1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
3 Copyright (C) 2002-2024 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
37 #include "gcc-rich-location.h"
41 #include "file-prefix-map.h"
43 #include "omp-general.h"
46 /* Keep track of forward references to immediate-escalating functions in
47 case they become consteval. This vector contains ADDR_EXPRs and
48 PTRMEM_CSTs; it also stores FUNCTION_DECLs that had an escalating
49 function call in them, to check that they can be evaluated to a constant,
50 and immediate-escalating functions that may become consteval. */
51 static GTY(()) hash_set
<tree
> *deferred_escalating_exprs
;
54 remember_escalating_expr (tree t
)
56 if (!deferred_escalating_exprs
)
57 deferred_escalating_exprs
= hash_set
<tree
>::create_ggc (37);
58 deferred_escalating_exprs
->add (t
);
61 /* Flags for cp_fold and cp_fold_r. */
65 /* Whether we're being called from cp_fold_function. */
66 ff_genericize
= 1 << 0,
67 /* Whether we're folding a point where we know we're
68 definitely not in a manifestly constant-evaluated
70 ff_mce_false
= 1 << 1,
73 using fold_flags_t
= int;
79 cp_fold_data (fold_flags_t flags
): flags (flags
) {}
82 /* Forward declarations. */
84 static tree
cp_genericize_r (tree
*, int *, void *);
85 static tree
cp_fold_r (tree
*, int *, void *);
86 static void cp_genericize_tree (tree
*, bool);
87 static tree
cp_fold (tree
, fold_flags_t
);
88 static tree
cp_fold_immediate_r (tree
*, int *, void *);
90 /* Genericize a TRY_BLOCK. */
93 genericize_try_block (tree
*stmt_p
)
95 tree body
= TRY_STMTS (*stmt_p
);
96 tree cleanup
= TRY_HANDLERS (*stmt_p
);
98 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
101 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
104 genericize_catch_block (tree
*stmt_p
)
106 tree type
= HANDLER_TYPE (*stmt_p
);
107 tree body
= HANDLER_BODY (*stmt_p
);
109 /* FIXME should the caught type go in TREE_TYPE? */
110 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
113 /* A terser interface for building a representation of an exception
117 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
121 /* FIXME should the allowed types go in TREE_TYPE? */
122 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
123 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
125 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
126 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
131 /* Genericize an EH_SPEC_BLOCK by converting it to a
132 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
135 genericize_eh_spec_block (tree
*stmt_p
)
137 tree body
= EH_SPEC_STMTS (*stmt_p
);
138 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
139 tree failure
= build_call_n (call_unexpected_fn
, 1, build_exc_ptr ());
141 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
142 suppress_warning (*stmt_p
);
143 suppress_warning (TREE_OPERAND (*stmt_p
, 1));
146 /* Return the first non-compound statement in STMT. */
149 first_stmt (tree stmt
)
151 switch (TREE_CODE (stmt
))
154 if (tree_statement_list_node
*p
= STATEMENT_LIST_HEAD (stmt
))
155 return first_stmt (p
->stmt
);
159 return first_stmt (BIND_EXPR_BODY (stmt
));
166 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
169 genericize_if_stmt (tree
*stmt_p
)
171 tree stmt
, cond
, then_
, else_
;
172 location_t locus
= EXPR_LOCATION (*stmt_p
);
175 cond
= IF_COND (stmt
);
176 then_
= THEN_CLAUSE (stmt
);
177 else_
= ELSE_CLAUSE (stmt
);
181 tree ft
= first_stmt (then_
);
182 tree fe
= first_stmt (else_
);
184 if (TREE_CODE (ft
) == PREDICT_EXPR
185 && TREE_CODE (fe
) == PREDICT_EXPR
186 && (pr
= PREDICT_EXPR_PREDICTOR (ft
)) == PREDICT_EXPR_PREDICTOR (fe
)
187 && (pr
== PRED_HOT_LABEL
|| pr
== PRED_COLD_LABEL
))
189 gcc_rich_location
richloc (EXPR_LOC_OR_LOC (ft
, locus
));
190 richloc
.add_range (EXPR_LOC_OR_LOC (fe
, locus
));
191 warning_at (&richloc
, OPT_Wattributes
,
192 "both branches of %<if%> statement marked as %qs",
193 pr
== PRED_HOT_LABEL
? "likely" : "unlikely");
198 then_
= build_empty_stmt (locus
);
200 else_
= build_empty_stmt (locus
);
202 /* consteval if has been verified not to have the then_/else_ blocks
203 entered by gotos/case labels from elsewhere, and as then_ block
204 can contain unfolded immediate function calls, we have to discard
205 the then_ block regardless of whether else_ has side-effects or not. */
206 if (IF_STMT_CONSTEVAL_P (stmt
))
208 if (block_may_fallthru (then_
))
209 stmt
= build3 (COND_EXPR
, void_type_node
, boolean_false_node
,
214 else if (IF_STMT_CONSTEXPR_P (stmt
))
215 stmt
= integer_nonzerop (cond
) ? then_
: else_
;
216 /* ??? This optimization doesn't seem to belong here, but removing it
217 causes -Wreturn-type regressions (e.g. 107310). */
218 else if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
220 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
223 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
224 protected_set_expr_location_if_unset (stmt
, locus
);
228 /* Hook into the middle of gimplifying an OMP_FOR node. */
230 static enum gimplify_status
231 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
233 tree for_stmt
= *expr_p
;
234 gimple_seq seq
= NULL
;
236 /* Protect ourselves from recursion. */
237 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
239 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
241 gimplify_and_add (for_stmt
, &seq
);
242 gimple_seq_add_seq (pre_p
, seq
);
244 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
249 /* Gimplify an EXPR_STMT node. */
252 gimplify_expr_stmt (tree
*stmt_p
)
254 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
256 if (stmt
== error_mark_node
)
259 /* Gimplification of a statement expression will nullify the
260 statement if all its side effects are moved to *PRE_P and *POST_P.
262 In this case we will not want to emit the gimplified statement.
263 However, we may still want to emit a warning, so we do that before
265 if (stmt
&& warn_unused_value
)
267 if (!TREE_SIDE_EFFECTS (stmt
))
269 if (!IS_EMPTY_STMT (stmt
)
270 && !VOID_TYPE_P (TREE_TYPE (stmt
))
271 && !warning_suppressed_p (stmt
, OPT_Wunused_value
))
272 warning (OPT_Wunused_value
, "statement with no effect");
275 warn_if_unused_value (stmt
, input_location
);
278 if (stmt
== NULL_TREE
)
279 stmt
= alloc_stmt_list ();
284 /* Gimplify initialization from an AGGR_INIT_EXPR. */
287 cp_gimplify_init_expr (tree
*expr_p
)
289 tree from
= TREE_OPERAND (*expr_p
, 1);
290 tree to
= TREE_OPERAND (*expr_p
, 0);
293 if (TREE_CODE (from
) == TARGET_EXPR
)
294 if (tree init
= TARGET_EXPR_INITIAL (from
))
296 /* Make sure that we expected to elide this temporary. But also allow
297 gimplify_modify_expr_rhs to elide temporaries of trivial type. */
298 gcc_checking_assert (TARGET_EXPR_ELIDING_P (from
)
299 || !TREE_ADDRESSABLE (TREE_TYPE (from
)));
300 if (target_expr_needs_replace (from
))
302 /* If this was changed by cp_genericize_target_expr, we need to
303 walk into it to replace uses of the slot. */
304 replace_decl (&init
, TARGET_EXPR_SLOT (from
), to
);
312 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
313 inside the TARGET_EXPR. */
316 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
318 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
319 replace the slot operand with our target.
321 Should we add a target parm to gimplify_expr instead? No, as in this
322 case we want to replace the INIT_EXPR. */
323 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
324 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
326 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
327 AGGR_INIT_EXPR_SLOT (sub
) = to
;
329 VEC_INIT_EXPR_SLOT (sub
) = to
;
332 /* The initialization is now a side-effect, so the container can
335 TREE_TYPE (from
) = void_type_node
;
338 /* Handle aggregate NSDMI. */
339 replace_placeholders (sub
, to
);
344 t
= TREE_OPERAND (t
, 1);
349 /* Gimplify a MUST_NOT_THROW_EXPR. */
351 static enum gimplify_status
352 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
355 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
356 tree body
= TREE_OPERAND (stmt
, 0);
357 gimple_seq try_
= NULL
;
358 gimple_seq catch_
= NULL
;
361 gimplify_and_add (body
, &try_
);
362 mnt
= gimple_build_eh_must_not_throw (call_terminate_fn
);
363 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
364 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
366 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
377 /* Return TRUE if an operand (OP) of a given TYPE being copied is
378 really just an empty class copy.
380 Check that the operand has a simple form so that TARGET_EXPRs and
381 non-empty CONSTRUCTORs get reduced properly, and we leave the
382 return slot optimization alone because it isn't a copy. */
385 simple_empty_class_p (tree type
, tree op
, tree_code code
)
387 if (TREE_CODE (op
) == COMPOUND_EXPR
)
388 return simple_empty_class_p (type
, TREE_OPERAND (op
, 1), code
);
389 if (SIMPLE_TARGET_EXPR_P (op
)
390 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type
))
391 /* The TARGET_EXPR is itself a simple copy, look through it. */
392 return simple_empty_class_p (type
, TARGET_EXPR_INITIAL (op
), code
);
394 if (TREE_CODE (op
) == PARM_DECL
395 && TREE_ADDRESSABLE (TREE_TYPE (op
)))
397 tree fn
= DECL_CONTEXT (op
);
398 if (DECL_THUNK_P (fn
)
399 || lambda_static_thunk_p (fn
))
400 /* In a thunk, we pass through invisible reference parms, so this isn't
406 (TREE_CODE (op
) == EMPTY_CLASS_EXPR
407 || code
== MODIFY_EXPR
408 || is_gimple_lvalue (op
)
409 || INDIRECT_REF_P (op
)
410 || (TREE_CODE (op
) == CONSTRUCTOR
411 && CONSTRUCTOR_NELTS (op
) == 0)
412 || (TREE_CODE (op
) == CALL_EXPR
413 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
414 && !TREE_CLOBBER_P (op
)
415 && is_really_empty_class (type
, /*ignore_vptr*/true);
418 /* Returns true if evaluating E as an lvalue has side-effects;
419 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
420 have side-effects until there is a read or write through it. */
423 lvalue_has_side_effects (tree e
)
425 if (!TREE_SIDE_EFFECTS (e
))
427 while (handled_component_p (e
))
429 if (TREE_CODE (e
) == ARRAY_REF
430 && TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 1)))
432 e
= TREE_OPERAND (e
, 0);
435 /* Just naming a variable has no side-effects. */
437 else if (INDIRECT_REF_P (e
))
438 /* Similarly, indirection has no side-effects. */
439 return TREE_SIDE_EFFECTS (TREE_OPERAND (e
, 0));
441 /* For anything else, trust TREE_SIDE_EFFECTS. */
442 return TREE_SIDE_EFFECTS (e
);
445 /* Return true if FN is an immediate-escalating function. */
448 immediate_escalating_function_p (tree fn
)
450 if (!fn
|| !flag_immediate_escalation
)
453 gcc_checking_assert (TREE_CODE (fn
) == FUNCTION_DECL
);
455 if (DECL_IMMEDIATE_FUNCTION_P (fn
))
458 /* An immediate-escalating function is
459 -- the call operator of a lambda that is not declared with the consteval
461 if (LAMBDA_FUNCTION_P (fn
))
463 /* -- a defaulted special member function that is not declared with the
464 consteval specifier */
465 special_function_kind sfk
= special_memfn_p (fn
);
466 if (sfk
!= sfk_none
&& DECL_DEFAULTED_FN (fn
))
468 /* -- a function that results from the instantiation of a templated entity
469 defined with the constexpr specifier. */
470 return is_instantiation_of_constexpr (fn
);
473 /* Return true if FN is an immediate-escalating function that has not been
474 checked for escalating expressions.. */
477 unchecked_immediate_escalating_function_p (tree fn
)
479 return (immediate_escalating_function_p (fn
)
480 && !DECL_ESCALATION_CHECKED_P (fn
));
483 /* Promote FN to an immediate function, including its clones. */
486 promote_function_to_consteval (tree fn
)
488 SET_DECL_IMMEDIATE_FUNCTION_P (fn
);
489 DECL_ESCALATION_CHECKED_P (fn
) = true;
491 FOR_EACH_CLONE (clone
, fn
)
493 SET_DECL_IMMEDIATE_FUNCTION_P (clone
);
494 DECL_ESCALATION_CHECKED_P (clone
) = true;
498 /* A wrapper around cp_fold_immediate_r. Return a non-null tree if
499 we found a non-constant immediate function, or taking the address
500 of an immediate function. */
503 cp_fold_immediate (tree
*tp
, mce_value manifestly_const_eval
,
504 tree decl
/*= current_function_decl*/)
506 if (cxx_dialect
<= cxx17
)
509 temp_override
<tree
> cfd (current_function_decl
, decl
);
511 fold_flags_t flags
= ff_none
;
512 if (manifestly_const_eval
== mce_false
)
513 flags
|= ff_mce_false
;
515 cp_fold_data
data (flags
);
516 int save_errorcount
= errorcount
;
517 tree r
= cp_walk_tree_without_duplicates (tp
, cp_fold_immediate_r
, &data
);
518 if (errorcount
> save_errorcount
)
519 return integer_one_node
;
523 /* Maybe say that FN (a function decl with DECL_IMMEDIATE_FUNCTION_P set)
524 was initially not an immediate function, but was promoted to one because
525 its body contained an immediate-escalating expression or conversion. */
528 maybe_explain_promoted_consteval (location_t loc
, tree fn
)
530 if (DECL_ESCALATION_CHECKED_P (fn
))
532 /* See if we can figure out what made the function consteval. */
533 tree x
= cp_fold_immediate (&DECL_SAVED_TREE (fn
), mce_unknown
, NULL_TREE
);
535 inform (cp_expr_loc_or_loc (x
, loc
),
536 "%qD was promoted to an immediate function because its "
537 "body contains an immediate-escalating expression %qE", fn
, x
);
539 inform (loc
, "%qD was promoted to an immediate function", fn
);
543 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
544 by expressions with side-effects in other operands. */
546 static enum gimplify_status
547 gimplify_to_rvalue (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
548 bool (*gimple_test_f
) (tree
))
550 enum gimplify_status t
551 = gimplify_expr (expr_p
, pre_p
, post_p
, gimple_test_f
, fb_rvalue
);
554 else if (is_gimple_variable (*expr_p
) && TREE_CODE (*expr_p
) != SSA_NAME
)
555 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
);
559 /* Like gimplify_arg, but if ORDERED is set (which should be set if
560 any of the arguments this argument is sequenced before has
561 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
562 are gimplified into SSA_NAME or a fresh temporary and for
563 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
565 static enum gimplify_status
566 cp_gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
569 enum gimplify_status t
;
571 && !is_gimple_reg_type (TREE_TYPE (*arg_p
))
572 && TREE_CODE (*arg_p
) == TARGET_EXPR
)
574 /* gimplify_arg would strip away the TARGET_EXPR, but
575 that can mean we don't copy the argument and some following
576 argument with side-effect could modify it. */
577 protected_set_expr_location (*arg_p
, call_location
);
578 return gimplify_expr (arg_p
, pre_p
, NULL
, is_gimple_lvalue
, fb_either
);
582 t
= gimplify_arg (arg_p
, pre_p
, call_location
);
586 && is_gimple_reg_type (TREE_TYPE (*arg_p
))
587 && is_gimple_variable (*arg_p
)
588 && TREE_CODE (*arg_p
) != SSA_NAME
589 /* No need to force references into register, references
590 can't be modified. */
591 && !TYPE_REF_P (TREE_TYPE (*arg_p
))
592 /* And this can't be modified either. */
593 && *arg_p
!= current_class_ptr
)
594 *arg_p
= get_initialized_tmp_var (*arg_p
, pre_p
);
600 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
603 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
605 int saved_stmts_are_full_exprs_p
= 0;
606 location_t loc
= cp_expr_loc_or_input_loc (*expr_p
);
607 enum tree_code code
= TREE_CODE (*expr_p
);
608 enum gimplify_status ret
;
610 if (STATEMENT_CODE_P (code
))
612 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
613 current_stmt_tree ()->stmts_are_full_exprs_p
614 = STMT_IS_FULL_EXPR_P (*expr_p
);
620 simplify_aggr_init_expr (expr_p
);
626 *expr_p
= expand_vec_init_expr (NULL_TREE
, *expr_p
,
627 tf_warning_or_error
);
629 cp_fold_data
data (ff_genericize
| ff_mce_false
);
630 cp_walk_tree (expr_p
, cp_fold_r
, &data
, NULL
);
631 cp_genericize_tree (expr_p
, false);
632 copy_if_shared (expr_p
);
638 /* FIXME communicate throw type to back end, probably by moving
639 THROW_EXPR into ../tree.def. */
640 *expr_p
= TREE_OPERAND (*expr_p
, 0);
644 case MUST_NOT_THROW_EXPR
:
645 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
648 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
649 LHS of an assignment might also be involved in the RHS, as in bug
652 cp_gimplify_init_expr (expr_p
);
653 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
659 /* If the back end isn't clever enough to know that the lhs and rhs
660 types are the same, add an explicit conversion. */
661 tree op0
= TREE_OPERAND (*expr_p
, 0);
662 tree op1
= TREE_OPERAND (*expr_p
, 1);
664 if (!error_operand_p (op0
)
665 && !error_operand_p (op1
)
666 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
667 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
668 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
669 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
670 TREE_TYPE (op0
), op1
);
672 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
, code
))
674 while (TREE_CODE (op1
) == TARGET_EXPR
)
675 /* We're disconnecting the initializer from its target,
676 don't create a temporary. */
677 op1
= TARGET_EXPR_INITIAL (op1
);
679 /* Remove any copies of empty classes. Also drop volatile
680 variables on the RHS to avoid infinite recursion from
681 gimplify_expr trying to load the value. */
682 if (TREE_SIDE_EFFECTS (op1
))
684 if (TREE_THIS_VOLATILE (op1
)
685 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
686 op1
= build_fold_addr_expr (op1
);
688 gimplify_and_add (op1
, pre_p
);
690 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
691 is_gimple_lvalue
, fb_lvalue
);
692 *expr_p
= TREE_OPERAND (*expr_p
, 0);
693 if (code
== RETURN_EXPR
&& REFERENCE_CLASS_P (*expr_p
))
694 /* Avoid 'return *<retval>;' */
695 *expr_p
= TREE_OPERAND (*expr_p
, 0);
697 /* P0145 says that the RHS is sequenced before the LHS.
698 gimplify_modify_expr gimplifies the RHS before the LHS, but that
699 isn't quite strong enough in two cases:
701 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
702 mean it's evaluated after the LHS.
704 2) the value calculation of the RHS is also sequenced before the
705 LHS, so for scalar assignment we need to preevaluate if the
706 RHS could be affected by LHS side-effects even if it has no
707 side-effects of its own. We don't need this for classes because
708 class assignment takes its RHS by reference. */
709 else if (flag_strong_eval_order
> 1
710 && TREE_CODE (*expr_p
) == MODIFY_EXPR
711 && lvalue_has_side_effects (op0
)
712 && (TREE_CODE (op1
) == CALL_EXPR
713 || (SCALAR_TYPE_P (TREE_TYPE (op1
))
714 && !TREE_CONSTANT (op1
))))
715 TREE_OPERAND (*expr_p
, 1) = get_initialized_tmp_var (op1
, pre_p
);
720 case EMPTY_CLASS_EXPR
:
721 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
722 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
727 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
732 genericize_try_block (expr_p
);
737 genericize_catch_block (expr_p
);
742 genericize_eh_spec_block (expr_p
);
762 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
766 gimplify_expr_stmt (expr_p
);
770 case UNARY_PLUS_EXPR
:
772 tree arg
= TREE_OPERAND (*expr_p
, 0);
773 tree type
= TREE_TYPE (*expr_p
);
774 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
782 if (flag_strong_eval_order
== 2
783 && CALL_EXPR_FN (*expr_p
)
784 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
)
785 && cp_get_callee_fndecl_nofold (*expr_p
) == NULL_TREE
)
787 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
788 enum gimplify_status t
789 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
790 is_gimple_call_addr
);
793 /* GIMPLE considers most pointer conversion useless, but for
794 calls we actually care about the exact function pointer type. */
795 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p
)) != fnptrtype
)
796 CALL_EXPR_FN (*expr_p
)
797 = build1 (NOP_EXPR
, fnptrtype
, CALL_EXPR_FN (*expr_p
));
799 if (!CALL_EXPR_FN (*expr_p
))
800 /* Internal function call. */;
801 else if (CALL_EXPR_REVERSE_ARGS (*expr_p
))
803 /* This is a call to a (compound) assignment operator that used
804 the operator syntax; gimplify the RHS first. */
805 gcc_assert (call_expr_nargs (*expr_p
) == 2);
806 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p
));
807 enum gimplify_status t
808 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 1), pre_p
, loc
,
809 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, 0)));
813 else if (CALL_EXPR_ORDERED_ARGS (*expr_p
))
815 /* Leave the last argument for gimplify_call_expr, to avoid problems
816 with __builtin_va_arg_pack(). */
817 int nargs
= call_expr_nargs (*expr_p
) - 1;
818 int last_side_effects_arg
= -1;
819 for (int i
= nargs
; i
> 0; --i
)
820 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
822 last_side_effects_arg
= i
;
825 for (int i
= 0; i
< nargs
; ++i
)
827 enum gimplify_status t
828 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
,
829 i
< last_side_effects_arg
);
834 else if (flag_strong_eval_order
835 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p
))
837 /* If flag_strong_eval_order, evaluate the object argument first. */
838 tree fntype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
839 if (INDIRECT_TYPE_P (fntype
))
840 fntype
= TREE_TYPE (fntype
);
841 if (TREE_CODE (fntype
) == METHOD_TYPE
)
843 int nargs
= call_expr_nargs (*expr_p
);
844 bool side_effects
= false;
845 for (int i
= 1; i
< nargs
; ++i
)
846 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p
, i
)))
851 enum gimplify_status t
852 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p
, 0), pre_p
, loc
,
860 tree decl
= cp_get_callee_fndecl_nofold (*expr_p
);
863 if (fndecl_built_in_p (decl
, BUILT_IN_FRONTEND
))
864 switch (DECL_FE_FUNCTION_CODE (decl
))
866 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
867 *expr_p
= boolean_false_node
;
869 case CP_BUILT_IN_SOURCE_LOCATION
:
871 = fold_builtin_source_location (*expr_p
);
873 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
875 = fold_builtin_is_corresponding_member
876 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
877 &CALL_EXPR_ARG (*expr_p
, 0));
879 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
881 = fold_builtin_is_pointer_inverconvertible_with_class
882 (EXPR_LOCATION (*expr_p
), call_expr_nargs (*expr_p
),
883 &CALL_EXPR_ARG (*expr_p
, 0));
888 else if (fndecl_built_in_p (decl
, BUILT_IN_CLZG
, BUILT_IN_CTZG
))
889 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
,
892 /* All consteval functions should have been processed by now. */
893 gcc_checking_assert (!immediate_invocation_p (decl
));
898 /* A TARGET_EXPR that expresses direct-initialization should have been
899 elided by cp_gimplify_init_expr. */
900 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p
));
901 /* Likewise, but allow extra temps of trivial type so that
902 gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
903 on the rhs of an assignment, as in constexpr-aggr1.C. */
904 gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p
)
905 || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)));
910 *expr_p
= cplus_expand_constant (*expr_p
);
911 if (TREE_CODE (*expr_p
) == PTRMEM_CST
)
918 if (TREE_OPERAND (*expr_p
, 0)
919 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
920 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
922 expr_p
= &TREE_OPERAND (*expr_p
, 0);
923 /* Avoid going through the INIT_EXPR case, which can
924 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
925 goto modify_expr_case
;
930 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
934 /* Restore saved state. */
935 if (STATEMENT_CODE_P (code
))
936 current_stmt_tree ()->stmts_are_full_exprs_p
937 = saved_stmts_are_full_exprs_p
;
943 is_invisiref_parm (const_tree t
)
945 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
946 && DECL_BY_REFERENCE (t
));
949 /* A stable comparison routine for use with splay trees and DECLs. */
952 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
957 return DECL_UID (a
) - DECL_UID (b
);
960 /* OpenMP context during genericization. */
962 struct cp_genericize_omp_taskreg
966 struct cp_genericize_omp_taskreg
*outer
;
967 splay_tree variables
;
970 /* Return true if genericization should try to determine if
971 DECL is firstprivate or shared within task regions. */
974 omp_var_to_track (tree decl
)
976 tree type
= TREE_TYPE (decl
);
977 if (is_invisiref_parm (decl
))
978 type
= TREE_TYPE (type
);
979 else if (TYPE_REF_P (type
))
980 type
= TREE_TYPE (type
);
981 while (TREE_CODE (type
) == ARRAY_TYPE
)
982 type
= TREE_TYPE (type
);
983 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
985 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
987 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
992 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
995 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
997 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
998 (splay_tree_key
) decl
);
1001 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
1003 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
1004 if (!omp_ctx
->default_shared
)
1006 struct cp_genericize_omp_taskreg
*octx
;
1008 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
1010 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
1011 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
1013 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
1016 if (octx
->is_parallel
)
1020 && (TREE_CODE (decl
) == PARM_DECL
1021 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
1022 && DECL_CONTEXT (decl
) == current_function_decl
)))
1023 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
1024 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
1026 /* DECL is implicitly determined firstprivate in
1027 the current task construct. Ensure copy ctor and
1028 dtor are instantiated, because during gimplification
1029 it will be already too late. */
1030 tree type
= TREE_TYPE (decl
);
1031 if (is_invisiref_parm (decl
))
1032 type
= TREE_TYPE (type
);
1033 else if (TYPE_REF_P (type
))
1034 type
= TREE_TYPE (type
);
1035 while (TREE_CODE (type
) == ARRAY_TYPE
)
1036 type
= TREE_TYPE (type
);
1037 get_copy_ctor (type
, tf_none
);
1038 get_dtor (type
, tf_none
);
1041 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
1045 /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
1046 not expected to elide, e.g. because unsafe_copy_elision_p is true. */
1049 any_non_eliding_target_exprs (tree ctor
)
1051 for (const constructor_elt
&e
: *CONSTRUCTOR_ELTS (ctor
))
1053 if (TREE_CODE (e
.value
) == TARGET_EXPR
1054 && !TARGET_EXPR_ELIDING_P (e
.value
))
1060 /* If we might need to clean up a partially constructed object, break down the
1061 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
1062 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
1066 cp_genericize_init (tree
*replace
, tree from
, tree to
)
1068 tree init
= NULL_TREE
;
1069 if (TREE_CODE (from
) == VEC_INIT_EXPR
)
1070 init
= expand_vec_init_expr (to
, from
, tf_warning_or_error
);
1071 else if (TREE_CODE (from
) == CONSTRUCTOR
1072 && TREE_SIDE_EFFECTS (from
)
1073 && ((flag_exceptions
1074 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from
)))
1075 || any_non_eliding_target_exprs (from
)))
1077 to
= cp_stabilize_reference (to
);
1078 replace_placeholders (from
, to
);
1079 init
= split_nonconstant_init (to
, from
);
1084 if (*replace
== from
)
1085 /* Make cp_gimplify_init_expr call replace_decl on this
1086 TARGET_EXPR_INITIAL. */
1087 init
= fold_convert (void_type_node
, init
);
1092 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
1095 cp_genericize_init_expr (tree
*stmt_p
)
1097 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
1098 tree to
= TREE_OPERAND (*stmt_p
, 0);
1099 tree from
= TREE_OPERAND (*stmt_p
, 1);
1100 if (SIMPLE_TARGET_EXPR_P (from
)
1101 /* Return gets confused if we clobber its INIT_EXPR this soon. */
1102 && TREE_CODE (to
) != RESULT_DECL
)
1103 from
= TARGET_EXPR_INITIAL (from
);
1104 cp_genericize_init (stmt_p
, from
, to
);
1107 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
1108 replace_decl later when we know what we're initializing. */
1111 cp_genericize_target_expr (tree
*stmt_p
)
1113 iloc_sentinel ils
= EXPR_LOCATION (*stmt_p
);
1114 tree slot
= TARGET_EXPR_SLOT (*stmt_p
);
1115 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p
),
1116 TARGET_EXPR_INITIAL (*stmt_p
), slot
);
1117 gcc_assert (!DECL_INITIAL (slot
));
1120 /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
1121 TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1122 replacement when cp_folding TARGET_EXPR to preserve the invariant that
1123 AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1126 maybe_replace_decl (tree
*tp
, tree decl
, tree replacement
)
1128 if (!*tp
|| !VOID_TYPE_P (TREE_TYPE (*tp
)))
1131 while (TREE_CODE (t
) == COMPOUND_EXPR
)
1132 t
= TREE_OPERAND (t
, 1);
1133 if (TREE_CODE (t
) == AGGR_INIT_EXPR
)
1134 replace_decl (&AGGR_INIT_EXPR_SLOT (t
), decl
, replacement
);
1135 else if (TREE_CODE (t
) == VEC_INIT_EXPR
)
1136 replace_decl (&VEC_INIT_EXPR_SLOT (t
), decl
, replacement
);
1138 replace_decl (tp
, decl
, replacement
);
1142 /* Genericization context. */
1144 struct cp_genericize_data
1146 hash_set
<tree
> *p_set
;
1147 auto_vec
<tree
> bind_expr_stack
;
1148 struct cp_genericize_omp_taskreg
*omp_ctx
;
1151 bool handle_invisiref_parm_p
;
1154 /* Emit an error about taking the address of an immediate function.
1155 EXPR is the whole expression; DECL is the immediate function. */
1158 taking_address_of_imm_fn_error (tree expr
, tree decl
)
1160 auto_diagnostic_group d
;
1161 const location_t loc
= (TREE_CODE (expr
) == PTRMEM_CST
1162 ? PTRMEM_CST_LOCATION (expr
)
1163 : EXPR_LOCATION (expr
));
1164 error_at (loc
, "taking address of an immediate function %qD", decl
);
1165 maybe_explain_promoted_consteval (loc
, decl
);
1168 /* A subroutine of cp_fold_r to handle immediate functions. */
1171 cp_fold_immediate_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
1173 auto data
= static_cast<cp_fold_data
*>(data_
);
1174 tree stmt
= *stmt_p
;
1175 /* The purpose of this is not to emit errors for mce_unknown. */
1176 const tsubst_flags_t complain
= (data
->flags
& ff_mce_false
1177 ? tf_error
: tf_none
);
1178 const tree_code code
= TREE_CODE (stmt
);
1180 /* No need to look into types or unevaluated operands.
1181 NB: This affects cp_fold_r as well. */
1183 || unevaluated_p (code
)
1184 /* We do not use in_immediate_context here because it checks
1185 more than is desirable, e.g., sk_template_parms. */
1186 || cp_unevaluated_operand
1187 || (current_function_decl
1188 && DECL_IMMEDIATE_FUNCTION_P (current_function_decl
)))
1194 tree decl
= NULL_TREE
;
1195 bool call_p
= false;
1197 /* We are looking for &fn or fn(). */
1201 case AGGR_INIT_EXPR
:
1202 if (tree fn
= cp_get_callee (stmt
))
1203 if (TREE_CODE (fn
) != ADDR_EXPR
|| ADDR_EXPR_DENOTES_CALL_P (fn
))
1204 decl
= cp_get_fndecl_from_callee (fn
, /*fold*/false);
1208 decl
= PTRMEM_CST_MEMBER (stmt
);
1211 if (!ADDR_EXPR_DENOTES_CALL_P (stmt
))
1212 decl
= TREE_OPERAND (stmt
, 0);
1218 if (!decl
|| TREE_CODE (decl
) != FUNCTION_DECL
)
1221 /* Fully escalate once all templates have been instantiated. What we're
1222 calling is not a consteval function but it may become one. This
1223 requires recursing; DECL may be promoted to consteval because it
1224 contains an escalating expression E, but E itself may have to be
1225 promoted first, etc. */
1226 if (at_eof
> 1 && unchecked_immediate_escalating_function_p (decl
))
1228 /* Set before the actual walk to avoid endless recursion. */
1229 DECL_ESCALATION_CHECKED_P (decl
) = true;
1230 /* We're only looking for the first escalating expression. Let us not
1231 walk more trees than necessary, hence mce_unknown. */
1232 cp_fold_immediate (&DECL_SAVED_TREE (decl
), mce_unknown
, decl
);
1235 /* [expr.const]p16 "An expression or conversion is immediate-escalating if
1236 it is not initially in an immediate function context and it is either
1237 -- an immediate invocation that is not a constant expression and is not
1238 a subexpression of an immediate invocation."
1240 If we are in an immediate-escalating function, the immediate-escalating
1241 expression or conversion makes it an immediate function. So STMT does
1242 not need to produce a constant expression. */
1243 if (DECL_IMMEDIATE_FUNCTION_P (decl
))
1245 tree e
= cxx_constant_value (stmt
, tf_none
);
1246 if (e
== error_mark_node
)
1248 /* This takes care of, e.g.,
1249 template <typename T>
1250 constexpr int f(T t)
1254 where id (consteval) causes f<int> to be promoted. */
1255 if (immediate_escalating_function_p (current_function_decl
))
1256 promote_function_to_consteval (current_function_decl
);
1257 else if (complain
& tf_error
)
1261 auto_diagnostic_group d
;
1262 location_t loc
= cp_expr_loc_or_input_loc (stmt
);
1263 error_at (loc
, "call to consteval function %qE is "
1264 "not a constant expression", stmt
);
1265 /* Explain why it's not a constant expression. */
1266 *stmt_p
= cxx_constant_value (stmt
, complain
);
1267 maybe_explain_promoted_consteval (loc
, decl
);
1269 else if (!data
->pset
.add (stmt
))
1271 taking_address_of_imm_fn_error (stmt
, decl
);
1272 *stmt_p
= build_zero_cst (TREE_TYPE (stmt
));
1274 /* If we're giving hard errors, continue the walk rather than
1275 bailing out after the first error. */
1281 /* We've evaluated the consteval function call. */
1285 /* We've encountered a function call that may turn out to be consteval
1286 later. Store its caller so that we can ensure that the call is
1287 a constant expression. */
1288 else if (unchecked_immediate_escalating_function_p (decl
))
1290 /* Make sure we're not inserting new elements while walking
1291 the deferred_escalating_exprs hash table; if we are, it's
1292 likely that a function wasn't properly marked checked for
1294 gcc_checking_assert (at_eof
<= 1);
1295 if (current_function_decl
)
1296 remember_escalating_expr (current_function_decl
);
1297 /* auto p = &f<int>; in the global scope won't be ensconced in
1298 a function we could store for later at this point. (If there's
1299 no c_f_d at this point and we're dealing with a call, we should
1300 see the call when cp_fold_function __static_i_and_d.) */
1302 remember_escalating_expr (stmt
);
1308 /* Perform any pre-gimplification folding of C++ front end trees to
1310 Note: The folding of non-omp cases is something to move into
1311 the middle-end. As for now we have most foldings only on GENERIC
1312 in fold-const, we need to perform this before transformation to
1315 ??? This is algorithmically weird because walk_tree works in pre-order, so
1316 we see outer expressions before inner expressions. This isn't as much of an
1317 issue because cp_fold recurses into subexpressions in many cases, but then
1318 walk_tree walks back into those subexpressions again. We avoid the
1319 resulting complexity problem by caching the result of cp_fold, but it's
1323 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data_
)
1325 cp_fold_data
*data
= (cp_fold_data
*)data_
;
1326 tree stmt
= *stmt_p
;
1327 enum tree_code code
= TREE_CODE (stmt
);
1329 if (cxx_dialect
>= cxx20
)
1331 /* Unfortunately we must handle code like
1333 where we have to check bar too. The cp_fold call below could
1334 fold the ?: into a constant before we've checked it. */
1335 if (code
== COND_EXPR
)
1337 auto then_fn
= cp_fold_r
, else_fn
= cp_fold_r
;
1338 /* See if we can figure out if either of the branches is dead. If it
1339 is, we don't need to do everything that cp_fold_r does. */
1340 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_fold_r
, data
, nullptr);
1341 if (integer_zerop (TREE_OPERAND (stmt
, 0)))
1342 then_fn
= cp_fold_immediate_r
;
1343 else if (integer_nonzerop (TREE_OPERAND (stmt
, 0)))
1344 else_fn
= cp_fold_immediate_r
;
1346 if (TREE_OPERAND (stmt
, 1))
1347 cp_walk_tree (&TREE_OPERAND (stmt
, 1), then_fn
, data
,
1349 if (TREE_OPERAND (stmt
, 2))
1350 cp_walk_tree (&TREE_OPERAND (stmt
, 2), else_fn
, data
,
1353 /* Don't return yet, still need the cp_fold below. */
1356 cp_fold_immediate_r (stmt_p
, walk_subtrees
, data
);
1359 *stmt_p
= stmt
= cp_fold (*stmt_p
, data
->flags
);
1361 /* For certain trees, like +foo(), the cp_fold above will remove the +,
1362 and the subsequent tree walk would go straight down to the CALL_EXPR's
1363 operands, meaning that cp_fold_immediate_r would never see the
1364 CALL_EXPR. Ew :(. */
1365 if (TREE_CODE (stmt
) == CALL_EXPR
&& code
!= CALL_EXPR
)
1366 cp_fold_immediate_r (stmt_p
, walk_subtrees
, data
);
1368 if (data
->pset
.add (stmt
))
1370 /* Don't walk subtrees of stmts we've already walked once, otherwise
1371 we can have exponential complexity with e.g. lots of nested
1372 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1373 always the same tree, which the first time cp_fold_r has been
1374 called on it had the subtrees walked. */
1379 code
= TREE_CODE (stmt
);
1386 case OMP_DISTRIBUTE
:
1390 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
1391 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
1392 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
1393 x
= OMP_FOR_COND (stmt
);
1394 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
1396 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
1397 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
1399 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
1401 n
= TREE_VEC_LENGTH (x
);
1402 for (i
= 0; i
< n
; i
++)
1404 tree o
= TREE_VEC_ELT (x
, i
);
1405 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
1406 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1409 x
= OMP_FOR_INCR (stmt
);
1410 if (x
&& TREE_CODE (x
) == TREE_VEC
)
1412 n
= TREE_VEC_LENGTH (x
);
1413 for (i
= 0; i
< n
; i
++)
1415 tree o
= TREE_VEC_ELT (x
, i
);
1416 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
1417 o
= TREE_OPERAND (o
, 1);
1418 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
1419 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
1421 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
1422 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
1426 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
1431 if (IF_STMT_CONSTEVAL_P (stmt
))
1433 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1434 boolean_false_node. */
1435 cp_walk_tree (&ELSE_CLAUSE (stmt
), cp_fold_r
, data
, NULL
);
1436 cp_walk_tree (&IF_SCOPE (stmt
), cp_fold_r
, data
, NULL
);
1442 /* cp_genericize_{init,target}_expr are only for genericize time; they're
1443 here rather than in cp_genericize to avoid problems with the invisible
1444 reference transition. */
1446 if (data
->flags
& ff_genericize
)
1447 cp_genericize_init_expr (stmt_p
);
1451 if (data
->flags
& ff_genericize
)
1452 cp_genericize_target_expr (stmt_p
);
1454 /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1455 that case, strip it in favor of this one. */
1456 if (tree
&init
= TARGET_EXPR_INITIAL (stmt
))
1458 cp_walk_tree (&init
, cp_fold_r
, data
, NULL
);
1459 cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt
), cp_fold_r
, data
, NULL
);
1461 if (TREE_CODE (init
) == TARGET_EXPR
)
1463 tree sub
= TARGET_EXPR_INITIAL (init
);
1464 maybe_replace_decl (&sub
, TARGET_EXPR_SLOT (init
),
1465 TARGET_EXPR_SLOT (stmt
));
1478 /* Fold ALL the trees! FIXME we should be able to remove this, but
1479 apparently that still causes optimization regressions. */
1482 cp_fold_function (tree fndecl
)
1484 /* By now all manifestly-constant-evaluated expressions will have
1485 been constant-evaluated already if possible, so we can safely
1486 pass ff_mce_false. */
1487 cp_fold_data
data (ff_genericize
| ff_mce_false
);
1488 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, &data
, NULL
);
1490 /* This is merely an optimization: if FNDECL has no i-e expressions,
1491 we'll not save c_f_d, and we can safely say that FNDECL will not
1492 be promoted to consteval. */
1493 if (deferred_escalating_exprs
1494 && !deferred_escalating_exprs
->contains (current_function_decl
))
1495 DECL_ESCALATION_CHECKED_P (fndecl
) = true;
1498 /* We've stashed immediate-escalating functions. Now see if they indeed
1499 ought to be promoted to consteval. */
1502 process_and_check_pending_immediate_escalating_fns ()
1504 /* This will be null for -fno-immediate-escalation. */
1505 if (!deferred_escalating_exprs
)
1508 for (auto e
: *deferred_escalating_exprs
)
1509 if (TREE_CODE (e
) == FUNCTION_DECL
&& !DECL_ESCALATION_CHECKED_P (e
))
1510 cp_fold_immediate (&DECL_SAVED_TREE (e
), mce_false
, e
);
1512 /* We've escalated every function that could have been promoted to
1513 consteval. Check that we are not taking the address of a consteval
1515 for (auto e
: *deferred_escalating_exprs
)
1517 if (TREE_CODE (e
) == FUNCTION_DECL
)
1519 tree decl
= (TREE_CODE (e
) == PTRMEM_CST
1520 ? PTRMEM_CST_MEMBER (e
)
1521 : TREE_OPERAND (e
, 0));
1522 if (DECL_IMMEDIATE_FUNCTION_P (decl
))
1523 taking_address_of_imm_fn_error (e
, decl
);
1526 deferred_escalating_exprs
= nullptr;
1529 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1531 static tree
genericize_spaceship (tree expr
)
1533 iloc_sentinel
s (cp_expr_location (expr
));
1534 tree type
= TREE_TYPE (expr
);
1535 tree op0
= TREE_OPERAND (expr
, 0);
1536 tree op1
= TREE_OPERAND (expr
, 1);
1537 return genericize_spaceship (input_location
, type
, op0
, op1
);
1540 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1541 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1542 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1543 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1546 predeclare_vla (tree expr
)
1548 tree type
= TREE_TYPE (expr
);
1549 if (type
== error_mark_node
)
1551 if (is_typedef_decl (expr
))
1552 type
= DECL_ORIGINAL_TYPE (expr
);
1554 /* We need to strip pointers for gimplify_type_sizes. */
1556 while (POINTER_TYPE_P (vla
))
1558 if (TYPE_NAME (vla
))
1560 vla
= TREE_TYPE (vla
);
1562 if (vla
== type
|| TYPE_NAME (vla
)
1563 || !variably_modified_type_p (vla
, NULL_TREE
))
1566 tree decl
= build_decl (input_location
, TYPE_DECL
, NULL_TREE
, vla
);
1567 DECL_ARTIFICIAL (decl
) = 1;
1568 TYPE_NAME (vla
) = decl
;
1569 tree dexp
= build_stmt (input_location
, DECL_EXPR
, decl
);
1577 expr
= build2 (COMPOUND_EXPR
, type
, dexp
, expr
);
1582 /* Perform any pre-gimplification lowering of C++ front end trees to
1586 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1588 tree stmt
= *stmt_p
;
1589 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1590 hash_set
<tree
> *p_set
= wtd
->p_set
;
1592 /* If in an OpenMP context, note var uses. */
1593 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1595 || TREE_CODE (stmt
) == PARM_DECL
1596 || TREE_CODE (stmt
) == RESULT_DECL
)
1597 && omp_var_to_track (stmt
))
1598 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1600 /* Don't dereference parms in a thunk, pass the references through. */
1601 if ((TREE_CODE (stmt
) == CALL_EXPR
&& call_from_lambda_thunk_p (stmt
))
1602 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1608 /* Dereference invisible reference parms. */
1609 if (wtd
->handle_invisiref_parm_p
&& is_invisiref_parm (stmt
))
1611 *stmt_p
= convert_from_reference (stmt
);
1612 p_set
->add (*stmt_p
);
1617 /* Map block scope extern declarations to visible declarations with the
1618 same name and type in outer scopes if any. */
1619 if (VAR_OR_FUNCTION_DECL_P (stmt
) && DECL_LOCAL_DECL_P (stmt
))
1620 if (tree alias
= DECL_LOCAL_DECL_ALIAS (stmt
))
1622 if (alias
!= error_mark_node
)
1625 TREE_USED (alias
) |= TREE_USED (stmt
);
1631 if (TREE_CODE (stmt
) == INTEGER_CST
1632 && TYPE_REF_P (TREE_TYPE (stmt
))
1633 && (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1634 && !wtd
->no_sanitize_p
)
1636 ubsan_maybe_instrument_reference (stmt_p
);
1637 if (*stmt_p
!= stmt
)
1644 /* Other than invisiref parms, don't walk the same tree twice. */
1645 if (p_set
->contains (stmt
))
1651 switch (TREE_CODE (stmt
))
1654 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1656 /* If in an OpenMP context, note var uses. */
1657 if (UNLIKELY (wtd
->omp_ctx
!= NULL
)
1658 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1659 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1660 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1666 if (TREE_OPERAND (stmt
, 0))
1668 if (is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1669 /* Don't dereference an invisiref RESULT_DECL inside a
1672 if (RETURN_EXPR_LOCAL_ADDR_P (stmt
))
1674 /* Don't return the address of a local variable. */
1675 tree
*p
= &TREE_OPERAND (stmt
, 0);
1676 while (TREE_CODE (*p
) == COMPOUND_EXPR
)
1677 p
= &TREE_OPERAND (*p
, 0);
1678 if (TREE_CODE (*p
) == INIT_EXPR
)
1680 tree op
= TREE_OPERAND (*p
, 1);
1681 tree new_op
= build2 (COMPOUND_EXPR
, TREE_TYPE (op
), op
,
1682 build_zero_cst (TREE_TYPE (op
)));
1683 TREE_OPERAND (*p
, 1) = new_op
;
1690 switch (OMP_CLAUSE_CODE (stmt
))
1692 case OMP_CLAUSE_LASTPRIVATE
:
1693 /* Don't dereference an invisiref in OpenMP clauses. */
1694 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1697 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1698 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1699 cp_genericize_r
, data
, NULL
);
1702 case OMP_CLAUSE_PRIVATE
:
1703 /* Don't dereference an invisiref in OpenMP clauses. */
1704 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1706 else if (wtd
->omp_ctx
!= NULL
)
1708 /* Private clause doesn't cause any references to the
1709 var in outer contexts, avoid calling
1710 omp_cxx_notice_variable for it. */
1711 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1712 wtd
->omp_ctx
= NULL
;
1713 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1719 case OMP_CLAUSE_SHARED
:
1720 case OMP_CLAUSE_FIRSTPRIVATE
:
1721 case OMP_CLAUSE_COPYIN
:
1722 case OMP_CLAUSE_COPYPRIVATE
:
1723 case OMP_CLAUSE_INCLUSIVE
:
1724 case OMP_CLAUSE_EXCLUSIVE
:
1725 /* Don't dereference an invisiref in OpenMP clauses. */
1726 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1729 case OMP_CLAUSE_REDUCTION
:
1730 case OMP_CLAUSE_IN_REDUCTION
:
1731 case OMP_CLAUSE_TASK_REDUCTION
:
1732 /* Don't dereference an invisiref in reduction clause's
1733 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1734 still needs to be genericized. */
1735 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1738 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1739 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1740 cp_genericize_r
, data
, NULL
);
1741 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1742 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1743 cp_genericize_r
, data
, NULL
);
1751 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1752 to lower this construct before scanning it, so we need to lower these
1753 before doing anything else. */
1755 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1756 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1759 CLEANUP_BODY (stmt
),
1760 CLEANUP_EXPR (stmt
));
1764 genericize_if_stmt (stmt_p
);
1765 /* *stmt_p has changed, tail recurse to handle it again. */
1766 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1768 /* COND_EXPR might have incompatible types in branches if one or both
1769 arms are bitfields. Fix it up now. */
1773 = (TREE_OPERAND (stmt
, 1)
1774 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1777 = (TREE_OPERAND (stmt
, 2)
1778 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1781 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1782 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1784 TREE_OPERAND (stmt
, 1)
1785 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1786 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1790 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1791 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1793 TREE_OPERAND (stmt
, 2)
1794 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1795 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1802 if (UNLIKELY (wtd
->omp_ctx
!= NULL
))
1805 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1807 && !DECL_EXTERNAL (decl
)
1808 && omp_var_to_track (decl
))
1811 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1812 (splay_tree_key
) decl
);
1814 splay_tree_insert (wtd
->omp_ctx
->variables
,
1815 (splay_tree_key
) decl
,
1817 ? OMP_CLAUSE_DEFAULT_SHARED
1818 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1821 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1823 /* The point here is to not sanitize static initializers. */
1824 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1825 wtd
->no_sanitize_p
= true;
1826 for (tree decl
= BIND_EXPR_VARS (stmt
);
1828 decl
= DECL_CHAIN (decl
))
1830 && TREE_STATIC (decl
)
1831 && DECL_INITIAL (decl
))
1832 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1833 wtd
->no_sanitize_p
= no_sanitize_p
;
1835 wtd
->bind_expr_stack
.safe_push (stmt
);
1836 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1837 cp_genericize_r
, data
, NULL
);
1838 wtd
->bind_expr_stack
.pop ();
1841 case ASSERTION_STMT
:
1842 case PRECONDITION_STMT
:
1843 case POSTCONDITION_STMT
:
1845 if (tree check
= build_contract_check (stmt
))
1848 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1851 /* If we didn't build a check, replace it with void_node so we don't
1852 leak contracts into GENERIC. */
1853 *stmt_p
= void_node
;
1860 tree block
= NULL_TREE
;
1862 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1863 BLOCK, and append an IMPORTED_DECL to its
1864 BLOCK_VARS chained list. */
1865 if (wtd
->bind_expr_stack
.exists ())
1868 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1869 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1874 tree decl
= TREE_OPERAND (stmt
, 0);
1877 if (undeduced_auto_decl (decl
))
1878 /* Omit from the GENERIC, the back-end can't handle it. */;
1881 tree using_directive
= make_node (IMPORTED_DECL
);
1882 TREE_TYPE (using_directive
) = void_type_node
;
1883 DECL_CONTEXT (using_directive
) = current_function_decl
;
1884 DECL_SOURCE_LOCATION (using_directive
)
1885 = cp_expr_loc_or_input_loc (stmt
);
1887 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
) = decl
;
1888 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1889 BLOCK_VARS (block
) = using_directive
;
1892 /* The USING_STMT won't appear in GENERIC. */
1893 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1899 if (TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1901 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1902 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1907 tree d
= DECL_EXPR_DECL (stmt
);
1909 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1917 struct cp_genericize_omp_taskreg omp_ctx
;
1922 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1923 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1924 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1925 omp_ctx
.outer
= wtd
->omp_ctx
;
1926 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1927 wtd
->omp_ctx
= &omp_ctx
;
1928 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1929 switch (OMP_CLAUSE_CODE (c
))
1931 case OMP_CLAUSE_SHARED
:
1932 case OMP_CLAUSE_PRIVATE
:
1933 case OMP_CLAUSE_FIRSTPRIVATE
:
1934 case OMP_CLAUSE_LASTPRIVATE
:
1935 decl
= OMP_CLAUSE_DECL (c
);
1936 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1938 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1941 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1942 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1943 ? OMP_CLAUSE_DEFAULT_SHARED
1944 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1945 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
&& omp_ctx
.outer
)
1946 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1948 case OMP_CLAUSE_DEFAULT
:
1949 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1950 omp_ctx
.default_shared
= true;
1954 if (TREE_CODE (stmt
) == OMP_TASKLOOP
)
1955 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
1956 cp_genericize_r
, cp_walk_subtrees
);
1958 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1959 wtd
->omp_ctx
= omp_ctx
.outer
;
1960 splay_tree_delete (omp_ctx
.variables
);
1965 cfun
->has_omp_target
= true;
1971 tree try_block
= wtd
->try_block
;
1972 wtd
->try_block
= stmt
;
1973 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1974 wtd
->try_block
= try_block
;
1975 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1979 case MUST_NOT_THROW_EXPR
:
1980 /* MUST_NOT_THROW_COND might be something else with TM. */
1981 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1984 tree try_block
= wtd
->try_block
;
1985 wtd
->try_block
= stmt
;
1986 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1987 wtd
->try_block
= try_block
;
1993 location_t loc
= location_of (stmt
);
1994 if (warning_suppressed_p (stmt
/* What warning? */))
1996 else if (wtd
->try_block
)
1998 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
)
2000 auto_diagnostic_group d
;
2001 if (warning_at (loc
, OPT_Wterminate
,
2002 "%<throw%> will always call %<terminate%>")
2003 && cxx_dialect
>= cxx11
2004 && DECL_DESTRUCTOR_P (current_function_decl
))
2005 inform (loc
, "in C++11 destructors default to %<noexcept%>");
2010 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
2011 && DECL_DESTRUCTOR_P (current_function_decl
)
2012 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
2014 && (get_defaulted_eh_spec (current_function_decl
)
2015 == empty_except_spec
))
2016 warning_at (loc
, OPT_Wc__11_compat
,
2017 "in C++11 this %<throw%> will call %<terminate%> "
2018 "because destructors default to %<noexcept%>");
2024 gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt
)));
2025 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
2028 case SPACESHIP_EXPR
:
2029 *stmt_p
= genericize_spaceship (*stmt_p
);
2033 /* By the time we get here we're handing off to the back end, so we don't
2034 need or want to preserve PTRMEM_CST anymore. */
2035 *stmt_p
= cplus_expand_constant (stmt
);
2040 /* For MEM_REF, make sure not to sanitize the second operand even
2041 if it has reference type. It is just an offset with a type
2042 holding other information. There is no other processing we
2043 need to do for INTEGER_CSTs, so just ignore the second argument
2045 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
2050 *stmt_p
= predeclare_vla (*stmt_p
);
2052 /* Warn of new allocations that are not big enough for the target
2055 && TREE_CODE (TREE_OPERAND (stmt
, 0)) == CALL_EXPR
2056 && POINTER_TYPE_P (TREE_TYPE (stmt
)))
2058 if (tree fndecl
= get_callee_fndecl (TREE_OPERAND (stmt
, 0)))
2059 if (DECL_IS_MALLOC (fndecl
))
2061 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (fndecl
));
2062 tree alloc_size
= lookup_attribute ("alloc_size", attrs
);
2064 warn_for_alloc_size (EXPR_LOCATION (stmt
),
2065 TREE_TYPE (TREE_TYPE (stmt
)),
2066 TREE_OPERAND (stmt
, 0), alloc_size
);
2070 if (!wtd
->no_sanitize_p
2071 && sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
)
2072 && TYPE_REF_P (TREE_TYPE (stmt
)))
2073 ubsan_maybe_instrument_reference (stmt_p
);
2077 /* Evaluate function concept checks instead of treating them as
2078 normal functions. */
2079 if (concept_check_p (stmt
))
2081 *stmt_p
= evaluate_concept_check (stmt
);
2082 * walk_subtrees
= 0;
2086 if (!wtd
->no_sanitize_p
2087 && sanitize_flags_p ((SANITIZE_NULL
2088 | SANITIZE_ALIGNMENT
| SANITIZE_VPTR
)))
2090 tree fn
= CALL_EXPR_FN (stmt
);
2092 && !error_operand_p (fn
)
2093 && INDIRECT_TYPE_P (TREE_TYPE (fn
))
2094 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
2097 = TREE_CODE (fn
) == ADDR_EXPR
2098 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
2099 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
2100 if (sanitize_flags_p (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
2101 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
2102 if (sanitize_flags_p (SANITIZE_VPTR
) && !is_ctor
)
2103 cp_ubsan_maybe_instrument_member_call (stmt
);
2105 else if (fn
== NULL_TREE
2106 && CALL_EXPR_IFN (stmt
) == IFN_UBSAN_NULL
2107 && TREE_CODE (CALL_EXPR_ARG (stmt
, 0)) == INTEGER_CST
2108 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt
, 0))))
2112 case AGGR_INIT_EXPR
:
2113 /* For calls to a multi-versioned function, overload resolution
2114 returns the function with the highest target priority, that is,
2115 the version that will checked for dispatching first. If this
2116 version is inlinable, a direct call to this version can be made
2117 otherwise the call should go through the dispatcher. */
2119 tree fn
= cp_get_callee_fndecl_nofold (stmt
);
2120 if (fn
&& DECL_FUNCTION_VERSIONED (fn
)
2121 && (current_function_decl
== NULL
2122 || !targetm
.target_option
.can_inline_p (current_function_decl
,
2124 if (tree dis
= get_function_version_dispatcher (fn
))
2126 mark_versions_used (dis
);
2127 dis
= build_address (dis
);
2128 if (TREE_CODE (stmt
) == CALL_EXPR
)
2129 CALL_EXPR_FN (stmt
) = dis
;
2131 AGGR_INIT_EXPR_FN (stmt
) = dis
;
2137 if (TARGET_EXPR_INITIAL (stmt
)
2138 && TREE_CODE (TARGET_EXPR_INITIAL (stmt
)) == CONSTRUCTOR
2139 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt
)))
2140 TARGET_EXPR_NO_ELIDE (stmt
) = 1;
2143 case TEMPLATE_ID_EXPR
:
2144 gcc_assert (concept_check_p (stmt
));
2145 /* Emit the value of the concept check. */
2146 *stmt_p
= evaluate_concept_check (stmt
);
2150 case OMP_DISTRIBUTE
:
2151 /* Need to explicitly instantiate copy ctors on class iterators of
2152 composite distribute parallel for. */
2153 if (OMP_FOR_INIT (*stmt_p
) == NULL_TREE
)
2155 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
2156 tree inner
= walk_tree (&OMP_FOR_BODY (*stmt_p
),
2157 find_combined_omp_for
, data
, NULL
);
2158 if (inner
!= NULL_TREE
2159 && TREE_CODE (inner
) == OMP_FOR
)
2161 for (int i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner
)); i
++)
2162 if (OMP_FOR_ORIG_DECLS (inner
)
2163 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
2165 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
),
2168 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner
), i
);
2169 /* Class iterators aren't allowed on OMP_SIMD, so the only
2170 case we need to solve is distribute parallel for. */
2171 gcc_assert (TREE_CODE (inner
) == OMP_FOR
2173 tree orig_decl
= TREE_PURPOSE (orig
);
2174 tree c
, cl
= NULL_TREE
;
2175 for (c
= OMP_FOR_CLAUSES (inner
);
2176 c
; c
= OMP_CLAUSE_CHAIN (c
))
2177 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
2178 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
2179 && OMP_CLAUSE_DECL (c
) == orig_decl
)
2184 if (cl
== NULL_TREE
)
2186 for (c
= OMP_PARALLEL_CLAUSES (*data
[1]);
2187 c
; c
= OMP_CLAUSE_CHAIN (c
))
2188 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_PRIVATE
2189 && OMP_CLAUSE_DECL (c
) == orig_decl
)
2197 orig_decl
= require_complete_type (orig_decl
);
2198 tree inner_type
= TREE_TYPE (orig_decl
);
2199 if (orig_decl
== error_mark_node
)
2201 if (TYPE_REF_P (TREE_TYPE (orig_decl
)))
2202 inner_type
= TREE_TYPE (inner_type
);
2204 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2205 inner_type
= TREE_TYPE (inner_type
);
2206 get_copy_ctor (inner_type
, tf_warning_or_error
);
2223 case STATEMENT_LIST
:
2224 /* These cases are handled by shared code. */
2225 c_genericize_control_stmt (stmt_p
, walk_subtrees
, data
,
2226 cp_genericize_r
, cp_walk_subtrees
);
2230 *stmt_p
= build1_loc (EXPR_LOCATION (stmt
), VIEW_CONVERT_EXPR
,
2231 TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
2235 if (IS_TYPE_OR_DECL_P (stmt
))
2240 p_set
->add (*stmt_p
);
2245 /* Lower C++ front end trees to GENERIC in T_P. */
2248 cp_genericize_tree (tree
* t_p
, bool handle_invisiref_parm_p
)
2250 struct cp_genericize_data wtd
;
2252 wtd
.p_set
= new hash_set
<tree
>;
2253 wtd
.bind_expr_stack
.create (0);
2255 wtd
.try_block
= NULL_TREE
;
2256 wtd
.no_sanitize_p
= false;
2257 wtd
.handle_invisiref_parm_p
= handle_invisiref_parm_p
;
2258 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
2260 if (sanitize_flags_p (SANITIZE_VPTR
))
2261 cp_ubsan_instrument_member_accesses (t_p
);
2264 /* If a function that should end with a return in non-void
2265 function doesn't obviously end with return, add ubsan
2266 instrumentation code to verify it at runtime. If -fsanitize=return
2267 is not enabled, instrument __builtin_unreachable. */
2270 cp_maybe_instrument_return (tree fndecl
)
2272 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
2273 || DECL_CONSTRUCTOR_P (fndecl
)
2274 || DECL_DESTRUCTOR_P (fndecl
)
2275 || !targetm
.warn_func_return (fndecl
))
2278 if (!sanitize_flags_p (SANITIZE_RETURN
, fndecl
)
2279 /* Don't add __builtin_unreachable () if not optimizing, it will not
2280 improve any optimizations in that case, just break UB code.
2281 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
2282 UBSan covers this with ubsan_instrument_return above where sufficient
2283 information is provided, while the __builtin_unreachable () below
2284 if return sanitization is disabled will just result in hard to
2285 understand runtime error without location. */
2286 && ((!optimize
&& !flag_unreachable_traps
)
2287 || sanitize_flags_p (SANITIZE_UNREACHABLE
, fndecl
)))
2290 tree t
= DECL_SAVED_TREE (fndecl
);
2293 switch (TREE_CODE (t
))
2296 t
= BIND_EXPR_BODY (t
);
2298 case TRY_FINALLY_EXPR
:
2299 case CLEANUP_POINT_EXPR
:
2300 t
= TREE_OPERAND (t
, 0);
2302 case STATEMENT_LIST
:
2304 tree_stmt_iterator i
= tsi_last (t
);
2305 while (!tsi_end_p (i
))
2307 tree p
= tsi_stmt (i
);
2308 if (TREE_CODE (p
) != DEBUG_BEGIN_STMT
)
2328 tree
*p
= &DECL_SAVED_TREE (fndecl
);
2329 if (TREE_CODE (*p
) == BIND_EXPR
)
2330 p
= &BIND_EXPR_BODY (*p
);
2332 location_t loc
= DECL_SOURCE_LOCATION (fndecl
);
2333 if (sanitize_flags_p (SANITIZE_RETURN
, fndecl
))
2334 t
= ubsan_instrument_return (loc
);
2336 t
= build_builtin_unreachable (BUILTINS_LOCATION
);
2338 append_to_statement_list (t
, p
);
2342 cp_genericize (tree fndecl
)
2346 /* Fix up the types of parms passed by invisible reference. */
2347 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
2348 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
2350 /* If a function's arguments are copied to create a thunk,
2351 then DECL_BY_REFERENCE will be set -- but the type of the
2352 argument will be a pointer type, so we will never get
2354 gcc_assert (!DECL_BY_REFERENCE (t
));
2355 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
2356 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
2357 DECL_BY_REFERENCE (t
) = 1;
2358 TREE_ADDRESSABLE (t
) = 0;
2362 /* Do the same for the return value. */
2363 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
2365 t
= DECL_RESULT (fndecl
);
2366 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
2367 DECL_BY_REFERENCE (t
) = 1;
2368 TREE_ADDRESSABLE (t
) = 0;
2372 /* Adjust DECL_VALUE_EXPR of the original var. */
2373 tree outer
= outer_curly_brace_block (current_function_decl
);
2377 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2379 && DECL_NAME (t
) == DECL_NAME (var
)
2380 && DECL_HAS_VALUE_EXPR_P (var
)
2381 && DECL_VALUE_EXPR (var
) == t
)
2383 tree val
= convert_from_reference (t
);
2384 SET_DECL_VALUE_EXPR (var
, val
);
2390 /* If we're a clone, the body is already GIMPLE. */
2391 if (DECL_CLONED_FUNCTION_P (fndecl
))
2394 /* Allow cp_genericize calls to be nested. */
2395 bc_state_t save_state
;
2396 save_bc_state (&save_state
);
2398 /* We do want to see every occurrence of the parms, so we can't just use
2399 walk_tree's hash functionality. */
2400 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
), true);
2402 cp_maybe_instrument_return (fndecl
);
2404 /* Do everything else. */
2405 c_genericize (fndecl
);
2406 restore_bc_state (&save_state
);
2409 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2410 NULL if there is in fact nothing to do. ARG2 may be null if FN
2411 actually only takes one argument. */
2414 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
2416 tree defparm
, parm
, t
;
2424 nargs
= list_length (DECL_ARGUMENTS (fn
));
2425 argarray
= XALLOCAVEC (tree
, nargs
);
2427 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
2429 defparm
= TREE_CHAIN (defparm
);
2431 bool is_method
= TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
;
2432 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
2434 tree inner_type
= TREE_TYPE (arg1
);
2435 tree start1
, end1
, p1
;
2436 tree start2
= NULL
, p2
= NULL
;
2437 tree ret
= NULL
, lab
;
2443 inner_type
= TREE_TYPE (inner_type
);
2444 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
2445 size_zero_node
, NULL
, NULL
);
2447 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
2448 size_zero_node
, NULL
, NULL
);
2450 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
2451 start1
= build_fold_addr_expr_loc (input_location
, start1
);
2453 start2
= build_fold_addr_expr_loc (input_location
, start2
);
2455 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
2456 end1
= fold_build_pointer_plus (start1
, end1
);
2458 p1
= create_tmp_var (TREE_TYPE (start1
));
2459 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
2460 append_to_statement_list (t
, &ret
);
2464 p2
= create_tmp_var (TREE_TYPE (start2
));
2465 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
2466 append_to_statement_list (t
, &ret
);
2469 lab
= create_artificial_label (input_location
);
2470 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
2471 append_to_statement_list (t
, &ret
);
2476 /* Handle default arguments. */
2477 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2478 parm
= TREE_CHAIN (parm
), i
++)
2479 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2480 TREE_PURPOSE (parm
), fn
,
2481 i
- is_method
, tf_warning_or_error
);
2482 t
= build_call_a (fn
, i
, argarray
);
2483 t
= fold_convert (void_type_node
, t
);
2484 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2485 append_to_statement_list (t
, &ret
);
2487 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
2488 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
2489 append_to_statement_list (t
, &ret
);
2493 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
2494 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
2495 append_to_statement_list (t
, &ret
);
2498 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
2499 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
2500 append_to_statement_list (t
, &ret
);
2506 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
2508 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
2509 /* Handle default arguments. */
2510 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
2511 parm
= TREE_CHAIN (parm
), i
++)
2512 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
2513 TREE_PURPOSE (parm
), fn
,
2514 i
- is_method
, tf_warning_or_error
);
2515 t
= build_call_a (fn
, i
, argarray
);
2516 t
= fold_convert (void_type_node
, t
);
2517 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
2521 /* Return code to initialize DECL with its default constructor, or
2522 NULL if there's nothing to do. */
2525 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
2527 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2531 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
2536 /* Return code to initialize DST with a copy constructor from SRC. */
2539 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
2541 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2545 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
2547 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2552 /* Similarly, except use an assignment operator instead. */
2555 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
2557 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2561 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
2563 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
2568 /* Return code to destroy DECL. */
2571 cxx_omp_clause_dtor (tree clause
, tree decl
)
2573 tree info
= CP_OMP_CLAUSE_INFO (clause
);
2577 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
2582 /* True if OpenMP should privatize what this DECL points to rather
2583 than the DECL itself. */
2586 cxx_omp_privatize_by_reference (const_tree decl
)
2588 return (TYPE_REF_P (TREE_TYPE (decl
))
2589 || is_invisiref_parm (decl
));
2592 /* Return true if DECL is const qualified var having no mutable member. */
2594 cxx_omp_const_qual_no_mutable (tree decl
)
2596 tree type
= TREE_TYPE (decl
);
2597 if (TYPE_REF_P (type
))
2599 if (!is_invisiref_parm (decl
))
2601 type
= TREE_TYPE (type
);
2603 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
2605 /* NVR doesn't preserve const qualification of the
2607 tree outer
= outer_curly_brace_block (current_function_decl
);
2611 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
2613 && DECL_NAME (decl
) == DECL_NAME (var
)
2614 && (TYPE_MAIN_VARIANT (type
)
2615 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
2617 if (TYPE_READONLY (TREE_TYPE (var
)))
2618 type
= TREE_TYPE (var
);
2624 if (type
== error_mark_node
)
2627 /* Variables with const-qualified type having no mutable member
2628 are predetermined shared. */
2629 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
2635 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2636 of DECL is predetermined. */
2638 enum omp_clause_default_kind
2639 cxx_omp_predetermined_sharing_1 (tree decl
)
2641 /* Static data members are predetermined shared. */
2642 if (TREE_STATIC (decl
))
2644 tree ctx
= CP_DECL_CONTEXT (decl
);
2645 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
2646 return OMP_CLAUSE_DEFAULT_SHARED
;
2648 if (c_omp_predefined_variable (decl
))
2649 return OMP_CLAUSE_DEFAULT_SHARED
;
2652 /* this may not be specified in data-sharing clauses, still we need
2653 to predetermined it firstprivate. */
2654 if (decl
== current_class_ptr
)
2655 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
2657 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2660 /* Likewise, but also include the artificial vars. We don't want to
2661 disallow the artificial vars being mentioned in explicit clauses,
2662 as we use artificial vars e.g. for loop constructs with random
2663 access iterators other than pointers, but during gimplification
2664 we want to treat them as predetermined. */
2666 enum omp_clause_default_kind
2667 cxx_omp_predetermined_sharing (tree decl
)
2669 enum omp_clause_default_kind ret
= cxx_omp_predetermined_sharing_1 (decl
);
2670 if (ret
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
2673 /* Predetermine artificial variables holding integral values, those
2674 are usually result of gimplify_one_sizepos or SAVE_EXPR
2677 && DECL_ARTIFICIAL (decl
)
2678 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2679 && !(DECL_LANG_SPECIFIC (decl
)
2680 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2681 return OMP_CLAUSE_DEFAULT_SHARED
;
2683 /* Similarly for typeinfo symbols. */
2684 if (VAR_P (decl
) && DECL_ARTIFICIAL (decl
) && DECL_TINFO_P (decl
))
2685 return OMP_CLAUSE_DEFAULT_SHARED
;
2687 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
2690 enum omp_clause_defaultmap_kind
2691 cxx_omp_predetermined_mapping (tree decl
)
2693 /* Predetermine artificial variables holding integral values, those
2694 are usually result of gimplify_one_sizepos or SAVE_EXPR
2697 && DECL_ARTIFICIAL (decl
)
2698 && INTEGRAL_TYPE_P (TREE_TYPE (decl
))
2699 && !(DECL_LANG_SPECIFIC (decl
)
2700 && DECL_OMP_PRIVATIZED_MEMBER (decl
)))
2701 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
;
2703 if (c_omp_predefined_variable (decl
))
2704 return OMP_CLAUSE_DEFAULTMAP_TO
;
2706 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
;
2709 /* Finalize an implicitly determined clause. */
2712 cxx_omp_finish_clause (tree c
, gimple_seq
*, bool /* openacc */)
2714 tree decl
, inner_type
;
2715 bool make_shared
= false;
2717 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
2718 && OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
2719 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_LASTPRIVATE
2720 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
)))
2723 decl
= OMP_CLAUSE_DECL (c
);
2724 decl
= require_complete_type (decl
);
2725 inner_type
= TREE_TYPE (decl
);
2726 if (decl
== error_mark_node
)
2728 else if (TYPE_REF_P (TREE_TYPE (decl
)))
2729 inner_type
= TREE_TYPE (inner_type
);
2731 /* We're interested in the base element, not arrays. */
2732 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
2733 inner_type
= TREE_TYPE (inner_type
);
2735 /* Check for special function availability by building a call to one.
2736 Save the results, because later we won't be in the right context
2737 for making these queries. */
2738 bool first
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
;
2739 bool last
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
;
2741 && CLASS_TYPE_P (inner_type
)
2742 && cxx_omp_create_clause_info (c
, inner_type
, !first
, first
, last
,
2748 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
2749 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c
) = 0;
2750 OMP_CLAUSE_SHARED_READONLY (c
) = 0;
2754 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2755 disregarded in OpenMP construct, because it is going to be
2756 remapped during OpenMP lowering. SHARED is true if DECL
2757 is going to be shared, false if it is going to be privatized. */
2760 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
2765 && DECL_HAS_VALUE_EXPR_P (decl
)
2766 && DECL_ARTIFICIAL (decl
)
2767 && DECL_LANG_SPECIFIC (decl
)
2768 && DECL_OMP_PRIVATIZED_MEMBER (decl
))
2770 if (VAR_P (decl
) && DECL_CONTEXT (decl
) && is_capture_proxy (decl
))
2775 /* Fold expression X which is used as an rvalue if RVAL is true. */
2778 cp_fold_maybe_rvalue (tree x
, bool rval
, fold_flags_t flags
)
2782 x
= cp_fold (x
, flags
);
2784 x
= mark_rvalue_use (x
);
2785 if (rval
&& DECL_P (x
)
2786 && !TYPE_REF_P (TREE_TYPE (x
)))
2788 tree v
= decl_constant_value (x
);
2789 if (v
!= x
&& v
!= error_mark_node
)
2801 cp_fold_maybe_rvalue (tree x
, bool rval
)
2803 return cp_fold_maybe_rvalue (x
, rval
, ff_none
);
2806 /* Fold expression X which is used as an rvalue. */
2809 cp_fold_rvalue (tree x
, fold_flags_t flags
)
2811 return cp_fold_maybe_rvalue (x
, true, flags
);
2815 cp_fold_rvalue (tree x
)
2817 return cp_fold_rvalue (x
, ff_none
);
2820 /* Perform folding on expression X. */
2823 cp_fully_fold (tree x
, mce_value manifestly_const_eval
)
2825 if (processing_template_decl
)
2827 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2828 have to call both. */
2829 if (cxx_dialect
>= cxx11
)
2831 x
= maybe_constant_value (x
, /*decl=*/NULL_TREE
, manifestly_const_eval
);
2832 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2833 a TARGET_EXPR; undo that here. */
2834 if (TREE_CODE (x
) == TARGET_EXPR
)
2835 x
= TARGET_EXPR_INITIAL (x
);
2836 else if (TREE_CODE (x
) == VIEW_CONVERT_EXPR
2837 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONSTRUCTOR
2838 && TREE_TYPE (TREE_OPERAND (x
, 0)) == TREE_TYPE (x
))
2839 x
= TREE_OPERAND (x
, 0);
2841 fold_flags_t flags
= ff_none
;
2842 if (manifestly_const_eval
== mce_false
)
2843 flags
|= ff_mce_false
;
2844 return cp_fold_rvalue (x
, flags
);
2848 cp_fully_fold (tree x
)
2850 return cp_fully_fold (x
, mce_unknown
);
2853 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2857 cp_fully_fold_init (tree x
)
2859 if (processing_template_decl
)
2861 x
= cp_fully_fold (x
, mce_false
);
2862 cp_fold_data
data (ff_mce_false
);
2863 cp_walk_tree (&x
, cp_fold_r
, &data
, NULL
);
2867 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2868 and certain changes are made to the folding done. Or should be (FIXME). We
2869 never touch maybe_const, as it is only used for the C front-end
2870 C_MAYBE_CONST_EXPR. */
2873 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
, bool lval
)
2875 return cp_fold_maybe_rvalue (x
, !lval
);
2878 static GTY((deletable
)) hash_map
<tree
, tree
> *fold_caches
[2];
2880 /* Subroutine of cp_fold. Returns which fold cache to use according
2881 to the given flags. We need multiple caches since the result of
2882 folding may depend on which flags are used. */
2884 static hash_map
<tree
, tree
> *&
2885 get_fold_cache (fold_flags_t flags
)
2887 if (flags
& ff_mce_false
)
2888 return fold_caches
[1];
2890 return fold_caches
[0];
2893 /* Dispose of the whole FOLD_CACHE. */
2896 clear_fold_cache (void)
2898 for (auto& fold_cache
: fold_caches
)
2899 if (fold_cache
!= NULL
)
2900 fold_cache
->empty ();
2903 /* This function tries to fold an expression X.
2904 To avoid combinatorial explosion, folding results are kept in fold_cache.
2905 If X is invalid, we don't fold at all.
2906 For performance reasons we don't cache expressions representing a
2907 declaration or constant.
2908 Function returns X or its folded variant. */
2911 cp_fold (tree x
, fold_flags_t flags
)
2913 tree op0
, op1
, op2
, op3
;
2914 tree org_x
= x
, r
= NULL_TREE
;
2915 enum tree_code code
;
2917 bool rval_ops
= true;
2919 if (!x
|| x
== error_mark_node
)
2922 if (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
))
2925 /* Don't bother to cache DECLs or constants. */
2926 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
2929 auto& fold_cache
= get_fold_cache (flags
);
2930 if (fold_cache
== NULL
)
2931 fold_cache
= hash_map
<tree
, tree
>::create_ggc (101);
2933 if (tree
*cached
= fold_cache
->get (x
))
2935 /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
2936 argument has been folded into a tree invariant, make sure it is
2937 unshared. See PR112727. */
2938 if (TREE_CODE (x
) == SAVE_EXPR
&& *cached
!= x
)
2939 return unshare_expr (*cached
);
2943 uid_sensitive_constexpr_evaluation_checker c
;
2945 code
= TREE_CODE (x
);
2948 case CLEANUP_POINT_EXPR
:
2949 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2951 r
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
2952 if (!TREE_SIDE_EFFECTS (r
))
2957 x
= fold_sizeof_expr (x
);
2960 case VIEW_CONVERT_EXPR
:
2963 case NON_LVALUE_EXPR
:
2966 if (VOID_TYPE_P (TREE_TYPE (x
)))
2968 /* This is just to make sure we don't end up with casts to
2969 void from error_mark_node. If we just return x, then
2970 cp_fold_r might fold the operand into error_mark_node and
2971 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2972 during gimplification doesn't like such casts.
2973 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2974 folding of the operand should be in the caches and if in cp_fold_r
2975 it will modify it in place. */
2976 op0
= cp_fold (TREE_OPERAND (x
, 0), flags
);
2977 if (op0
== error_mark_node
)
2978 x
= error_mark_node
;
2982 loc
= EXPR_LOCATION (x
);
2983 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
2985 if (code
== CONVERT_EXPR
2986 && SCALAR_TYPE_P (TREE_TYPE (x
))
2987 && op0
!= void_node
)
2988 /* During parsing we used convert_to_*_nofold; re-convert now using the
2989 folding variants, since fold() doesn't do those transformations. */
2990 x
= fold (convert (TREE_TYPE (x
), op0
));
2991 else if (op0
!= TREE_OPERAND (x
, 0))
2993 if (op0
== error_mark_node
)
2994 x
= error_mark_node
;
2996 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
3001 /* Conversion of an out-of-range value has implementation-defined
3002 behavior; the language considers it different from arithmetic
3003 overflow, which is undefined. */
3004 if (TREE_CODE (op0
) == INTEGER_CST
3005 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
3006 TREE_OVERFLOW (x
) = false;
3010 case EXCESS_PRECISION_EXPR
:
3011 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3012 x
= fold_convert_loc (EXPR_LOCATION (x
), TREE_TYPE (x
), op0
);
3016 /* We don't need the decltype(auto) obfuscation anymore. */
3017 if (REF_PARENTHESIZED_P (x
))
3019 tree p
= maybe_undo_parenthesized_ref (x
);
3021 return cp_fold (p
, flags
);
3026 loc
= EXPR_LOCATION (x
);
3027 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), false, flags
);
3029 /* Cope with user tricks that amount to offsetof. */
3030 if (op0
!= error_mark_node
3031 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0
)))
3033 tree val
= get_base_address (op0
);
3035 && INDIRECT_REF_P (val
)
3036 && COMPLETE_TYPE_P (TREE_TYPE (val
))
3037 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))
3039 val
= TREE_OPERAND (val
, 0);
3041 val
= maybe_constant_value (val
);
3042 if (TREE_CODE (val
) == INTEGER_CST
)
3043 return fold_offsetof (op0
, TREE_TYPE (x
));
3053 case FIX_TRUNC_EXPR
:
3059 case TRUTH_NOT_EXPR
:
3060 case FIXED_CONVERT_EXPR
:
3063 loc
= EXPR_LOCATION (x
);
3064 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3067 if (op0
!= TREE_OPERAND (x
, 0))
3069 if (op0
== error_mark_node
)
3070 x
= error_mark_node
;
3073 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
3074 if (code
== INDIRECT_REF
3075 && (INDIRECT_REF_P (x
) || TREE_CODE (x
) == MEM_REF
))
3077 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3078 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3079 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3086 gcc_assert (TREE_CODE (x
) != COND_EXPR
3087 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
3090 case UNARY_PLUS_EXPR
:
3091 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
3092 if (op0
== error_mark_node
)
3093 x
= error_mark_node
;
3095 x
= fold_convert (TREE_TYPE (x
), op0
);
3098 case POSTDECREMENT_EXPR
:
3099 case POSTINCREMENT_EXPR
:
3101 case PREDECREMENT_EXPR
:
3102 case PREINCREMENT_EXPR
:
3107 case POINTER_PLUS_EXPR
:
3109 case POINTER_DIFF_EXPR
:
3112 case TRUNC_DIV_EXPR
:
3114 case FLOOR_DIV_EXPR
:
3115 case ROUND_DIV_EXPR
:
3116 case TRUNC_MOD_EXPR
:
3118 case ROUND_MOD_EXPR
:
3120 case EXACT_DIV_EXPR
:
3130 case TRUTH_AND_EXPR
:
3131 case TRUTH_ANDIF_EXPR
:
3133 case TRUTH_ORIF_EXPR
:
3134 case TRUTH_XOR_EXPR
:
3135 case LT_EXPR
: case LE_EXPR
:
3136 case GT_EXPR
: case GE_EXPR
:
3137 case EQ_EXPR
: case NE_EXPR
:
3138 case UNORDERED_EXPR
: case ORDERED_EXPR
:
3139 case UNLT_EXPR
: case UNLE_EXPR
:
3140 case UNGT_EXPR
: case UNGE_EXPR
:
3141 case UNEQ_EXPR
: case LTGT_EXPR
:
3142 case RANGE_EXPR
: case COMPLEX_EXPR
:
3144 loc
= EXPR_LOCATION (x
);
3145 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
, flags
);
3146 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1), flags
);
3148 /* decltype(nullptr) has only one value, so optimize away all comparisons
3149 with that type right away, keeping them in the IL causes troubles for
3150 various optimizations. */
3151 if (COMPARISON_CLASS_P (org_x
)
3152 && TREE_CODE (TREE_TYPE (op0
)) == NULLPTR_TYPE
3153 && TREE_CODE (TREE_TYPE (op1
)) == NULLPTR_TYPE
)
3158 x
= constant_boolean_node (true, TREE_TYPE (x
));
3161 x
= constant_boolean_node (false, TREE_TYPE (x
));
3166 return omit_two_operands_loc (loc
, TREE_TYPE (x
), x
,
3170 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
3172 if (op0
== error_mark_node
|| op1
== error_mark_node
)
3173 x
= error_mark_node
;
3175 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
3180 /* This is only needed for -Wnonnull-compare and only if
3181 TREE_NO_WARNING (org_x), but to avoid that option affecting code
3182 generation, we do it always. */
3183 if (COMPARISON_CLASS_P (org_x
))
3185 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
3187 else if (COMPARISON_CLASS_P (x
))
3189 if (warn_nonnull_compare
3190 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
3191 suppress_warning (x
, OPT_Wnonnull_compare
);
3193 /* Otherwise give up on optimizing these, let GIMPLE folders
3194 optimize those later on. */
3195 else if (op0
!= TREE_OPERAND (org_x
, 0)
3196 || op1
!= TREE_OPERAND (org_x
, 1))
3198 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
3199 if (warn_nonnull_compare
3200 && warning_suppressed_p (org_x
, OPT_Wnonnull_compare
))
3201 suppress_warning (x
, OPT_Wnonnull_compare
);
3211 loc
= EXPR_LOCATION (x
);
3212 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0), flags
);
3213 op1
= cp_fold (TREE_OPERAND (x
, 1), flags
);
3214 op2
= cp_fold (TREE_OPERAND (x
, 2), flags
);
3216 if (TREE_CODE (TREE_TYPE (x
)) == BOOLEAN_TYPE
)
3218 warning_sentinel
s (warn_int_in_bool_context
);
3219 if (!VOID_TYPE_P (TREE_TYPE (op1
)))
3220 op1
= cp_truthvalue_conversion (op1
, tf_warning_or_error
);
3221 if (!VOID_TYPE_P (TREE_TYPE (op2
)))
3222 op2
= cp_truthvalue_conversion (op2
, tf_warning_or_error
);
3224 else if (VOID_TYPE_P (TREE_TYPE (x
)))
3226 if (TREE_CODE (op0
) == INTEGER_CST
)
3228 /* If the condition is constant, fold can fold away
3229 the COND_EXPR. If some statement-level uses of COND_EXPR
3230 have one of the branches NULL, avoid folding crash. */
3232 op1
= build_empty_stmt (loc
);
3234 op2
= build_empty_stmt (loc
);
3238 /* Otherwise, don't bother folding a void condition, since
3239 it can't produce a constant value. */
3240 if (op0
!= TREE_OPERAND (x
, 0)
3241 || op1
!= TREE_OPERAND (x
, 1)
3242 || op2
!= TREE_OPERAND (x
, 2))
3243 x
= build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
3248 if (op0
!= TREE_OPERAND (x
, 0)
3249 || op1
!= TREE_OPERAND (x
, 1)
3250 || op2
!= TREE_OPERAND (x
, 2))
3252 if (op0
== error_mark_node
3253 || op1
== error_mark_node
3254 || op2
== error_mark_node
)
3255 x
= error_mark_node
;
3257 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
3262 /* A COND_EXPR might have incompatible types in branches if one or both
3263 arms are bitfields. If folding exposed such a branch, fix it up. */
3264 if (TREE_CODE (x
) != code
3265 && x
!= error_mark_node
3266 && !useless_type_conversion_p (TREE_TYPE (org_x
), TREE_TYPE (x
)))
3267 x
= fold_convert (TREE_TYPE (org_x
), x
);
3273 tree callee
= get_callee_fndecl (x
);
3275 /* "Inline" calls to std::move/forward and other cast-like functions
3276 by simply folding them into a corresponding cast to their return
3277 type. This is cheaper than relying on the middle end to do so, and
3278 also means we avoid generating useless debug info for them at all.
3280 At this point the argument has already been converted into a
3281 reference, so it suffices to use a NOP_EXPR to express the
3283 if ((OPTION_SET_P (flag_fold_simple_inlines
)
3284 ? flag_fold_simple_inlines
3286 && call_expr_nargs (x
) == 1
3287 && decl_in_std_namespace_p (callee
)
3288 && DECL_NAME (callee
) != NULL_TREE
3289 && (id_equal (DECL_NAME (callee
), "move")
3290 || id_equal (DECL_NAME (callee
), "forward")
3291 || id_equal (DECL_NAME (callee
), "addressof")
3292 /* This addressof equivalent is used heavily in libstdc++. */
3293 || id_equal (DECL_NAME (callee
), "__addressof")
3294 || id_equal (DECL_NAME (callee
), "as_const")))
3296 r
= CALL_EXPR_ARG (x
, 0);
3297 /* Check that the return and argument types are sane before
3299 if (INDIRECT_TYPE_P (TREE_TYPE (x
))
3300 && INDIRECT_TYPE_P (TREE_TYPE (r
)))
3302 if (!same_type_p (TREE_TYPE (x
), TREE_TYPE (r
)))
3303 r
= build_nop (TREE_TYPE (x
), r
);
3304 x
= cp_fold (r
, flags
);
3309 int sv
= optimize
, nw
= sv
;
3311 /* Some built-in function calls will be evaluated at compile-time in
3312 fold (). Set optimize to 1 when folding __builtin_constant_p inside
3313 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3314 if (callee
&& fndecl_built_in_p (callee
) && !optimize
3315 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
3316 && current_function_decl
3317 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
3320 if (callee
&& fndecl_built_in_p (callee
, BUILT_IN_FRONTEND
))
3322 iloc_sentinel
ils (EXPR_LOCATION (x
));
3323 switch (DECL_FE_FUNCTION_CODE (callee
))
3325 case CP_BUILT_IN_IS_CONSTANT_EVALUATED
:
3326 /* Defer folding __builtin_is_constant_evaluated unless
3327 we know this isn't a manifestly constant-evaluated
3329 if (flags
& ff_mce_false
)
3330 x
= boolean_false_node
;
3332 case CP_BUILT_IN_SOURCE_LOCATION
:
3333 x
= fold_builtin_source_location (x
);
3335 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER
:
3336 x
= fold_builtin_is_corresponding_member
3337 (EXPR_LOCATION (x
), call_expr_nargs (x
),
3338 &CALL_EXPR_ARG (x
, 0));
3340 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS
:
3341 x
= fold_builtin_is_pointer_inverconvertible_with_class
3342 (EXPR_LOCATION (x
), call_expr_nargs (x
),
3343 &CALL_EXPR_ARG (x
, 0));
3352 && fndecl_built_in_p (callee
, CP_BUILT_IN_SOURCE_LOCATION
,
3355 x
= fold_builtin_source_location (x
);
3359 bool changed
= false;
3360 int m
= call_expr_nargs (x
);
3361 for (int i
= 0; i
< m
; i
++)
3363 r
= cp_fold (CALL_EXPR_ARG (x
, i
), flags
);
3364 if (r
!= CALL_EXPR_ARG (x
, i
))
3366 if (r
== error_mark_node
)
3368 x
= error_mark_node
;
3373 CALL_EXPR_ARG (x
, i
) = r
;
3377 if (x
== error_mark_node
)
3384 if (TREE_CODE (r
) != CALL_EXPR
)
3386 x
= cp_fold (r
, flags
);
3392 /* Invoke maybe_constant_value for functions declared
3393 constexpr and not called with AGGR_INIT_EXPRs.
3395 Do constexpr expansion of expressions where the call itself is not
3396 constant, but the call followed by an INDIRECT_REF is. */
3397 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
3400 mce_value manifestly_const_eval
= mce_unknown
;
3401 if (flags
& ff_mce_false
)
3402 /* Allow folding __builtin_is_constant_evaluated to false during
3403 constexpr evaluation of this call. */
3404 manifestly_const_eval
= mce_false
;
3405 r
= maybe_constant_value (x
, /*decl=*/NULL_TREE
,
3406 manifestly_const_eval
);
3410 if (TREE_CODE (r
) != CALL_EXPR
)
3412 if (DECL_CONSTRUCTOR_P (callee
))
3414 loc
= EXPR_LOCATION (x
);
3415 tree s
= build_fold_indirect_ref_loc (loc
,
3416 CALL_EXPR_ARG (x
, 0));
3417 r
= cp_build_init_expr (s
, r
);
3430 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
3431 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
3432 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
3434 tree op
= cp_fold (p
->value
, flags
);
3437 if (op
== error_mark_node
)
3439 x
= error_mark_node
;
3444 nelts
= elts
->copy ();
3445 (*nelts
)[i
].value
= op
;
3450 x
= build_constructor (TREE_TYPE (x
), nelts
);
3451 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x
)
3452 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x
);
3453 CONSTRUCTOR_MUTABLE_POISON (x
)
3454 = CONSTRUCTOR_MUTABLE_POISON (org_x
);
3456 if (VECTOR_TYPE_P (TREE_TYPE (x
)))
3462 bool changed
= false;
3463 int n
= TREE_VEC_LENGTH (x
);
3465 for (int i
= 0; i
< n
; i
++)
3467 tree op
= cp_fold (TREE_VEC_ELT (x
, i
), flags
);
3468 if (op
!= TREE_VEC_ELT (x
, i
))
3472 TREE_VEC_ELT (x
, i
) = op
;
3481 case ARRAY_RANGE_REF
:
3483 loc
= EXPR_LOCATION (x
);
3484 op0
= cp_fold (TREE_OPERAND (x
, 0), flags
);
3485 op1
= cp_fold (TREE_OPERAND (x
, 1), flags
);
3486 op2
= cp_fold (TREE_OPERAND (x
, 2), flags
);
3487 op3
= cp_fold (TREE_OPERAND (x
, 3), flags
);
3489 if (op0
!= TREE_OPERAND (x
, 0)
3490 || op1
!= TREE_OPERAND (x
, 1)
3491 || op2
!= TREE_OPERAND (x
, 2)
3492 || op3
!= TREE_OPERAND (x
, 3))
3494 if (op0
== error_mark_node
3495 || op1
== error_mark_node
3496 || op2
== error_mark_node
3497 || op3
== error_mark_node
)
3498 x
= error_mark_node
;
3501 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
3502 TREE_READONLY (x
) = TREE_READONLY (org_x
);
3503 TREE_SIDE_EFFECTS (x
) = TREE_SIDE_EFFECTS (org_x
);
3504 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3512 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3513 folding, evaluates to an invariant. In that case no need to wrap
3514 this folded tree with a SAVE_EXPR. */
3515 r
= cp_fold (TREE_OPERAND (x
, 0), flags
);
3516 if (tree_invariant_p (r
))
3521 x
= evaluate_requires_expr (x
);
3528 if (EXPR_P (x
) && TREE_CODE (x
) == code
)
3530 TREE_THIS_VOLATILE (x
) = TREE_THIS_VOLATILE (org_x
);
3531 copy_warning (x
, org_x
);
3534 if (!c
.evaluation_restricted_p ())
3536 fold_cache
->put (org_x
, x
);
3537 /* Prevent that we try to fold an already folded result again. */
3539 fold_cache
->put (x
, x
);
3545 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3548 lookup_hotness_attribute (tree list
)
3550 for (; list
; list
= TREE_CHAIN (list
))
3552 tree name
= get_attribute_name (list
);
3553 if ((is_attribute_p ("hot", name
)
3554 || is_attribute_p ("cold", name
)
3555 || is_attribute_p ("likely", name
)
3556 || is_attribute_p ("unlikely", name
))
3557 && is_attribute_namespace_p ("", list
))
3563 /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3566 remove_hotness_attribute (tree list
)
3568 for (tree
*p
= &list
; *p
; )
3571 tree name
= get_attribute_name (l
);
3572 if ((is_attribute_p ("hot", name
)
3573 || is_attribute_p ("cold", name
)
3574 || is_attribute_p ("likely", name
)
3575 || is_attribute_p ("unlikely", name
))
3576 && is_attribute_namespace_p ("", l
))
3578 *p
= TREE_CHAIN (l
);
3581 p
= &TREE_CHAIN (l
);
3586 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3590 process_stmt_hotness_attribute (tree std_attrs
, location_t attrs_loc
)
3592 if (std_attrs
== error_mark_node
)
3594 if (tree attr
= lookup_hotness_attribute (std_attrs
))
3596 tree name
= get_attribute_name (attr
);
3597 bool hot
= (is_attribute_p ("hot", name
)
3598 || is_attribute_p ("likely", name
));
3599 tree pred
= build_predict_expr (hot
? PRED_HOT_LABEL
: PRED_COLD_LABEL
,
3600 hot
? TAKEN
: NOT_TAKEN
);
3601 SET_EXPR_LOCATION (pred
, attrs_loc
);
3603 if (tree other
= lookup_hotness_attribute (TREE_CHAIN (attr
)))
3604 warning (OPT_Wattributes
, "ignoring attribute %qE after earlier %qE",
3605 get_attribute_name (other
), name
);
3606 std_attrs
= remove_hotness_attribute (std_attrs
);
3611 /* Build IFN_ASSUME internal call for assume condition ARG. */
3614 build_assume_call (location_t loc
, tree arg
)
3616 if (!processing_template_decl
)
3617 arg
= fold_build_cleanup_point_expr (TREE_TYPE (arg
), arg
);
3618 return build_call_expr_internal_loc (loc
, IFN_ASSUME
, void_type_node
,
3622 /* If [[assume (cond)]] appears on this statement, handle it. */
3625 process_stmt_assume_attribute (tree std_attrs
, tree statement
,
3626 location_t attrs_loc
)
3628 if (std_attrs
== error_mark_node
)
3630 tree attr
= lookup_attribute ("gnu", "assume", std_attrs
);
3633 /* The next token after the assume attribute is not ';'. */
3636 warning_at (attrs_loc
, OPT_Wattributes
,
3637 "%<assume%> attribute not followed by %<;%>");
3640 for (; attr
; attr
= lookup_attribute ("gnu", "assume", TREE_CHAIN (attr
)))
3642 tree args
= TREE_VALUE (attr
);
3643 if (args
&& PACK_EXPANSION_P (args
))
3645 auto_diagnostic_group d
;
3646 error_at (attrs_loc
, "pack expansion of %qE attribute",
3647 get_attribute_name (attr
));
3648 if (cxx_dialect
>= cxx17
)
3649 inform (attrs_loc
, "use fold expression in the attribute "
3650 "argument instead");
3653 int nargs
= list_length (args
);
3656 auto_diagnostic_group d
;
3657 error_at (attrs_loc
, "wrong number of arguments specified for "
3658 "%qE attribute", get_attribute_name (attr
));
3659 inform (attrs_loc
, "expected %i, found %i", 1, nargs
);
3663 tree arg
= TREE_VALUE (args
);
3664 if (!type_dependent_expression_p (arg
))
3665 arg
= contextual_conv_bool (arg
, tf_warning_or_error
);
3666 if (error_operand_p (arg
))
3668 finish_expr_stmt (build_assume_call (attrs_loc
, arg
));
3671 return remove_attribute ("gnu", "assume", std_attrs
);
3674 /* Return the type std::source_location::__impl after performing
3675 verification on it. */
3678 get_source_location_impl_type ()
3680 tree name
= get_identifier ("source_location");
3681 tree decl
= lookup_qualified_name (std_node
, name
);
3682 if (TREE_CODE (decl
) != TYPE_DECL
)
3684 auto_diagnostic_group d
;
3685 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3686 qualified_name_lookup_error (std_node
, name
, decl
, input_location
);
3688 error ("%qD is not a type", decl
);
3689 return error_mark_node
;
3691 name
= get_identifier ("__impl");
3692 tree type
= TREE_TYPE (decl
);
3693 decl
= lookup_qualified_name (type
, name
);
3694 if (TREE_CODE (decl
) != TYPE_DECL
)
3696 auto_diagnostic_group d
;
3697 if (decl
== error_mark_node
|| TREE_CODE (decl
) == TREE_LIST
)
3698 qualified_name_lookup_error (type
, name
, decl
, input_location
);
3700 error ("%qD is not a type", decl
);
3701 return error_mark_node
;
3703 type
= TREE_TYPE (decl
);
3704 if (TREE_CODE (type
) != RECORD_TYPE
)
3706 error ("%qD is not a class type", decl
);
3707 return error_mark_node
;
3711 for (tree field
= TYPE_FIELDS (type
);
3712 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3713 field
= DECL_CHAIN (field
))
3715 if (DECL_NAME (field
) != NULL_TREE
)
3717 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3718 if (strcmp (n
, "_M_file_name") == 0
3719 || strcmp (n
, "_M_function_name") == 0)
3721 if (TREE_TYPE (field
) != const_string_type_node
)
3723 error ("%qD does not have %<const char *%> type", field
);
3724 return error_mark_node
;
3729 else if (strcmp (n
, "_M_line") == 0 || strcmp (n
, "_M_column") == 0)
3731 if (TREE_CODE (TREE_TYPE (field
)) != INTEGER_TYPE
)
3733 error ("%qD does not have integral type", field
);
3734 return error_mark_node
;
3745 error ("%<std::source_location::__impl%> does not contain only "
3746 "non-static data members %<_M_file_name%>, "
3747 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3748 return error_mark_node
;
3750 return build_qualified_type (type
, TYPE_QUAL_CONST
);
3753 /* Type for source_location_table hash_set. */
3754 struct GTY((for_user
)) source_location_table_entry
{
3760 /* Traits class for function start hash maps below. */
3762 struct source_location_table_entry_hash
3763 : ggc_remove
<source_location_table_entry
>
3765 typedef source_location_table_entry value_type
;
3766 typedef source_location_table_entry compare_type
;
3769 hash (const source_location_table_entry
&ref
)
3771 inchash::hash
hstate (0);
3772 hstate
.add_int (ref
.loc
);
3773 hstate
.add_int (ref
.uid
);
3774 return hstate
.end ();
3778 equal (const source_location_table_entry
&ref1
,
3779 const source_location_table_entry
&ref2
)
3781 return ref1
.loc
== ref2
.loc
&& ref1
.uid
== ref2
.uid
;
3785 mark_deleted (source_location_table_entry
&ref
)
3787 ref
.loc
= UNKNOWN_LOCATION
;
3789 ref
.var
= NULL_TREE
;
3792 static const bool empty_zero_p
= true;
3795 mark_empty (source_location_table_entry
&ref
)
3797 ref
.loc
= UNKNOWN_LOCATION
;
3799 ref
.var
= NULL_TREE
;
3803 is_deleted (const source_location_table_entry
&ref
)
3805 return (ref
.loc
== UNKNOWN_LOCATION
3807 && ref
.var
== NULL_TREE
);
3811 is_empty (const source_location_table_entry
&ref
)
3813 return (ref
.loc
== UNKNOWN_LOCATION
3815 && ref
.var
== NULL_TREE
);
3819 pch_nx (source_location_table_entry
&p
)
3821 extern void gt_pch_nx (source_location_table_entry
&);
3826 pch_nx (source_location_table_entry
&p
, gt_pointer_operator op
, void *cookie
)
3828 extern void gt_pch_nx (source_location_table_entry
*, gt_pointer_operator
,
3830 gt_pch_nx (&p
, op
, cookie
);
3834 static GTY(()) hash_table
<source_location_table_entry_hash
>
3835 *source_location_table
;
3836 static GTY(()) unsigned int source_location_id
;
3838 /* Fold the __builtin_source_location () call T. */
3841 fold_builtin_source_location (const_tree t
)
3843 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
3844 /* TREE_TYPE (t) is const std::source_location::__impl* */
3845 tree source_location_impl
= TREE_TYPE (TREE_TYPE (t
));
3846 if (source_location_impl
== error_mark_node
)
3847 return build_zero_cst (const_ptr_type_node
);
3848 gcc_assert (CLASS_TYPE_P (source_location_impl
)
3849 && id_equal (TYPE_IDENTIFIER (source_location_impl
), "__impl"));
3851 location_t loc
= EXPR_LOCATION (t
);
3852 if (source_location_table
== NULL
)
3853 source_location_table
3854 = hash_table
<source_location_table_entry_hash
>::create_ggc (64);
3855 const line_map_ordinary
*map
;
3856 source_location_table_entry entry
;
3858 = linemap_resolve_location (line_table
, loc
, LRK_MACRO_EXPANSION_POINT
,
3860 entry
.uid
= current_function_decl
? DECL_UID (current_function_decl
) : -1;
3861 entry
.var
= error_mark_node
;
3862 source_location_table_entry
*entryp
3863 = source_location_table
->find_slot (entry
, INSERT
);
3870 ASM_GENERATE_INTERNAL_LABEL (tmp_name
, "Lsrc_loc", source_location_id
++);
3871 var
= build_decl (loc
, VAR_DECL
, get_identifier (tmp_name
),
3872 source_location_impl
);
3873 TREE_STATIC (var
) = 1;
3874 TREE_PUBLIC (var
) = 0;
3875 DECL_ARTIFICIAL (var
) = 1;
3876 DECL_IGNORED_P (var
) = 1;
3877 DECL_EXTERNAL (var
) = 0;
3878 DECL_DECLARED_CONSTEXPR_P (var
) = 1;
3879 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var
) = 1;
3880 layout_decl (var
, 0);
3882 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3884 for (tree field
= TYPE_FIELDS (source_location_impl
);
3885 (field
= next_aggregate_field (field
)) != NULL_TREE
;
3886 field
= DECL_CHAIN (field
))
3888 const char *n
= IDENTIFIER_POINTER (DECL_NAME (field
));
3889 tree val
= NULL_TREE
;
3890 if (strcmp (n
, "_M_file_name") == 0)
3892 if (const char *fname
= LOCATION_FILE (loc
))
3894 fname
= remap_macro_filename (fname
);
3895 val
= build_string_literal (fname
);
3898 val
= build_string_literal ("");
3900 else if (strcmp (n
, "_M_function_name") == 0)
3902 const char *name
= "";
3904 if (current_function_decl
)
3905 name
= cxx_printable_name (current_function_decl
, 2);
3907 val
= build_string_literal (name
);
3909 else if (strcmp (n
, "_M_line") == 0)
3910 val
= build_int_cst (TREE_TYPE (field
), LOCATION_LINE (loc
));
3911 else if (strcmp (n
, "_M_column") == 0)
3912 val
= build_int_cst (TREE_TYPE (field
), LOCATION_COLUMN (loc
));
3915 CONSTRUCTOR_APPEND_ELT (v
, field
, val
);
3918 tree ctor
= build_constructor (source_location_impl
, v
);
3919 TREE_CONSTANT (ctor
) = 1;
3920 TREE_STATIC (ctor
) = 1;
3921 DECL_INITIAL (var
) = ctor
;
3922 varpool_node::finalize_decl (var
);
3927 return build_fold_addr_expr_with_type_loc (loc
, var
, TREE_TYPE (t
));
3930 #include "gt-cp-cp-gimplify.h"