1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "basic-block.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
33 #include "c-family/c-ubsan.h"
36 /* Forward declarations. */
38 static tree
cp_genericize_r (tree
*, int *, void *);
39 static tree
cp_fold_r (tree
*, int *, void *);
40 static void cp_genericize_tree (tree
*);
41 static tree
cp_fold (tree
);
43 /* Local declarations. */
45 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
47 /* Stack of labels which are targets for "break" or "continue",
48 linked through TREE_CHAIN. */
49 static tree bc_label
[2];
51 /* Begin a scope which can be exited by a break or continue statement. BC
54 Just creates a label with location LOCATION and pushes it into the current
58 begin_bc_block (enum bc_t bc
, location_t location
)
60 tree label
= create_artificial_label (location
);
61 DECL_CHAIN (label
) = bc_label
[bc
];
64 LABEL_DECL_BREAK (label
) = true;
66 LABEL_DECL_CONTINUE (label
) = true;
70 /* Finish a scope which can be exited by a break or continue statement.
71 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
72 an expression for the contents of the scope.
74 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
75 BLOCK. Otherwise, just forget the label. */
78 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
80 gcc_assert (label
== bc_label
[bc
]);
82 if (TREE_USED (label
))
83 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
86 bc_label
[bc
] = DECL_CHAIN (label
);
87 DECL_CHAIN (label
) = NULL_TREE
;
90 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
91 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
92 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
93 of gimplify_cilk_spawn. */
96 cilk_cp_gimplify_call_params_in_spawned_fn (tree
*expr_p
, gimple_seq
*pre_p
,
101 cilk_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
);
102 if (TREE_CODE (*expr_p
) == AGGR_INIT_EXPR
)
103 for (ii
= 0; ii
< aggr_init_expr_nargs (*expr_p
); ii
++)
104 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p
, ii
), pre_p
, post_p
,
105 is_gimple_reg
, fb_rvalue
);
109 /* Get the LABEL_EXPR to represent a break or continue statement
110 in the current block scope. BC indicates which. */
113 get_bc_label (enum bc_t bc
)
115 tree label
= bc_label
[bc
];
117 /* Mark the label used for finish_bc_block. */
118 TREE_USED (label
) = 1;
122 /* Genericize a TRY_BLOCK. */
125 genericize_try_block (tree
*stmt_p
)
127 tree body
= TRY_STMTS (*stmt_p
);
128 tree cleanup
= TRY_HANDLERS (*stmt_p
);
130 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
133 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
136 genericize_catch_block (tree
*stmt_p
)
138 tree type
= HANDLER_TYPE (*stmt_p
);
139 tree body
= HANDLER_BODY (*stmt_p
);
141 /* FIXME should the caught type go in TREE_TYPE? */
142 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
145 /* A terser interface for building a representation of an exception
149 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
153 /* FIXME should the allowed types go in TREE_TYPE? */
154 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
155 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
157 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
158 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
163 /* Genericize an EH_SPEC_BLOCK by converting it to a
164 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
167 genericize_eh_spec_block (tree
*stmt_p
)
169 tree body
= EH_SPEC_STMTS (*stmt_p
);
170 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
171 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
173 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
174 TREE_NO_WARNING (*stmt_p
) = true;
175 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
178 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
181 genericize_if_stmt (tree
*stmt_p
)
183 tree stmt
, cond
, then_
, else_
;
184 location_t locus
= EXPR_LOCATION (*stmt_p
);
187 cond
= IF_COND (stmt
);
188 then_
= THEN_CLAUSE (stmt
);
189 else_
= ELSE_CLAUSE (stmt
);
192 then_
= build_empty_stmt (locus
);
194 else_
= build_empty_stmt (locus
);
196 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
198 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
201 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
202 if (!EXPR_HAS_LOCATION (stmt
))
203 protected_set_expr_location (stmt
, locus
);
207 /* Build a generic representation of one of the C loop forms. COND is the
208 loop condition or NULL_TREE. BODY is the (possibly compound) statement
209 controlled by the loop. INCR is the increment expression of a for-loop,
210 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
211 evaluated before the loop body as in while and for loops, or after the
212 loop body as in do-while loops. */
215 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
216 tree incr
, bool cond_is_first
, int *walk_subtrees
,
221 tree stmt_list
= NULL
;
223 blab
= begin_bc_block (bc_break
, start_locus
);
224 clab
= begin_bc_block (bc_continue
, start_locus
);
226 protected_set_expr_location (incr
, start_locus
);
228 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
229 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
230 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
233 if (cond
&& TREE_CODE (cond
) != INTEGER_CST
)
235 /* If COND is constant, don't bother building an exit. If it's false,
236 we won't build a loop. If it's true, any exits are in the body. */
237 location_t cloc
= EXPR_LOC_OR_LOC (cond
, start_locus
);
238 exit
= build1_loc (cloc
, GOTO_EXPR
, void_type_node
,
239 get_bc_label (bc_break
));
240 exit
= fold_build3_loc (cloc
, COND_EXPR
, void_type_node
, cond
,
241 build_empty_stmt (cloc
), exit
);
244 if (exit
&& cond_is_first
)
245 append_to_statement_list (exit
, &stmt_list
);
246 append_to_statement_list (body
, &stmt_list
);
247 finish_bc_block (&stmt_list
, bc_continue
, clab
);
248 append_to_statement_list (incr
, &stmt_list
);
249 if (exit
&& !cond_is_first
)
250 append_to_statement_list (exit
, &stmt_list
);
253 stmt_list
= build_empty_stmt (start_locus
);
256 if (cond
&& integer_zerop (cond
))
259 loop
= fold_build3_loc (start_locus
, COND_EXPR
,
260 void_type_node
, cond
, stmt_list
,
261 build_empty_stmt (start_locus
));
267 location_t loc
= start_locus
;
268 if (!cond
|| integer_nonzerop (cond
))
269 loc
= EXPR_LOCATION (expr_first (body
));
270 if (loc
== UNKNOWN_LOCATION
)
272 loop
= build1_loc (loc
, LOOP_EXPR
, void_type_node
, stmt_list
);
276 append_to_statement_list (loop
, &stmt_list
);
277 finish_bc_block (&stmt_list
, bc_break
, blab
);
279 stmt_list
= build_empty_stmt (start_locus
);
284 /* Genericize a FOR_STMT node *STMT_P. */
287 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
292 tree init
= FOR_INIT_STMT (stmt
);
296 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
297 append_to_statement_list (init
, &expr
);
300 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
301 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
302 append_to_statement_list (loop
, &expr
);
303 if (expr
== NULL_TREE
)
308 /* Genericize a WHILE_STMT node *STMT_P. */
311 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
314 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
315 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
318 /* Genericize a DO_STMT node *STMT_P. */
321 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
324 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
325 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
328 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
331 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
334 tree break_block
, body
, cond
, type
;
335 location_t stmt_locus
= EXPR_LOCATION (stmt
);
337 break_block
= begin_bc_block (bc_break
, stmt_locus
);
339 body
= SWITCH_STMT_BODY (stmt
);
341 body
= build_empty_stmt (stmt_locus
);
342 cond
= SWITCH_STMT_COND (stmt
);
343 type
= SWITCH_STMT_TYPE (stmt
);
345 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
346 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
347 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
350 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
351 finish_bc_block (stmt_p
, bc_break
, break_block
);
354 /* Genericize a CONTINUE_STMT node *STMT_P. */
357 genericize_continue_stmt (tree
*stmt_p
)
359 tree stmt_list
= NULL
;
360 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
361 tree label
= get_bc_label (bc_continue
);
362 location_t location
= EXPR_LOCATION (*stmt_p
);
363 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
364 append_to_statement_list (pred
, &stmt_list
);
365 append_to_statement_list (jump
, &stmt_list
);
369 /* Genericize a BREAK_STMT node *STMT_P. */
372 genericize_break_stmt (tree
*stmt_p
)
374 tree label
= get_bc_label (bc_break
);
375 location_t location
= EXPR_LOCATION (*stmt_p
);
376 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
379 /* Genericize a OMP_FOR node *STMT_P. */
382 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
385 location_t locus
= EXPR_LOCATION (stmt
);
386 tree clab
= begin_bc_block (bc_continue
, locus
);
388 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
389 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
390 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
391 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
392 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
393 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
396 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
399 /* Hook into the middle of gimplifying an OMP_FOR node. */
401 static enum gimplify_status
402 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
404 tree for_stmt
= *expr_p
;
405 gimple_seq seq
= NULL
;
407 /* Protect ourselves from recursion. */
408 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
410 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
412 gimplify_and_add (for_stmt
, &seq
);
413 gimple_seq_add_seq (pre_p
, seq
);
415 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
420 /* Gimplify an EXPR_STMT node. */
423 gimplify_expr_stmt (tree
*stmt_p
)
425 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
427 if (stmt
== error_mark_node
)
430 /* Gimplification of a statement expression will nullify the
431 statement if all its side effects are moved to *PRE_P and *POST_P.
433 In this case we will not want to emit the gimplified statement.
434 However, we may still want to emit a warning, so we do that before
436 if (stmt
&& warn_unused_value
)
438 if (!TREE_SIDE_EFFECTS (stmt
))
440 if (!IS_EMPTY_STMT (stmt
)
441 && !VOID_TYPE_P (TREE_TYPE (stmt
))
442 && !TREE_NO_WARNING (stmt
))
443 warning (OPT_Wunused_value
, "statement with no effect");
446 warn_if_unused_value (stmt
, input_location
);
449 if (stmt
== NULL_TREE
)
450 stmt
= alloc_stmt_list ();
455 /* Gimplify initialization from an AGGR_INIT_EXPR. */
458 cp_gimplify_init_expr (tree
*expr_p
)
460 tree from
= TREE_OPERAND (*expr_p
, 1);
461 tree to
= TREE_OPERAND (*expr_p
, 0);
464 /* What about code that pulls out the temp and uses it elsewhere? I
465 think that such code never uses the TARGET_EXPR as an initializer. If
466 I'm wrong, we'll abort because the temp won't have any RTL. In that
467 case, I guess we'll need to replace references somehow. */
468 if (TREE_CODE (from
) == TARGET_EXPR
)
469 from
= TARGET_EXPR_INITIAL (from
);
471 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
472 inside the TARGET_EXPR. */
475 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
477 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
478 replace the slot operand with our target.
480 Should we add a target parm to gimplify_expr instead? No, as in this
481 case we want to replace the INIT_EXPR. */
482 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
483 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
485 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
486 AGGR_INIT_EXPR_SLOT (sub
) = to
;
488 VEC_INIT_EXPR_SLOT (sub
) = to
;
491 /* The initialization is now a side-effect, so the container can
494 TREE_TYPE (from
) = void_type_node
;
497 if (cxx_dialect
>= cxx14
&& TREE_CODE (sub
) == CONSTRUCTOR
)
498 /* Handle aggregate NSDMI. */
499 replace_placeholders (sub
, to
);
504 t
= TREE_OPERAND (t
, 1);
509 /* Gimplify a MUST_NOT_THROW_EXPR. */
511 static enum gimplify_status
512 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
515 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
516 tree body
= TREE_OPERAND (stmt
, 0);
517 gimple_seq try_
= NULL
;
518 gimple_seq catch_
= NULL
;
521 gimplify_and_add (body
, &try_
);
522 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
523 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
524 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
526 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
537 /* Return TRUE if an operand (OP) of a given TYPE being copied is
538 really just an empty class copy.
540 Check that the operand has a simple form so that TARGET_EXPRs and
541 non-empty CONSTRUCTORs get reduced properly, and we leave the
542 return slot optimization alone because it isn't a copy. */
545 simple_empty_class_p (tree type
, tree op
)
548 ((TREE_CODE (op
) == COMPOUND_EXPR
549 && simple_empty_class_p (type
, TREE_OPERAND (op
, 1)))
550 || is_gimple_lvalue (op
)
551 || INDIRECT_REF_P (op
)
552 || (TREE_CODE (op
) == CONSTRUCTOR
553 && CONSTRUCTOR_NELTS (op
) == 0
554 && !TREE_CLOBBER_P (op
))
555 || (TREE_CODE (op
) == CALL_EXPR
556 && !CALL_EXPR_RETURN_SLOT_OPT (op
)))
557 && is_really_empty_class (type
);
560 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
563 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
565 int saved_stmts_are_full_exprs_p
= 0;
566 enum tree_code code
= TREE_CODE (*expr_p
);
567 enum gimplify_status ret
;
569 if (STATEMENT_CODE_P (code
))
571 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
572 current_stmt_tree ()->stmts_are_full_exprs_p
573 = STMT_IS_FULL_EXPR_P (*expr_p
);
579 *expr_p
= cplus_expand_constant (*expr_p
);
584 simplify_aggr_init_expr (expr_p
);
590 location_t loc
= input_location
;
591 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
592 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
593 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
594 input_location
= EXPR_LOCATION (*expr_p
);
595 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
596 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
598 tf_warning_or_error
);
599 cp_genericize_tree (expr_p
);
601 input_location
= loc
;
606 /* FIXME communicate throw type to back end, probably by moving
607 THROW_EXPR into ../tree.def. */
608 *expr_p
= TREE_OPERAND (*expr_p
, 0);
612 case MUST_NOT_THROW_EXPR
:
613 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
616 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
617 LHS of an assignment might also be involved in the RHS, as in bug
620 if (fn_contains_cilk_spawn_p (cfun
))
622 if (cilk_detect_spawn_and_unwrap (expr_p
))
624 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
,
626 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
628 if (seen_error () && contains_cilk_spawn_stmt (*expr_p
))
632 cp_gimplify_init_expr (expr_p
);
633 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
635 /* Otherwise fall through. */
639 if (fn_contains_cilk_spawn_p (cfun
)
640 && cilk_detect_spawn_and_unwrap (expr_p
)
643 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
644 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
646 /* If the back end isn't clever enough to know that the lhs and rhs
647 types are the same, add an explicit conversion. */
648 tree op0
= TREE_OPERAND (*expr_p
, 0);
649 tree op1
= TREE_OPERAND (*expr_p
, 1);
651 if (!error_operand_p (op0
)
652 && !error_operand_p (op1
)
653 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
654 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
655 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
656 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
657 TREE_TYPE (op0
), op1
);
659 else if (simple_empty_class_p (TREE_TYPE (op0
), op1
))
661 /* Remove any copies of empty classes. Also drop volatile
662 variables on the RHS to avoid infinite recursion from
663 gimplify_expr trying to load the value. */
664 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
665 is_gimple_lvalue
, fb_lvalue
);
666 if (TREE_SIDE_EFFECTS (op1
))
668 if (TREE_THIS_VOLATILE (op1
)
669 && (REFERENCE_CLASS_P (op1
) || DECL_P (op1
)))
670 op1
= build_fold_addr_expr (op1
);
672 gimplify_and_add (op1
, pre_p
);
674 *expr_p
= TREE_OPERAND (*expr_p
, 0);
680 case EMPTY_CLASS_EXPR
:
681 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
682 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
687 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
692 genericize_try_block (expr_p
);
697 genericize_catch_block (expr_p
);
702 genericize_eh_spec_block (expr_p
);
721 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
725 gimplify_expr_stmt (expr_p
);
729 case UNARY_PLUS_EXPR
:
731 tree arg
= TREE_OPERAND (*expr_p
, 0);
732 tree type
= TREE_TYPE (*expr_p
);
733 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
739 case CILK_SPAWN_STMT
:
740 gcc_assert(fn_contains_cilk_spawn_p (cfun
)
741 && cilk_detect_spawn_and_unwrap (expr_p
));
745 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
746 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
751 if (fn_contains_cilk_spawn_p (cfun
)
752 && cilk_detect_spawn_and_unwrap (expr_p
)
755 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p
, pre_p
, post_p
);
756 return (enum gimplify_status
) gimplify_cilk_spawn (expr_p
);
758 /* DR 1030 says that we need to evaluate the elements of an
759 initializer-list in forward order even when it's used as arguments to
760 a constructor. So if the target wants to evaluate them in reverse
761 order and there's more than one argument other than 'this', gimplify
764 if (PUSH_ARGS_REVERSED
&& CALL_EXPR_LIST_INIT_P (*expr_p
)
765 && call_expr_nargs (*expr_p
) > 2)
767 int nargs
= call_expr_nargs (*expr_p
);
768 location_t loc
= EXPR_LOC_OR_LOC (*expr_p
, input_location
);
769 for (int i
= 1; i
< nargs
; ++i
)
771 enum gimplify_status t
772 = gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
, loc
);
780 if (TREE_OPERAND (*expr_p
, 0)
781 && (TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == INIT_EXPR
782 || TREE_CODE (TREE_OPERAND (*expr_p
, 0)) == MODIFY_EXPR
))
784 expr_p
= &TREE_OPERAND (*expr_p
, 0);
785 code
= TREE_CODE (*expr_p
);
786 /* Avoid going through the INIT_EXPR case, which can
787 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
788 goto modify_expr_case
;
793 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
797 /* Restore saved state. */
798 if (STATEMENT_CODE_P (code
))
799 current_stmt_tree ()->stmts_are_full_exprs_p
800 = saved_stmts_are_full_exprs_p
;
806 is_invisiref_parm (const_tree t
)
808 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
809 && DECL_BY_REFERENCE (t
));
812 /* Return true if the uid in both int tree maps are equal. */
815 cxx_int_tree_map_hasher::equal (cxx_int_tree_map
*a
, cxx_int_tree_map
*b
)
817 return (a
->uid
== b
->uid
);
820 /* Hash a UID in a cxx_int_tree_map. */
823 cxx_int_tree_map_hasher::hash (cxx_int_tree_map
*item
)
828 /* A stable comparison routine for use with splay trees and DECLs. */
831 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
836 return DECL_UID (a
) - DECL_UID (b
);
839 /* OpenMP context during genericization. */
841 struct cp_genericize_omp_taskreg
845 struct cp_genericize_omp_taskreg
*outer
;
846 splay_tree variables
;
849 /* Return true if genericization should try to determine if
850 DECL is firstprivate or shared within task regions. */
853 omp_var_to_track (tree decl
)
855 tree type
= TREE_TYPE (decl
);
856 if (is_invisiref_parm (decl
))
857 type
= TREE_TYPE (type
);
858 while (TREE_CODE (type
) == ARRAY_TYPE
)
859 type
= TREE_TYPE (type
);
860 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
862 if (VAR_P (decl
) && CP_DECL_THREAD_LOCAL_P (decl
))
864 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
869 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
872 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
874 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
875 (splay_tree_key
) decl
);
878 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
880 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
881 if (!omp_ctx
->default_shared
)
883 struct cp_genericize_omp_taskreg
*octx
;
885 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
887 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
888 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
890 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
893 if (octx
->is_parallel
)
897 && (TREE_CODE (decl
) == PARM_DECL
898 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
899 && DECL_CONTEXT (decl
) == current_function_decl
)))
900 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
901 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
903 /* DECL is implicitly determined firstprivate in
904 the current task construct. Ensure copy ctor and
905 dtor are instantiated, because during gimplification
906 it will be already too late. */
907 tree type
= TREE_TYPE (decl
);
908 if (is_invisiref_parm (decl
))
909 type
= TREE_TYPE (type
);
910 while (TREE_CODE (type
) == ARRAY_TYPE
)
911 type
= TREE_TYPE (type
);
912 get_copy_ctor (type
, tf_none
);
913 get_dtor (type
, tf_none
);
916 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
920 /* Genericization context. */
922 struct cp_genericize_data
924 hash_set
<tree
> *p_set
;
925 vec
<tree
> bind_expr_stack
;
926 struct cp_genericize_omp_taskreg
*omp_ctx
;
931 /* Perform any pre-gimplification folding of C++ front end trees to
933 Note: The folding of none-omp cases is something to move into
934 the middle-end. As for now we have most foldings only on GENERIC
935 in fold-const, we need to perform this before transformation to
939 cp_fold_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
944 *stmt_p
= stmt
= cp_fold (*stmt_p
);
946 code
= TREE_CODE (stmt
);
947 if (code
== OMP_FOR
|| code
== OMP_SIMD
|| code
== OMP_DISTRIBUTE
948 || code
== OMP_TASKLOOP
|| code
== CILK_FOR
|| code
== CILK_SIMD
949 || code
== OACC_LOOP
)
954 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_fold_r
, data
, NULL
);
955 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_fold_r
, data
, NULL
);
956 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_fold_r
, data
, NULL
);
957 x
= OMP_FOR_COND (stmt
);
958 if (x
&& TREE_CODE_CLASS (TREE_CODE (x
)) == tcc_comparison
)
960 cp_walk_tree (&TREE_OPERAND (x
, 0), cp_fold_r
, data
, NULL
);
961 cp_walk_tree (&TREE_OPERAND (x
, 1), cp_fold_r
, data
, NULL
);
963 else if (x
&& TREE_CODE (x
) == TREE_VEC
)
965 n
= TREE_VEC_LENGTH (x
);
966 for (i
= 0; i
< n
; i
++)
968 tree o
= TREE_VEC_ELT (x
, i
);
969 if (o
&& TREE_CODE_CLASS (TREE_CODE (o
)) == tcc_comparison
)
970 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
973 x
= OMP_FOR_INCR (stmt
);
974 if (x
&& TREE_CODE (x
) == TREE_VEC
)
976 n
= TREE_VEC_LENGTH (x
);
977 for (i
= 0; i
< n
; i
++)
979 tree o
= TREE_VEC_ELT (x
, i
);
980 if (o
&& TREE_CODE (o
) == MODIFY_EXPR
)
981 o
= TREE_OPERAND (o
, 1);
982 if (o
&& (TREE_CODE (o
) == PLUS_EXPR
|| TREE_CODE (o
) == MINUS_EXPR
983 || TREE_CODE (o
) == POINTER_PLUS_EXPR
))
985 cp_walk_tree (&TREE_OPERAND (o
, 0), cp_fold_r
, data
, NULL
);
986 cp_walk_tree (&TREE_OPERAND (o
, 1), cp_fold_r
, data
, NULL
);
990 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_fold_r
, data
, NULL
);
997 /* Fold ALL the trees! FIXME we should be able to remove this, but
998 apparently that still causes optimization regressions. */
1001 cp_fold_function (tree fndecl
)
1003 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_fold_r
, NULL
, NULL
);
1006 /* Perform any pre-gimplification lowering of C++ front end trees to
1010 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1012 tree stmt
= *stmt_p
;
1013 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
1014 hash_set
<tree
> *p_set
= wtd
->p_set
;
1016 /* If in an OpenMP context, note var uses. */
1017 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1019 || TREE_CODE (stmt
) == PARM_DECL
1020 || TREE_CODE (stmt
) == RESULT_DECL
)
1021 && omp_var_to_track (stmt
))
1022 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
1024 /* Don't dereference parms in a thunk, pass the references through. */
1025 if ((TREE_CODE (stmt
) == CALL_EXPR
&& CALL_FROM_THUNK_P (stmt
))
1026 || (TREE_CODE (stmt
) == AGGR_INIT_EXPR
&& AGGR_INIT_FROM_THUNK_P (stmt
)))
1032 /* Otherwise, do dereference invisible reference parms. */
1033 if (is_invisiref_parm (stmt
))
1035 *stmt_p
= convert_from_reference (stmt
);
1040 /* Map block scope extern declarations to visible declarations with the
1041 same name and type in outer scopes if any. */
1042 if (cp_function_chain
->extern_decl_map
1043 && VAR_OR_FUNCTION_DECL_P (stmt
)
1044 && DECL_EXTERNAL (stmt
))
1046 struct cxx_int_tree_map
*h
, in
;
1047 in
.uid
= DECL_UID (stmt
);
1048 h
= cp_function_chain
->extern_decl_map
->find_with_hash (&in
, in
.uid
);
1057 /* Other than invisiref parms, don't walk the same tree twice. */
1058 if (p_set
->contains (stmt
))
1064 if (TREE_CODE (stmt
) == ADDR_EXPR
1065 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1067 /* If in an OpenMP context, note var uses. */
1068 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
1069 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
1070 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
1071 *stmt_p
= fold_convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1074 else if (TREE_CODE (stmt
) == RETURN_EXPR
1075 && TREE_OPERAND (stmt
, 0)
1076 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
1077 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1079 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
1080 switch (OMP_CLAUSE_CODE (stmt
))
1082 case OMP_CLAUSE_LASTPRIVATE
:
1083 /* Don't dereference an invisiref in OpenMP clauses. */
1084 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1087 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
1088 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
1089 cp_genericize_r
, data
, NULL
);
1092 case OMP_CLAUSE_PRIVATE
:
1093 /* Don't dereference an invisiref in OpenMP clauses. */
1094 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1096 else if (wtd
->omp_ctx
!= NULL
)
1098 /* Private clause doesn't cause any references to the
1099 var in outer contexts, avoid calling
1100 omp_cxx_notice_variable for it. */
1101 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
1102 wtd
->omp_ctx
= NULL
;
1103 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
1109 case OMP_CLAUSE_SHARED
:
1110 case OMP_CLAUSE_FIRSTPRIVATE
:
1111 case OMP_CLAUSE_COPYIN
:
1112 case OMP_CLAUSE_COPYPRIVATE
:
1113 /* Don't dereference an invisiref in OpenMP clauses. */
1114 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1117 case OMP_CLAUSE_REDUCTION
:
1118 /* Don't dereference an invisiref in reduction clause's
1119 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1120 still needs to be genericized. */
1121 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
1124 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
1125 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
1126 cp_genericize_r
, data
, NULL
);
1127 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
1128 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
1129 cp_genericize_r
, data
, NULL
);
1135 else if (IS_TYPE_OR_DECL_P (stmt
))
1138 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1139 to lower this construct before scanning it, so we need to lower these
1140 before doing anything else. */
1141 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
1142 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
1143 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
1146 CLEANUP_BODY (stmt
),
1147 CLEANUP_EXPR (stmt
));
1149 else if (TREE_CODE (stmt
) == IF_STMT
)
1151 genericize_if_stmt (stmt_p
);
1152 /* *stmt_p has changed, tail recurse to handle it again. */
1153 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
1156 /* COND_EXPR might have incompatible types in branches if one or both
1157 arms are bitfields. Fix it up now. */
1158 else if (TREE_CODE (stmt
) == COND_EXPR
)
1161 = (TREE_OPERAND (stmt
, 1)
1162 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
1165 = (TREE_OPERAND (stmt
, 2)
1166 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
1169 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1170 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
1172 TREE_OPERAND (stmt
, 1)
1173 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
1174 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1178 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1179 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1181 TREE_OPERAND (stmt
, 2)
1182 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1183 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1188 else if (TREE_CODE (stmt
) == BIND_EXPR
)
1190 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1193 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1195 && !DECL_EXTERNAL (decl
)
1196 && omp_var_to_track (decl
))
1199 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1200 (splay_tree_key
) decl
);
1202 splay_tree_insert (wtd
->omp_ctx
->variables
,
1203 (splay_tree_key
) decl
,
1205 ? OMP_CLAUSE_DEFAULT_SHARED
1206 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1210 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1212 /* The point here is to not sanitize static initializers. */
1213 bool no_sanitize_p
= wtd
->no_sanitize_p
;
1214 wtd
->no_sanitize_p
= true;
1215 for (tree decl
= BIND_EXPR_VARS (stmt
);
1217 decl
= DECL_CHAIN (decl
))
1219 && TREE_STATIC (decl
)
1220 && DECL_INITIAL (decl
))
1221 cp_walk_tree (&DECL_INITIAL (decl
), cp_genericize_r
, data
, NULL
);
1222 wtd
->no_sanitize_p
= no_sanitize_p
;
1224 wtd
->bind_expr_stack
.safe_push (stmt
);
1225 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1226 cp_genericize_r
, data
, NULL
);
1227 wtd
->bind_expr_stack
.pop ();
1230 else if (TREE_CODE (stmt
) == USING_STMT
)
1232 tree block
= NULL_TREE
;
1234 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1235 BLOCK, and append an IMPORTED_DECL to its
1236 BLOCK_VARS chained list. */
1237 if (wtd
->bind_expr_stack
.exists ())
1240 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1241 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1246 tree using_directive
;
1247 gcc_assert (TREE_OPERAND (stmt
, 0));
1249 using_directive
= make_node (IMPORTED_DECL
);
1250 TREE_TYPE (using_directive
) = void_type_node
;
1252 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1253 = TREE_OPERAND (stmt
, 0);
1254 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1255 BLOCK_VARS (block
) = using_directive
;
1257 /* The USING_STMT won't appear in GENERIC. */
1258 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1262 else if (TREE_CODE (stmt
) == DECL_EXPR
1263 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1265 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1266 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1269 else if (TREE_CODE (stmt
) == DECL_EXPR
)
1271 tree d
= DECL_EXPR_DECL (stmt
);
1272 if (TREE_CODE (d
) == VAR_DECL
)
1273 gcc_assert (CP_DECL_THREAD_LOCAL_P (d
) == DECL_THREAD_LOCAL_P (d
));
1275 else if (TREE_CODE (stmt
) == OMP_PARALLEL
|| TREE_CODE (stmt
) == OMP_TASK
)
1277 struct cp_genericize_omp_taskreg omp_ctx
;
1282 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1283 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1284 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1285 omp_ctx
.outer
= wtd
->omp_ctx
;
1286 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1287 wtd
->omp_ctx
= &omp_ctx
;
1288 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1289 switch (OMP_CLAUSE_CODE (c
))
1291 case OMP_CLAUSE_SHARED
:
1292 case OMP_CLAUSE_PRIVATE
:
1293 case OMP_CLAUSE_FIRSTPRIVATE
:
1294 case OMP_CLAUSE_LASTPRIVATE
:
1295 decl
= OMP_CLAUSE_DECL (c
);
1296 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1298 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1301 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1302 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1303 ? OMP_CLAUSE_DEFAULT_SHARED
1304 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1305 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1307 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1309 case OMP_CLAUSE_DEFAULT
:
1310 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1311 omp_ctx
.default_shared
= true;
1315 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1316 wtd
->omp_ctx
= omp_ctx
.outer
;
1317 splay_tree_delete (omp_ctx
.variables
);
1319 else if (TREE_CODE (stmt
) == TRY_BLOCK
)
1322 tree try_block
= wtd
->try_block
;
1323 wtd
->try_block
= stmt
;
1324 cp_walk_tree (&TRY_STMTS (stmt
), cp_genericize_r
, data
, NULL
);
1325 wtd
->try_block
= try_block
;
1326 cp_walk_tree (&TRY_HANDLERS (stmt
), cp_genericize_r
, data
, NULL
);
1328 else if (TREE_CODE (stmt
) == MUST_NOT_THROW_EXPR
)
1330 /* MUST_NOT_THROW_COND might be something else with TM. */
1331 if (MUST_NOT_THROW_COND (stmt
) == NULL_TREE
)
1334 tree try_block
= wtd
->try_block
;
1335 wtd
->try_block
= stmt
;
1336 cp_walk_tree (&TREE_OPERAND (stmt
, 0), cp_genericize_r
, data
, NULL
);
1337 wtd
->try_block
= try_block
;
1340 else if (TREE_CODE (stmt
) == THROW_EXPR
)
1342 location_t loc
= location_of (stmt
);
1343 if (TREE_NO_WARNING (stmt
))
1345 else if (wtd
->try_block
)
1347 if (TREE_CODE (wtd
->try_block
) == MUST_NOT_THROW_EXPR
1348 && warning_at (loc
, OPT_Wterminate
,
1349 "throw will always call terminate()")
1350 && cxx_dialect
>= cxx11
1351 && DECL_DESTRUCTOR_P (current_function_decl
))
1352 inform (loc
, "in C++11 destructors default to noexcept");
1356 if (warn_cxx11_compat
&& cxx_dialect
< cxx11
1357 && DECL_DESTRUCTOR_P (current_function_decl
)
1358 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl
))
1360 && (get_defaulted_eh_spec (current_function_decl
)
1361 == empty_except_spec
))
1362 warning_at (loc
, OPT_Wc__11_compat
,
1363 "in C++11 this throw will terminate because "
1364 "destructors default to noexcept");
1367 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1368 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1369 else if (TREE_CODE (stmt
) == FOR_STMT
)
1370 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1371 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1372 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1373 else if (TREE_CODE (stmt
) == DO_STMT
)
1374 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1375 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1376 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1377 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1378 genericize_continue_stmt (stmt_p
);
1379 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1380 genericize_break_stmt (stmt_p
);
1381 else if (TREE_CODE (stmt
) == OMP_FOR
1382 || TREE_CODE (stmt
) == OMP_SIMD
1383 || TREE_CODE (stmt
) == OMP_DISTRIBUTE
1384 || TREE_CODE (stmt
) == OMP_TASKLOOP
)
1385 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1386 else if ((flag_sanitize
1387 & (SANITIZE_NULL
| SANITIZE_ALIGNMENT
| SANITIZE_VPTR
))
1388 && !wtd
->no_sanitize_p
)
1390 if ((flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1391 && TREE_CODE (stmt
) == NOP_EXPR
1392 && TREE_CODE (TREE_TYPE (stmt
)) == REFERENCE_TYPE
)
1393 ubsan_maybe_instrument_reference (stmt
);
1394 else if (TREE_CODE (stmt
) == CALL_EXPR
)
1396 tree fn
= CALL_EXPR_FN (stmt
);
1398 && !error_operand_p (fn
)
1399 && POINTER_TYPE_P (TREE_TYPE (fn
))
1400 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn
))) == METHOD_TYPE
)
1403 = TREE_CODE (fn
) == ADDR_EXPR
1404 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
1405 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn
, 0));
1406 if (flag_sanitize
& (SANITIZE_NULL
| SANITIZE_ALIGNMENT
))
1407 ubsan_maybe_instrument_member_call (stmt
, is_ctor
);
1408 if ((flag_sanitize
& SANITIZE_VPTR
) && !is_ctor
)
1409 cp_ubsan_maybe_instrument_member_call (stmt
);
1414 p_set
->add (*stmt_p
);
1419 /* Lower C++ front end trees to GENERIC in T_P. */
1422 cp_genericize_tree (tree
* t_p
)
1424 struct cp_genericize_data wtd
;
1426 wtd
.p_set
= new hash_set
<tree
>;
1427 wtd
.bind_expr_stack
.create (0);
1429 wtd
.try_block
= NULL_TREE
;
1430 wtd
.no_sanitize_p
= false;
1431 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1433 wtd
.bind_expr_stack
.release ();
1434 if (flag_sanitize
& SANITIZE_VPTR
)
1435 cp_ubsan_instrument_member_accesses (t_p
);
1438 /* If a function that should end with a return in non-void
1439 function doesn't obviously end with return, add ubsan
1440 instrumentation code to verify it at runtime. */
1443 cp_ubsan_maybe_instrument_return (tree fndecl
)
1445 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
1446 || DECL_CONSTRUCTOR_P (fndecl
)
1447 || DECL_DESTRUCTOR_P (fndecl
)
1448 || !targetm
.warn_func_return (fndecl
))
1451 tree t
= DECL_SAVED_TREE (fndecl
);
1454 switch (TREE_CODE (t
))
1457 t
= BIND_EXPR_BODY (t
);
1459 case TRY_FINALLY_EXPR
:
1460 t
= TREE_OPERAND (t
, 0);
1462 case STATEMENT_LIST
:
1464 tree_stmt_iterator i
= tsi_last (t
);
1481 t
= DECL_SAVED_TREE (fndecl
);
1482 if (TREE_CODE (t
) == BIND_EXPR
1483 && TREE_CODE (BIND_EXPR_BODY (t
)) == STATEMENT_LIST
)
1485 tree_stmt_iterator i
= tsi_last (BIND_EXPR_BODY (t
));
1486 t
= ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl
));
1487 tsi_link_after (&i
, t
, TSI_NEW_STMT
);
1492 cp_genericize (tree fndecl
)
1496 /* Fix up the types of parms passed by invisible reference. */
1497 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1498 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1500 /* If a function's arguments are copied to create a thunk,
1501 then DECL_BY_REFERENCE will be set -- but the type of the
1502 argument will be a pointer type, so we will never get
1504 gcc_assert (!DECL_BY_REFERENCE (t
));
1505 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1506 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1507 DECL_BY_REFERENCE (t
) = 1;
1508 TREE_ADDRESSABLE (t
) = 0;
1512 /* Do the same for the return value. */
1513 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1515 t
= DECL_RESULT (fndecl
);
1516 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1517 DECL_BY_REFERENCE (t
) = 1;
1518 TREE_ADDRESSABLE (t
) = 0;
1522 /* Adjust DECL_VALUE_EXPR of the original var. */
1523 tree outer
= outer_curly_brace_block (current_function_decl
);
1527 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1528 if (DECL_NAME (t
) == DECL_NAME (var
)
1529 && DECL_HAS_VALUE_EXPR_P (var
)
1530 && DECL_VALUE_EXPR (var
) == t
)
1532 tree val
= convert_from_reference (t
);
1533 SET_DECL_VALUE_EXPR (var
, val
);
1539 /* If we're a clone, the body is already GIMPLE. */
1540 if (DECL_CLONED_FUNCTION_P (fndecl
))
1543 /* Expand all the array notations here. */
1545 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1546 DECL_SAVED_TREE (fndecl
) =
1547 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1549 /* We do want to see every occurrence of the parms, so we can't just use
1550 walk_tree's hash functionality. */
1551 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1553 if (flag_sanitize
& SANITIZE_RETURN
1554 && do_ubsan_in_current_function ())
1555 cp_ubsan_maybe_instrument_return (fndecl
);
1557 /* Do everything else. */
1558 c_genericize (fndecl
);
1560 gcc_assert (bc_label
[bc_break
] == NULL
);
1561 gcc_assert (bc_label
[bc_continue
] == NULL
);
1564 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1565 NULL if there is in fact nothing to do. ARG2 may be null if FN
1566 actually only takes one argument. */
1569 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1571 tree defparm
, parm
, t
;
1579 nargs
= list_length (DECL_ARGUMENTS (fn
));
1580 argarray
= XALLOCAVEC (tree
, nargs
);
1582 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1584 defparm
= TREE_CHAIN (defparm
);
1586 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1588 tree inner_type
= TREE_TYPE (arg1
);
1589 tree start1
, end1
, p1
;
1590 tree start2
= NULL
, p2
= NULL
;
1591 tree ret
= NULL
, lab
;
1597 inner_type
= TREE_TYPE (inner_type
);
1598 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1599 size_zero_node
, NULL
, NULL
);
1601 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1602 size_zero_node
, NULL
, NULL
);
1604 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1605 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1607 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1609 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1610 end1
= fold_build_pointer_plus (start1
, end1
);
1612 p1
= create_tmp_var (TREE_TYPE (start1
));
1613 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1614 append_to_statement_list (t
, &ret
);
1618 p2
= create_tmp_var (TREE_TYPE (start2
));
1619 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1620 append_to_statement_list (t
, &ret
);
1623 lab
= create_artificial_label (input_location
);
1624 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1625 append_to_statement_list (t
, &ret
);
1630 /* Handle default arguments. */
1631 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1632 parm
= TREE_CHAIN (parm
), i
++)
1633 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1634 TREE_PURPOSE (parm
), fn
, i
,
1635 tf_warning_or_error
);
1636 t
= build_call_a (fn
, i
, argarray
);
1637 t
= fold_convert (void_type_node
, t
);
1638 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1639 append_to_statement_list (t
, &ret
);
1641 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1642 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1643 append_to_statement_list (t
, &ret
);
1647 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1648 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1649 append_to_statement_list (t
, &ret
);
1652 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1653 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1654 append_to_statement_list (t
, &ret
);
1660 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1662 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1663 /* Handle default arguments. */
1664 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1665 parm
= TREE_CHAIN (parm
), i
++)
1666 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1667 TREE_PURPOSE (parm
),
1668 fn
, i
, tf_warning_or_error
);
1669 t
= build_call_a (fn
, i
, argarray
);
1670 t
= fold_convert (void_type_node
, t
);
1671 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1675 /* Return code to initialize DECL with its default constructor, or
1676 NULL if there's nothing to do. */
1679 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1681 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1685 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1690 /* Return code to initialize DST with a copy constructor from SRC. */
1693 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1695 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1699 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1701 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1706 /* Similarly, except use an assignment operator instead. */
1709 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1711 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1715 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1717 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1722 /* Return code to destroy DECL. */
1725 cxx_omp_clause_dtor (tree clause
, tree decl
)
1727 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1731 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1736 /* True if OpenMP should privatize what this DECL points to rather
1737 than the DECL itself. */
1740 cxx_omp_privatize_by_reference (const_tree decl
)
1742 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1743 || is_invisiref_parm (decl
));
1746 /* Return true if DECL is const qualified var having no mutable member. */
1748 cxx_omp_const_qual_no_mutable (tree decl
)
1750 tree type
= TREE_TYPE (decl
);
1751 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1753 if (!is_invisiref_parm (decl
))
1755 type
= TREE_TYPE (type
);
1757 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1759 /* NVR doesn't preserve const qualification of the
1761 tree outer
= outer_curly_brace_block (current_function_decl
);
1765 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1766 if (DECL_NAME (decl
) == DECL_NAME (var
)
1767 && (TYPE_MAIN_VARIANT (type
)
1768 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1770 if (TYPE_READONLY (TREE_TYPE (var
)))
1771 type
= TREE_TYPE (var
);
1777 if (type
== error_mark_node
)
1780 /* Variables with const-qualified type having no mutable member
1781 are predetermined shared. */
1782 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1788 /* True if OpenMP sharing attribute of DECL is predetermined. */
1790 enum omp_clause_default_kind
1791 cxx_omp_predetermined_sharing (tree decl
)
1793 /* Static data members are predetermined shared. */
1794 if (TREE_STATIC (decl
))
1796 tree ctx
= CP_DECL_CONTEXT (decl
);
1797 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1798 return OMP_CLAUSE_DEFAULT_SHARED
;
1801 /* Const qualified vars having no mutable member are predetermined
1803 if (cxx_omp_const_qual_no_mutable (decl
))
1804 return OMP_CLAUSE_DEFAULT_SHARED
;
1806 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1809 /* Finalize an implicitly determined clause. */
1812 cxx_omp_finish_clause (tree c
, gimple_seq
*)
1814 tree decl
, inner_type
;
1815 bool make_shared
= false;
1817 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1820 decl
= OMP_CLAUSE_DECL (c
);
1821 decl
= require_complete_type (decl
);
1822 inner_type
= TREE_TYPE (decl
);
1823 if (decl
== error_mark_node
)
1825 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1826 inner_type
= TREE_TYPE (inner_type
);
1828 /* We're interested in the base element, not arrays. */
1829 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1830 inner_type
= TREE_TYPE (inner_type
);
1832 /* Check for special function availability by building a call to one.
1833 Save the results, because later we won't be in the right context
1834 for making these queries. */
1836 && CLASS_TYPE_P (inner_type
)
1837 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1841 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;
1844 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1845 disregarded in OpenMP construct, because it is going to be
1846 remapped during OpenMP lowering. SHARED is true if DECL
1847 is going to be shared, false if it is going to be privatized. */
1850 cxx_omp_disregard_value_expr (tree decl
, bool shared
)
1854 && DECL_HAS_VALUE_EXPR_P (decl
)
1855 && DECL_ARTIFICIAL (decl
)
1856 && DECL_LANG_SPECIFIC (decl
)
1857 && DECL_OMP_PRIVATIZED_MEMBER (decl
);
1860 /* Perform folding on expression X. */
1863 cp_fully_fold (tree x
)
1868 /* Fold expression X which is used as an rvalue if RVAL is true. */
1871 cp_fold_maybe_rvalue (tree x
, bool rval
)
1873 if (rval
&& DECL_P (x
))
1875 tree v
= decl_constant_value (x
);
1876 if (v
!= error_mark_node
)
1882 /* Fold expression X which is used as an rvalue. */
1885 cp_fold_rvalue (tree x
)
1887 return cp_fold_maybe_rvalue (x
, true);
1890 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
1891 and certain changes are made to the folding done. Or should be (FIXME). We
1892 never touch maybe_const, as it is only used for the C front-end
1893 C_MAYBE_CONST_EXPR. */
1896 c_fully_fold (tree x
, bool /*in_init*/, bool */
*maybe_const*/
)
1898 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
1900 return cp_fold_rvalue (x
);
1903 static GTY((cache
, deletable
)) cache_map fold_cache
;
1905 /* Dispose of the whole FOLD_CACHE. */
1908 clear_fold_cache (void)
1910 gt_cleare_cache (fold_cache
);
1913 /* This function tries to fold an expression X.
1914 To avoid combinatorial explosion, folding results are kept in fold_cache.
1915 If we are processing a template or X is invalid, we don't fold at all.
1916 For performance reasons we don't cache expressions representing a
1917 declaration or constant.
1918 Function returns X or its folded variant. */
1923 tree op0
, op1
, op2
, op3
;
1924 tree org_x
= x
, r
= NULL_TREE
;
1925 enum tree_code code
;
1927 bool rval_ops
= true;
1929 if (!x
|| x
== error_mark_node
)
1932 if (processing_template_decl
1933 || (EXPR_P (x
) && (!TREE_TYPE (x
) || TREE_TYPE (x
) == error_mark_node
)))
1936 /* Don't bother to cache DECLs or constants. */
1937 if (DECL_P (x
) || CONSTANT_CLASS_P (x
))
1940 if (tree cached
= fold_cache
.get (x
))
1943 code
= TREE_CODE (x
);
1947 x
= fold_sizeof_expr (x
);
1950 case VIEW_CONVERT_EXPR
:
1954 case NON_LVALUE_EXPR
:
1956 if (VOID_TYPE_P (TREE_TYPE (x
)))
1959 loc
= EXPR_LOCATION (x
);
1960 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
1962 if (code
== CONVERT_EXPR
1963 && SCALAR_TYPE_P (TREE_TYPE (x
))
1964 && op0
!= void_node
)
1965 /* During parsing we used convert_to_*_nofold; re-convert now using the
1966 folding variants, since fold() doesn't do those transformations. */
1967 x
= fold (convert (TREE_TYPE (x
), op0
));
1968 else if (op0
!= TREE_OPERAND (x
, 0))
1970 if (op0
== error_mark_node
)
1971 x
= error_mark_node
;
1973 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
1978 /* Conversion of an out-of-range value has implementation-defined
1979 behavior; the language considers it different from arithmetic
1980 overflow, which is undefined. */
1981 if (TREE_CODE (op0
) == INTEGER_CST
1982 && TREE_OVERFLOW_P (x
) && !TREE_OVERFLOW_P (op0
))
1983 TREE_OVERFLOW (x
) = false;
1992 case FIX_TRUNC_EXPR
:
1997 case TRUTH_NOT_EXPR
:
1998 case FIXED_CONVERT_EXPR
:
2001 loc
= EXPR_LOCATION (x
);
2002 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2004 if (op0
!= TREE_OPERAND (x
, 0))
2006 if (op0
== error_mark_node
)
2007 x
= error_mark_node
;
2009 x
= fold_build1_loc (loc
, code
, TREE_TYPE (x
), op0
);
2014 gcc_assert (TREE_CODE (x
) != COND_EXPR
2015 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x
, 0))));
2018 case UNARY_PLUS_EXPR
:
2019 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2020 if (op0
== error_mark_node
)
2021 x
= error_mark_node
;
2023 x
= fold_convert (TREE_TYPE (x
), op0
);
2026 case POSTDECREMENT_EXPR
:
2027 case POSTINCREMENT_EXPR
:
2029 case PREDECREMENT_EXPR
:
2030 case PREINCREMENT_EXPR
:
2034 case POINTER_PLUS_EXPR
:
2038 case TRUNC_DIV_EXPR
:
2040 case FLOOR_DIV_EXPR
:
2041 case ROUND_DIV_EXPR
:
2042 case TRUNC_MOD_EXPR
:
2044 case ROUND_MOD_EXPR
:
2046 case EXACT_DIV_EXPR
:
2056 case TRUTH_AND_EXPR
:
2057 case TRUTH_ANDIF_EXPR
:
2059 case TRUTH_ORIF_EXPR
:
2060 case TRUTH_XOR_EXPR
:
2061 case LT_EXPR
: case LE_EXPR
:
2062 case GT_EXPR
: case GE_EXPR
:
2063 case EQ_EXPR
: case NE_EXPR
:
2064 case UNORDERED_EXPR
: case ORDERED_EXPR
:
2065 case UNLT_EXPR
: case UNLE_EXPR
:
2066 case UNGT_EXPR
: case UNGE_EXPR
:
2067 case UNEQ_EXPR
: case LTGT_EXPR
:
2068 case RANGE_EXPR
: case COMPLEX_EXPR
:
2070 loc
= EXPR_LOCATION (x
);
2071 op0
= cp_fold_maybe_rvalue (TREE_OPERAND (x
, 0), rval_ops
);
2072 op1
= cp_fold_rvalue (TREE_OPERAND (x
, 1));
2074 if (op0
!= TREE_OPERAND (x
, 0) || op1
!= TREE_OPERAND (x
, 1))
2076 if (op0
== error_mark_node
|| op1
== error_mark_node
)
2077 x
= error_mark_node
;
2079 x
= fold_build2_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
);
2084 if (TREE_NO_WARNING (org_x
)
2085 && warn_nonnull_compare
2086 && COMPARISON_CLASS_P (org_x
))
2088 if (x
== error_mark_node
|| TREE_CODE (x
) == INTEGER_CST
)
2090 else if (COMPARISON_CLASS_P (x
))
2091 TREE_NO_WARNING (x
) = 1;
2092 /* Otherwise give up on optimizing these, let GIMPLE folders
2093 optimize those later on. */
2094 else if (op0
!= TREE_OPERAND (org_x
, 0)
2095 || op1
!= TREE_OPERAND (org_x
, 1))
2097 x
= build2_loc (loc
, code
, TREE_TYPE (org_x
), op0
, op1
);
2098 TREE_NO_WARNING (x
) = 1;
2108 /* Don't bother folding a void condition, since it can't produce a
2109 constant value. Also, some statement-level uses of COND_EXPR leave
2110 one of the branches NULL, so folding would crash. */
2111 if (VOID_TYPE_P (TREE_TYPE (x
)))
2114 loc
= EXPR_LOCATION (x
);
2115 op0
= cp_fold_rvalue (TREE_OPERAND (x
, 0));
2116 op1
= cp_fold (TREE_OPERAND (x
, 1));
2117 op2
= cp_fold (TREE_OPERAND (x
, 2));
2119 if (op0
!= TREE_OPERAND (x
, 0)
2120 || op1
!= TREE_OPERAND (x
, 1)
2121 || op2
!= TREE_OPERAND (x
, 2))
2123 if (op0
== error_mark_node
2124 || op1
== error_mark_node
2125 || op2
== error_mark_node
)
2126 x
= error_mark_node
;
2128 x
= fold_build3_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
);
2133 /* A COND_EXPR might have incompatible types in branches if one or both
2134 arms are bitfields. If folding exposed such a branch, fix it up. */
2135 if (TREE_CODE (x
) != code
)
2136 if (tree type
= is_bitfield_expr_with_lowered_type (x
))
2137 x
= fold_convert (type
, x
);
2143 int i
, m
, sv
= optimize
, nw
= sv
, changed
= 0;
2144 tree callee
= get_callee_fndecl (x
);
2146 /* Some built-in function calls will be evaluated at compile-time in
2147 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2148 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2149 if (callee
&& DECL_BUILT_IN (callee
) && !optimize
2150 && DECL_IS_BUILTIN_CONSTANT_P (callee
)
2151 && current_function_decl
2152 && DECL_DECLARED_CONSTEXPR_P (current_function_decl
))
2157 m
= call_expr_nargs (x
);
2158 for (i
= 0; i
< m
; i
++)
2160 r
= cp_fold (CALL_EXPR_ARG (x
, i
));
2161 if (r
!= CALL_EXPR_ARG (x
, i
))
2163 if (r
== error_mark_node
)
2165 x
= error_mark_node
;
2170 CALL_EXPR_ARG (x
, i
) = r
;
2172 if (x
== error_mark_node
)
2179 if (TREE_CODE (r
) != CALL_EXPR
)
2187 /* Invoke maybe_constant_value for functions declared
2188 constexpr and not called with AGGR_INIT_EXPRs.
2190 Do constexpr expansion of expressions where the call itself is not
2191 constant, but the call followed by an INDIRECT_REF is. */
2192 if (callee
&& DECL_DECLARED_CONSTEXPR_P (callee
)
2194 r
= maybe_constant_value (x
);
2197 if (TREE_CODE (r
) != CALL_EXPR
)
2212 bool changed
= false;
2213 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (x
);
2214 vec
<constructor_elt
, va_gc
> *nelts
= NULL
;
2215 vec_safe_reserve (nelts
, vec_safe_length (elts
));
2216 FOR_EACH_VEC_SAFE_ELT (elts
, i
, p
)
2218 tree op
= cp_fold (p
->value
);
2219 constructor_elt e
= { p
->index
, op
};
2220 nelts
->quick_push (e
);
2223 if (op
== error_mark_node
)
2225 x
= error_mark_node
;
2233 x
= build_constructor (TREE_TYPE (x
), nelts
);
2240 bool changed
= false;
2241 vec
<tree
, va_gc
> *vec
= make_tree_vector ();
2242 int i
, n
= TREE_VEC_LENGTH (x
);
2243 vec_safe_reserve (vec
, n
);
2245 for (i
= 0; i
< n
; i
++)
2247 tree op
= cp_fold (TREE_VEC_ELT (x
, i
));
2248 vec
->quick_push (op
);
2249 if (op
!= TREE_VEC_ELT (x
, i
))
2256 for (i
= 0; i
< n
; i
++)
2257 TREE_VEC_ELT (r
, i
) = (*vec
)[i
];
2261 release_tree_vector (vec
);
2267 case ARRAY_RANGE_REF
:
2269 loc
= EXPR_LOCATION (x
);
2270 op0
= cp_fold (TREE_OPERAND (x
, 0));
2271 op1
= cp_fold (TREE_OPERAND (x
, 1));
2272 op2
= cp_fold (TREE_OPERAND (x
, 2));
2273 op3
= cp_fold (TREE_OPERAND (x
, 3));
2275 if (op0
!= TREE_OPERAND (x
, 0)
2276 || op1
!= TREE_OPERAND (x
, 1)
2277 || op2
!= TREE_OPERAND (x
, 2)
2278 || op3
!= TREE_OPERAND (x
, 3))
2280 if (op0
== error_mark_node
2281 || op1
== error_mark_node
2282 || op2
== error_mark_node
2283 || op3
== error_mark_node
)
2284 x
= error_mark_node
;
2286 x
= build4_loc (loc
, code
, TREE_TYPE (x
), op0
, op1
, op2
, op3
);
2296 fold_cache
.put (org_x
, x
);
2297 /* Prevent that we try to fold an already folded result again. */
2299 fold_cache
.put (x
, x
);
2304 #include "gt-cp-cp-gimplify.h"