]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/cp-gimplify.c
C++: more location wrapper nodes (PR c++/43064, PR c++/43486)
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
CommitLineData
88bce636 1/* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
4ee9c684 2
8e8f6434 3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4ee9c684 4 Contributed by Jason Merrill <jason@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
aa139c3f 10Software Foundation; either version 3, or (at your option) any later
4ee9c684 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
aa139c3f 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
4cba6f60 25#include "target.h"
9ef16211 26#include "basic-block.h"
4ee9c684 27#include "cp-tree.h"
9ef16211 28#include "gimple.h"
4cba6f60 29#include "predict.h"
9ef16211 30#include "stor-layout.h"
75a70cf9 31#include "tree-iterator.h"
a8783bee 32#include "gimplify.h"
020bc656 33#include "c-family/c-ubsan.h"
30a86690 34#include "stringpool.h"
35#include "attribs.h"
9917317a 36#include "asan.h"
863c62e0 37#include "gcc-rich-location.h"
4ee9c684 38
57cf061a 39/* Forward declarations. */
40
41static tree cp_genericize_r (tree *, int *, void *);
d2c63826 42static tree cp_fold_r (tree *, int *, void *);
a0168bf5 43static void cp_genericize_tree (tree*, bool);
d2c63826 44static tree cp_fold (tree);
57cf061a 45
e7911019 46/* Local declarations. */
47
48enum bc_t { bc_break = 0, bc_continue = 1 };
49
8487df40 50/* Stack of labels which are targets for "break" or "continue",
51 linked through TREE_CHAIN. */
52static tree bc_label[2];
e7911019 53
54/* Begin a scope which can be exited by a break or continue statement. BC
55 indicates which.
56
57cf061a 57 Just creates a label with location LOCATION and pushes it into the current
58 context. */
e7911019 59
60static tree
57cf061a 61begin_bc_block (enum bc_t bc, location_t location)
e7911019 62{
57cf061a 63 tree label = create_artificial_label (location);
1767a056 64 DECL_CHAIN (label) = bc_label[bc];
8487df40 65 bc_label[bc] = label;
00f21715 66 if (bc == bc_break)
67 LABEL_DECL_BREAK (label) = true;
68 else
69 LABEL_DECL_CONTINUE (label) = true;
e7911019 70 return label;
71}
72
73/* Finish a scope which can be exited by a break or continue statement.
57cf061a 74 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
e7911019 75 an expression for the contents of the scope.
76
77 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
57cf061a 78 BLOCK. Otherwise, just forget the label. */
e7911019 79
57cf061a 80static void
81finish_bc_block (tree *block, enum bc_t bc, tree label)
e7911019 82{
8487df40 83 gcc_assert (label == bc_label[bc]);
e7911019 84
85 if (TREE_USED (label))
57cf061a 86 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
87 block);
e7911019 88
1767a056 89 bc_label[bc] = DECL_CHAIN (label);
90 DECL_CHAIN (label) = NULL_TREE;
e7911019 91}
92
75a70cf9 93/* Get the LABEL_EXPR to represent a break or continue statement
94 in the current block scope. BC indicates which. */
e7911019 95
96static tree
75a70cf9 97get_bc_label (enum bc_t bc)
e7911019 98{
8487df40 99 tree label = bc_label[bc];
e7911019 100
e7911019 101 /* Mark the label used for finish_bc_block. */
102 TREE_USED (label) = 1;
75a70cf9 103 return label;
e7911019 104}
105
4ee9c684 106/* Genericize a TRY_BLOCK. */
107
108static void
109genericize_try_block (tree *stmt_p)
110{
111 tree body = TRY_STMTS (*stmt_p);
112 tree cleanup = TRY_HANDLERS (*stmt_p);
113
831d52a2 114 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
4ee9c684 115}
116
117/* Genericize a HANDLER by converting to a CATCH_EXPR. */
118
119static void
120genericize_catch_block (tree *stmt_p)
121{
122 tree type = HANDLER_TYPE (*stmt_p);
123 tree body = HANDLER_BODY (*stmt_p);
124
4ee9c684 125 /* FIXME should the caught type go in TREE_TYPE? */
831d52a2 126 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
4ee9c684 127}
128
75a70cf9 129/* A terser interface for building a representation of an exception
130 specification. */
131
132static tree
133build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134{
135 tree t;
136
137 /* FIXME should the allowed types go in TREE_TYPE? */
138 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
139 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140
141 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
142 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143
144 return t;
145}
146
4ee9c684 147/* Genericize an EH_SPEC_BLOCK by converting it to a
148 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149
150static void
151genericize_eh_spec_block (tree *stmt_p)
152{
153 tree body = EH_SPEC_STMTS (*stmt_p);
154 tree allowed = EH_SPEC_RAISES (*stmt_p);
c4bac24d 155 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
4ee9c684 156
75a70cf9 157 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
e627cda1 158 TREE_NO_WARNING (*stmt_p) = true;
159 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
4ee9c684 160}
161
863c62e0 162/* Return the first non-compound statement in STMT. */
163
164tree
165first_stmt (tree stmt)
166{
167 switch (TREE_CODE (stmt))
168 {
169 case STATEMENT_LIST:
170 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
171 return first_stmt (p->stmt);
172 return void_node;
173
174 case BIND_EXPR:
175 return first_stmt (BIND_EXPR_BODY (stmt));
176
177 default:
178 return stmt;
179 }
180}
181
dddab69e 182/* Genericize an IF_STMT by turning it into a COND_EXPR. */
183
184static void
97767aad 185genericize_if_stmt (tree *stmt_p)
dddab69e 186{
551fa2c7 187 tree stmt, cond, then_, else_;
75a70cf9 188 location_t locus = EXPR_LOCATION (*stmt_p);
dddab69e 189
190 stmt = *stmt_p;
551fa2c7 191 cond = IF_COND (stmt);
dddab69e 192 then_ = THEN_CLAUSE (stmt);
193 else_ = ELSE_CLAUSE (stmt);
194
863c62e0 195 if (then_ && else_)
196 {
197 tree ft = first_stmt (then_);
198 tree fe = first_stmt (else_);
199 br_predictor pr;
200 if (TREE_CODE (ft) == PREDICT_EXPR
201 && TREE_CODE (fe) == PREDICT_EXPR
202 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
203 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
204 {
205 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
206 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
207 warning_at (&richloc, OPT_Wattributes,
208 "both branches of %<if%> statement marked as %qs",
209 predictor_name (pr));
210 }
211 }
212
dddab69e 213 if (!then_)
e60a6f7b 214 then_ = build_empty_stmt (locus);
dddab69e 215 if (!else_)
e60a6f7b 216 else_ = build_empty_stmt (locus);
dddab69e 217
551fa2c7 218 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
219 stmt = then_;
220 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
221 stmt = else_;
222 else
223 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
ebd1f44d 224 if (!EXPR_HAS_LOCATION (stmt))
225 protected_set_expr_location (stmt, locus);
dddab69e 226 *stmt_p = stmt;
227}
228
e7911019 229/* Build a generic representation of one of the C loop forms. COND is the
230 loop condition or NULL_TREE. BODY is the (possibly compound) statement
231 controlled by the loop. INCR is the increment expression of a for-loop,
232 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
233 evaluated before the loop body as in while and for loops, or after the
234 loop body as in do-while loops. */
235
57cf061a 236static void
237genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
238 tree incr, bool cond_is_first, int *walk_subtrees,
239 void *data)
e7911019 240{
57cf061a 241 tree blab, clab;
e6f10d79 242 tree exit = NULL;
57cf061a 243 tree stmt_list = NULL;
244
245 blab = begin_bc_block (bc_break, start_locus);
246 clab = begin_bc_block (bc_continue, start_locus);
e7911019 247
ebd1f44d 248 protected_set_expr_location (incr, start_locus);
e7911019 249
57cf061a 250 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
251 cp_walk_tree (&body, cp_genericize_r, data, NULL);
252 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
253 *walk_subtrees = 0;
e7911019 254
e6f10d79 255 if (cond && TREE_CODE (cond) != INTEGER_CST)
e7911019 256 {
e6f10d79 257 /* If COND is constant, don't bother building an exit. If it's false,
258 we won't build a loop. If it's true, any exits are in the body. */
d3a3cfb8 259 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
e6f10d79 260 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
261 get_bc_label (bc_break));
262 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
263 build_empty_stmt (cloc), exit);
57cf061a 264 }
e7911019 265
e6f10d79 266 if (exit && cond_is_first)
267 append_to_statement_list (exit, &stmt_list);
57cf061a 268 append_to_statement_list (body, &stmt_list);
269 finish_bc_block (&stmt_list, bc_continue, clab);
270 append_to_statement_list (incr, &stmt_list);
e6f10d79 271 if (exit && !cond_is_first)
272 append_to_statement_list (exit, &stmt_list);
e7911019 273
e6f10d79 274 if (!stmt_list)
275 stmt_list = build_empty_stmt (start_locus);
276
277 tree loop;
278 if (cond && integer_zerop (cond))
279 {
280 if (cond_is_first)
281 loop = fold_build3_loc (start_locus, COND_EXPR,
282 void_type_node, cond, stmt_list,
283 build_empty_stmt (start_locus));
284 else
285 loop = stmt_list;
286 }
287 else
92765e8a 288 {
289 location_t loc = start_locus;
290 if (!cond || integer_nonzerop (cond))
291 loc = EXPR_LOCATION (expr_first (body));
292 if (loc == UNKNOWN_LOCATION)
293 loc = start_locus;
294 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
295 }
e6f10d79 296
297 stmt_list = NULL;
298 append_to_statement_list (loop, &stmt_list);
299 finish_bc_block (&stmt_list, bc_break, blab);
300 if (!stmt_list)
301 stmt_list = build_empty_stmt (start_locus);
e7911019 302
57cf061a 303 *stmt_p = stmt_list;
e7911019 304}
305
57cf061a 306/* Genericize a FOR_STMT node *STMT_P. */
e7911019 307
308static void
57cf061a 309genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 310{
311 tree stmt = *stmt_p;
57cf061a 312 tree expr = NULL;
313 tree loop;
314 tree init = FOR_INIT_STMT (stmt);
e7911019 315
57cf061a 316 if (init)
317 {
318 cp_walk_tree (&init, cp_genericize_r, data, NULL);
319 append_to_statement_list (init, &expr);
320 }
e7911019 321
57cf061a 322 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
323 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
324 append_to_statement_list (loop, &expr);
e6f10d79 325 if (expr == NULL_TREE)
326 expr = loop;
57cf061a 327 *stmt_p = expr;
e7911019 328}
329
57cf061a 330/* Genericize a WHILE_STMT node *STMT_P. */
e7911019 331
332static void
57cf061a 333genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 334{
335 tree stmt = *stmt_p;
57cf061a 336 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
337 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
e7911019 338}
339
57cf061a 340/* Genericize a DO_STMT node *STMT_P. */
e7911019 341
342static void
57cf061a 343genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 344{
345 tree stmt = *stmt_p;
57cf061a 346 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
347 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
e7911019 348}
349
57cf061a 350/* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
e7911019 351
352static void
57cf061a 353genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 354{
355 tree stmt = *stmt_p;
57cf061a 356 tree break_block, body, cond, type;
357 location_t stmt_locus = EXPR_LOCATION (stmt);
e7911019 358
57cf061a 359 break_block = begin_bc_block (bc_break, stmt_locus);
e7911019 360
361 body = SWITCH_STMT_BODY (stmt);
362 if (!body)
e60a6f7b 363 body = build_empty_stmt (stmt_locus);
57cf061a 364 cond = SWITCH_STMT_COND (stmt);
365 type = SWITCH_STMT_TYPE (stmt);
e7911019 366
57cf061a 367 cp_walk_tree (&body, cp_genericize_r, data, NULL);
368 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
369 cp_walk_tree (&type, cp_genericize_r, data, NULL);
370 *walk_subtrees = 0;
e7911019 371
17cf92d6 372 if (TREE_USED (break_block))
373 SWITCH_BREAK_LABEL_P (break_block) = 1;
374 finish_bc_block (&body, bc_break, break_block);
bd37ce3e 375 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
3501ad33 376 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
377 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
378 || !TREE_USED (break_block));
57cf061a 379}
380
381/* Genericize a CONTINUE_STMT node *STMT_P. */
382
383static void
384genericize_continue_stmt (tree *stmt_p)
385{
386 tree stmt_list = NULL;
387 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
388 tree label = get_bc_label (bc_continue);
389 location_t location = EXPR_LOCATION (*stmt_p);
390 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
22e029d2 391 append_to_statement_list_force (pred, &stmt_list);
57cf061a 392 append_to_statement_list (jump, &stmt_list);
393 *stmt_p = stmt_list;
e7911019 394}
395
57cf061a 396/* Genericize a BREAK_STMT node *STMT_P. */
397
398static void
399genericize_break_stmt (tree *stmt_p)
400{
401 tree label = get_bc_label (bc_break);
402 location_t location = EXPR_LOCATION (*stmt_p);
403 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
404}
405
406/* Genericize a OMP_FOR node *STMT_P. */
407
408static void
409genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
410{
411 tree stmt = *stmt_p;
412 location_t locus = EXPR_LOCATION (stmt);
413 tree clab = begin_bc_block (bc_continue, locus);
414
415 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
0dfe87da 416 if (TREE_CODE (stmt) != OMP_TASKLOOP)
417 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
57cf061a 418 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
419 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
420 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
421 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
422 *walk_subtrees = 0;
423
424 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
425}
426
427/* Hook into the middle of gimplifying an OMP_FOR node. */
8487df40 428
429static enum gimplify_status
75a70cf9 430cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8487df40 431{
432 tree for_stmt = *expr_p;
75a70cf9 433 gimple_seq seq = NULL;
8487df40 434
435 /* Protect ourselves from recursion. */
436 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
437 return GS_UNHANDLED;
438 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
439
75a70cf9 440 gimplify_and_add (for_stmt, &seq);
75a70cf9 441 gimple_seq_add_seq (pre_p, seq);
8487df40 442
8487df40 443 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
444
445 return GS_ALL_DONE;
446}
447
7219fab5 448/* Gimplify an EXPR_STMT node. */
449
450static void
451gimplify_expr_stmt (tree *stmt_p)
452{
453 tree stmt = EXPR_STMT_EXPR (*stmt_p);
454
455 if (stmt == error_mark_node)
456 stmt = NULL;
457
458 /* Gimplification of a statement expression will nullify the
459 statement if all its side effects are moved to *PRE_P and *POST_P.
460
461 In this case we will not want to emit the gimplified statement.
462 However, we may still want to emit a warning, so we do that before
463 gimplification. */
43667bd3 464 if (stmt && warn_unused_value)
7219fab5 465 {
466 if (!TREE_SIDE_EFFECTS (stmt))
467 {
468 if (!IS_EMPTY_STMT (stmt)
469 && !VOID_TYPE_P (TREE_TYPE (stmt))
470 && !TREE_NO_WARNING (stmt))
43667bd3 471 warning (OPT_Wunused_value, "statement with no effect");
7219fab5 472 }
43667bd3 473 else
7219fab5 474 warn_if_unused_value (stmt, input_location);
475 }
476
477 if (stmt == NULL_TREE)
478 stmt = alloc_stmt_list ();
479
480 *stmt_p = stmt;
481}
482
4ee9c684 483/* Gimplify initialization from an AGGR_INIT_EXPR. */
484
485static void
da73cc75 486cp_gimplify_init_expr (tree *expr_p)
4ee9c684 487{
488 tree from = TREE_OPERAND (*expr_p, 1);
489 tree to = TREE_OPERAND (*expr_p, 0);
930e8175 490 tree t;
4ee9c684 491
4ee9c684 492 /* What about code that pulls out the temp and uses it elsewhere? I
493 think that such code never uses the TARGET_EXPR as an initializer. If
494 I'm wrong, we'll abort because the temp won't have any RTL. In that
495 case, I guess we'll need to replace references somehow. */
496 if (TREE_CODE (from) == TARGET_EXPR)
8e9e8d76 497 from = TARGET_EXPR_INITIAL (from);
4ee9c684 498
c3d09d4d 499 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
500 inside the TARGET_EXPR. */
930e8175 501 for (t = from; t; )
502 {
503 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
4ee9c684 504
930e8175 505 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
506 replace the slot operand with our target.
4ee9c684 507
930e8175 508 Should we add a target parm to gimplify_expr instead? No, as in this
509 case we want to replace the INIT_EXPR. */
a8b75081 510 if (TREE_CODE (sub) == AGGR_INIT_EXPR
511 || TREE_CODE (sub) == VEC_INIT_EXPR)
930e8175 512 {
a8b75081 513 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
514 AGGR_INIT_EXPR_SLOT (sub) = to;
515 else
516 VEC_INIT_EXPR_SLOT (sub) = to;
930e8175 517 *expr_p = from;
518
519 /* The initialization is now a side-effect, so the container can
520 become void. */
521 if (from != sub)
522 TREE_TYPE (from) = void_type_node;
523 }
930e8175 524
ffc5ad9b 525 /* Handle aggregate NSDMI. */
526 replace_placeholders (sub, to);
cf72f34d 527
930e8175 528 if (t == sub)
529 break;
530 else
531 t = TREE_OPERAND (t, 1);
4ee9c684 532 }
930e8175 533
4ee9c684 534}
535
536/* Gimplify a MUST_NOT_THROW_EXPR. */
537
75a70cf9 538static enum gimplify_status
539gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
4ee9c684 540{
541 tree stmt = *expr_p;
2363ef00 542 tree temp = voidify_wrapper_expr (stmt, NULL);
4ee9c684 543 tree body = TREE_OPERAND (stmt, 0);
73bb17ce 544 gimple_seq try_ = NULL;
545 gimple_seq catch_ = NULL;
42acab1c 546 gimple *mnt;
4ee9c684 547
73bb17ce 548 gimplify_and_add (body, &try_);
c4bac24d 549 mnt = gimple_build_eh_must_not_throw (terminate_fn);
e0d98d5f 550 gimple_seq_add_stmt_without_update (&catch_, mnt);
73bb17ce 551 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
4ee9c684 552
e0d98d5f 553 gimple_seq_add_stmt_without_update (pre_p, mnt);
4ee9c684 554 if (temp)
555 {
4ee9c684 556 *expr_p = temp;
75a70cf9 557 return GS_OK;
4ee9c684 558 }
75a70cf9 559
560 *expr_p = NULL;
561 return GS_ALL_DONE;
4ee9c684 562}
8495c0ca 563
6442eaae 564/* Return TRUE if an operand (OP) of a given TYPE being copied is
565 really just an empty class copy.
566
567 Check that the operand has a simple form so that TARGET_EXPRs and
568 non-empty CONSTRUCTORs get reduced properly, and we leave the
569 return slot optimization alone because it isn't a copy. */
570
571static bool
572simple_empty_class_p (tree type, tree op)
573{
574 return
575 ((TREE_CODE (op) == COMPOUND_EXPR
576 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
550c1405 577 || TREE_CODE (op) == EMPTY_CLASS_EXPR
6442eaae 578 || is_gimple_lvalue (op)
579 || INDIRECT_REF_P (op)
580 || (TREE_CODE (op) == CONSTRUCTOR
581 && CONSTRUCTOR_NELTS (op) == 0
582 && !TREE_CLOBBER_P (op))
583 || (TREE_CODE (op) == CALL_EXPR
584 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
585 && is_really_empty_class (type);
586}
587
a6a52a73 588/* Returns true if evaluating E as an lvalue has side-effects;
589 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
590 have side-effects until there is a read or write through it. */
591
592static bool
593lvalue_has_side_effects (tree e)
594{
595 if (!TREE_SIDE_EFFECTS (e))
596 return false;
597 while (handled_component_p (e))
598 {
599 if (TREE_CODE (e) == ARRAY_REF
600 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
601 return true;
602 e = TREE_OPERAND (e, 0);
603 }
604 if (DECL_P (e))
605 /* Just naming a variable has no side-effects. */
606 return false;
607 else if (INDIRECT_REF_P (e))
608 /* Similarly, indirection has no side-effects. */
609 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
610 else
611 /* For anything else, trust TREE_SIDE_EFFECTS. */
612 return TREE_SIDE_EFFECTS (e);
613}
614
8495c0ca 615/* Do C++-specific gimplification. Args are as for gimplify_expr. */
616
617int
75a70cf9 618cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8495c0ca 619{
620 int saved_stmts_are_full_exprs_p = 0;
d3a3cfb8 621 location_t loc = cp_expr_loc_or_loc (*expr_p, input_location);
8495c0ca 622 enum tree_code code = TREE_CODE (*expr_p);
623 enum gimplify_status ret;
624
625 if (STATEMENT_CODE_P (code))
626 {
627 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
628 current_stmt_tree ()->stmts_are_full_exprs_p
629 = STMT_IS_FULL_EXPR_P (*expr_p);
630 }
631
632 switch (code)
633 {
8495c0ca 634 case AGGR_INIT_EXPR:
635 simplify_aggr_init_expr (expr_p);
636 ret = GS_OK;
637 break;
638
a8b75081 639 case VEC_INIT_EXPR:
640 {
641 location_t loc = input_location;
da73cc75 642 tree init = VEC_INIT_EXPR_INIT (*expr_p);
f66fb566 643 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
a8b75081 644 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
645 input_location = EXPR_LOCATION (*expr_p);
f66fb566 646 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
98c0a208 647 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
da73cc75 648 from_array,
a8b75081 649 tf_warning_or_error);
74fd83a9 650 hash_set<tree> pset;
651 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
a0168bf5 652 cp_genericize_tree (expr_p, false);
a8b75081 653 ret = GS_OK;
654 input_location = loc;
655 }
656 break;
657
8495c0ca 658 case THROW_EXPR:
a17c2a3a 659 /* FIXME communicate throw type to back end, probably by moving
8495c0ca 660 THROW_EXPR into ../tree.def. */
661 *expr_p = TREE_OPERAND (*expr_p, 0);
662 ret = GS_OK;
663 break;
664
665 case MUST_NOT_THROW_EXPR:
75a70cf9 666 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
8495c0ca 667 break;
668
75a70cf9 669 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
cf6b103e 670 LHS of an assignment might also be involved in the RHS, as in bug
671 25979. */
8495c0ca 672 case INIT_EXPR:
da73cc75 673 cp_gimplify_init_expr (expr_p);
4fd66866 674 if (TREE_CODE (*expr_p) != INIT_EXPR)
675 return GS_OK;
e3533433 676 /* Fall through. */
1cce6590 677 case MODIFY_EXPR:
6442eaae 678 modify_expr_case:
1cce6590 679 {
680 /* If the back end isn't clever enough to know that the lhs and rhs
681 types are the same, add an explicit conversion. */
682 tree op0 = TREE_OPERAND (*expr_p, 0);
683 tree op1 = TREE_OPERAND (*expr_p, 1);
684
97f7ff5b 685 if (!error_operand_p (op0)
686 && !error_operand_p (op1)
687 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
688 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
1cce6590 689 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
690 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
691 TREE_TYPE (op0), op1);
eda37335 692
6442eaae 693 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
eda37335 694 {
6442eaae 695 /* Remove any copies of empty classes. Also drop volatile
696 variables on the RHS to avoid infinite recursion from
697 gimplify_expr trying to load the value. */
6442eaae 698 if (TREE_SIDE_EFFECTS (op1))
699 {
700 if (TREE_THIS_VOLATILE (op1)
701 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
702 op1 = build_fold_addr_expr (op1);
703
704 gimplify_and_add (op1, pre_p);
705 }
a6a52a73 706 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
707 is_gimple_lvalue, fb_lvalue);
6442eaae 708 *expr_p = TREE_OPERAND (*expr_p, 0);
eda37335 709 }
a6a52a73 710 /* P0145 says that the RHS is sequenced before the LHS.
711 gimplify_modify_expr gimplifies the RHS before the LHS, but that
712 isn't quite strong enough in two cases:
713
714 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
715 mean it's evaluated after the LHS.
716
717 2) the value calculation of the RHS is also sequenced before the
718 LHS, so for scalar assignment we need to preevaluate if the
719 RHS could be affected by LHS side-effects even if it has no
720 side-effects of its own. We don't need this for classes because
721 class assignment takes its RHS by reference. */
722 else if (flag_strong_eval_order > 1
723 && TREE_CODE (*expr_p) == MODIFY_EXPR
724 && lvalue_has_side_effects (op0)
725 && (TREE_CODE (op1) == CALL_EXPR
726 || (SCALAR_TYPE_P (TREE_TYPE (op1))
727 && !TREE_CONSTANT (op1))))
728 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
1cce6590 729 }
8495c0ca 730 ret = GS_OK;
731 break;
732
733 case EMPTY_CLASS_EXPR:
ff8c638e 734 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
735 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
8495c0ca 736 ret = GS_OK;
737 break;
738
739 case BASELINK:
740 *expr_p = BASELINK_FUNCTIONS (*expr_p);
741 ret = GS_OK;
742 break;
743
744 case TRY_BLOCK:
745 genericize_try_block (expr_p);
746 ret = GS_OK;
747 break;
748
749 case HANDLER:
750 genericize_catch_block (expr_p);
751 ret = GS_OK;
752 break;
753
754 case EH_SPEC_BLOCK:
755 genericize_eh_spec_block (expr_p);
756 ret = GS_OK;
757 break;
758
759 case USING_STMT:
660c48c4 760 gcc_unreachable ();
8495c0ca 761
e7911019 762 case FOR_STMT:
e7911019 763 case WHILE_STMT:
e7911019 764 case DO_STMT:
e7911019 765 case SWITCH_STMT:
57cf061a 766 case CONTINUE_STMT:
767 case BREAK_STMT:
768 gcc_unreachable ();
e7911019 769
8487df40 770 case OMP_FOR:
bc7bff74 771 case OMP_SIMD:
772 case OMP_DISTRIBUTE:
43895be5 773 case OMP_TASKLOOP:
75a70cf9 774 ret = cp_gimplify_omp_for (expr_p, pre_p);
8487df40 775 break;
776
7219fab5 777 case EXPR_STMT:
778 gimplify_expr_stmt (expr_p);
779 ret = GS_OK;
780 break;
781
97d541d5 782 case UNARY_PLUS_EXPR:
783 {
784 tree arg = TREE_OPERAND (*expr_p, 0);
785 tree type = TREE_TYPE (*expr_p);
786 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
787 : arg;
788 ret = GS_OK;
789 }
790 break;
791
433e804e 792 case CALL_EXPR:
10621300 793 ret = GS_OK;
06c75b9a 794 if (!CALL_EXPR_FN (*expr_p))
795 /* Internal function call. */;
796 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
10621300 797 {
06c75b9a 798 /* This is a call to a (compound) assignment operator that used
799 the operator syntax; gimplify the RHS first. */
800 gcc_assert (call_expr_nargs (*expr_p) == 2);
801 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
802 enum gimplify_status t
803 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
804 if (t == GS_ERROR)
805 ret = GS_ERROR;
806 }
807 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
808 {
809 /* Leave the last argument for gimplify_call_expr, to avoid problems
810 with __builtin_va_arg_pack(). */
811 int nargs = call_expr_nargs (*expr_p) - 1;
812 for (int i = 0; i < nargs; ++i)
10621300 813 {
814 enum gimplify_status t
815 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
816 if (t == GS_ERROR)
817 ret = GS_ERROR;
818 }
819 }
e59cff35 820 else if (flag_strong_eval_order
06c75b9a 821 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
822 {
e59cff35 823 /* If flag_strong_eval_order, evaluate the object argument first. */
06c75b9a 824 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
d03fa520 825 if (INDIRECT_TYPE_P (fntype))
06c75b9a 826 fntype = TREE_TYPE (fntype);
827 if (TREE_CODE (fntype) == METHOD_TYPE)
828 {
829 enum gimplify_status t
830 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
831 if (t == GS_ERROR)
832 ret = GS_ERROR;
833 }
834 }
18d371d3 835 if (ret != GS_ERROR)
836 {
837 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
838 if (decl
a0e9bfbb 839 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
840 BUILT_IN_FRONTEND))
18d371d3 841 *expr_p = boolean_false_node;
842 }
10621300 843 break;
844
6442eaae 845 case RETURN_EXPR:
846 if (TREE_OPERAND (*expr_p, 0)
847 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
848 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
849 {
850 expr_p = &TREE_OPERAND (*expr_p, 0);
851 code = TREE_CODE (*expr_p);
852 /* Avoid going through the INIT_EXPR case, which can
853 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
854 goto modify_expr_case;
855 }
856 /* Fall through. */
857
8495c0ca 858 default:
8458f4ca 859 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
8495c0ca 860 break;
861 }
862
863 /* Restore saved state. */
864 if (STATEMENT_CODE_P (code))
865 current_stmt_tree ()->stmts_are_full_exprs_p
866 = saved_stmts_are_full_exprs_p;
867
868 return ret;
869}
dddab69e 870
dddcebdc 871static inline bool
9f627b1a 872is_invisiref_parm (const_tree t)
dddcebdc 873{
806e4c12 874 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
dddcebdc 875 && DECL_BY_REFERENCE (t));
876}
877
7db5a284 878/* Return true if the uid in both int tree maps are equal. */
879
2ef51f0e 880bool
881cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
7db5a284 882{
7db5a284 883 return (a->uid == b->uid);
884}
885
886/* Hash a UID in a cxx_int_tree_map. */
887
888unsigned int
2ef51f0e 889cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
7db5a284 890{
2ef51f0e 891 return item->uid;
7db5a284 892}
893
9b222de3 894/* A stable comparison routine for use with splay trees and DECLs. */
895
896static int
897splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
898{
899 tree a = (tree) xa;
900 tree b = (tree) xb;
901
902 return DECL_UID (a) - DECL_UID (b);
903}
904
905/* OpenMP context during genericization. */
906
907struct cp_genericize_omp_taskreg
908{
909 bool is_parallel;
910 bool default_shared;
911 struct cp_genericize_omp_taskreg *outer;
912 splay_tree variables;
913};
914
915/* Return true if genericization should try to determine if
916 DECL is firstprivate or shared within task regions. */
917
918static bool
919omp_var_to_track (tree decl)
920{
921 tree type = TREE_TYPE (decl);
922 if (is_invisiref_parm (decl))
923 type = TREE_TYPE (type);
90ad495b 924 else if (TYPE_REF_P (type))
a3ee44e4 925 type = TREE_TYPE (type);
9b222de3 926 while (TREE_CODE (type) == ARRAY_TYPE)
927 type = TREE_TYPE (type);
928 if (type == error_mark_node || !CLASS_TYPE_P (type))
929 return false;
800478e6 930 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
9b222de3 931 return false;
932 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
933 return false;
934 return true;
935}
936
937/* Note DECL use in OpenMP region OMP_CTX during genericization. */
938
939static void
940omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
941{
942 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
943 (splay_tree_key) decl);
944 if (n == NULL)
945 {
946 int flags = OMP_CLAUSE_DEFAULT_SHARED;
947 if (omp_ctx->outer)
948 omp_cxx_notice_variable (omp_ctx->outer, decl);
949 if (!omp_ctx->default_shared)
950 {
951 struct cp_genericize_omp_taskreg *octx;
952
953 for (octx = omp_ctx->outer; octx; octx = octx->outer)
954 {
955 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
956 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
957 {
958 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
959 break;
960 }
961 if (octx->is_parallel)
962 break;
963 }
964 if (octx == NULL
965 && (TREE_CODE (decl) == PARM_DECL
966 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
967 && DECL_CONTEXT (decl) == current_function_decl)))
968 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
969 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
970 {
971 /* DECL is implicitly determined firstprivate in
972 the current task construct. Ensure copy ctor and
973 dtor are instantiated, because during gimplification
974 it will be already too late. */
975 tree type = TREE_TYPE (decl);
976 if (is_invisiref_parm (decl))
977 type = TREE_TYPE (type);
90ad495b 978 else if (TYPE_REF_P (type))
a3ee44e4 979 type = TREE_TYPE (type);
9b222de3 980 while (TREE_CODE (type) == ARRAY_TYPE)
981 type = TREE_TYPE (type);
982 get_copy_ctor (type, tf_none);
983 get_dtor (type, tf_none);
984 }
985 }
986 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
987 }
988}
989
990/* Genericization context. */
991
660c48c4 992struct cp_genericize_data
993{
431205b7 994 hash_set<tree> *p_set;
f1f41a6c 995 vec<tree> bind_expr_stack;
9b222de3 996 struct cp_genericize_omp_taskreg *omp_ctx;
cb40a6f7 997 tree try_block;
d120fa25 998 bool no_sanitize_p;
a0168bf5 999 bool handle_invisiref_parm_p;
660c48c4 1000};
1001
d2c63826 1002/* Perform any pre-gimplification folding of C++ front end trees to
1003 GENERIC.
1004 Note: The folding of none-omp cases is something to move into
1005 the middle-end. As for now we have most foldings only on GENERIC
1006 in fold-const, we need to perform this before transformation to
1007 GIMPLE-form. */
1008
1009static tree
1010cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1011{
1012 tree stmt;
1013 enum tree_code code;
1014
1015 *stmt_p = stmt = cp_fold (*stmt_p);
1016
0406b32f 1017 if (((hash_set<tree> *) data)->add (stmt))
1018 {
1019 /* Don't walk subtrees of stmts we've already walked once, otherwise
1020 we can have exponential complexity with e.g. lots of nested
1021 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1022 always the same tree, which the first time cp_fold_r has been
1023 called on it had the subtrees walked. */
1024 *walk_subtrees = 0;
1025 return NULL;
1026 }
1027
d2c63826 1028 code = TREE_CODE (stmt);
1029 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
efa02472 1030 || code == OMP_TASKLOOP || code == OACC_LOOP)
d2c63826 1031 {
1032 tree x;
1033 int i, n;
1034
1035 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1036 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1037 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1038 x = OMP_FOR_COND (stmt);
1039 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1040 {
1041 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1042 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1043 }
1044 else if (x && TREE_CODE (x) == TREE_VEC)
1045 {
1046 n = TREE_VEC_LENGTH (x);
1047 for (i = 0; i < n; i++)
1048 {
1049 tree o = TREE_VEC_ELT (x, i);
1050 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1051 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1052 }
1053 }
1054 x = OMP_FOR_INCR (stmt);
1055 if (x && TREE_CODE (x) == TREE_VEC)
1056 {
1057 n = TREE_VEC_LENGTH (x);
1058 for (i = 0; i < n; i++)
1059 {
1060 tree o = TREE_VEC_ELT (x, i);
1061 if (o && TREE_CODE (o) == MODIFY_EXPR)
1062 o = TREE_OPERAND (o, 1);
1063 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1064 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1065 {
1066 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1067 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1068 }
1069 }
1070 }
1071 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1072 *walk_subtrees = 0;
1073 }
1074
1075 return NULL;
1076}
1077
8f559c6e 1078/* Fold ALL the trees! FIXME we should be able to remove this, but
1079 apparently that still causes optimization regressions. */
1080
1081void
1082cp_fold_function (tree fndecl)
1083{
0406b32f 1084 hash_set<tree> pset;
1085 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
8f559c6e 1086}
1087
dddcebdc 1088/* Perform any pre-gimplification lowering of C++ front end trees to
1089 GENERIC. */
dddab69e 1090
1091static tree
dddcebdc 1092cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
dddab69e 1093{
1094 tree stmt = *stmt_p;
660c48c4 1095 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
431205b7 1096 hash_set<tree> *p_set = wtd->p_set;
dddab69e 1097
9b222de3 1098 /* If in an OpenMP context, note var uses. */
1099 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
80a58eb0 1100 && (VAR_P (stmt)
9b222de3 1101 || TREE_CODE (stmt) == PARM_DECL
1102 || TREE_CODE (stmt) == RESULT_DECL)
1103 && omp_var_to_track (stmt))
1104 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1105
cbc3b89f 1106 /* Don't dereference parms in a thunk, pass the references through. */
1107 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1108 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1109 {
1110 *walk_subtrees = 0;
1111 return NULL;
1112 }
1113
6f0a524c 1114 /* Dereference invisible reference parms. */
a0168bf5 1115 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
dddcebdc 1116 {
806e4c12 1117 *stmt_p = convert_from_reference (stmt);
0e899ffe 1118 p_set->add (*stmt_p);
dddcebdc 1119 *walk_subtrees = 0;
1120 return NULL;
1121 }
1122
7db5a284 1123 /* Map block scope extern declarations to visible declarations with the
1124 same name and type in outer scopes if any. */
1125 if (cp_function_chain->extern_decl_map
4cace8cb 1126 && VAR_OR_FUNCTION_DECL_P (stmt)
7db5a284 1127 && DECL_EXTERNAL (stmt))
1128 {
1129 struct cxx_int_tree_map *h, in;
1130 in.uid = DECL_UID (stmt);
2ef51f0e 1131 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
7db5a284 1132 if (h)
1133 {
1134 *stmt_p = h->to;
cce77517 1135 TREE_USED (h->to) |= TREE_USED (stmt);
7db5a284 1136 *walk_subtrees = 0;
1137 return NULL;
1138 }
1139 }
1140
3ddb3278 1141 if (TREE_CODE (stmt) == INTEGER_CST
90ad495b 1142 && TYPE_REF_P (TREE_TYPE (stmt))
3ddb3278 1143 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1144 && !wtd->no_sanitize_p)
1145 {
1146 ubsan_maybe_instrument_reference (stmt_p);
1147 if (*stmt_p != stmt)
1148 {
1149 *walk_subtrees = 0;
1150 return NULL_TREE;
1151 }
1152 }
1153
dddcebdc 1154 /* Other than invisiref parms, don't walk the same tree twice. */
431205b7 1155 if (p_set->contains (stmt))
dddcebdc 1156 {
1157 *walk_subtrees = 0;
1158 return NULL_TREE;
1159 }
1160
cc9e1a64 1161 switch (TREE_CODE (stmt))
dddcebdc 1162 {
cc9e1a64 1163 case ADDR_EXPR:
1164 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1165 {
1166 /* If in an OpenMP context, note var uses. */
1167 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1168 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1169 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1170 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
9b222de3 1171 *walk_subtrees = 0;
cc9e1a64 1172 }
1173 break;
1174
1175 case RETURN_EXPR:
1176 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1177 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1178 *walk_subtrees = 0;
1179 break;
1180
1181 case OMP_CLAUSE:
1182 switch (OMP_CLAUSE_CODE (stmt))
1183 {
1184 case OMP_CLAUSE_LASTPRIVATE:
1185 /* Don't dereference an invisiref in OpenMP clauses. */
1186 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1187 {
1188 *walk_subtrees = 0;
1189 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1190 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1191 cp_genericize_r, data, NULL);
1192 }
1193 break;
1194 case OMP_CLAUSE_PRIVATE:
1195 /* Don't dereference an invisiref in OpenMP clauses. */
1196 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
9b222de3 1197 *walk_subtrees = 0;
cc9e1a64 1198 else if (wtd->omp_ctx != NULL)
1199 {
1200 /* Private clause doesn't cause any references to the
1201 var in outer contexts, avoid calling
1202 omp_cxx_notice_variable for it. */
1203 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1204 wtd->omp_ctx = NULL;
1205 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1206 data, NULL);
1207 wtd->omp_ctx = old;
1208 *walk_subtrees = 0;
1209 }
1210 break;
1211 case OMP_CLAUSE_SHARED:
1212 case OMP_CLAUSE_FIRSTPRIVATE:
1213 case OMP_CLAUSE_COPYIN:
1214 case OMP_CLAUSE_COPYPRIVATE:
1215 /* Don't dereference an invisiref in OpenMP clauses. */
1216 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
bc7bff74 1217 *walk_subtrees = 0;
cc9e1a64 1218 break;
1219 case OMP_CLAUSE_REDUCTION:
7e5a76c8 1220 case OMP_CLAUSE_IN_REDUCTION:
1221 case OMP_CLAUSE_TASK_REDUCTION:
cc9e1a64 1222 /* Don't dereference an invisiref in reduction clause's
1223 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1224 still needs to be genericized. */
1225 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1226 {
1227 *walk_subtrees = 0;
1228 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1229 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1230 cp_genericize_r, data, NULL);
1231 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1232 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1233 cp_genericize_r, data, NULL);
1234 }
1235 break;
1236 default:
1237 break;
1238 }
1239 break;
1240
1241 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1242 to lower this construct before scanning it, so we need to lower these
1243 before doing anything else. */
1244 case CLEANUP_STMT:
1245 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1246 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1247 : TRY_FINALLY_EXPR,
1248 void_type_node,
1249 CLEANUP_BODY (stmt),
1250 CLEANUP_EXPR (stmt));
1251 break;
1252
1253 case IF_STMT:
97767aad 1254 genericize_if_stmt (stmt_p);
1255 /* *stmt_p has changed, tail recurse to handle it again. */
1256 return cp_genericize_r (stmt_p, walk_subtrees, data);
97767aad 1257
cc9e1a64 1258 /* COND_EXPR might have incompatible types in branches if one or both
1259 arms are bitfields. Fix it up now. */
1260 case COND_EXPR:
1261 {
1262 tree type_left
1263 = (TREE_OPERAND (stmt, 1)
1264 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1265 : NULL_TREE);
1266 tree type_right
1267 = (TREE_OPERAND (stmt, 2)
1268 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1269 : NULL_TREE);
1270 if (type_left
1271 && !useless_type_conversion_p (TREE_TYPE (stmt),
1272 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1273 {
1274 TREE_OPERAND (stmt, 1)
1275 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1276 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1277 type_left));
1278 }
1279 if (type_right
1280 && !useless_type_conversion_p (TREE_TYPE (stmt),
1281 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1282 {
1283 TREE_OPERAND (stmt, 2)
1284 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1285 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1286 type_right));
1287 }
1288 }
1289 break;
a0a1efe3 1290
cc9e1a64 1291 case BIND_EXPR:
9b222de3 1292 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1293 {
1294 tree decl;
1295 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
80a58eb0 1296 if (VAR_P (decl)
9b222de3 1297 && !DECL_EXTERNAL (decl)
1298 && omp_var_to_track (decl))
1299 {
1300 splay_tree_node n
1301 = splay_tree_lookup (wtd->omp_ctx->variables,
1302 (splay_tree_key) decl);
1303 if (n == NULL)
1304 splay_tree_insert (wtd->omp_ctx->variables,
1305 (splay_tree_key) decl,
1306 TREE_STATIC (decl)
1307 ? OMP_CLAUSE_DEFAULT_SHARED
1308 : OMP_CLAUSE_DEFAULT_PRIVATE);
1309 }
1310 }
9917317a 1311 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
d120fa25 1312 {
1313 /* The point here is to not sanitize static initializers. */
1314 bool no_sanitize_p = wtd->no_sanitize_p;
1315 wtd->no_sanitize_p = true;
1316 for (tree decl = BIND_EXPR_VARS (stmt);
1317 decl;
1318 decl = DECL_CHAIN (decl))
1319 if (VAR_P (decl)
1320 && TREE_STATIC (decl)
1321 && DECL_INITIAL (decl))
1322 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1323 wtd->no_sanitize_p = no_sanitize_p;
1324 }
f1f41a6c 1325 wtd->bind_expr_stack.safe_push (stmt);
660c48c4 1326 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1327 cp_genericize_r, data, NULL);
f1f41a6c 1328 wtd->bind_expr_stack.pop ();
cc9e1a64 1329 break;
660c48c4 1330
cc9e1a64 1331 case USING_STMT:
1332 {
1333 tree block = NULL_TREE;
1334
1335 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1336 BLOCK, and append an IMPORTED_DECL to its
1337 BLOCK_VARS chained list. */
1338 if (wtd->bind_expr_stack.exists ())
1339 {
1340 int i;
1341 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1342 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1343 break;
1344 }
1345 if (block)
1346 {
3dd770ef 1347 tree decl = TREE_OPERAND (stmt, 0);
1348 gcc_assert (decl);
cc9e1a64 1349
3dd770ef 1350 if (undeduced_auto_decl (decl))
1351 /* Omit from the GENERIC, the back-end can't handle it. */;
1352 else
1353 {
1354 tree using_directive = make_node (IMPORTED_DECL);
1355 TREE_TYPE (using_directive) = void_type_node;
660c48c4 1356
3dd770ef 1357 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1358 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1359 BLOCK_VARS (block) = using_directive;
1360 }
cc9e1a64 1361 }
1362 /* The USING_STMT won't appear in GENERIC. */
1363 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1364 *walk_subtrees = 0;
1365 }
1366 break;
1367
1368 case DECL_EXPR:
1369 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
660c48c4 1370 {
cc9e1a64 1371 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1372 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1373 *walk_subtrees = 0;
660c48c4 1374 }
cc9e1a64 1375 else
660c48c4 1376 {
cc9e1a64 1377 tree d = DECL_EXPR_DECL (stmt);
1378 if (VAR_P (d))
1379 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
660c48c4 1380 }
cc9e1a64 1381 break;
9b222de3 1382
cc9e1a64 1383 case OMP_PARALLEL:
1384 case OMP_TASK:
1385 case OMP_TASKLOOP:
1386 {
1387 struct cp_genericize_omp_taskreg omp_ctx;
1388 tree c, decl;
1389 splay_tree_node n;
1390
1391 *walk_subtrees = 0;
1392 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1393 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1394 omp_ctx.default_shared = omp_ctx.is_parallel;
1395 omp_ctx.outer = wtd->omp_ctx;
1396 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1397 wtd->omp_ctx = &omp_ctx;
1398 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1399 switch (OMP_CLAUSE_CODE (c))
1400 {
1401 case OMP_CLAUSE_SHARED:
1402 case OMP_CLAUSE_PRIVATE:
1403 case OMP_CLAUSE_FIRSTPRIVATE:
1404 case OMP_CLAUSE_LASTPRIVATE:
1405 decl = OMP_CLAUSE_DECL (c);
1406 if (decl == error_mark_node || !omp_var_to_track (decl))
1407 break;
1408 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1409 if (n != NULL)
1410 break;
1411 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1412 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1413 ? OMP_CLAUSE_DEFAULT_SHARED
1414 : OMP_CLAUSE_DEFAULT_PRIVATE);
1415 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1416 omp_cxx_notice_variable (omp_ctx.outer, decl);
9b222de3 1417 break;
cc9e1a64 1418 case OMP_CLAUSE_DEFAULT:
1419 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1420 omp_ctx.default_shared = true;
1421 default:
9b222de3 1422 break;
cc9e1a64 1423 }
1424 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1425 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1426 else
1427 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1428 wtd->omp_ctx = omp_ctx.outer;
1429 splay_tree_delete (omp_ctx.variables);
1430 }
1431 break;
1432
1433 case TRY_BLOCK:
1434 {
1435 *walk_subtrees = 0;
1436 tree try_block = wtd->try_block;
1437 wtd->try_block = stmt;
1438 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1439 wtd->try_block = try_block;
1440 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1441 }
1442 break;
1443
1444 case MUST_NOT_THROW_EXPR:
cb40a6f7 1445 /* MUST_NOT_THROW_COND might be something else with TM. */
1446 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1447 {
1448 *walk_subtrees = 0;
1449 tree try_block = wtd->try_block;
1450 wtd->try_block = stmt;
1451 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1452 wtd->try_block = try_block;
1453 }
cc9e1a64 1454 break;
1455
1456 case THROW_EXPR:
1457 {
1458 location_t loc = location_of (stmt);
1459 if (TREE_NO_WARNING (stmt))
1460 /* Never mind. */;
1461 else if (wtd->try_block)
1462 {
bc35ef65 1463 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1464 {
1465 auto_diagnostic_group d;
1466 if (warning_at (loc, OPT_Wterminate,
1467 "throw will always call terminate()")
1468 && cxx_dialect >= cxx11
1469 && DECL_DESTRUCTOR_P (current_function_decl))
1470 inform (loc, "in C++11 destructors default to noexcept");
1471 }
cc9e1a64 1472 }
1473 else
1474 {
1475 if (warn_cxx11_compat && cxx_dialect < cxx11
1476 && DECL_DESTRUCTOR_P (current_function_decl)
1477 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1478 == NULL_TREE)
1479 && (get_defaulted_eh_spec (current_function_decl)
1480 == empty_except_spec))
1481 warning_at (loc, OPT_Wc__11_compat,
1482 "in C++11 this throw will terminate because "
1483 "destructors default to noexcept");
1484 }
1485 }
1486 break;
1487
1488 case CONVERT_EXPR:
1489 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1490 break;
1491
1492 case FOR_STMT:
1493 genericize_for_stmt (stmt_p, walk_subtrees, data);
1494 break;
1495
1496 case WHILE_STMT:
1497 genericize_while_stmt (stmt_p, walk_subtrees, data);
1498 break;
1499
1500 case DO_STMT:
1501 genericize_do_stmt (stmt_p, walk_subtrees, data);
1502 break;
1503
1504 case SWITCH_STMT:
1505 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1506 break;
1507
1508 case CONTINUE_STMT:
1509 genericize_continue_stmt (stmt_p);
1510 break;
1511
1512 case BREAK_STMT:
1513 genericize_break_stmt (stmt_p);
1514 break;
1515
1516 case OMP_FOR:
1517 case OMP_SIMD:
1518 case OMP_DISTRIBUTE:
68bf4712 1519 case OACC_LOOP:
cc9e1a64 1520 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1521 break;
1522
1523 case PTRMEM_CST:
518495b8 1524 /* By the time we get here we're handing off to the back end, so we don't
1525 need or want to preserve PTRMEM_CST anymore. */
1526 *stmt_p = cplus_expand_constant (stmt);
1527 *walk_subtrees = 0;
cc9e1a64 1528 break;
1529
1530 case MEM_REF:
9564446e 1531 /* For MEM_REF, make sure not to sanitize the second operand even
cc9e1a64 1532 if it has reference type. It is just an offset with a type
9564446e 1533 holding other information. There is no other processing we
1534 need to do for INTEGER_CSTs, so just ignore the second argument
1535 unconditionally. */
1536 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1537 *walk_subtrees = 0;
cc9e1a64 1538 break;
1539
1540 case NOP_EXPR:
1541 if (!wtd->no_sanitize_p
1542 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
90ad495b 1543 && TYPE_REF_P (TREE_TYPE (stmt)))
3ddb3278 1544 ubsan_maybe_instrument_reference (stmt_p);
cc9e1a64 1545 break;
1546
1547 case CALL_EXPR:
1548 if (!wtd->no_sanitize_p
1549 && sanitize_flags_p ((SANITIZE_NULL
1550 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
392dee1e 1551 {
1552 tree fn = CALL_EXPR_FN (stmt);
1553 if (fn != NULL_TREE
1554 && !error_operand_p (fn)
d03fa520 1555 && INDIRECT_TYPE_P (TREE_TYPE (fn))
392dee1e 1556 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1557 {
1558 bool is_ctor
1559 = TREE_CODE (fn) == ADDR_EXPR
1560 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1561 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
9917317a 1562 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
32cf7025 1563 ubsan_maybe_instrument_member_call (stmt, is_ctor);
9917317a 1564 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
32cf7025 1565 cp_ubsan_maybe_instrument_member_call (stmt);
392dee1e 1566 }
13da18cc 1567 else if (fn == NULL_TREE
1568 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1569 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
90ad495b 1570 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
13da18cc 1571 *walk_subtrees = 0;
392dee1e 1572 }
91735070 1573 /* Fall through. */
1574 case AGGR_INIT_EXPR:
1575 /* For calls to a multi-versioned function, overload resolution
1576 returns the function with the highest target priority, that is,
1577 the version that will checked for dispatching first. If this
1578 version is inlinable, a direct call to this version can be made
1579 otherwise the call should go through the dispatcher. */
1580 {
ced7e116 1581 tree fn = cp_get_callee_fndecl_nofold (stmt);
91735070 1582 if (fn && DECL_FUNCTION_VERSIONED (fn)
1583 && (current_function_decl == NULL
1584 || !targetm.target_option.can_inline_p (current_function_decl,
1585 fn)))
1586 if (tree dis = get_function_version_dispatcher (fn))
1587 {
1588 mark_versions_used (dis);
1589 dis = build_address (dis);
1590 if (TREE_CODE (stmt) == CALL_EXPR)
1591 CALL_EXPR_FN (stmt) = dis;
1592 else
1593 AGGR_INIT_EXPR_FN (stmt) = dis;
1594 }
1595 }
cc9e1a64 1596 break;
1597
7604a798 1598 case TARGET_EXPR:
1599 if (TARGET_EXPR_INITIAL (stmt)
1600 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1601 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1602 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1603 break;
1604
cc9e1a64 1605 default:
1606 if (IS_TYPE_OR_DECL_P (stmt))
1607 *walk_subtrees = 0;
1608 break;
392dee1e 1609 }
660c48c4 1610
431205b7 1611 p_set->add (*stmt_p);
9031d10b 1612
dddab69e 1613 return NULL;
1614}
1615
57cf061a 1616/* Lower C++ front end trees to GENERIC in T_P. */
1617
1618static void
a0168bf5 1619cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
57cf061a 1620{
1621 struct cp_genericize_data wtd;
1622
431205b7 1623 wtd.p_set = new hash_set<tree>;
f1f41a6c 1624 wtd.bind_expr_stack.create (0);
57cf061a 1625 wtd.omp_ctx = NULL;
cb40a6f7 1626 wtd.try_block = NULL_TREE;
d120fa25 1627 wtd.no_sanitize_p = false;
a0168bf5 1628 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
57cf061a 1629 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
431205b7 1630 delete wtd.p_set;
f1f41a6c 1631 wtd.bind_expr_stack.release ();
9917317a 1632 if (sanitize_flags_p (SANITIZE_VPTR))
32cf7025 1633 cp_ubsan_instrument_member_accesses (t_p);
57cf061a 1634}
1635
020bc656 1636/* If a function that should end with a return in non-void
1637 function doesn't obviously end with return, add ubsan
2fb20ba2 1638 instrumentation code to verify it at runtime. If -fsanitize=return
1639 is not enabled, instrument __builtin_unreachable. */
020bc656 1640
1641static void
2fb20ba2 1642cp_maybe_instrument_return (tree fndecl)
020bc656 1643{
1644 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1645 || DECL_CONSTRUCTOR_P (fndecl)
1646 || DECL_DESTRUCTOR_P (fndecl)
1647 || !targetm.warn_func_return (fndecl))
1648 return;
1649
9987c8d5 1650 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1651 /* Don't add __builtin_unreachable () if not optimizing, it will not
1652 improve any optimizations in that case, just break UB code.
1653 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1654 UBSan covers this with ubsan_instrument_return above where sufficient
1655 information is provided, while the __builtin_unreachable () below
1656 if return sanitization is disabled will just result in hard to
1657 understand runtime error without location. */
1658 && (!optimize
1659 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1660 return;
1661
020bc656 1662 tree t = DECL_SAVED_TREE (fndecl);
1663 while (t)
1664 {
1665 switch (TREE_CODE (t))
1666 {
1667 case BIND_EXPR:
1668 t = BIND_EXPR_BODY (t);
1669 continue;
1670 case TRY_FINALLY_EXPR:
f907d51b 1671 case CLEANUP_POINT_EXPR:
020bc656 1672 t = TREE_OPERAND (t, 0);
1673 continue;
1674 case STATEMENT_LIST:
1675 {
1676 tree_stmt_iterator i = tsi_last (t);
8a42a2fb 1677 while (!tsi_end_p (i))
1678 {
1679 tree p = tsi_stmt (i);
1680 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1681 break;
1682 tsi_prev (&i);
1683 }
020bc656 1684 if (!tsi_end_p (i))
1685 {
1686 t = tsi_stmt (i);
1687 continue;
1688 }
1689 }
1690 break;
1691 case RETURN_EXPR:
1692 return;
1693 default:
1694 break;
1695 }
1696 break;
1697 }
1698 if (t == NULL_TREE)
1699 return;
ce7e2f0e 1700 tree *p = &DECL_SAVED_TREE (fndecl);
1701 if (TREE_CODE (*p) == BIND_EXPR)
1702 p = &BIND_EXPR_BODY (*p);
2fb20ba2 1703
1704 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1705 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1706 t = ubsan_instrument_return (loc);
1707 else
1708 {
1709 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1710 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1711 }
1712
ce7e2f0e 1713 append_to_statement_list (t, p);
020bc656 1714}
1715
dddab69e 1716void
1717cp_genericize (tree fndecl)
1718{
dddcebdc 1719 tree t;
dddcebdc 1720
1721 /* Fix up the types of parms passed by invisible reference. */
1767a056 1722 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1fe46df1 1723 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1724 {
1725 /* If a function's arguments are copied to create a thunk,
1726 then DECL_BY_REFERENCE will be set -- but the type of the
1727 argument will be a pointer type, so we will never get
1728 here. */
1729 gcc_assert (!DECL_BY_REFERENCE (t));
1730 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1731 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1732 DECL_BY_REFERENCE (t) = 1;
1733 TREE_ADDRESSABLE (t) = 0;
1734 relayout_decl (t);
1735 }
dddcebdc 1736
806e4c12 1737 /* Do the same for the return value. */
1738 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1739 {
1740 t = DECL_RESULT (fndecl);
1741 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1742 DECL_BY_REFERENCE (t) = 1;
1743 TREE_ADDRESSABLE (t) = 0;
1744 relayout_decl (t);
ae294470 1745 if (DECL_NAME (t))
1746 {
1747 /* Adjust DECL_VALUE_EXPR of the original var. */
1748 tree outer = outer_curly_brace_block (current_function_decl);
1749 tree var;
1750
1751 if (outer)
1752 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1f10c59c 1753 if (VAR_P (var)
1754 && DECL_NAME (t) == DECL_NAME (var)
ae294470 1755 && DECL_HAS_VALUE_EXPR_P (var)
1756 && DECL_VALUE_EXPR (var) == t)
1757 {
1758 tree val = convert_from_reference (t);
1759 SET_DECL_VALUE_EXPR (var, val);
1760 break;
1761 }
1762 }
806e4c12 1763 }
1764
dddcebdc 1765 /* If we're a clone, the body is already GIMPLE. */
1766 if (DECL_CLONED_FUNCTION_P (fndecl))
1767 return;
1768
df0c563f 1769 /* Allow cp_genericize calls to be nested. */
1770 tree save_bc_label[2];
1771 save_bc_label[bc_break] = bc_label[bc_break];
1772 save_bc_label[bc_continue] = bc_label[bc_continue];
1773 bc_label[bc_break] = NULL_TREE;
1774 bc_label[bc_continue] = NULL_TREE;
1775
dddcebdc 1776 /* We do want to see every occurrence of the parms, so we can't just use
1777 walk_tree's hash functionality. */
a0168bf5 1778 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
dddab69e 1779
2fb20ba2 1780 cp_maybe_instrument_return (fndecl);
020bc656 1781
dddab69e 1782 /* Do everything else. */
1783 c_genericize (fndecl);
8487df40 1784
1785 gcc_assert (bc_label[bc_break] == NULL);
1786 gcc_assert (bc_label[bc_continue] == NULL);
df0c563f 1787 bc_label[bc_break] = save_bc_label[bc_break];
1788 bc_label[bc_continue] = save_bc_label[bc_continue];
8487df40 1789}
1790\f
1791/* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1792 NULL if there is in fact nothing to do. ARG2 may be null if FN
1793 actually only takes one argument. */
1794
1795static tree
1796cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1797{
c1be427d 1798 tree defparm, parm, t;
d01f58f9 1799 int i = 0;
1800 int nargs;
1801 tree *argarray;
2f2c591f 1802
8487df40 1803 if (fn == NULL)
1804 return NULL;
1805
d01f58f9 1806 nargs = list_length (DECL_ARGUMENTS (fn));
fd70b918 1807 argarray = XALLOCAVEC (tree, nargs);
d01f58f9 1808
2f2c591f 1809 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1810 if (arg2)
1811 defparm = TREE_CHAIN (defparm);
1812
c06d7bdd 1813 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
8487df40 1814 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1815 {
1816 tree inner_type = TREE_TYPE (arg1);
1817 tree start1, end1, p1;
1818 tree start2 = NULL, p2 = NULL;
c1be427d 1819 tree ret = NULL, lab;
8487df40 1820
1821 start1 = arg1;
1822 start2 = arg2;
1823 do
1824 {
1825 inner_type = TREE_TYPE (inner_type);
1826 start1 = build4 (ARRAY_REF, inner_type, start1,
1827 size_zero_node, NULL, NULL);
1828 if (arg2)
1829 start2 = build4 (ARRAY_REF, inner_type, start2,
1830 size_zero_node, NULL, NULL);
1831 }
1832 while (TREE_CODE (inner_type) == ARRAY_TYPE);
389dd41b 1833 start1 = build_fold_addr_expr_loc (input_location, start1);
8487df40 1834 if (arg2)
389dd41b 1835 start2 = build_fold_addr_expr_loc (input_location, start2);
8487df40 1836
1837 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2cc66f2a 1838 end1 = fold_build_pointer_plus (start1, end1);
8487df40 1839
f9e245b2 1840 p1 = create_tmp_var (TREE_TYPE (start1));
75a70cf9 1841 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
8487df40 1842 append_to_statement_list (t, &ret);
1843
1844 if (arg2)
1845 {
f9e245b2 1846 p2 = create_tmp_var (TREE_TYPE (start2));
75a70cf9 1847 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
8487df40 1848 append_to_statement_list (t, &ret);
1849 }
1850
e60a6f7b 1851 lab = create_artificial_label (input_location);
8487df40 1852 t = build1 (LABEL_EXPR, void_type_node, lab);
1853 append_to_statement_list (t, &ret);
1854
d01f58f9 1855 argarray[i++] = p1;
8487df40 1856 if (arg2)
d01f58f9 1857 argarray[i++] = p2;
2f2c591f 1858 /* Handle default arguments. */
93bb78b6 1859 for (parm = defparm; parm && parm != void_list_node;
1860 parm = TREE_CHAIN (parm), i++)
d01f58f9 1861 argarray[i] = convert_default_arg (TREE_VALUE (parm),
c06d7bdd 1862 TREE_PURPOSE (parm), fn,
1863 i - is_method, tf_warning_or_error);
d01f58f9 1864 t = build_call_a (fn, i, argarray);
c1be427d 1865 t = fold_convert (void_type_node, t);
1866 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8487df40 1867 append_to_statement_list (t, &ret);
1868
2cc66f2a 1869 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
75a70cf9 1870 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
8487df40 1871 append_to_statement_list (t, &ret);
1872
1873 if (arg2)
1874 {
2cc66f2a 1875 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
75a70cf9 1876 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
8487df40 1877 append_to_statement_list (t, &ret);
1878 }
1879
1880 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1881 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1882 append_to_statement_list (t, &ret);
1883
1884 return ret;
1885 }
1886 else
1887 {
389dd41b 1888 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
8487df40 1889 if (arg2)
389dd41b 1890 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2f2c591f 1891 /* Handle default arguments. */
93bb78b6 1892 for (parm = defparm; parm && parm != void_list_node;
d01f58f9 1893 parm = TREE_CHAIN (parm), i++)
1894 argarray[i] = convert_default_arg (TREE_VALUE (parm),
c06d7bdd 1895 TREE_PURPOSE (parm), fn,
1896 i - is_method, tf_warning_or_error);
c1be427d 1897 t = build_call_a (fn, i, argarray);
1898 t = fold_convert (void_type_node, t);
1899 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8487df40 1900 }
1901}
1902
1903/* Return code to initialize DECL with its default constructor, or
1904 NULL if there's nothing to do. */
1905
1906tree
a49c5913 1907cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
8487df40 1908{
1909 tree info = CP_OMP_CLAUSE_INFO (clause);
1910 tree ret = NULL;
1911
1912 if (info)
1913 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1914
1915 return ret;
1916}
1917
1918/* Return code to initialize DST with a copy constructor from SRC. */
1919
1920tree
1921cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1922{
1923 tree info = CP_OMP_CLAUSE_INFO (clause);
1924 tree ret = NULL;
1925
1926 if (info)
1927 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1928 if (ret == NULL)
75a70cf9 1929 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8487df40 1930
1931 return ret;
1932}
1933
1934/* Similarly, except use an assignment operator instead. */
1935
1936tree
1937cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1938{
1939 tree info = CP_OMP_CLAUSE_INFO (clause);
1940 tree ret = NULL;
1941
1942 if (info)
1943 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1944 if (ret == NULL)
75a70cf9 1945 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8487df40 1946
1947 return ret;
1948}
1949
1950/* Return code to destroy DECL. */
1951
1952tree
1953cxx_omp_clause_dtor (tree clause, tree decl)
1954{
1955 tree info = CP_OMP_CLAUSE_INFO (clause);
1956 tree ret = NULL;
1957
1958 if (info)
1959 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1960
1961 return ret;
dddab69e 1962}
df2c34fc 1963
1964/* True if OpenMP should privatize what this DECL points to rather
1965 than the DECL itself. */
1966
1967bool
9f627b1a 1968cxx_omp_privatize_by_reference (const_tree decl)
df2c34fc 1969{
90ad495b 1970 return (TYPE_REF_P (TREE_TYPE (decl))
bc7bff74 1971 || is_invisiref_parm (decl));
df2c34fc 1972}
fd6481cf 1973
2169f33b 1974/* Return true if DECL is const qualified var having no mutable member. */
1975bool
1976cxx_omp_const_qual_no_mutable (tree decl)
fd6481cf 1977{
2169f33b 1978 tree type = TREE_TYPE (decl);
90ad495b 1979 if (TYPE_REF_P (type))
fd6481cf 1980 {
1981 if (!is_invisiref_parm (decl))
2169f33b 1982 return false;
fd6481cf 1983 type = TREE_TYPE (type);
1984
1985 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1986 {
1987 /* NVR doesn't preserve const qualification of the
1988 variable's type. */
1989 tree outer = outer_curly_brace_block (current_function_decl);
1990 tree var;
1991
1992 if (outer)
1767a056 1993 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1f10c59c 1994 if (VAR_P (var)
1995 && DECL_NAME (decl) == DECL_NAME (var)
fd6481cf 1996 && (TYPE_MAIN_VARIANT (type)
1997 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1998 {
1999 if (TYPE_READONLY (TREE_TYPE (var)))
2000 type = TREE_TYPE (var);
2001 break;
2002 }
2003 }
2004 }
2005
2006 if (type == error_mark_node)
2169f33b 2007 return false;
fd6481cf 2008
2009 /* Variables with const-qualified type having no mutable member
2010 are predetermined shared. */
2011 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2169f33b 2012 return true;
2013
2014 return false;
2015}
2016
2017/* True if OpenMP sharing attribute of DECL is predetermined. */
2018
2019enum omp_clause_default_kind
b16a5119 2020cxx_omp_predetermined_sharing_1 (tree decl)
2169f33b 2021{
2022 /* Static data members are predetermined shared. */
2023 if (TREE_STATIC (decl))
2024 {
2025 tree ctx = CP_DECL_CONTEXT (decl);
2026 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2027 return OMP_CLAUSE_DEFAULT_SHARED;
2028 }
2029
7e5a76c8 2030 /* this may not be specified in data-sharing clauses, still we need
2031 to predetermined it firstprivate. */
2032 if (decl == current_class_ptr)
2033 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
fd6481cf 2034
2035 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2036}
2037
b16a5119 2038/* Likewise, but also include the artificial vars. We don't want to
2039 disallow the artificial vars being mentioned in explicit clauses,
2040 as we use artificial vars e.g. for loop constructs with random
2041 access iterators other than pointers, but during gimplification
2042 we want to treat them as predetermined. */
2043
2044enum omp_clause_default_kind
2045cxx_omp_predetermined_sharing (tree decl)
2046{
2047 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2048 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2049 return ret;
2050
2051 /* Predetermine artificial variables holding integral values, those
2052 are usually result of gimplify_one_sizepos or SAVE_EXPR
2053 gimplification. */
2054 if (VAR_P (decl)
2055 && DECL_ARTIFICIAL (decl)
2056 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2057 && !(DECL_LANG_SPECIFIC (decl)
2058 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2059 return OMP_CLAUSE_DEFAULT_SHARED;
2060
2061 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2062}
2063
fd6481cf 2064/* Finalize an implicitly determined clause. */
2065
2066void
691447ab 2067cxx_omp_finish_clause (tree c, gimple_seq *)
fd6481cf 2068{
2069 tree decl, inner_type;
2070 bool make_shared = false;
2071
2072 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2073 return;
2074
2075 decl = OMP_CLAUSE_DECL (c);
2076 decl = require_complete_type (decl);
2077 inner_type = TREE_TYPE (decl);
2078 if (decl == error_mark_node)
2079 make_shared = true;
90ad495b 2080 else if (TYPE_REF_P (TREE_TYPE (decl)))
43895be5 2081 inner_type = TREE_TYPE (inner_type);
fd6481cf 2082
2083 /* We're interested in the base element, not arrays. */
2084 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2085 inner_type = TREE_TYPE (inner_type);
2086
2087 /* Check for special function availability by building a call to one.
2088 Save the results, because later we won't be in the right context
2089 for making these queries. */
2090 if (!make_shared
2091 && CLASS_TYPE_P (inner_type)
bc7bff74 2092 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
fd6481cf 2093 make_shared = true;
2094
2095 if (make_shared)
1c3f8c56 2096 {
2097 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2098 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2099 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2100 }
fd6481cf 2101}
43895be5 2102
2103/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2104 disregarded in OpenMP construct, because it is going to be
2105 remapped during OpenMP lowering. SHARED is true if DECL
2106 is going to be shared, false if it is going to be privatized. */
2107
2108bool
2109cxx_omp_disregard_value_expr (tree decl, bool shared)
2110{
2111 return !shared
2112 && VAR_P (decl)
2113 && DECL_HAS_VALUE_EXPR_P (decl)
2114 && DECL_ARTIFICIAL (decl)
2115 && DECL_LANG_SPECIFIC (decl)
2116 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2117}
d2c63826 2118
69f54cf5 2119/* Fold expression X which is used as an rvalue if RVAL is true. */
2120
2121static tree
2122cp_fold_maybe_rvalue (tree x, bool rval)
2123{
e71bb662 2124 while (true)
69f54cf5 2125 {
e71bb662 2126 x = cp_fold (x);
ac6641ca 2127 if (rval && DECL_P (x)
90ad495b 2128 && !TYPE_REF_P (TREE_TYPE (x)))
e71bb662 2129 {
2130 tree v = decl_constant_value (x);
2131 if (v != x && v != error_mark_node)
2132 {
2133 x = v;
2134 continue;
2135 }
2136 }
2137 break;
69f54cf5 2138 }
e71bb662 2139 return x;
69f54cf5 2140}
2141
2142/* Fold expression X which is used as an rvalue. */
2143
2144static tree
2145cp_fold_rvalue (tree x)
2146{
2147 return cp_fold_maybe_rvalue (x, true);
2148}
2149
d93ee6f8 2150/* Perform folding on expression X. */
2151
2152tree
2153cp_fully_fold (tree x)
2154{
2155 if (processing_template_decl)
2156 return x;
2157 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2158 have to call both. */
2159 if (cxx_dialect >= cxx11)
5f9e77dd 2160 {
2161 x = maybe_constant_value (x);
2162 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2163 a TARGET_EXPR; undo that here. */
2164 if (TREE_CODE (x) == TARGET_EXPR)
2165 x = TARGET_EXPR_INITIAL (x);
2166 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2167 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2168 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2169 x = TREE_OPERAND (x, 0);
2170 }
d93ee6f8 2171 return cp_fold_rvalue (x);
2172}
2173
da562e32 2174/* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2175 and certain changes are made to the folding done. Or should be (FIXME). We
2176 never touch maybe_const, as it is only used for the C front-end
2177 C_MAYBE_CONST_EXPR. */
2178
2179tree
69cd03b2 2180c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
da562e32 2181{
69cd03b2 2182 return cp_fold_maybe_rvalue (x, !lval);
da562e32 2183}
2184
2a655a4c 2185static GTY((deletable)) hash_map<tree, tree> *fold_cache;
d2c63826 2186
a0c919f7 2187/* Dispose of the whole FOLD_CACHE. */
2188
2189void
2190clear_fold_cache (void)
2191{
2a655a4c 2192 if (fold_cache != NULL)
2193 fold_cache->empty ();
a0c919f7 2194}
2195
d2c63826 2196/* This function tries to fold an expression X.
2197 To avoid combinatorial explosion, folding results are kept in fold_cache.
d76863c8 2198 If X is invalid, we don't fold at all.
d2c63826 2199 For performance reasons we don't cache expressions representing a
2200 declaration or constant.
2201 Function returns X or its folded variant. */
2202
2203static tree
2204cp_fold (tree x)
2205{
2206 tree op0, op1, op2, op3;
2207 tree org_x = x, r = NULL_TREE;
2208 enum tree_code code;
2209 location_t loc;
69f54cf5 2210 bool rval_ops = true;
d2c63826 2211
8f559c6e 2212 if (!x || x == error_mark_node)
d2c63826 2213 return x;
2214
d76863c8 2215 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
d2c63826 2216 return x;
2217
2218 /* Don't bother to cache DECLs or constants. */
2219 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2220 return x;
2221
2a655a4c 2222 if (fold_cache == NULL)
2223 fold_cache = hash_map<tree, tree>::create_ggc (101);
2224
2225 if (tree *cached = fold_cache->get (x))
2226 return *cached;
d2c63826 2227
2228 code = TREE_CODE (x);
2229 switch (code)
2230 {
d1cd4a64 2231 case CLEANUP_POINT_EXPR:
2232 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2233 effects. */
2234 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2235 if (!TREE_SIDE_EFFECTS (r))
2236 x = r;
2237 break;
2238
d2c63826 2239 case SIZEOF_EXPR:
2240 x = fold_sizeof_expr (x);
2241 break;
2242
2243 case VIEW_CONVERT_EXPR:
69f54cf5 2244 rval_ops = false;
e3533433 2245 /* FALLTHRU */
d2c63826 2246 case CONVERT_EXPR:
2247 case NOP_EXPR:
2248 case NON_LVALUE_EXPR:
2249
2250 if (VOID_TYPE_P (TREE_TYPE (x)))
ca29c574 2251 {
2252 /* This is just to make sure we don't end up with casts to
2253 void from error_mark_node. If we just return x, then
2254 cp_fold_r might fold the operand into error_mark_node and
2255 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2256 during gimplification doesn't like such casts.
2257 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2258 folding of the operand should be in the caches and if in cp_fold_r
2259 it will modify it in place. */
2260 op0 = cp_fold (TREE_OPERAND (x, 0));
2261 if (op0 == error_mark_node)
2262 x = error_mark_node;
2263 break;
2264 }
d2c63826 2265
d2c63826 2266 loc = EXPR_LOCATION (x);
f7d61b1e 2267 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
d2c63826 2268
b981525c 2269 if (code == CONVERT_EXPR
2270 && SCALAR_TYPE_P (TREE_TYPE (x))
2271 && op0 != void_node)
2272 /* During parsing we used convert_to_*_nofold; re-convert now using the
2273 folding variants, since fold() doesn't do those transformations. */
2274 x = fold (convert (TREE_TYPE (x), op0));
2275 else if (op0 != TREE_OPERAND (x, 0))
1b8c43ab 2276 {
2277 if (op0 == error_mark_node)
2278 x = error_mark_node;
2279 else
2280 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2281 }
111e415b 2282 else
2283 x = fold (x);
d2c63826 2284
2285 /* Conversion of an out-of-range value has implementation-defined
2286 behavior; the language considers it different from arithmetic
2287 overflow, which is undefined. */
2288 if (TREE_CODE (op0) == INTEGER_CST
2289 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2290 TREE_OVERFLOW (x) = false;
2291
2292 break;
2293
bcb45dab 2294 case INDIRECT_REF:
2295 /* We don't need the decltype(auto) obfuscation anymore. */
2296 if (REF_PARENTHESIZED_P (x))
2297 {
2298 tree p = maybe_undo_parenthesized_ref (x);
e71bb662 2299 return cp_fold (p);
bcb45dab 2300 }
2301 goto unary;
2302
d2c63826 2303 case ADDR_EXPR:
e885b147 2304 loc = EXPR_LOCATION (x);
2305 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2306
2307 /* Cope with user tricks that amount to offsetof. */
2308 if (op0 != error_mark_node
2309 && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2310 && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2311 {
2312 tree val = get_base_address (op0);
2313 if (val
2314 && INDIRECT_REF_P (val)
2315 && COMPLETE_TYPE_P (TREE_TYPE (val))
2316 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2317 {
2318 val = TREE_OPERAND (val, 0);
2319 STRIP_NOPS (val);
26364f3e 2320 val = maybe_constant_value (val);
e885b147 2321 if (TREE_CODE (val) == INTEGER_CST)
3c43ed34 2322 return fold_offsetof (op0, TREE_TYPE (x));
e885b147 2323 }
2324 }
2325 goto finish_unary;
2326
d2c63826 2327 case REALPART_EXPR:
2328 case IMAGPART_EXPR:
69f54cf5 2329 rval_ops = false;
e3533433 2330 /* FALLTHRU */
d2c63826 2331 case CONJ_EXPR:
2332 case FIX_TRUNC_EXPR:
2333 case FLOAT_EXPR:
2334 case NEGATE_EXPR:
2335 case ABS_EXPR:
1c67942e 2336 case ABSU_EXPR:
d2c63826 2337 case BIT_NOT_EXPR:
2338 case TRUTH_NOT_EXPR:
2339 case FIXED_CONVERT_EXPR:
bcb45dab 2340 unary:
d2c63826 2341
2342 loc = EXPR_LOCATION (x);
69f54cf5 2343 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
d2c63826 2344
e885b147 2345 finish_unary:
d2c63826 2346 if (op0 != TREE_OPERAND (x, 0))
1b8c43ab 2347 {
2348 if (op0 == error_mark_node)
2349 x = error_mark_node;
2350 else
89f17a65 2351 {
2352 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2353 if (code == INDIRECT_REF
2354 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2355 {
2356 TREE_READONLY (x) = TREE_READONLY (org_x);
2357 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2358 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2359 }
2360 }
1b8c43ab 2361 }
111e415b 2362 else
2363 x = fold (x);
d2c63826 2364
2365 gcc_assert (TREE_CODE (x) != COND_EXPR
2366 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2367 break;
2368
a2eb1271 2369 case UNARY_PLUS_EXPR:
2370 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2371 if (op0 == error_mark_node)
2372 x = error_mark_node;
2373 else
2374 x = fold_convert (TREE_TYPE (x), op0);
2375 break;
2376
d2c63826 2377 case POSTDECREMENT_EXPR:
2378 case POSTINCREMENT_EXPR:
2379 case INIT_EXPR:
d2c63826 2380 case PREDECREMENT_EXPR:
2381 case PREINCREMENT_EXPR:
2382 case COMPOUND_EXPR:
69f54cf5 2383 case MODIFY_EXPR:
2384 rval_ops = false;
e3533433 2385 /* FALLTHRU */
d2c63826 2386 case POINTER_PLUS_EXPR:
2387 case PLUS_EXPR:
57e83b58 2388 case POINTER_DIFF_EXPR:
d2c63826 2389 case MINUS_EXPR:
2390 case MULT_EXPR:
2391 case TRUNC_DIV_EXPR:
2392 case CEIL_DIV_EXPR:
2393 case FLOOR_DIV_EXPR:
2394 case ROUND_DIV_EXPR:
2395 case TRUNC_MOD_EXPR:
2396 case CEIL_MOD_EXPR:
2397 case ROUND_MOD_EXPR:
2398 case RDIV_EXPR:
2399 case EXACT_DIV_EXPR:
2400 case MIN_EXPR:
2401 case MAX_EXPR:
2402 case LSHIFT_EXPR:
2403 case RSHIFT_EXPR:
2404 case LROTATE_EXPR:
2405 case RROTATE_EXPR:
2406 case BIT_AND_EXPR:
2407 case BIT_IOR_EXPR:
2408 case BIT_XOR_EXPR:
2409 case TRUTH_AND_EXPR:
2410 case TRUTH_ANDIF_EXPR:
2411 case TRUTH_OR_EXPR:
2412 case TRUTH_ORIF_EXPR:
2413 case TRUTH_XOR_EXPR:
2414 case LT_EXPR: case LE_EXPR:
2415 case GT_EXPR: case GE_EXPR:
2416 case EQ_EXPR: case NE_EXPR:
2417 case UNORDERED_EXPR: case ORDERED_EXPR:
2418 case UNLT_EXPR: case UNLE_EXPR:
2419 case UNGT_EXPR: case UNGE_EXPR:
2420 case UNEQ_EXPR: case LTGT_EXPR:
2421 case RANGE_EXPR: case COMPLEX_EXPR:
d2c63826 2422
2423 loc = EXPR_LOCATION (x);
69f54cf5 2424 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2425 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
d2c63826 2426
2427 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
1b8c43ab 2428 {
2429 if (op0 == error_mark_node || op1 == error_mark_node)
2430 x = error_mark_node;
2431 else
2432 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2433 }
111e415b 2434 else
2435 x = fold (x);
d2c63826 2436
4d984926 2437 /* This is only needed for -Wnonnull-compare and only if
2438 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2439 generation, we do it always. */
2440 if (COMPARISON_CLASS_P (org_x))
2cde02ad 2441 {
2442 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2443 ;
2444 else if (COMPARISON_CLASS_P (x))
4d984926 2445 {
2446 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2447 TREE_NO_WARNING (x) = 1;
2448 }
2cde02ad 2449 /* Otherwise give up on optimizing these, let GIMPLE folders
2450 optimize those later on. */
2451 else if (op0 != TREE_OPERAND (org_x, 0)
2452 || op1 != TREE_OPERAND (org_x, 1))
2453 {
2454 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
4d984926 2455 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2456 TREE_NO_WARNING (x) = 1;
2cde02ad 2457 }
2458 else
2459 x = org_x;
2460 }
d2c63826 2461 break;
2462
2463 case VEC_COND_EXPR:
2464 case COND_EXPR:
d2c63826 2465 loc = EXPR_LOCATION (x);
69f54cf5 2466 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
d2c63826 2467 op1 = cp_fold (TREE_OPERAND (x, 1));
2468 op2 = cp_fold (TREE_OPERAND (x, 2));
2469
7a7ca07c 2470 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2471 {
f9542e61 2472 warning_sentinel s (warn_int_in_bool_context);
7a7ca07c 2473 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2474 op1 = cp_truthvalue_conversion (op1);
2475 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2476 op2 = cp_truthvalue_conversion (op2);
2477 }
3c382e05 2478 else if (VOID_TYPE_P (TREE_TYPE (x)))
2479 {
2480 if (TREE_CODE (op0) == INTEGER_CST)
2481 {
2482 /* If the condition is constant, fold can fold away
2483 the COND_EXPR. If some statement-level uses of COND_EXPR
2484 have one of the branches NULL, avoid folding crash. */
2485 if (!op1)
2486 op1 = build_empty_stmt (loc);
2487 if (!op2)
2488 op2 = build_empty_stmt (loc);
2489 }
2490 else
2491 {
2492 /* Otherwise, don't bother folding a void condition, since
2493 it can't produce a constant value. */
2494 if (op0 != TREE_OPERAND (x, 0)
2495 || op1 != TREE_OPERAND (x, 1)
2496 || op2 != TREE_OPERAND (x, 2))
2497 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2498 break;
2499 }
2500 }
7a7ca07c 2501
f6dfb86a 2502 if (op0 != TREE_OPERAND (x, 0)
2503 || op1 != TREE_OPERAND (x, 1)
2504 || op2 != TREE_OPERAND (x, 2))
1b8c43ab 2505 {
2506 if (op0 == error_mark_node
2507 || op1 == error_mark_node
2508 || op2 == error_mark_node)
2509 x = error_mark_node;
2510 else
2511 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2512 }
f6dfb86a 2513 else
d2c63826 2514 x = fold (x);
2515
bf64d98a 2516 /* A COND_EXPR might have incompatible types in branches if one or both
2517 arms are bitfields. If folding exposed such a branch, fix it up. */
ec72e2f7 2518 if (TREE_CODE (x) != code
6fa371d1 2519 && x != error_mark_node
ec72e2f7 2520 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2521 x = fold_convert (TREE_TYPE (org_x), x);
bf64d98a 2522
d2c63826 2523 break;
2524
2525 case CALL_EXPR:
2526 {
2527 int i, m, sv = optimize, nw = sv, changed = 0;
2528 tree callee = get_callee_fndecl (x);
2529
efe6a40a 2530 /* Some built-in function calls will be evaluated at compile-time in
2531 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2532 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
a0e9bfbb 2533 if (callee && fndecl_built_in_p (callee) && !optimize
d2c63826 2534 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2535 && current_function_decl
2536 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2537 nw = 1;
d2c63826 2538
18d371d3 2539 /* Defer folding __builtin_is_constant_evaluated. */
2540 if (callee
a0e9bfbb 2541 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2542 BUILT_IN_FRONTEND))
18d371d3 2543 break;
2544
d2c63826 2545 x = copy_node (x);
2546
2547 m = call_expr_nargs (x);
2548 for (i = 0; i < m; i++)
2549 {
2550 r = cp_fold (CALL_EXPR_ARG (x, i));
2551 if (r != CALL_EXPR_ARG (x, i))
1b8c43ab 2552 {
2553 if (r == error_mark_node)
2554 {
2555 x = error_mark_node;
2556 break;
2557 }
2558 changed = 1;
2559 }
d2c63826 2560 CALL_EXPR_ARG (x, i) = r;
2561 }
1b8c43ab 2562 if (x == error_mark_node)
2563 break;
d2c63826 2564
2565 optimize = nw;
2566 r = fold (x);
2567 optimize = sv;
2568
2569 if (TREE_CODE (r) != CALL_EXPR)
2570 {
2571 x = cp_fold (r);
2572 break;
2573 }
2574
2575 optimize = nw;
2576
efe6a40a 2577 /* Invoke maybe_constant_value for functions declared
2578 constexpr and not called with AGGR_INIT_EXPRs.
d2c63826 2579 TODO:
efe6a40a 2580 Do constexpr expansion of expressions where the call itself is not
2581 constant, but the call followed by an INDIRECT_REF is. */
29684344 2582 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2583 && !flag_no_inline)
d9cfff22 2584 r = maybe_constant_value (x);
d2c63826 2585 optimize = sv;
2586
2587 if (TREE_CODE (r) != CALL_EXPR)
2588 {
d9cfff22 2589 if (DECL_CONSTRUCTOR_P (callee))
2590 {
2591 loc = EXPR_LOCATION (x);
2592 tree s = build_fold_indirect_ref_loc (loc,
2593 CALL_EXPR_ARG (x, 0));
2594 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2595 }
d2c63826 2596 x = r;
2597 break;
2598 }
2599
2600 if (!changed)
2601 x = org_x;
2602 break;
2603 }
2604
2605 case CONSTRUCTOR:
2606 {
2607 unsigned i;
2608 constructor_elt *p;
2609 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
41a5cb89 2610 vec<constructor_elt, va_gc> *nelts = NULL;
d2c63826 2611 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
41a5cb89 2612 {
2613 tree op = cp_fold (p->value);
41a5cb89 2614 if (op != p->value)
1b8c43ab 2615 {
2616 if (op == error_mark_node)
2617 {
2618 x = error_mark_node;
4ecaaab2 2619 vec_free (nelts);
1b8c43ab 2620 break;
2621 }
4ecaaab2 2622 if (nelts == NULL)
2623 nelts = elts->copy ();
2624 (*nelts)[i].value = op;
1b8c43ab 2625 }
41a5cb89 2626 }
4ecaaab2 2627 if (nelts)
7604a798 2628 {
2629 x = build_constructor (TREE_TYPE (x), nelts);
2630 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2631 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2632 }
f82dc839 2633 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2634 x = fold (x);
d2c63826 2635 break;
2636 }
2637 case TREE_VEC:
2638 {
2639 bool changed = false;
2640 vec<tree, va_gc> *vec = make_tree_vector ();
2641 int i, n = TREE_VEC_LENGTH (x);
2642 vec_safe_reserve (vec, n);
2643
2644 for (i = 0; i < n; i++)
2645 {
2646 tree op = cp_fold (TREE_VEC_ELT (x, i));
2647 vec->quick_push (op);
2648 if (op != TREE_VEC_ELT (x, i))
2649 changed = true;
2650 }
2651
2652 if (changed)
2653 {
2654 r = copy_node (x);
2655 for (i = 0; i < n; i++)
2656 TREE_VEC_ELT (r, i) = (*vec)[i];
2657 x = r;
2658 }
2659
2660 release_tree_vector (vec);
2661 }
2662
2663 break;
2664
2665 case ARRAY_REF:
2666 case ARRAY_RANGE_REF:
2667
2668 loc = EXPR_LOCATION (x);
2669 op0 = cp_fold (TREE_OPERAND (x, 0));
2670 op1 = cp_fold (TREE_OPERAND (x, 1));
2671 op2 = cp_fold (TREE_OPERAND (x, 2));
2672 op3 = cp_fold (TREE_OPERAND (x, 3));
2673
1b8c43ab 2674 if (op0 != TREE_OPERAND (x, 0)
2675 || op1 != TREE_OPERAND (x, 1)
2676 || op2 != TREE_OPERAND (x, 2)
2677 || op3 != TREE_OPERAND (x, 3))
2678 {
2679 if (op0 == error_mark_node
2680 || op1 == error_mark_node
2681 || op2 == error_mark_node
2682 || op3 == error_mark_node)
2683 x = error_mark_node;
2684 else
89f17a65 2685 {
2686 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2687 TREE_READONLY (x) = TREE_READONLY (org_x);
2688 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2689 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2690 }
1b8c43ab 2691 }
d2c63826 2692
2693 x = fold (x);
2694 break;
2695
2af642bf 2696 case SAVE_EXPR:
2697 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2698 folding, evaluates to an invariant. In that case no need to wrap
2699 this folded tree with a SAVE_EXPR. */
2700 r = cp_fold (TREE_OPERAND (x, 0));
2701 if (tree_invariant_p (r))
2702 x = r;
2703 break;
2704
d2c63826 2705 default:
2706 return org_x;
2707 }
2708
2a655a4c 2709 fold_cache->put (org_x, x);
d2c63826 2710 /* Prevent that we try to fold an already folded result again. */
2711 if (x != org_x)
2a655a4c 2712 fold_cache->put (x, x);
d2c63826 2713
2714 return x;
2715}
2716
863c62e0 2717/* Look up either "hot" or "cold" in attribute list LIST. */
2718
2719tree
2720lookup_hotness_attribute (tree list)
2721{
2722 for (; list; list = TREE_CHAIN (list))
2723 {
2724 tree name = get_attribute_name (list);
2725 if (is_attribute_p ("hot", name)
2726 || is_attribute_p ("cold", name)
2727 || is_attribute_p ("likely", name)
2728 || is_attribute_p ("unlikely", name))
2729 break;
2730 }
2731 return list;
2732}
2733
2734/* Remove both "hot" and "cold" attributes from LIST. */
2735
2736static tree
2737remove_hotness_attribute (tree list)
2738{
2739 list = remove_attribute ("hot", list);
2740 list = remove_attribute ("cold", list);
2741 list = remove_attribute ("likely", list);
2742 list = remove_attribute ("unlikely", list);
2743 return list;
2744}
2745
2746/* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2747 PREDICT_EXPR. */
2748
2749tree
2750process_stmt_hotness_attribute (tree std_attrs)
2751{
2752 if (std_attrs == error_mark_node)
2753 return std_attrs;
2754 if (tree attr = lookup_hotness_attribute (std_attrs))
2755 {
2756 tree name = get_attribute_name (attr);
2757 bool hot = (is_attribute_p ("hot", name)
2758 || is_attribute_p ("likely", name));
2759 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2760 hot ? TAKEN : NOT_TAKEN);
2761 SET_EXPR_LOCATION (pred, input_location);
2762 add_stmt (pred);
2763 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2764 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2765 get_attribute_name (other), name);
2766 std_attrs = remove_hotness_attribute (std_attrs);
2767 }
2768 return std_attrs;
2769}
2770
d2c63826 2771#include "gt-cp-cp-gimplify.h"