]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/cp-gimplify.c
2019-02-27 Bernd Edlinger <bernd.edlinger@hotmail.de>
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
CommitLineData
88bce636 1/* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
4ee9c684 2
fbd26352 3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4ee9c684 4 Contributed by Jason Merrill <jason@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
aa139c3f 10Software Foundation; either version 3, or (at your option) any later
4ee9c684 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
aa139c3f 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
4cba6f60 25#include "target.h"
9ef16211 26#include "basic-block.h"
4ee9c684 27#include "cp-tree.h"
9ef16211 28#include "gimple.h"
4cba6f60 29#include "predict.h"
9ef16211 30#include "stor-layout.h"
75a70cf9 31#include "tree-iterator.h"
a8783bee 32#include "gimplify.h"
020bc656 33#include "c-family/c-ubsan.h"
30a86690 34#include "stringpool.h"
35#include "attribs.h"
9917317a 36#include "asan.h"
863c62e0 37#include "gcc-rich-location.h"
4ee9c684 38
57cf061a 39/* Forward declarations. */
40
41static tree cp_genericize_r (tree *, int *, void *);
d2c63826 42static tree cp_fold_r (tree *, int *, void *);
a0168bf5 43static void cp_genericize_tree (tree*, bool);
d2c63826 44static tree cp_fold (tree);
57cf061a 45
e7911019 46/* Local declarations. */
47
48enum bc_t { bc_break = 0, bc_continue = 1 };
49
8487df40 50/* Stack of labels which are targets for "break" or "continue",
51 linked through TREE_CHAIN. */
52static tree bc_label[2];
e7911019 53
54/* Begin a scope which can be exited by a break or continue statement. BC
55 indicates which.
56
57cf061a 57 Just creates a label with location LOCATION and pushes it into the current
58 context. */
e7911019 59
60static tree
57cf061a 61begin_bc_block (enum bc_t bc, location_t location)
e7911019 62{
57cf061a 63 tree label = create_artificial_label (location);
1767a056 64 DECL_CHAIN (label) = bc_label[bc];
8487df40 65 bc_label[bc] = label;
00f21715 66 if (bc == bc_break)
67 LABEL_DECL_BREAK (label) = true;
68 else
69 LABEL_DECL_CONTINUE (label) = true;
e7911019 70 return label;
71}
72
73/* Finish a scope which can be exited by a break or continue statement.
57cf061a 74 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
e7911019 75 an expression for the contents of the scope.
76
77 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
57cf061a 78 BLOCK. Otherwise, just forget the label. */
e7911019 79
57cf061a 80static void
81finish_bc_block (tree *block, enum bc_t bc, tree label)
e7911019 82{
8487df40 83 gcc_assert (label == bc_label[bc]);
e7911019 84
85 if (TREE_USED (label))
57cf061a 86 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
87 block);
e7911019 88
1767a056 89 bc_label[bc] = DECL_CHAIN (label);
90 DECL_CHAIN (label) = NULL_TREE;
e7911019 91}
92
75a70cf9 93/* Get the LABEL_EXPR to represent a break or continue statement
94 in the current block scope. BC indicates which. */
e7911019 95
96static tree
75a70cf9 97get_bc_label (enum bc_t bc)
e7911019 98{
8487df40 99 tree label = bc_label[bc];
e7911019 100
e7911019 101 /* Mark the label used for finish_bc_block. */
102 TREE_USED (label) = 1;
75a70cf9 103 return label;
e7911019 104}
105
4ee9c684 106/* Genericize a TRY_BLOCK. */
107
108static void
109genericize_try_block (tree *stmt_p)
110{
111 tree body = TRY_STMTS (*stmt_p);
112 tree cleanup = TRY_HANDLERS (*stmt_p);
113
831d52a2 114 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
4ee9c684 115}
116
117/* Genericize a HANDLER by converting to a CATCH_EXPR. */
118
119static void
120genericize_catch_block (tree *stmt_p)
121{
122 tree type = HANDLER_TYPE (*stmt_p);
123 tree body = HANDLER_BODY (*stmt_p);
124
4ee9c684 125 /* FIXME should the caught type go in TREE_TYPE? */
831d52a2 126 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
4ee9c684 127}
128
75a70cf9 129/* A terser interface for building a representation of an exception
130 specification. */
131
132static tree
133build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134{
135 tree t;
136
137 /* FIXME should the allowed types go in TREE_TYPE? */
138 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
139 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140
141 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
142 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143
144 return t;
145}
146
4ee9c684 147/* Genericize an EH_SPEC_BLOCK by converting it to a
148 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149
150static void
151genericize_eh_spec_block (tree *stmt_p)
152{
153 tree body = EH_SPEC_STMTS (*stmt_p);
154 tree allowed = EH_SPEC_RAISES (*stmt_p);
c4bac24d 155 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
4ee9c684 156
75a70cf9 157 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
e627cda1 158 TREE_NO_WARNING (*stmt_p) = true;
159 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
4ee9c684 160}
161
863c62e0 162/* Return the first non-compound statement in STMT. */
163
164tree
165first_stmt (tree stmt)
166{
167 switch (TREE_CODE (stmt))
168 {
169 case STATEMENT_LIST:
170 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
171 return first_stmt (p->stmt);
172 return void_node;
173
174 case BIND_EXPR:
175 return first_stmt (BIND_EXPR_BODY (stmt));
176
177 default:
178 return stmt;
179 }
180}
181
dddab69e 182/* Genericize an IF_STMT by turning it into a COND_EXPR. */
183
184static void
97767aad 185genericize_if_stmt (tree *stmt_p)
dddab69e 186{
551fa2c7 187 tree stmt, cond, then_, else_;
75a70cf9 188 location_t locus = EXPR_LOCATION (*stmt_p);
dddab69e 189
190 stmt = *stmt_p;
551fa2c7 191 cond = IF_COND (stmt);
dddab69e 192 then_ = THEN_CLAUSE (stmt);
193 else_ = ELSE_CLAUSE (stmt);
194
863c62e0 195 if (then_ && else_)
196 {
197 tree ft = first_stmt (then_);
198 tree fe = first_stmt (else_);
199 br_predictor pr;
200 if (TREE_CODE (ft) == PREDICT_EXPR
201 && TREE_CODE (fe) == PREDICT_EXPR
202 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
203 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
204 {
205 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
206 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
207 warning_at (&richloc, OPT_Wattributes,
208 "both branches of %<if%> statement marked as %qs",
a1e1b603 209 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
863c62e0 210 }
211 }
212
dddab69e 213 if (!then_)
e60a6f7b 214 then_ = build_empty_stmt (locus);
dddab69e 215 if (!else_)
e60a6f7b 216 else_ = build_empty_stmt (locus);
dddab69e 217
551fa2c7 218 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
219 stmt = then_;
220 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
221 stmt = else_;
222 else
223 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
ebd1f44d 224 if (!EXPR_HAS_LOCATION (stmt))
225 protected_set_expr_location (stmt, locus);
dddab69e 226 *stmt_p = stmt;
227}
228
e7911019 229/* Build a generic representation of one of the C loop forms. COND is the
230 loop condition or NULL_TREE. BODY is the (possibly compound) statement
231 controlled by the loop. INCR is the increment expression of a for-loop,
232 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
233 evaluated before the loop body as in while and for loops, or after the
234 loop body as in do-while loops. */
235
57cf061a 236static void
237genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
238 tree incr, bool cond_is_first, int *walk_subtrees,
239 void *data)
e7911019 240{
57cf061a 241 tree blab, clab;
e6f10d79 242 tree exit = NULL;
57cf061a 243 tree stmt_list = NULL;
244
ebd1f44d 245 protected_set_expr_location (incr, start_locus);
e7911019 246
57cf061a 247 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
57cf061a 248 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
8090d51c 249
250 blab = begin_bc_block (bc_break, start_locus);
251 clab = begin_bc_block (bc_continue, start_locus);
252
253 cp_walk_tree (&body, cp_genericize_r, data, NULL);
57cf061a 254 *walk_subtrees = 0;
e7911019 255
e6f10d79 256 if (cond && TREE_CODE (cond) != INTEGER_CST)
e7911019 257 {
e6f10d79 258 /* If COND is constant, don't bother building an exit. If it's false,
259 we won't build a loop. If it's true, any exits are in the body. */
d3a3cfb8 260 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
e6f10d79 261 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
262 get_bc_label (bc_break));
263 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
264 build_empty_stmt (cloc), exit);
57cf061a 265 }
e7911019 266
e6f10d79 267 if (exit && cond_is_first)
268 append_to_statement_list (exit, &stmt_list);
57cf061a 269 append_to_statement_list (body, &stmt_list);
270 finish_bc_block (&stmt_list, bc_continue, clab);
271 append_to_statement_list (incr, &stmt_list);
e6f10d79 272 if (exit && !cond_is_first)
273 append_to_statement_list (exit, &stmt_list);
e7911019 274
e6f10d79 275 if (!stmt_list)
276 stmt_list = build_empty_stmt (start_locus);
277
278 tree loop;
279 if (cond && integer_zerop (cond))
280 {
281 if (cond_is_first)
282 loop = fold_build3_loc (start_locus, COND_EXPR,
283 void_type_node, cond, stmt_list,
284 build_empty_stmt (start_locus));
285 else
286 loop = stmt_list;
287 }
288 else
92765e8a 289 {
290 location_t loc = start_locus;
291 if (!cond || integer_nonzerop (cond))
292 loc = EXPR_LOCATION (expr_first (body));
293 if (loc == UNKNOWN_LOCATION)
294 loc = start_locus;
295 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
296 }
e6f10d79 297
298 stmt_list = NULL;
299 append_to_statement_list (loop, &stmt_list);
300 finish_bc_block (&stmt_list, bc_break, blab);
301 if (!stmt_list)
302 stmt_list = build_empty_stmt (start_locus);
e7911019 303
57cf061a 304 *stmt_p = stmt_list;
e7911019 305}
306
57cf061a 307/* Genericize a FOR_STMT node *STMT_P. */
e7911019 308
309static void
57cf061a 310genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 311{
312 tree stmt = *stmt_p;
57cf061a 313 tree expr = NULL;
314 tree loop;
315 tree init = FOR_INIT_STMT (stmt);
e7911019 316
57cf061a 317 if (init)
318 {
319 cp_walk_tree (&init, cp_genericize_r, data, NULL);
320 append_to_statement_list (init, &expr);
321 }
e7911019 322
57cf061a 323 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
324 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
325 append_to_statement_list (loop, &expr);
e6f10d79 326 if (expr == NULL_TREE)
327 expr = loop;
57cf061a 328 *stmt_p = expr;
e7911019 329}
330
57cf061a 331/* Genericize a WHILE_STMT node *STMT_P. */
e7911019 332
333static void
57cf061a 334genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 335{
336 tree stmt = *stmt_p;
57cf061a 337 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
338 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
e7911019 339}
340
57cf061a 341/* Genericize a DO_STMT node *STMT_P. */
e7911019 342
343static void
57cf061a 344genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 345{
346 tree stmt = *stmt_p;
57cf061a 347 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
348 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
e7911019 349}
350
57cf061a 351/* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
e7911019 352
353static void
57cf061a 354genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 355{
356 tree stmt = *stmt_p;
57cf061a 357 tree break_block, body, cond, type;
358 location_t stmt_locus = EXPR_LOCATION (stmt);
e7911019 359
e7911019 360 body = SWITCH_STMT_BODY (stmt);
361 if (!body)
e60a6f7b 362 body = build_empty_stmt (stmt_locus);
57cf061a 363 cond = SWITCH_STMT_COND (stmt);
364 type = SWITCH_STMT_TYPE (stmt);
e7911019 365
57cf061a 366 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
1f03c410 367
368 break_block = begin_bc_block (bc_break, stmt_locus);
369
370 cp_walk_tree (&body, cp_genericize_r, data, NULL);
57cf061a 371 cp_walk_tree (&type, cp_genericize_r, data, NULL);
372 *walk_subtrees = 0;
e7911019 373
17cf92d6 374 if (TREE_USED (break_block))
375 SWITCH_BREAK_LABEL_P (break_block) = 1;
376 finish_bc_block (&body, bc_break, break_block);
bd37ce3e 377 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
3501ad33 378 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
379 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
380 || !TREE_USED (break_block));
57cf061a 381}
382
383/* Genericize a CONTINUE_STMT node *STMT_P. */
384
385static void
386genericize_continue_stmt (tree *stmt_p)
387{
388 tree stmt_list = NULL;
389 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
390 tree label = get_bc_label (bc_continue);
391 location_t location = EXPR_LOCATION (*stmt_p);
392 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
22e029d2 393 append_to_statement_list_force (pred, &stmt_list);
57cf061a 394 append_to_statement_list (jump, &stmt_list);
395 *stmt_p = stmt_list;
e7911019 396}
397
57cf061a 398/* Genericize a BREAK_STMT node *STMT_P. */
399
400static void
401genericize_break_stmt (tree *stmt_p)
402{
403 tree label = get_bc_label (bc_break);
404 location_t location = EXPR_LOCATION (*stmt_p);
405 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
406}
407
408/* Genericize a OMP_FOR node *STMT_P. */
409
410static void
411genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
412{
413 tree stmt = *stmt_p;
414 location_t locus = EXPR_LOCATION (stmt);
415 tree clab = begin_bc_block (bc_continue, locus);
416
417 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
0dfe87da 418 if (TREE_CODE (stmt) != OMP_TASKLOOP)
419 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
57cf061a 420 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
421 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
422 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
423 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
424 *walk_subtrees = 0;
425
426 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
427}
428
429/* Hook into the middle of gimplifying an OMP_FOR node. */
8487df40 430
431static enum gimplify_status
75a70cf9 432cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8487df40 433{
434 tree for_stmt = *expr_p;
75a70cf9 435 gimple_seq seq = NULL;
8487df40 436
437 /* Protect ourselves from recursion. */
438 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
439 return GS_UNHANDLED;
440 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
441
75a70cf9 442 gimplify_and_add (for_stmt, &seq);
75a70cf9 443 gimple_seq_add_seq (pre_p, seq);
8487df40 444
8487df40 445 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
446
447 return GS_ALL_DONE;
448}
449
7219fab5 450/* Gimplify an EXPR_STMT node. */
451
452static void
453gimplify_expr_stmt (tree *stmt_p)
454{
455 tree stmt = EXPR_STMT_EXPR (*stmt_p);
456
457 if (stmt == error_mark_node)
458 stmt = NULL;
459
460 /* Gimplification of a statement expression will nullify the
461 statement if all its side effects are moved to *PRE_P and *POST_P.
462
463 In this case we will not want to emit the gimplified statement.
464 However, we may still want to emit a warning, so we do that before
465 gimplification. */
43667bd3 466 if (stmt && warn_unused_value)
7219fab5 467 {
468 if (!TREE_SIDE_EFFECTS (stmt))
469 {
470 if (!IS_EMPTY_STMT (stmt)
471 && !VOID_TYPE_P (TREE_TYPE (stmt))
472 && !TREE_NO_WARNING (stmt))
43667bd3 473 warning (OPT_Wunused_value, "statement with no effect");
7219fab5 474 }
43667bd3 475 else
7219fab5 476 warn_if_unused_value (stmt, input_location);
477 }
478
479 if (stmt == NULL_TREE)
480 stmt = alloc_stmt_list ();
481
482 *stmt_p = stmt;
483}
484
4ee9c684 485/* Gimplify initialization from an AGGR_INIT_EXPR. */
486
487static void
da73cc75 488cp_gimplify_init_expr (tree *expr_p)
4ee9c684 489{
490 tree from = TREE_OPERAND (*expr_p, 1);
491 tree to = TREE_OPERAND (*expr_p, 0);
930e8175 492 tree t;
4ee9c684 493
4ee9c684 494 /* What about code that pulls out the temp and uses it elsewhere? I
495 think that such code never uses the TARGET_EXPR as an initializer. If
496 I'm wrong, we'll abort because the temp won't have any RTL. In that
497 case, I guess we'll need to replace references somehow. */
498 if (TREE_CODE (from) == TARGET_EXPR)
8e9e8d76 499 from = TARGET_EXPR_INITIAL (from);
4ee9c684 500
c3d09d4d 501 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
502 inside the TARGET_EXPR. */
930e8175 503 for (t = from; t; )
504 {
505 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
4ee9c684 506
930e8175 507 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
508 replace the slot operand with our target.
4ee9c684 509
930e8175 510 Should we add a target parm to gimplify_expr instead? No, as in this
511 case we want to replace the INIT_EXPR. */
a8b75081 512 if (TREE_CODE (sub) == AGGR_INIT_EXPR
513 || TREE_CODE (sub) == VEC_INIT_EXPR)
930e8175 514 {
a8b75081 515 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
516 AGGR_INIT_EXPR_SLOT (sub) = to;
517 else
518 VEC_INIT_EXPR_SLOT (sub) = to;
930e8175 519 *expr_p = from;
520
521 /* The initialization is now a side-effect, so the container can
522 become void. */
523 if (from != sub)
524 TREE_TYPE (from) = void_type_node;
525 }
930e8175 526
ffc5ad9b 527 /* Handle aggregate NSDMI. */
528 replace_placeholders (sub, to);
cf72f34d 529
930e8175 530 if (t == sub)
531 break;
532 else
533 t = TREE_OPERAND (t, 1);
4ee9c684 534 }
930e8175 535
4ee9c684 536}
537
538/* Gimplify a MUST_NOT_THROW_EXPR. */
539
75a70cf9 540static enum gimplify_status
541gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
4ee9c684 542{
543 tree stmt = *expr_p;
2363ef00 544 tree temp = voidify_wrapper_expr (stmt, NULL);
4ee9c684 545 tree body = TREE_OPERAND (stmt, 0);
73bb17ce 546 gimple_seq try_ = NULL;
547 gimple_seq catch_ = NULL;
42acab1c 548 gimple *mnt;
4ee9c684 549
73bb17ce 550 gimplify_and_add (body, &try_);
c4bac24d 551 mnt = gimple_build_eh_must_not_throw (terminate_fn);
e0d98d5f 552 gimple_seq_add_stmt_without_update (&catch_, mnt);
73bb17ce 553 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
4ee9c684 554
e0d98d5f 555 gimple_seq_add_stmt_without_update (pre_p, mnt);
4ee9c684 556 if (temp)
557 {
4ee9c684 558 *expr_p = temp;
75a70cf9 559 return GS_OK;
4ee9c684 560 }
75a70cf9 561
562 *expr_p = NULL;
563 return GS_ALL_DONE;
4ee9c684 564}
8495c0ca 565
6442eaae 566/* Return TRUE if an operand (OP) of a given TYPE being copied is
567 really just an empty class copy.
568
569 Check that the operand has a simple form so that TARGET_EXPRs and
570 non-empty CONSTRUCTORs get reduced properly, and we leave the
571 return slot optimization alone because it isn't a copy. */
572
573static bool
574simple_empty_class_p (tree type, tree op)
575{
576 return
577 ((TREE_CODE (op) == COMPOUND_EXPR
578 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
550c1405 579 || TREE_CODE (op) == EMPTY_CLASS_EXPR
6442eaae 580 || is_gimple_lvalue (op)
581 || INDIRECT_REF_P (op)
582 || (TREE_CODE (op) == CONSTRUCTOR
583 && CONSTRUCTOR_NELTS (op) == 0
584 && !TREE_CLOBBER_P (op))
585 || (TREE_CODE (op) == CALL_EXPR
586 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
587 && is_really_empty_class (type);
588}
589
a6a52a73 590/* Returns true if evaluating E as an lvalue has side-effects;
591 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
592 have side-effects until there is a read or write through it. */
593
594static bool
595lvalue_has_side_effects (tree e)
596{
597 if (!TREE_SIDE_EFFECTS (e))
598 return false;
599 while (handled_component_p (e))
600 {
601 if (TREE_CODE (e) == ARRAY_REF
602 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
603 return true;
604 e = TREE_OPERAND (e, 0);
605 }
606 if (DECL_P (e))
607 /* Just naming a variable has no side-effects. */
608 return false;
609 else if (INDIRECT_REF_P (e))
610 /* Similarly, indirection has no side-effects. */
611 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
612 else
613 /* For anything else, trust TREE_SIDE_EFFECTS. */
614 return TREE_SIDE_EFFECTS (e);
615}
616
8495c0ca 617/* Do C++-specific gimplification. Args are as for gimplify_expr. */
618
619int
75a70cf9 620cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8495c0ca 621{
622 int saved_stmts_are_full_exprs_p = 0;
d3a3cfb8 623 location_t loc = cp_expr_loc_or_loc (*expr_p, input_location);
8495c0ca 624 enum tree_code code = TREE_CODE (*expr_p);
625 enum gimplify_status ret;
626
627 if (STATEMENT_CODE_P (code))
628 {
629 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
630 current_stmt_tree ()->stmts_are_full_exprs_p
631 = STMT_IS_FULL_EXPR_P (*expr_p);
632 }
633
634 switch (code)
635 {
8495c0ca 636 case AGGR_INIT_EXPR:
637 simplify_aggr_init_expr (expr_p);
638 ret = GS_OK;
639 break;
640
a8b75081 641 case VEC_INIT_EXPR:
642 {
643 location_t loc = input_location;
da73cc75 644 tree init = VEC_INIT_EXPR_INIT (*expr_p);
f66fb566 645 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
a8b75081 646 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
647 input_location = EXPR_LOCATION (*expr_p);
f66fb566 648 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
98c0a208 649 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
da73cc75 650 from_array,
a8b75081 651 tf_warning_or_error);
74fd83a9 652 hash_set<tree> pset;
653 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
a0168bf5 654 cp_genericize_tree (expr_p, false);
a8b75081 655 ret = GS_OK;
656 input_location = loc;
657 }
658 break;
659
8495c0ca 660 case THROW_EXPR:
a17c2a3a 661 /* FIXME communicate throw type to back end, probably by moving
8495c0ca 662 THROW_EXPR into ../tree.def. */
663 *expr_p = TREE_OPERAND (*expr_p, 0);
664 ret = GS_OK;
665 break;
666
667 case MUST_NOT_THROW_EXPR:
75a70cf9 668 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
8495c0ca 669 break;
670
75a70cf9 671 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
cf6b103e 672 LHS of an assignment might also be involved in the RHS, as in bug
673 25979. */
8495c0ca 674 case INIT_EXPR:
da73cc75 675 cp_gimplify_init_expr (expr_p);
4fd66866 676 if (TREE_CODE (*expr_p) != INIT_EXPR)
677 return GS_OK;
e3533433 678 /* Fall through. */
1cce6590 679 case MODIFY_EXPR:
6442eaae 680 modify_expr_case:
1cce6590 681 {
682 /* If the back end isn't clever enough to know that the lhs and rhs
683 types are the same, add an explicit conversion. */
684 tree op0 = TREE_OPERAND (*expr_p, 0);
685 tree op1 = TREE_OPERAND (*expr_p, 1);
686
97f7ff5b 687 if (!error_operand_p (op0)
688 && !error_operand_p (op1)
689 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
690 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
1cce6590 691 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
692 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
693 TREE_TYPE (op0), op1);
eda37335 694
6442eaae 695 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
eda37335 696 {
6442eaae 697 /* Remove any copies of empty classes. Also drop volatile
698 variables on the RHS to avoid infinite recursion from
699 gimplify_expr trying to load the value. */
6442eaae 700 if (TREE_SIDE_EFFECTS (op1))
701 {
702 if (TREE_THIS_VOLATILE (op1)
703 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
704 op1 = build_fold_addr_expr (op1);
705
706 gimplify_and_add (op1, pre_p);
707 }
a6a52a73 708 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
709 is_gimple_lvalue, fb_lvalue);
6442eaae 710 *expr_p = TREE_OPERAND (*expr_p, 0);
eda37335 711 }
a6a52a73 712 /* P0145 says that the RHS is sequenced before the LHS.
713 gimplify_modify_expr gimplifies the RHS before the LHS, but that
714 isn't quite strong enough in two cases:
715
716 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
717 mean it's evaluated after the LHS.
718
719 2) the value calculation of the RHS is also sequenced before the
720 LHS, so for scalar assignment we need to preevaluate if the
721 RHS could be affected by LHS side-effects even if it has no
722 side-effects of its own. We don't need this for classes because
723 class assignment takes its RHS by reference. */
724 else if (flag_strong_eval_order > 1
725 && TREE_CODE (*expr_p) == MODIFY_EXPR
726 && lvalue_has_side_effects (op0)
727 && (TREE_CODE (op1) == CALL_EXPR
728 || (SCALAR_TYPE_P (TREE_TYPE (op1))
729 && !TREE_CONSTANT (op1))))
730 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
1cce6590 731 }
8495c0ca 732 ret = GS_OK;
733 break;
734
735 case EMPTY_CLASS_EXPR:
ff8c638e 736 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
737 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
8495c0ca 738 ret = GS_OK;
739 break;
740
741 case BASELINK:
742 *expr_p = BASELINK_FUNCTIONS (*expr_p);
743 ret = GS_OK;
744 break;
745
746 case TRY_BLOCK:
747 genericize_try_block (expr_p);
748 ret = GS_OK;
749 break;
750
751 case HANDLER:
752 genericize_catch_block (expr_p);
753 ret = GS_OK;
754 break;
755
756 case EH_SPEC_BLOCK:
757 genericize_eh_spec_block (expr_p);
758 ret = GS_OK;
759 break;
760
761 case USING_STMT:
660c48c4 762 gcc_unreachable ();
8495c0ca 763
e7911019 764 case FOR_STMT:
e7911019 765 case WHILE_STMT:
e7911019 766 case DO_STMT:
e7911019 767 case SWITCH_STMT:
57cf061a 768 case CONTINUE_STMT:
769 case BREAK_STMT:
770 gcc_unreachable ();
e7911019 771
8487df40 772 case OMP_FOR:
bc7bff74 773 case OMP_SIMD:
774 case OMP_DISTRIBUTE:
43895be5 775 case OMP_TASKLOOP:
75a70cf9 776 ret = cp_gimplify_omp_for (expr_p, pre_p);
8487df40 777 break;
778
7219fab5 779 case EXPR_STMT:
780 gimplify_expr_stmt (expr_p);
781 ret = GS_OK;
782 break;
783
97d541d5 784 case UNARY_PLUS_EXPR:
785 {
786 tree arg = TREE_OPERAND (*expr_p, 0);
787 tree type = TREE_TYPE (*expr_p);
788 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
789 : arg;
790 ret = GS_OK;
791 }
792 break;
793
433e804e 794 case CALL_EXPR:
10621300 795 ret = GS_OK;
06c75b9a 796 if (!CALL_EXPR_FN (*expr_p))
797 /* Internal function call. */;
798 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
10621300 799 {
06c75b9a 800 /* This is a call to a (compound) assignment operator that used
801 the operator syntax; gimplify the RHS first. */
802 gcc_assert (call_expr_nargs (*expr_p) == 2);
803 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
804 enum gimplify_status t
805 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
806 if (t == GS_ERROR)
807 ret = GS_ERROR;
808 }
809 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
810 {
811 /* Leave the last argument for gimplify_call_expr, to avoid problems
812 with __builtin_va_arg_pack(). */
813 int nargs = call_expr_nargs (*expr_p) - 1;
814 for (int i = 0; i < nargs; ++i)
10621300 815 {
816 enum gimplify_status t
817 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
818 if (t == GS_ERROR)
819 ret = GS_ERROR;
820 }
821 }
e59cff35 822 else if (flag_strong_eval_order
06c75b9a 823 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
824 {
e59cff35 825 /* If flag_strong_eval_order, evaluate the object argument first. */
06c75b9a 826 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
d03fa520 827 if (INDIRECT_TYPE_P (fntype))
06c75b9a 828 fntype = TREE_TYPE (fntype);
829 if (TREE_CODE (fntype) == METHOD_TYPE)
830 {
831 enum gimplify_status t
832 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
833 if (t == GS_ERROR)
834 ret = GS_ERROR;
835 }
836 }
18d371d3 837 if (ret != GS_ERROR)
838 {
839 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
840 if (decl
a0e9bfbb 841 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
842 BUILT_IN_FRONTEND))
18d371d3 843 *expr_p = boolean_false_node;
844 }
10621300 845 break;
846
6442eaae 847 case RETURN_EXPR:
848 if (TREE_OPERAND (*expr_p, 0)
849 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
850 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
851 {
852 expr_p = &TREE_OPERAND (*expr_p, 0);
853 code = TREE_CODE (*expr_p);
854 /* Avoid going through the INIT_EXPR case, which can
855 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
856 goto modify_expr_case;
857 }
858 /* Fall through. */
859
8495c0ca 860 default:
8458f4ca 861 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
8495c0ca 862 break;
863 }
864
865 /* Restore saved state. */
866 if (STATEMENT_CODE_P (code))
867 current_stmt_tree ()->stmts_are_full_exprs_p
868 = saved_stmts_are_full_exprs_p;
869
870 return ret;
871}
dddab69e 872
dddcebdc 873static inline bool
9f627b1a 874is_invisiref_parm (const_tree t)
dddcebdc 875{
806e4c12 876 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
dddcebdc 877 && DECL_BY_REFERENCE (t));
878}
879
7db5a284 880/* Return true if the uid in both int tree maps are equal. */
881
2ef51f0e 882bool
883cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
7db5a284 884{
7db5a284 885 return (a->uid == b->uid);
886}
887
888/* Hash a UID in a cxx_int_tree_map. */
889
890unsigned int
2ef51f0e 891cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
7db5a284 892{
2ef51f0e 893 return item->uid;
7db5a284 894}
895
9b222de3 896/* A stable comparison routine for use with splay trees and DECLs. */
897
898static int
899splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
900{
901 tree a = (tree) xa;
902 tree b = (tree) xb;
903
904 return DECL_UID (a) - DECL_UID (b);
905}
906
907/* OpenMP context during genericization. */
908
909struct cp_genericize_omp_taskreg
910{
911 bool is_parallel;
912 bool default_shared;
913 struct cp_genericize_omp_taskreg *outer;
914 splay_tree variables;
915};
916
917/* Return true if genericization should try to determine if
918 DECL is firstprivate or shared within task regions. */
919
920static bool
921omp_var_to_track (tree decl)
922{
923 tree type = TREE_TYPE (decl);
924 if (is_invisiref_parm (decl))
925 type = TREE_TYPE (type);
90ad495b 926 else if (TYPE_REF_P (type))
a3ee44e4 927 type = TREE_TYPE (type);
9b222de3 928 while (TREE_CODE (type) == ARRAY_TYPE)
929 type = TREE_TYPE (type);
930 if (type == error_mark_node || !CLASS_TYPE_P (type))
931 return false;
800478e6 932 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
9b222de3 933 return false;
934 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
935 return false;
936 return true;
937}
938
939/* Note DECL use in OpenMP region OMP_CTX during genericization. */
940
941static void
942omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
943{
944 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
945 (splay_tree_key) decl);
946 if (n == NULL)
947 {
948 int flags = OMP_CLAUSE_DEFAULT_SHARED;
949 if (omp_ctx->outer)
950 omp_cxx_notice_variable (omp_ctx->outer, decl);
951 if (!omp_ctx->default_shared)
952 {
953 struct cp_genericize_omp_taskreg *octx;
954
955 for (octx = omp_ctx->outer; octx; octx = octx->outer)
956 {
957 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
958 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
959 {
960 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
961 break;
962 }
963 if (octx->is_parallel)
964 break;
965 }
966 if (octx == NULL
967 && (TREE_CODE (decl) == PARM_DECL
968 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
969 && DECL_CONTEXT (decl) == current_function_decl)))
970 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
971 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
972 {
973 /* DECL is implicitly determined firstprivate in
974 the current task construct. Ensure copy ctor and
975 dtor are instantiated, because during gimplification
976 it will be already too late. */
977 tree type = TREE_TYPE (decl);
978 if (is_invisiref_parm (decl))
979 type = TREE_TYPE (type);
90ad495b 980 else if (TYPE_REF_P (type))
a3ee44e4 981 type = TREE_TYPE (type);
9b222de3 982 while (TREE_CODE (type) == ARRAY_TYPE)
983 type = TREE_TYPE (type);
984 get_copy_ctor (type, tf_none);
985 get_dtor (type, tf_none);
986 }
987 }
988 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
989 }
990}
991
992/* Genericization context. */
993
660c48c4 994struct cp_genericize_data
995{
431205b7 996 hash_set<tree> *p_set;
f1f41a6c 997 vec<tree> bind_expr_stack;
9b222de3 998 struct cp_genericize_omp_taskreg *omp_ctx;
cb40a6f7 999 tree try_block;
d120fa25 1000 bool no_sanitize_p;
a0168bf5 1001 bool handle_invisiref_parm_p;
660c48c4 1002};
1003
d2c63826 1004/* Perform any pre-gimplification folding of C++ front end trees to
1005 GENERIC.
1006 Note: The folding of none-omp cases is something to move into
1007 the middle-end. As for now we have most foldings only on GENERIC
1008 in fold-const, we need to perform this before transformation to
1009 GIMPLE-form. */
1010
1011static tree
1012cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1013{
1014 tree stmt;
1015 enum tree_code code;
1016
1017 *stmt_p = stmt = cp_fold (*stmt_p);
1018
0406b32f 1019 if (((hash_set<tree> *) data)->add (stmt))
1020 {
1021 /* Don't walk subtrees of stmts we've already walked once, otherwise
1022 we can have exponential complexity with e.g. lots of nested
1023 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1024 always the same tree, which the first time cp_fold_r has been
1025 called on it had the subtrees walked. */
1026 *walk_subtrees = 0;
1027 return NULL;
1028 }
1029
d2c63826 1030 code = TREE_CODE (stmt);
1031 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
efa02472 1032 || code == OMP_TASKLOOP || code == OACC_LOOP)
d2c63826 1033 {
1034 tree x;
1035 int i, n;
1036
1037 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1038 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1039 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1040 x = OMP_FOR_COND (stmt);
1041 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1042 {
1043 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1044 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1045 }
1046 else if (x && TREE_CODE (x) == TREE_VEC)
1047 {
1048 n = TREE_VEC_LENGTH (x);
1049 for (i = 0; i < n; i++)
1050 {
1051 tree o = TREE_VEC_ELT (x, i);
1052 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1053 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1054 }
1055 }
1056 x = OMP_FOR_INCR (stmt);
1057 if (x && TREE_CODE (x) == TREE_VEC)
1058 {
1059 n = TREE_VEC_LENGTH (x);
1060 for (i = 0; i < n; i++)
1061 {
1062 tree o = TREE_VEC_ELT (x, i);
1063 if (o && TREE_CODE (o) == MODIFY_EXPR)
1064 o = TREE_OPERAND (o, 1);
1065 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1066 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1067 {
1068 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1069 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1070 }
1071 }
1072 }
1073 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1074 *walk_subtrees = 0;
1075 }
1076
1077 return NULL;
1078}
1079
8f559c6e 1080/* Fold ALL the trees! FIXME we should be able to remove this, but
1081 apparently that still causes optimization regressions. */
1082
1083void
1084cp_fold_function (tree fndecl)
1085{
0406b32f 1086 hash_set<tree> pset;
1087 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
8f559c6e 1088}
1089
dddcebdc 1090/* Perform any pre-gimplification lowering of C++ front end trees to
1091 GENERIC. */
dddab69e 1092
1093static tree
dddcebdc 1094cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
dddab69e 1095{
1096 tree stmt = *stmt_p;
660c48c4 1097 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
431205b7 1098 hash_set<tree> *p_set = wtd->p_set;
dddab69e 1099
9b222de3 1100 /* If in an OpenMP context, note var uses. */
1101 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
80a58eb0 1102 && (VAR_P (stmt)
9b222de3 1103 || TREE_CODE (stmt) == PARM_DECL
1104 || TREE_CODE (stmt) == RESULT_DECL)
1105 && omp_var_to_track (stmt))
1106 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1107
cbc3b89f 1108 /* Don't dereference parms in a thunk, pass the references through. */
1109 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1110 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1111 {
1112 *walk_subtrees = 0;
1113 return NULL;
1114 }
1115
6f0a524c 1116 /* Dereference invisible reference parms. */
a0168bf5 1117 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
dddcebdc 1118 {
806e4c12 1119 *stmt_p = convert_from_reference (stmt);
0e899ffe 1120 p_set->add (*stmt_p);
dddcebdc 1121 *walk_subtrees = 0;
1122 return NULL;
1123 }
1124
7db5a284 1125 /* Map block scope extern declarations to visible declarations with the
1126 same name and type in outer scopes if any. */
1127 if (cp_function_chain->extern_decl_map
4cace8cb 1128 && VAR_OR_FUNCTION_DECL_P (stmt)
7db5a284 1129 && DECL_EXTERNAL (stmt))
1130 {
1131 struct cxx_int_tree_map *h, in;
1132 in.uid = DECL_UID (stmt);
2ef51f0e 1133 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
7db5a284 1134 if (h)
1135 {
1136 *stmt_p = h->to;
cce77517 1137 TREE_USED (h->to) |= TREE_USED (stmt);
7db5a284 1138 *walk_subtrees = 0;
1139 return NULL;
1140 }
1141 }
1142
3ddb3278 1143 if (TREE_CODE (stmt) == INTEGER_CST
90ad495b 1144 && TYPE_REF_P (TREE_TYPE (stmt))
3ddb3278 1145 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1146 && !wtd->no_sanitize_p)
1147 {
1148 ubsan_maybe_instrument_reference (stmt_p);
1149 if (*stmt_p != stmt)
1150 {
1151 *walk_subtrees = 0;
1152 return NULL_TREE;
1153 }
1154 }
1155
dddcebdc 1156 /* Other than invisiref parms, don't walk the same tree twice. */
431205b7 1157 if (p_set->contains (stmt))
dddcebdc 1158 {
1159 *walk_subtrees = 0;
1160 return NULL_TREE;
1161 }
1162
cc9e1a64 1163 switch (TREE_CODE (stmt))
dddcebdc 1164 {
cc9e1a64 1165 case ADDR_EXPR:
1166 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1167 {
1168 /* If in an OpenMP context, note var uses. */
1169 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1170 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1171 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1172 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
9b222de3 1173 *walk_subtrees = 0;
cc9e1a64 1174 }
1175 break;
1176
1177 case RETURN_EXPR:
1178 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1179 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1180 *walk_subtrees = 0;
1181 break;
1182
1183 case OMP_CLAUSE:
1184 switch (OMP_CLAUSE_CODE (stmt))
1185 {
1186 case OMP_CLAUSE_LASTPRIVATE:
1187 /* Don't dereference an invisiref in OpenMP clauses. */
1188 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1189 {
1190 *walk_subtrees = 0;
1191 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1192 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1193 cp_genericize_r, data, NULL);
1194 }
1195 break;
1196 case OMP_CLAUSE_PRIVATE:
1197 /* Don't dereference an invisiref in OpenMP clauses. */
1198 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
9b222de3 1199 *walk_subtrees = 0;
cc9e1a64 1200 else if (wtd->omp_ctx != NULL)
1201 {
1202 /* Private clause doesn't cause any references to the
1203 var in outer contexts, avoid calling
1204 omp_cxx_notice_variable for it. */
1205 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1206 wtd->omp_ctx = NULL;
1207 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1208 data, NULL);
1209 wtd->omp_ctx = old;
1210 *walk_subtrees = 0;
1211 }
1212 break;
1213 case OMP_CLAUSE_SHARED:
1214 case OMP_CLAUSE_FIRSTPRIVATE:
1215 case OMP_CLAUSE_COPYIN:
1216 case OMP_CLAUSE_COPYPRIVATE:
1217 /* Don't dereference an invisiref in OpenMP clauses. */
1218 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
bc7bff74 1219 *walk_subtrees = 0;
cc9e1a64 1220 break;
1221 case OMP_CLAUSE_REDUCTION:
7e5a76c8 1222 case OMP_CLAUSE_IN_REDUCTION:
1223 case OMP_CLAUSE_TASK_REDUCTION:
cc9e1a64 1224 /* Don't dereference an invisiref in reduction clause's
1225 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1226 still needs to be genericized. */
1227 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1228 {
1229 *walk_subtrees = 0;
1230 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1231 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1232 cp_genericize_r, data, NULL);
1233 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1234 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1235 cp_genericize_r, data, NULL);
1236 }
1237 break;
1238 default:
1239 break;
1240 }
1241 break;
1242
1243 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1244 to lower this construct before scanning it, so we need to lower these
1245 before doing anything else. */
1246 case CLEANUP_STMT:
1247 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1248 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1249 : TRY_FINALLY_EXPR,
1250 void_type_node,
1251 CLEANUP_BODY (stmt),
1252 CLEANUP_EXPR (stmt));
1253 break;
1254
1255 case IF_STMT:
97767aad 1256 genericize_if_stmt (stmt_p);
1257 /* *stmt_p has changed, tail recurse to handle it again. */
1258 return cp_genericize_r (stmt_p, walk_subtrees, data);
97767aad 1259
cc9e1a64 1260 /* COND_EXPR might have incompatible types in branches if one or both
1261 arms are bitfields. Fix it up now. */
1262 case COND_EXPR:
1263 {
1264 tree type_left
1265 = (TREE_OPERAND (stmt, 1)
1266 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1267 : NULL_TREE);
1268 tree type_right
1269 = (TREE_OPERAND (stmt, 2)
1270 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1271 : NULL_TREE);
1272 if (type_left
1273 && !useless_type_conversion_p (TREE_TYPE (stmt),
1274 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1275 {
1276 TREE_OPERAND (stmt, 1)
1277 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1278 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1279 type_left));
1280 }
1281 if (type_right
1282 && !useless_type_conversion_p (TREE_TYPE (stmt),
1283 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1284 {
1285 TREE_OPERAND (stmt, 2)
1286 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1287 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1288 type_right));
1289 }
1290 }
1291 break;
a0a1efe3 1292
cc9e1a64 1293 case BIND_EXPR:
9b222de3 1294 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1295 {
1296 tree decl;
1297 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
80a58eb0 1298 if (VAR_P (decl)
9b222de3 1299 && !DECL_EXTERNAL (decl)
1300 && omp_var_to_track (decl))
1301 {
1302 splay_tree_node n
1303 = splay_tree_lookup (wtd->omp_ctx->variables,
1304 (splay_tree_key) decl);
1305 if (n == NULL)
1306 splay_tree_insert (wtd->omp_ctx->variables,
1307 (splay_tree_key) decl,
1308 TREE_STATIC (decl)
1309 ? OMP_CLAUSE_DEFAULT_SHARED
1310 : OMP_CLAUSE_DEFAULT_PRIVATE);
1311 }
1312 }
9917317a 1313 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
d120fa25 1314 {
1315 /* The point here is to not sanitize static initializers. */
1316 bool no_sanitize_p = wtd->no_sanitize_p;
1317 wtd->no_sanitize_p = true;
1318 for (tree decl = BIND_EXPR_VARS (stmt);
1319 decl;
1320 decl = DECL_CHAIN (decl))
1321 if (VAR_P (decl)
1322 && TREE_STATIC (decl)
1323 && DECL_INITIAL (decl))
1324 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1325 wtd->no_sanitize_p = no_sanitize_p;
1326 }
f1f41a6c 1327 wtd->bind_expr_stack.safe_push (stmt);
660c48c4 1328 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1329 cp_genericize_r, data, NULL);
f1f41a6c 1330 wtd->bind_expr_stack.pop ();
cc9e1a64 1331 break;
660c48c4 1332
cc9e1a64 1333 case USING_STMT:
1334 {
1335 tree block = NULL_TREE;
1336
1337 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1338 BLOCK, and append an IMPORTED_DECL to its
1339 BLOCK_VARS chained list. */
1340 if (wtd->bind_expr_stack.exists ())
1341 {
1342 int i;
1343 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1344 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1345 break;
1346 }
1347 if (block)
1348 {
3dd770ef 1349 tree decl = TREE_OPERAND (stmt, 0);
1350 gcc_assert (decl);
cc9e1a64 1351
3dd770ef 1352 if (undeduced_auto_decl (decl))
1353 /* Omit from the GENERIC, the back-end can't handle it. */;
1354 else
1355 {
1356 tree using_directive = make_node (IMPORTED_DECL);
1357 TREE_TYPE (using_directive) = void_type_node;
660c48c4 1358
3dd770ef 1359 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1360 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1361 BLOCK_VARS (block) = using_directive;
1362 }
cc9e1a64 1363 }
1364 /* The USING_STMT won't appear in GENERIC. */
1365 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1366 *walk_subtrees = 0;
1367 }
1368 break;
1369
1370 case DECL_EXPR:
1371 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
660c48c4 1372 {
cc9e1a64 1373 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1374 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1375 *walk_subtrees = 0;
660c48c4 1376 }
cc9e1a64 1377 else
660c48c4 1378 {
cc9e1a64 1379 tree d = DECL_EXPR_DECL (stmt);
1380 if (VAR_P (d))
1381 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
660c48c4 1382 }
cc9e1a64 1383 break;
9b222de3 1384
cc9e1a64 1385 case OMP_PARALLEL:
1386 case OMP_TASK:
1387 case OMP_TASKLOOP:
1388 {
1389 struct cp_genericize_omp_taskreg omp_ctx;
1390 tree c, decl;
1391 splay_tree_node n;
1392
1393 *walk_subtrees = 0;
1394 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1395 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1396 omp_ctx.default_shared = omp_ctx.is_parallel;
1397 omp_ctx.outer = wtd->omp_ctx;
1398 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1399 wtd->omp_ctx = &omp_ctx;
1400 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1401 switch (OMP_CLAUSE_CODE (c))
1402 {
1403 case OMP_CLAUSE_SHARED:
1404 case OMP_CLAUSE_PRIVATE:
1405 case OMP_CLAUSE_FIRSTPRIVATE:
1406 case OMP_CLAUSE_LASTPRIVATE:
1407 decl = OMP_CLAUSE_DECL (c);
1408 if (decl == error_mark_node || !omp_var_to_track (decl))
1409 break;
1410 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1411 if (n != NULL)
1412 break;
1413 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1414 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1415 ? OMP_CLAUSE_DEFAULT_SHARED
1416 : OMP_CLAUSE_DEFAULT_PRIVATE);
1417 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1418 omp_cxx_notice_variable (omp_ctx.outer, decl);
9b222de3 1419 break;
cc9e1a64 1420 case OMP_CLAUSE_DEFAULT:
1421 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1422 omp_ctx.default_shared = true;
1423 default:
9b222de3 1424 break;
cc9e1a64 1425 }
1426 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1427 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1428 else
1429 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1430 wtd->omp_ctx = omp_ctx.outer;
1431 splay_tree_delete (omp_ctx.variables);
1432 }
1433 break;
1434
1435 case TRY_BLOCK:
1436 {
1437 *walk_subtrees = 0;
1438 tree try_block = wtd->try_block;
1439 wtd->try_block = stmt;
1440 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1441 wtd->try_block = try_block;
1442 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1443 }
1444 break;
1445
1446 case MUST_NOT_THROW_EXPR:
cb40a6f7 1447 /* MUST_NOT_THROW_COND might be something else with TM. */
1448 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1449 {
1450 *walk_subtrees = 0;
1451 tree try_block = wtd->try_block;
1452 wtd->try_block = stmt;
1453 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1454 wtd->try_block = try_block;
1455 }
cc9e1a64 1456 break;
1457
1458 case THROW_EXPR:
1459 {
1460 location_t loc = location_of (stmt);
1461 if (TREE_NO_WARNING (stmt))
1462 /* Never mind. */;
1463 else if (wtd->try_block)
1464 {
bc35ef65 1465 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1466 {
1467 auto_diagnostic_group d;
1468 if (warning_at (loc, OPT_Wterminate,
1469 "throw will always call terminate()")
1470 && cxx_dialect >= cxx11
1471 && DECL_DESTRUCTOR_P (current_function_decl))
1472 inform (loc, "in C++11 destructors default to noexcept");
1473 }
cc9e1a64 1474 }
1475 else
1476 {
1477 if (warn_cxx11_compat && cxx_dialect < cxx11
1478 && DECL_DESTRUCTOR_P (current_function_decl)
1479 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1480 == NULL_TREE)
1481 && (get_defaulted_eh_spec (current_function_decl)
1482 == empty_except_spec))
1483 warning_at (loc, OPT_Wc__11_compat,
1484 "in C++11 this throw will terminate because "
1485 "destructors default to noexcept");
1486 }
1487 }
1488 break;
1489
1490 case CONVERT_EXPR:
1491 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1492 break;
1493
1494 case FOR_STMT:
1495 genericize_for_stmt (stmt_p, walk_subtrees, data);
1496 break;
1497
1498 case WHILE_STMT:
1499 genericize_while_stmt (stmt_p, walk_subtrees, data);
1500 break;
1501
1502 case DO_STMT:
1503 genericize_do_stmt (stmt_p, walk_subtrees, data);
1504 break;
1505
1506 case SWITCH_STMT:
1507 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1508 break;
1509
1510 case CONTINUE_STMT:
1511 genericize_continue_stmt (stmt_p);
1512 break;
1513
1514 case BREAK_STMT:
1515 genericize_break_stmt (stmt_p);
1516 break;
1517
1518 case OMP_FOR:
1519 case OMP_SIMD:
1520 case OMP_DISTRIBUTE:
68bf4712 1521 case OACC_LOOP:
cc9e1a64 1522 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1523 break;
1524
1525 case PTRMEM_CST:
518495b8 1526 /* By the time we get here we're handing off to the back end, so we don't
1527 need or want to preserve PTRMEM_CST anymore. */
1528 *stmt_p = cplus_expand_constant (stmt);
1529 *walk_subtrees = 0;
cc9e1a64 1530 break;
1531
1532 case MEM_REF:
9564446e 1533 /* For MEM_REF, make sure not to sanitize the second operand even
cc9e1a64 1534 if it has reference type. It is just an offset with a type
9564446e 1535 holding other information. There is no other processing we
1536 need to do for INTEGER_CSTs, so just ignore the second argument
1537 unconditionally. */
1538 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1539 *walk_subtrees = 0;
cc9e1a64 1540 break;
1541
1542 case NOP_EXPR:
1543 if (!wtd->no_sanitize_p
1544 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
90ad495b 1545 && TYPE_REF_P (TREE_TYPE (stmt)))
3ddb3278 1546 ubsan_maybe_instrument_reference (stmt_p);
cc9e1a64 1547 break;
1548
1549 case CALL_EXPR:
1550 if (!wtd->no_sanitize_p
1551 && sanitize_flags_p ((SANITIZE_NULL
1552 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
392dee1e 1553 {
1554 tree fn = CALL_EXPR_FN (stmt);
1555 if (fn != NULL_TREE
1556 && !error_operand_p (fn)
d03fa520 1557 && INDIRECT_TYPE_P (TREE_TYPE (fn))
392dee1e 1558 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1559 {
1560 bool is_ctor
1561 = TREE_CODE (fn) == ADDR_EXPR
1562 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1563 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
9917317a 1564 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
32cf7025 1565 ubsan_maybe_instrument_member_call (stmt, is_ctor);
9917317a 1566 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
32cf7025 1567 cp_ubsan_maybe_instrument_member_call (stmt);
392dee1e 1568 }
13da18cc 1569 else if (fn == NULL_TREE
1570 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1571 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
90ad495b 1572 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
13da18cc 1573 *walk_subtrees = 0;
392dee1e 1574 }
91735070 1575 /* Fall through. */
1576 case AGGR_INIT_EXPR:
1577 /* For calls to a multi-versioned function, overload resolution
1578 returns the function with the highest target priority, that is,
1579 the version that will checked for dispatching first. If this
1580 version is inlinable, a direct call to this version can be made
1581 otherwise the call should go through the dispatcher. */
1582 {
ced7e116 1583 tree fn = cp_get_callee_fndecl_nofold (stmt);
91735070 1584 if (fn && DECL_FUNCTION_VERSIONED (fn)
1585 && (current_function_decl == NULL
1586 || !targetm.target_option.can_inline_p (current_function_decl,
1587 fn)))
1588 if (tree dis = get_function_version_dispatcher (fn))
1589 {
1590 mark_versions_used (dis);
1591 dis = build_address (dis);
1592 if (TREE_CODE (stmt) == CALL_EXPR)
1593 CALL_EXPR_FN (stmt) = dis;
1594 else
1595 AGGR_INIT_EXPR_FN (stmt) = dis;
1596 }
1597 }
cc9e1a64 1598 break;
1599
7604a798 1600 case TARGET_EXPR:
1601 if (TARGET_EXPR_INITIAL (stmt)
1602 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1603 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1604 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1605 break;
1606
cc9e1a64 1607 default:
1608 if (IS_TYPE_OR_DECL_P (stmt))
1609 *walk_subtrees = 0;
1610 break;
392dee1e 1611 }
660c48c4 1612
431205b7 1613 p_set->add (*stmt_p);
9031d10b 1614
dddab69e 1615 return NULL;
1616}
1617
57cf061a 1618/* Lower C++ front end trees to GENERIC in T_P. */
1619
1620static void
a0168bf5 1621cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
57cf061a 1622{
1623 struct cp_genericize_data wtd;
1624
431205b7 1625 wtd.p_set = new hash_set<tree>;
f1f41a6c 1626 wtd.bind_expr_stack.create (0);
57cf061a 1627 wtd.omp_ctx = NULL;
cb40a6f7 1628 wtd.try_block = NULL_TREE;
d120fa25 1629 wtd.no_sanitize_p = false;
a0168bf5 1630 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
57cf061a 1631 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
431205b7 1632 delete wtd.p_set;
f1f41a6c 1633 wtd.bind_expr_stack.release ();
9917317a 1634 if (sanitize_flags_p (SANITIZE_VPTR))
32cf7025 1635 cp_ubsan_instrument_member_accesses (t_p);
57cf061a 1636}
1637
020bc656 1638/* If a function that should end with a return in non-void
1639 function doesn't obviously end with return, add ubsan
2fb20ba2 1640 instrumentation code to verify it at runtime. If -fsanitize=return
1641 is not enabled, instrument __builtin_unreachable. */
020bc656 1642
1643static void
2fb20ba2 1644cp_maybe_instrument_return (tree fndecl)
020bc656 1645{
1646 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1647 || DECL_CONSTRUCTOR_P (fndecl)
1648 || DECL_DESTRUCTOR_P (fndecl)
1649 || !targetm.warn_func_return (fndecl))
1650 return;
1651
9987c8d5 1652 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1653 /* Don't add __builtin_unreachable () if not optimizing, it will not
1654 improve any optimizations in that case, just break UB code.
1655 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1656 UBSan covers this with ubsan_instrument_return above where sufficient
1657 information is provided, while the __builtin_unreachable () below
1658 if return sanitization is disabled will just result in hard to
1659 understand runtime error without location. */
1660 && (!optimize
1661 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1662 return;
1663
020bc656 1664 tree t = DECL_SAVED_TREE (fndecl);
1665 while (t)
1666 {
1667 switch (TREE_CODE (t))
1668 {
1669 case BIND_EXPR:
1670 t = BIND_EXPR_BODY (t);
1671 continue;
1672 case TRY_FINALLY_EXPR:
f907d51b 1673 case CLEANUP_POINT_EXPR:
020bc656 1674 t = TREE_OPERAND (t, 0);
1675 continue;
1676 case STATEMENT_LIST:
1677 {
1678 tree_stmt_iterator i = tsi_last (t);
8a42a2fb 1679 while (!tsi_end_p (i))
1680 {
1681 tree p = tsi_stmt (i);
1682 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1683 break;
1684 tsi_prev (&i);
1685 }
020bc656 1686 if (!tsi_end_p (i))
1687 {
1688 t = tsi_stmt (i);
1689 continue;
1690 }
1691 }
1692 break;
1693 case RETURN_EXPR:
1694 return;
1695 default:
1696 break;
1697 }
1698 break;
1699 }
1700 if (t == NULL_TREE)
1701 return;
ce7e2f0e 1702 tree *p = &DECL_SAVED_TREE (fndecl);
1703 if (TREE_CODE (*p) == BIND_EXPR)
1704 p = &BIND_EXPR_BODY (*p);
2fb20ba2 1705
1706 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1707 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1708 t = ubsan_instrument_return (loc);
1709 else
1710 {
1711 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1712 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1713 }
1714
ce7e2f0e 1715 append_to_statement_list (t, p);
020bc656 1716}
1717
dddab69e 1718void
1719cp_genericize (tree fndecl)
1720{
dddcebdc 1721 tree t;
dddcebdc 1722
1723 /* Fix up the types of parms passed by invisible reference. */
1767a056 1724 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1fe46df1 1725 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1726 {
1727 /* If a function's arguments are copied to create a thunk,
1728 then DECL_BY_REFERENCE will be set -- but the type of the
1729 argument will be a pointer type, so we will never get
1730 here. */
1731 gcc_assert (!DECL_BY_REFERENCE (t));
1732 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1733 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1734 DECL_BY_REFERENCE (t) = 1;
1735 TREE_ADDRESSABLE (t) = 0;
1736 relayout_decl (t);
1737 }
dddcebdc 1738
806e4c12 1739 /* Do the same for the return value. */
1740 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1741 {
1742 t = DECL_RESULT (fndecl);
1743 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1744 DECL_BY_REFERENCE (t) = 1;
1745 TREE_ADDRESSABLE (t) = 0;
1746 relayout_decl (t);
ae294470 1747 if (DECL_NAME (t))
1748 {
1749 /* Adjust DECL_VALUE_EXPR of the original var. */
1750 tree outer = outer_curly_brace_block (current_function_decl);
1751 tree var;
1752
1753 if (outer)
1754 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1f10c59c 1755 if (VAR_P (var)
1756 && DECL_NAME (t) == DECL_NAME (var)
ae294470 1757 && DECL_HAS_VALUE_EXPR_P (var)
1758 && DECL_VALUE_EXPR (var) == t)
1759 {
1760 tree val = convert_from_reference (t);
1761 SET_DECL_VALUE_EXPR (var, val);
1762 break;
1763 }
1764 }
806e4c12 1765 }
1766
dddcebdc 1767 /* If we're a clone, the body is already GIMPLE. */
1768 if (DECL_CLONED_FUNCTION_P (fndecl))
1769 return;
1770
df0c563f 1771 /* Allow cp_genericize calls to be nested. */
1772 tree save_bc_label[2];
1773 save_bc_label[bc_break] = bc_label[bc_break];
1774 save_bc_label[bc_continue] = bc_label[bc_continue];
1775 bc_label[bc_break] = NULL_TREE;
1776 bc_label[bc_continue] = NULL_TREE;
1777
dddcebdc 1778 /* We do want to see every occurrence of the parms, so we can't just use
1779 walk_tree's hash functionality. */
a0168bf5 1780 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
dddab69e 1781
2fb20ba2 1782 cp_maybe_instrument_return (fndecl);
020bc656 1783
dddab69e 1784 /* Do everything else. */
1785 c_genericize (fndecl);
8487df40 1786
1787 gcc_assert (bc_label[bc_break] == NULL);
1788 gcc_assert (bc_label[bc_continue] == NULL);
df0c563f 1789 bc_label[bc_break] = save_bc_label[bc_break];
1790 bc_label[bc_continue] = save_bc_label[bc_continue];
8487df40 1791}
1792\f
1793/* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1794 NULL if there is in fact nothing to do. ARG2 may be null if FN
1795 actually only takes one argument. */
1796
1797static tree
1798cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1799{
c1be427d 1800 tree defparm, parm, t;
d01f58f9 1801 int i = 0;
1802 int nargs;
1803 tree *argarray;
2f2c591f 1804
8487df40 1805 if (fn == NULL)
1806 return NULL;
1807
d01f58f9 1808 nargs = list_length (DECL_ARGUMENTS (fn));
fd70b918 1809 argarray = XALLOCAVEC (tree, nargs);
d01f58f9 1810
2f2c591f 1811 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1812 if (arg2)
1813 defparm = TREE_CHAIN (defparm);
1814
c06d7bdd 1815 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
8487df40 1816 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1817 {
1818 tree inner_type = TREE_TYPE (arg1);
1819 tree start1, end1, p1;
1820 tree start2 = NULL, p2 = NULL;
c1be427d 1821 tree ret = NULL, lab;
8487df40 1822
1823 start1 = arg1;
1824 start2 = arg2;
1825 do
1826 {
1827 inner_type = TREE_TYPE (inner_type);
1828 start1 = build4 (ARRAY_REF, inner_type, start1,
1829 size_zero_node, NULL, NULL);
1830 if (arg2)
1831 start2 = build4 (ARRAY_REF, inner_type, start2,
1832 size_zero_node, NULL, NULL);
1833 }
1834 while (TREE_CODE (inner_type) == ARRAY_TYPE);
389dd41b 1835 start1 = build_fold_addr_expr_loc (input_location, start1);
8487df40 1836 if (arg2)
389dd41b 1837 start2 = build_fold_addr_expr_loc (input_location, start2);
8487df40 1838
1839 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2cc66f2a 1840 end1 = fold_build_pointer_plus (start1, end1);
8487df40 1841
f9e245b2 1842 p1 = create_tmp_var (TREE_TYPE (start1));
75a70cf9 1843 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
8487df40 1844 append_to_statement_list (t, &ret);
1845
1846 if (arg2)
1847 {
f9e245b2 1848 p2 = create_tmp_var (TREE_TYPE (start2));
75a70cf9 1849 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
8487df40 1850 append_to_statement_list (t, &ret);
1851 }
1852
e60a6f7b 1853 lab = create_artificial_label (input_location);
8487df40 1854 t = build1 (LABEL_EXPR, void_type_node, lab);
1855 append_to_statement_list (t, &ret);
1856
d01f58f9 1857 argarray[i++] = p1;
8487df40 1858 if (arg2)
d01f58f9 1859 argarray[i++] = p2;
2f2c591f 1860 /* Handle default arguments. */
93bb78b6 1861 for (parm = defparm; parm && parm != void_list_node;
1862 parm = TREE_CHAIN (parm), i++)
d01f58f9 1863 argarray[i] = convert_default_arg (TREE_VALUE (parm),
c06d7bdd 1864 TREE_PURPOSE (parm), fn,
1865 i - is_method, tf_warning_or_error);
d01f58f9 1866 t = build_call_a (fn, i, argarray);
c1be427d 1867 t = fold_convert (void_type_node, t);
1868 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8487df40 1869 append_to_statement_list (t, &ret);
1870
2cc66f2a 1871 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
75a70cf9 1872 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
8487df40 1873 append_to_statement_list (t, &ret);
1874
1875 if (arg2)
1876 {
2cc66f2a 1877 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
75a70cf9 1878 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
8487df40 1879 append_to_statement_list (t, &ret);
1880 }
1881
1882 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1883 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1884 append_to_statement_list (t, &ret);
1885
1886 return ret;
1887 }
1888 else
1889 {
389dd41b 1890 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
8487df40 1891 if (arg2)
389dd41b 1892 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2f2c591f 1893 /* Handle default arguments. */
93bb78b6 1894 for (parm = defparm; parm && parm != void_list_node;
d01f58f9 1895 parm = TREE_CHAIN (parm), i++)
1896 argarray[i] = convert_default_arg (TREE_VALUE (parm),
c06d7bdd 1897 TREE_PURPOSE (parm), fn,
1898 i - is_method, tf_warning_or_error);
c1be427d 1899 t = build_call_a (fn, i, argarray);
1900 t = fold_convert (void_type_node, t);
1901 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8487df40 1902 }
1903}
1904
1905/* Return code to initialize DECL with its default constructor, or
1906 NULL if there's nothing to do. */
1907
1908tree
a49c5913 1909cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
8487df40 1910{
1911 tree info = CP_OMP_CLAUSE_INFO (clause);
1912 tree ret = NULL;
1913
1914 if (info)
1915 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1916
1917 return ret;
1918}
1919
1920/* Return code to initialize DST with a copy constructor from SRC. */
1921
1922tree
1923cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1924{
1925 tree info = CP_OMP_CLAUSE_INFO (clause);
1926 tree ret = NULL;
1927
1928 if (info)
1929 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1930 if (ret == NULL)
75a70cf9 1931 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8487df40 1932
1933 return ret;
1934}
1935
1936/* Similarly, except use an assignment operator instead. */
1937
1938tree
1939cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1940{
1941 tree info = CP_OMP_CLAUSE_INFO (clause);
1942 tree ret = NULL;
1943
1944 if (info)
1945 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1946 if (ret == NULL)
75a70cf9 1947 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8487df40 1948
1949 return ret;
1950}
1951
1952/* Return code to destroy DECL. */
1953
1954tree
1955cxx_omp_clause_dtor (tree clause, tree decl)
1956{
1957 tree info = CP_OMP_CLAUSE_INFO (clause);
1958 tree ret = NULL;
1959
1960 if (info)
1961 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1962
1963 return ret;
dddab69e 1964}
df2c34fc 1965
1966/* True if OpenMP should privatize what this DECL points to rather
1967 than the DECL itself. */
1968
1969bool
9f627b1a 1970cxx_omp_privatize_by_reference (const_tree decl)
df2c34fc 1971{
90ad495b 1972 return (TYPE_REF_P (TREE_TYPE (decl))
bc7bff74 1973 || is_invisiref_parm (decl));
df2c34fc 1974}
fd6481cf 1975
2169f33b 1976/* Return true if DECL is const qualified var having no mutable member. */
1977bool
1978cxx_omp_const_qual_no_mutable (tree decl)
fd6481cf 1979{
2169f33b 1980 tree type = TREE_TYPE (decl);
90ad495b 1981 if (TYPE_REF_P (type))
fd6481cf 1982 {
1983 if (!is_invisiref_parm (decl))
2169f33b 1984 return false;
fd6481cf 1985 type = TREE_TYPE (type);
1986
1987 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1988 {
1989 /* NVR doesn't preserve const qualification of the
1990 variable's type. */
1991 tree outer = outer_curly_brace_block (current_function_decl);
1992 tree var;
1993
1994 if (outer)
1767a056 1995 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1f10c59c 1996 if (VAR_P (var)
1997 && DECL_NAME (decl) == DECL_NAME (var)
fd6481cf 1998 && (TYPE_MAIN_VARIANT (type)
1999 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2000 {
2001 if (TYPE_READONLY (TREE_TYPE (var)))
2002 type = TREE_TYPE (var);
2003 break;
2004 }
2005 }
2006 }
2007
2008 if (type == error_mark_node)
2169f33b 2009 return false;
fd6481cf 2010
2011 /* Variables with const-qualified type having no mutable member
2012 are predetermined shared. */
2013 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2169f33b 2014 return true;
2015
2016 return false;
2017}
2018
2019/* True if OpenMP sharing attribute of DECL is predetermined. */
2020
2021enum omp_clause_default_kind
b16a5119 2022cxx_omp_predetermined_sharing_1 (tree decl)
2169f33b 2023{
2024 /* Static data members are predetermined shared. */
2025 if (TREE_STATIC (decl))
2026 {
2027 tree ctx = CP_DECL_CONTEXT (decl);
2028 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2029 return OMP_CLAUSE_DEFAULT_SHARED;
2030 }
2031
7e5a76c8 2032 /* this may not be specified in data-sharing clauses, still we need
2033 to predetermined it firstprivate. */
2034 if (decl == current_class_ptr)
2035 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
fd6481cf 2036
2037 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2038}
2039
b16a5119 2040/* Likewise, but also include the artificial vars. We don't want to
2041 disallow the artificial vars being mentioned in explicit clauses,
2042 as we use artificial vars e.g. for loop constructs with random
2043 access iterators other than pointers, but during gimplification
2044 we want to treat them as predetermined. */
2045
2046enum omp_clause_default_kind
2047cxx_omp_predetermined_sharing (tree decl)
2048{
2049 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2050 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2051 return ret;
2052
2053 /* Predetermine artificial variables holding integral values, those
2054 are usually result of gimplify_one_sizepos or SAVE_EXPR
2055 gimplification. */
2056 if (VAR_P (decl)
2057 && DECL_ARTIFICIAL (decl)
2058 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2059 && !(DECL_LANG_SPECIFIC (decl)
2060 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2061 return OMP_CLAUSE_DEFAULT_SHARED;
2062
2063 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2064}
2065
fd6481cf 2066/* Finalize an implicitly determined clause. */
2067
2068void
691447ab 2069cxx_omp_finish_clause (tree c, gimple_seq *)
fd6481cf 2070{
2071 tree decl, inner_type;
2072 bool make_shared = false;
2073
2074 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2075 return;
2076
2077 decl = OMP_CLAUSE_DECL (c);
2078 decl = require_complete_type (decl);
2079 inner_type = TREE_TYPE (decl);
2080 if (decl == error_mark_node)
2081 make_shared = true;
90ad495b 2082 else if (TYPE_REF_P (TREE_TYPE (decl)))
43895be5 2083 inner_type = TREE_TYPE (inner_type);
fd6481cf 2084
2085 /* We're interested in the base element, not arrays. */
2086 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2087 inner_type = TREE_TYPE (inner_type);
2088
2089 /* Check for special function availability by building a call to one.
2090 Save the results, because later we won't be in the right context
2091 for making these queries. */
2092 if (!make_shared
2093 && CLASS_TYPE_P (inner_type)
bc7bff74 2094 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
fd6481cf 2095 make_shared = true;
2096
2097 if (make_shared)
1c3f8c56 2098 {
2099 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2100 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2101 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2102 }
fd6481cf 2103}
43895be5 2104
2105/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2106 disregarded in OpenMP construct, because it is going to be
2107 remapped during OpenMP lowering. SHARED is true if DECL
2108 is going to be shared, false if it is going to be privatized. */
2109
2110bool
2111cxx_omp_disregard_value_expr (tree decl, bool shared)
2112{
2113 return !shared
2114 && VAR_P (decl)
2115 && DECL_HAS_VALUE_EXPR_P (decl)
2116 && DECL_ARTIFICIAL (decl)
2117 && DECL_LANG_SPECIFIC (decl)
2118 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2119}
d2c63826 2120
69f54cf5 2121/* Fold expression X which is used as an rvalue if RVAL is true. */
2122
4749c4ac 2123tree
69f54cf5 2124cp_fold_maybe_rvalue (tree x, bool rval)
2125{
e71bb662 2126 while (true)
69f54cf5 2127 {
e71bb662 2128 x = cp_fold (x);
0c2ebbc4 2129 if (rval)
2130 x = mark_rvalue_use (x);
ac6641ca 2131 if (rval && DECL_P (x)
90ad495b 2132 && !TYPE_REF_P (TREE_TYPE (x)))
e71bb662 2133 {
2134 tree v = decl_constant_value (x);
2135 if (v != x && v != error_mark_node)
2136 {
2137 x = v;
2138 continue;
2139 }
2140 }
2141 break;
69f54cf5 2142 }
e71bb662 2143 return x;
69f54cf5 2144}
2145
2146/* Fold expression X which is used as an rvalue. */
2147
4749c4ac 2148tree
69f54cf5 2149cp_fold_rvalue (tree x)
2150{
2151 return cp_fold_maybe_rvalue (x, true);
2152}
2153
d93ee6f8 2154/* Perform folding on expression X. */
2155
2156tree
2157cp_fully_fold (tree x)
2158{
2159 if (processing_template_decl)
2160 return x;
2161 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2162 have to call both. */
2163 if (cxx_dialect >= cxx11)
5f9e77dd 2164 {
2165 x = maybe_constant_value (x);
2166 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2167 a TARGET_EXPR; undo that here. */
2168 if (TREE_CODE (x) == TARGET_EXPR)
2169 x = TARGET_EXPR_INITIAL (x);
2170 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2171 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2172 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2173 x = TREE_OPERAND (x, 0);
2174 }
d93ee6f8 2175 return cp_fold_rvalue (x);
2176}
2177
d560f985 2178/* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2179 in some cases. */
2180
2181tree
2182cp_fully_fold_init (tree x)
2183{
2184 if (processing_template_decl)
2185 return x;
2186 x = cp_fully_fold (x);
2187 hash_set<tree> pset;
2188 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2189 return x;
2190}
2191
da562e32 2192/* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2193 and certain changes are made to the folding done. Or should be (FIXME). We
2194 never touch maybe_const, as it is only used for the C front-end
2195 C_MAYBE_CONST_EXPR. */
2196
2197tree
69cd03b2 2198c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
da562e32 2199{
69cd03b2 2200 return cp_fold_maybe_rvalue (x, !lval);
da562e32 2201}
2202
2a655a4c 2203static GTY((deletable)) hash_map<tree, tree> *fold_cache;
d2c63826 2204
a0c919f7 2205/* Dispose of the whole FOLD_CACHE. */
2206
2207void
2208clear_fold_cache (void)
2209{
2a655a4c 2210 if (fold_cache != NULL)
2211 fold_cache->empty ();
a0c919f7 2212}
2213
d2c63826 2214/* This function tries to fold an expression X.
2215 To avoid combinatorial explosion, folding results are kept in fold_cache.
d76863c8 2216 If X is invalid, we don't fold at all.
d2c63826 2217 For performance reasons we don't cache expressions representing a
2218 declaration or constant.
2219 Function returns X or its folded variant. */
2220
2221static tree
2222cp_fold (tree x)
2223{
2224 tree op0, op1, op2, op3;
2225 tree org_x = x, r = NULL_TREE;
2226 enum tree_code code;
2227 location_t loc;
69f54cf5 2228 bool rval_ops = true;
d2c63826 2229
8f559c6e 2230 if (!x || x == error_mark_node)
d2c63826 2231 return x;
2232
d76863c8 2233 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
d2c63826 2234 return x;
2235
2236 /* Don't bother to cache DECLs or constants. */
2237 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2238 return x;
2239
2a655a4c 2240 if (fold_cache == NULL)
2241 fold_cache = hash_map<tree, tree>::create_ggc (101);
2242
2243 if (tree *cached = fold_cache->get (x))
2244 return *cached;
d2c63826 2245
2246 code = TREE_CODE (x);
2247 switch (code)
2248 {
d1cd4a64 2249 case CLEANUP_POINT_EXPR:
2250 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2251 effects. */
2252 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2253 if (!TREE_SIDE_EFFECTS (r))
2254 x = r;
2255 break;
2256
d2c63826 2257 case SIZEOF_EXPR:
2258 x = fold_sizeof_expr (x);
2259 break;
2260
2261 case VIEW_CONVERT_EXPR:
69f54cf5 2262 rval_ops = false;
e3533433 2263 /* FALLTHRU */
d2c63826 2264 case CONVERT_EXPR:
2265 case NOP_EXPR:
2266 case NON_LVALUE_EXPR:
2267
2268 if (VOID_TYPE_P (TREE_TYPE (x)))
ca29c574 2269 {
2270 /* This is just to make sure we don't end up with casts to
2271 void from error_mark_node. If we just return x, then
2272 cp_fold_r might fold the operand into error_mark_node and
2273 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2274 during gimplification doesn't like such casts.
2275 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2276 folding of the operand should be in the caches and if in cp_fold_r
2277 it will modify it in place. */
2278 op0 = cp_fold (TREE_OPERAND (x, 0));
2279 if (op0 == error_mark_node)
2280 x = error_mark_node;
2281 break;
2282 }
d2c63826 2283
d2c63826 2284 loc = EXPR_LOCATION (x);
f7d61b1e 2285 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
d2c63826 2286
b981525c 2287 if (code == CONVERT_EXPR
2288 && SCALAR_TYPE_P (TREE_TYPE (x))
2289 && op0 != void_node)
2290 /* During parsing we used convert_to_*_nofold; re-convert now using the
2291 folding variants, since fold() doesn't do those transformations. */
2292 x = fold (convert (TREE_TYPE (x), op0));
2293 else if (op0 != TREE_OPERAND (x, 0))
1b8c43ab 2294 {
2295 if (op0 == error_mark_node)
2296 x = error_mark_node;
2297 else
2298 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2299 }
111e415b 2300 else
2301 x = fold (x);
d2c63826 2302
2303 /* Conversion of an out-of-range value has implementation-defined
2304 behavior; the language considers it different from arithmetic
2305 overflow, which is undefined. */
2306 if (TREE_CODE (op0) == INTEGER_CST
2307 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2308 TREE_OVERFLOW (x) = false;
2309
2310 break;
2311
bcb45dab 2312 case INDIRECT_REF:
2313 /* We don't need the decltype(auto) obfuscation anymore. */
2314 if (REF_PARENTHESIZED_P (x))
2315 {
2316 tree p = maybe_undo_parenthesized_ref (x);
e71bb662 2317 return cp_fold (p);
bcb45dab 2318 }
2319 goto unary;
2320
d2c63826 2321 case ADDR_EXPR:
e885b147 2322 loc = EXPR_LOCATION (x);
2323 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2324
2325 /* Cope with user tricks that amount to offsetof. */
2326 if (op0 != error_mark_node
2327 && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2328 && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2329 {
2330 tree val = get_base_address (op0);
2331 if (val
2332 && INDIRECT_REF_P (val)
2333 && COMPLETE_TYPE_P (TREE_TYPE (val))
2334 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2335 {
2336 val = TREE_OPERAND (val, 0);
2337 STRIP_NOPS (val);
26364f3e 2338 val = maybe_constant_value (val);
e885b147 2339 if (TREE_CODE (val) == INTEGER_CST)
3c43ed34 2340 return fold_offsetof (op0, TREE_TYPE (x));
e885b147 2341 }
2342 }
2343 goto finish_unary;
2344
d2c63826 2345 case REALPART_EXPR:
2346 case IMAGPART_EXPR:
69f54cf5 2347 rval_ops = false;
e3533433 2348 /* FALLTHRU */
d2c63826 2349 case CONJ_EXPR:
2350 case FIX_TRUNC_EXPR:
2351 case FLOAT_EXPR:
2352 case NEGATE_EXPR:
2353 case ABS_EXPR:
1c67942e 2354 case ABSU_EXPR:
d2c63826 2355 case BIT_NOT_EXPR:
2356 case TRUTH_NOT_EXPR:
2357 case FIXED_CONVERT_EXPR:
bcb45dab 2358 unary:
d2c63826 2359
2360 loc = EXPR_LOCATION (x);
69f54cf5 2361 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
d2c63826 2362
e885b147 2363 finish_unary:
d2c63826 2364 if (op0 != TREE_OPERAND (x, 0))
1b8c43ab 2365 {
2366 if (op0 == error_mark_node)
2367 x = error_mark_node;
2368 else
89f17a65 2369 {
2370 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2371 if (code == INDIRECT_REF
2372 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2373 {
2374 TREE_READONLY (x) = TREE_READONLY (org_x);
2375 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2376 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2377 }
2378 }
1b8c43ab 2379 }
111e415b 2380 else
2381 x = fold (x);
d2c63826 2382
2383 gcc_assert (TREE_CODE (x) != COND_EXPR
2384 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2385 break;
2386
a2eb1271 2387 case UNARY_PLUS_EXPR:
2388 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2389 if (op0 == error_mark_node)
2390 x = error_mark_node;
2391 else
2392 x = fold_convert (TREE_TYPE (x), op0);
2393 break;
2394
d2c63826 2395 case POSTDECREMENT_EXPR:
2396 case POSTINCREMENT_EXPR:
2397 case INIT_EXPR:
d2c63826 2398 case PREDECREMENT_EXPR:
2399 case PREINCREMENT_EXPR:
2400 case COMPOUND_EXPR:
69f54cf5 2401 case MODIFY_EXPR:
2402 rval_ops = false;
e3533433 2403 /* FALLTHRU */
d2c63826 2404 case POINTER_PLUS_EXPR:
2405 case PLUS_EXPR:
57e83b58 2406 case POINTER_DIFF_EXPR:
d2c63826 2407 case MINUS_EXPR:
2408 case MULT_EXPR:
2409 case TRUNC_DIV_EXPR:
2410 case CEIL_DIV_EXPR:
2411 case FLOOR_DIV_EXPR:
2412 case ROUND_DIV_EXPR:
2413 case TRUNC_MOD_EXPR:
2414 case CEIL_MOD_EXPR:
2415 case ROUND_MOD_EXPR:
2416 case RDIV_EXPR:
2417 case EXACT_DIV_EXPR:
2418 case MIN_EXPR:
2419 case MAX_EXPR:
2420 case LSHIFT_EXPR:
2421 case RSHIFT_EXPR:
2422 case LROTATE_EXPR:
2423 case RROTATE_EXPR:
2424 case BIT_AND_EXPR:
2425 case BIT_IOR_EXPR:
2426 case BIT_XOR_EXPR:
2427 case TRUTH_AND_EXPR:
2428 case TRUTH_ANDIF_EXPR:
2429 case TRUTH_OR_EXPR:
2430 case TRUTH_ORIF_EXPR:
2431 case TRUTH_XOR_EXPR:
2432 case LT_EXPR: case LE_EXPR:
2433 case GT_EXPR: case GE_EXPR:
2434 case EQ_EXPR: case NE_EXPR:
2435 case UNORDERED_EXPR: case ORDERED_EXPR:
2436 case UNLT_EXPR: case UNLE_EXPR:
2437 case UNGT_EXPR: case UNGE_EXPR:
2438 case UNEQ_EXPR: case LTGT_EXPR:
2439 case RANGE_EXPR: case COMPLEX_EXPR:
d2c63826 2440
2441 loc = EXPR_LOCATION (x);
69f54cf5 2442 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2443 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
d2c63826 2444
2445 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
1b8c43ab 2446 {
2447 if (op0 == error_mark_node || op1 == error_mark_node)
2448 x = error_mark_node;
2449 else
2450 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2451 }
111e415b 2452 else
2453 x = fold (x);
d2c63826 2454
4d984926 2455 /* This is only needed for -Wnonnull-compare and only if
2456 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2457 generation, we do it always. */
2458 if (COMPARISON_CLASS_P (org_x))
2cde02ad 2459 {
2460 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2461 ;
2462 else if (COMPARISON_CLASS_P (x))
4d984926 2463 {
2464 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2465 TREE_NO_WARNING (x) = 1;
2466 }
2cde02ad 2467 /* Otherwise give up on optimizing these, let GIMPLE folders
2468 optimize those later on. */
2469 else if (op0 != TREE_OPERAND (org_x, 0)
2470 || op1 != TREE_OPERAND (org_x, 1))
2471 {
2472 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
4d984926 2473 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2474 TREE_NO_WARNING (x) = 1;
2cde02ad 2475 }
2476 else
2477 x = org_x;
2478 }
d2c63826 2479 break;
2480
2481 case VEC_COND_EXPR:
2482 case COND_EXPR:
d2c63826 2483 loc = EXPR_LOCATION (x);
69f54cf5 2484 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
d2c63826 2485 op1 = cp_fold (TREE_OPERAND (x, 1));
2486 op2 = cp_fold (TREE_OPERAND (x, 2));
2487
7a7ca07c 2488 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2489 {
f9542e61 2490 warning_sentinel s (warn_int_in_bool_context);
7a7ca07c 2491 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2492 op1 = cp_truthvalue_conversion (op1);
2493 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2494 op2 = cp_truthvalue_conversion (op2);
2495 }
3c382e05 2496 else if (VOID_TYPE_P (TREE_TYPE (x)))
2497 {
2498 if (TREE_CODE (op0) == INTEGER_CST)
2499 {
2500 /* If the condition is constant, fold can fold away
2501 the COND_EXPR. If some statement-level uses of COND_EXPR
2502 have one of the branches NULL, avoid folding crash. */
2503 if (!op1)
2504 op1 = build_empty_stmt (loc);
2505 if (!op2)
2506 op2 = build_empty_stmt (loc);
2507 }
2508 else
2509 {
2510 /* Otherwise, don't bother folding a void condition, since
2511 it can't produce a constant value. */
2512 if (op0 != TREE_OPERAND (x, 0)
2513 || op1 != TREE_OPERAND (x, 1)
2514 || op2 != TREE_OPERAND (x, 2))
2515 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2516 break;
2517 }
2518 }
7a7ca07c 2519
f6dfb86a 2520 if (op0 != TREE_OPERAND (x, 0)
2521 || op1 != TREE_OPERAND (x, 1)
2522 || op2 != TREE_OPERAND (x, 2))
1b8c43ab 2523 {
2524 if (op0 == error_mark_node
2525 || op1 == error_mark_node
2526 || op2 == error_mark_node)
2527 x = error_mark_node;
2528 else
2529 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2530 }
f6dfb86a 2531 else
d2c63826 2532 x = fold (x);
2533
bf64d98a 2534 /* A COND_EXPR might have incompatible types in branches if one or both
2535 arms are bitfields. If folding exposed such a branch, fix it up. */
ec72e2f7 2536 if (TREE_CODE (x) != code
6fa371d1 2537 && x != error_mark_node
ec72e2f7 2538 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2539 x = fold_convert (TREE_TYPE (org_x), x);
bf64d98a 2540
d2c63826 2541 break;
2542
2543 case CALL_EXPR:
2544 {
2545 int i, m, sv = optimize, nw = sv, changed = 0;
2546 tree callee = get_callee_fndecl (x);
2547
efe6a40a 2548 /* Some built-in function calls will be evaluated at compile-time in
2549 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2550 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
a0e9bfbb 2551 if (callee && fndecl_built_in_p (callee) && !optimize
d2c63826 2552 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2553 && current_function_decl
2554 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2555 nw = 1;
d2c63826 2556
18d371d3 2557 /* Defer folding __builtin_is_constant_evaluated. */
2558 if (callee
a0e9bfbb 2559 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2560 BUILT_IN_FRONTEND))
18d371d3 2561 break;
2562
d2c63826 2563 x = copy_node (x);
2564
2565 m = call_expr_nargs (x);
2566 for (i = 0; i < m; i++)
2567 {
2568 r = cp_fold (CALL_EXPR_ARG (x, i));
2569 if (r != CALL_EXPR_ARG (x, i))
1b8c43ab 2570 {
2571 if (r == error_mark_node)
2572 {
2573 x = error_mark_node;
2574 break;
2575 }
2576 changed = 1;
2577 }
d2c63826 2578 CALL_EXPR_ARG (x, i) = r;
2579 }
1b8c43ab 2580 if (x == error_mark_node)
2581 break;
d2c63826 2582
2583 optimize = nw;
2584 r = fold (x);
2585 optimize = sv;
2586
2587 if (TREE_CODE (r) != CALL_EXPR)
2588 {
2589 x = cp_fold (r);
2590 break;
2591 }
2592
2593 optimize = nw;
2594
efe6a40a 2595 /* Invoke maybe_constant_value for functions declared
2596 constexpr and not called with AGGR_INIT_EXPRs.
d2c63826 2597 TODO:
efe6a40a 2598 Do constexpr expansion of expressions where the call itself is not
2599 constant, but the call followed by an INDIRECT_REF is. */
29684344 2600 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2601 && !flag_no_inline)
d9cfff22 2602 r = maybe_constant_value (x);
d2c63826 2603 optimize = sv;
2604
2605 if (TREE_CODE (r) != CALL_EXPR)
2606 {
d9cfff22 2607 if (DECL_CONSTRUCTOR_P (callee))
2608 {
2609 loc = EXPR_LOCATION (x);
2610 tree s = build_fold_indirect_ref_loc (loc,
2611 CALL_EXPR_ARG (x, 0));
2612 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2613 }
d2c63826 2614 x = r;
2615 break;
2616 }
2617
2618 if (!changed)
2619 x = org_x;
2620 break;
2621 }
2622
2623 case CONSTRUCTOR:
2624 {
2625 unsigned i;
2626 constructor_elt *p;
2627 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
41a5cb89 2628 vec<constructor_elt, va_gc> *nelts = NULL;
d2c63826 2629 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
41a5cb89 2630 {
2631 tree op = cp_fold (p->value);
41a5cb89 2632 if (op != p->value)
1b8c43ab 2633 {
2634 if (op == error_mark_node)
2635 {
2636 x = error_mark_node;
4ecaaab2 2637 vec_free (nelts);
1b8c43ab 2638 break;
2639 }
4ecaaab2 2640 if (nelts == NULL)
2641 nelts = elts->copy ();
2642 (*nelts)[i].value = op;
1b8c43ab 2643 }
41a5cb89 2644 }
4ecaaab2 2645 if (nelts)
7604a798 2646 {
2647 x = build_constructor (TREE_TYPE (x), nelts);
2648 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2649 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2650 }
f82dc839 2651 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2652 x = fold (x);
d2c63826 2653 break;
2654 }
2655 case TREE_VEC:
2656 {
2657 bool changed = false;
2658 vec<tree, va_gc> *vec = make_tree_vector ();
2659 int i, n = TREE_VEC_LENGTH (x);
2660 vec_safe_reserve (vec, n);
2661
2662 for (i = 0; i < n; i++)
2663 {
2664 tree op = cp_fold (TREE_VEC_ELT (x, i));
2665 vec->quick_push (op);
2666 if (op != TREE_VEC_ELT (x, i))
2667 changed = true;
2668 }
2669
2670 if (changed)
2671 {
2672 r = copy_node (x);
2673 for (i = 0; i < n; i++)
2674 TREE_VEC_ELT (r, i) = (*vec)[i];
2675 x = r;
2676 }
2677
2678 release_tree_vector (vec);
2679 }
2680
2681 break;
2682
2683 case ARRAY_REF:
2684 case ARRAY_RANGE_REF:
2685
2686 loc = EXPR_LOCATION (x);
2687 op0 = cp_fold (TREE_OPERAND (x, 0));
2688 op1 = cp_fold (TREE_OPERAND (x, 1));
2689 op2 = cp_fold (TREE_OPERAND (x, 2));
2690 op3 = cp_fold (TREE_OPERAND (x, 3));
2691
1b8c43ab 2692 if (op0 != TREE_OPERAND (x, 0)
2693 || op1 != TREE_OPERAND (x, 1)
2694 || op2 != TREE_OPERAND (x, 2)
2695 || op3 != TREE_OPERAND (x, 3))
2696 {
2697 if (op0 == error_mark_node
2698 || op1 == error_mark_node
2699 || op2 == error_mark_node
2700 || op3 == error_mark_node)
2701 x = error_mark_node;
2702 else
89f17a65 2703 {
2704 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2705 TREE_READONLY (x) = TREE_READONLY (org_x);
2706 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2707 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2708 }
1b8c43ab 2709 }
d2c63826 2710
2711 x = fold (x);
2712 break;
2713
2af642bf 2714 case SAVE_EXPR:
2715 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2716 folding, evaluates to an invariant. In that case no need to wrap
2717 this folded tree with a SAVE_EXPR. */
2718 r = cp_fold (TREE_OPERAND (x, 0));
2719 if (tree_invariant_p (r))
2720 x = r;
2721 break;
2722
d2c63826 2723 default:
2724 return org_x;
2725 }
2726
2a655a4c 2727 fold_cache->put (org_x, x);
d2c63826 2728 /* Prevent that we try to fold an already folded result again. */
2729 if (x != org_x)
2a655a4c 2730 fold_cache->put (x, x);
d2c63826 2731
2732 return x;
2733}
2734
863c62e0 2735/* Look up either "hot" or "cold" in attribute list LIST. */
2736
2737tree
2738lookup_hotness_attribute (tree list)
2739{
2740 for (; list; list = TREE_CHAIN (list))
2741 {
2742 tree name = get_attribute_name (list);
2743 if (is_attribute_p ("hot", name)
2744 || is_attribute_p ("cold", name)
2745 || is_attribute_p ("likely", name)
2746 || is_attribute_p ("unlikely", name))
2747 break;
2748 }
2749 return list;
2750}
2751
2752/* Remove both "hot" and "cold" attributes from LIST. */
2753
2754static tree
2755remove_hotness_attribute (tree list)
2756{
2757 list = remove_attribute ("hot", list);
2758 list = remove_attribute ("cold", list);
2759 list = remove_attribute ("likely", list);
2760 list = remove_attribute ("unlikely", list);
2761 return list;
2762}
2763
2764/* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2765 PREDICT_EXPR. */
2766
2767tree
a1e1b603 2768process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
863c62e0 2769{
2770 if (std_attrs == error_mark_node)
2771 return std_attrs;
2772 if (tree attr = lookup_hotness_attribute (std_attrs))
2773 {
2774 tree name = get_attribute_name (attr);
2775 bool hot = (is_attribute_p ("hot", name)
2776 || is_attribute_p ("likely", name));
2777 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2778 hot ? TAKEN : NOT_TAKEN);
a1e1b603 2779 SET_EXPR_LOCATION (pred, attrs_loc);
863c62e0 2780 add_stmt (pred);
2781 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2782 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2783 get_attribute_name (other), name);
2784 std_attrs = remove_hotness_attribute (std_attrs);
2785 }
2786 return std_attrs;
2787}
2788
d2c63826 2789#include "gt-cp-cp-gimplify.h"