]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/cp-gimplify.c
gcc/ada/ChangeLog:
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
CommitLineData
88bce636 1/* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
4ee9c684 2
fbd26352 3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4ee9c684 4 Contributed by Jason Merrill <jason@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
aa139c3f 10Software Foundation; either version 3, or (at your option) any later
4ee9c684 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
aa139c3f 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
4cba6f60 25#include "target.h"
9ef16211 26#include "basic-block.h"
4ee9c684 27#include "cp-tree.h"
9ef16211 28#include "gimple.h"
4cba6f60 29#include "predict.h"
9ef16211 30#include "stor-layout.h"
75a70cf9 31#include "tree-iterator.h"
a8783bee 32#include "gimplify.h"
020bc656 33#include "c-family/c-ubsan.h"
30a86690 34#include "stringpool.h"
35#include "attribs.h"
9917317a 36#include "asan.h"
863c62e0 37#include "gcc-rich-location.h"
4ee9c684 38
57cf061a 39/* Forward declarations. */
40
41static tree cp_genericize_r (tree *, int *, void *);
d2c63826 42static tree cp_fold_r (tree *, int *, void *);
a0168bf5 43static void cp_genericize_tree (tree*, bool);
d2c63826 44static tree cp_fold (tree);
57cf061a 45
e7911019 46/* Local declarations. */
47
48enum bc_t { bc_break = 0, bc_continue = 1 };
49
8487df40 50/* Stack of labels which are targets for "break" or "continue",
51 linked through TREE_CHAIN. */
52static tree bc_label[2];
e7911019 53
54/* Begin a scope which can be exited by a break or continue statement. BC
55 indicates which.
56
57cf061a 57 Just creates a label with location LOCATION and pushes it into the current
58 context. */
e7911019 59
60static tree
57cf061a 61begin_bc_block (enum bc_t bc, location_t location)
e7911019 62{
57cf061a 63 tree label = create_artificial_label (location);
1767a056 64 DECL_CHAIN (label) = bc_label[bc];
8487df40 65 bc_label[bc] = label;
00f21715 66 if (bc == bc_break)
67 LABEL_DECL_BREAK (label) = true;
68 else
69 LABEL_DECL_CONTINUE (label) = true;
e7911019 70 return label;
71}
72
73/* Finish a scope which can be exited by a break or continue statement.
57cf061a 74 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
e7911019 75 an expression for the contents of the scope.
76
77 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
57cf061a 78 BLOCK. Otherwise, just forget the label. */
e7911019 79
57cf061a 80static void
81finish_bc_block (tree *block, enum bc_t bc, tree label)
e7911019 82{
8487df40 83 gcc_assert (label == bc_label[bc]);
e7911019 84
85 if (TREE_USED (label))
57cf061a 86 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
87 block);
e7911019 88
1767a056 89 bc_label[bc] = DECL_CHAIN (label);
90 DECL_CHAIN (label) = NULL_TREE;
e7911019 91}
92
75a70cf9 93/* Get the LABEL_EXPR to represent a break or continue statement
94 in the current block scope. BC indicates which. */
e7911019 95
96static tree
75a70cf9 97get_bc_label (enum bc_t bc)
e7911019 98{
8487df40 99 tree label = bc_label[bc];
e7911019 100
e7911019 101 /* Mark the label used for finish_bc_block. */
102 TREE_USED (label) = 1;
75a70cf9 103 return label;
e7911019 104}
105
4ee9c684 106/* Genericize a TRY_BLOCK. */
107
108static void
109genericize_try_block (tree *stmt_p)
110{
111 tree body = TRY_STMTS (*stmt_p);
112 tree cleanup = TRY_HANDLERS (*stmt_p);
113
831d52a2 114 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
4ee9c684 115}
116
117/* Genericize a HANDLER by converting to a CATCH_EXPR. */
118
119static void
120genericize_catch_block (tree *stmt_p)
121{
122 tree type = HANDLER_TYPE (*stmt_p);
123 tree body = HANDLER_BODY (*stmt_p);
124
4ee9c684 125 /* FIXME should the caught type go in TREE_TYPE? */
831d52a2 126 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
4ee9c684 127}
128
75a70cf9 129/* A terser interface for building a representation of an exception
130 specification. */
131
132static tree
133build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134{
135 tree t;
136
137 /* FIXME should the allowed types go in TREE_TYPE? */
138 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
139 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140
141 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
142 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143
144 return t;
145}
146
4ee9c684 147/* Genericize an EH_SPEC_BLOCK by converting it to a
148 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149
150static void
151genericize_eh_spec_block (tree *stmt_p)
152{
153 tree body = EH_SPEC_STMTS (*stmt_p);
154 tree allowed = EH_SPEC_RAISES (*stmt_p);
c4bac24d 155 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
4ee9c684 156
75a70cf9 157 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
e627cda1 158 TREE_NO_WARNING (*stmt_p) = true;
159 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
4ee9c684 160}
161
863c62e0 162/* Return the first non-compound statement in STMT. */
163
164tree
165first_stmt (tree stmt)
166{
167 switch (TREE_CODE (stmt))
168 {
169 case STATEMENT_LIST:
170 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
171 return first_stmt (p->stmt);
172 return void_node;
173
174 case BIND_EXPR:
175 return first_stmt (BIND_EXPR_BODY (stmt));
176
177 default:
178 return stmt;
179 }
180}
181
dddab69e 182/* Genericize an IF_STMT by turning it into a COND_EXPR. */
183
184static void
97767aad 185genericize_if_stmt (tree *stmt_p)
dddab69e 186{
551fa2c7 187 tree stmt, cond, then_, else_;
75a70cf9 188 location_t locus = EXPR_LOCATION (*stmt_p);
dddab69e 189
190 stmt = *stmt_p;
551fa2c7 191 cond = IF_COND (stmt);
dddab69e 192 then_ = THEN_CLAUSE (stmt);
193 else_ = ELSE_CLAUSE (stmt);
194
863c62e0 195 if (then_ && else_)
196 {
197 tree ft = first_stmt (then_);
198 tree fe = first_stmt (else_);
199 br_predictor pr;
200 if (TREE_CODE (ft) == PREDICT_EXPR
201 && TREE_CODE (fe) == PREDICT_EXPR
202 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
203 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
204 {
205 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
206 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
207 warning_at (&richloc, OPT_Wattributes,
208 "both branches of %<if%> statement marked as %qs",
a1e1b603 209 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
863c62e0 210 }
211 }
212
dddab69e 213 if (!then_)
e60a6f7b 214 then_ = build_empty_stmt (locus);
dddab69e 215 if (!else_)
e60a6f7b 216 else_ = build_empty_stmt (locus);
dddab69e 217
551fa2c7 218 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
219 stmt = then_;
220 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
221 stmt = else_;
222 else
223 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
ebd1f44d 224 if (!EXPR_HAS_LOCATION (stmt))
225 protected_set_expr_location (stmt, locus);
dddab69e 226 *stmt_p = stmt;
227}
228
e7911019 229/* Build a generic representation of one of the C loop forms. COND is the
230 loop condition or NULL_TREE. BODY is the (possibly compound) statement
231 controlled by the loop. INCR is the increment expression of a for-loop,
232 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
233 evaluated before the loop body as in while and for loops, or after the
234 loop body as in do-while loops. */
235
57cf061a 236static void
237genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
238 tree incr, bool cond_is_first, int *walk_subtrees,
239 void *data)
e7911019 240{
57cf061a 241 tree blab, clab;
e6f10d79 242 tree exit = NULL;
57cf061a 243 tree stmt_list = NULL;
18b874d3 244 tree debug_begin = NULL;
57cf061a 245
18b874d3 246 if (EXPR_LOCATION (incr) == UNKNOWN_LOCATION)
247 protected_set_expr_location (incr, start_locus);
e7911019 248
57cf061a 249 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
57cf061a 250 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
8090d51c 251
252 blab = begin_bc_block (bc_break, start_locus);
253 clab = begin_bc_block (bc_continue, start_locus);
254
255 cp_walk_tree (&body, cp_genericize_r, data, NULL);
57cf061a 256 *walk_subtrees = 0;
e7911019 257
18b874d3 258 if (MAY_HAVE_DEBUG_MARKER_STMTS
259 && (!cond || !integer_zerop (cond)))
260 {
261 debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
262 SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus));
263 }
264
e6f10d79 265 if (cond && TREE_CODE (cond) != INTEGER_CST)
e7911019 266 {
e6f10d79 267 /* If COND is constant, don't bother building an exit. If it's false,
268 we won't build a loop. If it's true, any exits are in the body. */
d3a3cfb8 269 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
e6f10d79 270 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
271 get_bc_label (bc_break));
272 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
273 build_empty_stmt (cloc), exit);
57cf061a 274 }
e7911019 275
e6f10d79 276 if (exit && cond_is_first)
18b874d3 277 {
278 append_to_statement_list (debug_begin, &stmt_list);
279 debug_begin = NULL_TREE;
280 append_to_statement_list (exit, &stmt_list);
281 }
57cf061a 282 append_to_statement_list (body, &stmt_list);
283 finish_bc_block (&stmt_list, bc_continue, clab);
18b874d3 284 if (incr)
285 {
286 if (MAY_HAVE_DEBUG_MARKER_STMTS)
287 {
288 tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
289 SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus));
290 append_to_statement_list (d, &stmt_list);
291 }
292 append_to_statement_list (incr, &stmt_list);
293 }
294 append_to_statement_list (debug_begin, &stmt_list);
e6f10d79 295 if (exit && !cond_is_first)
296 append_to_statement_list (exit, &stmt_list);
e7911019 297
e6f10d79 298 if (!stmt_list)
299 stmt_list = build_empty_stmt (start_locus);
300
301 tree loop;
302 if (cond && integer_zerop (cond))
303 {
304 if (cond_is_first)
305 loop = fold_build3_loc (start_locus, COND_EXPR,
306 void_type_node, cond, stmt_list,
307 build_empty_stmt (start_locus));
308 else
309 loop = stmt_list;
310 }
311 else
92765e8a 312 {
313 location_t loc = start_locus;
314 if (!cond || integer_nonzerop (cond))
315 loc = EXPR_LOCATION (expr_first (body));
316 if (loc == UNKNOWN_LOCATION)
317 loc = start_locus;
318 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
319 }
e6f10d79 320
321 stmt_list = NULL;
322 append_to_statement_list (loop, &stmt_list);
323 finish_bc_block (&stmt_list, bc_break, blab);
324 if (!stmt_list)
325 stmt_list = build_empty_stmt (start_locus);
e7911019 326
57cf061a 327 *stmt_p = stmt_list;
e7911019 328}
329
57cf061a 330/* Genericize a FOR_STMT node *STMT_P. */
e7911019 331
332static void
57cf061a 333genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 334{
335 tree stmt = *stmt_p;
57cf061a 336 tree expr = NULL;
337 tree loop;
338 tree init = FOR_INIT_STMT (stmt);
e7911019 339
57cf061a 340 if (init)
341 {
342 cp_walk_tree (&init, cp_genericize_r, data, NULL);
343 append_to_statement_list (init, &expr);
344 }
e7911019 345
57cf061a 346 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
347 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
348 append_to_statement_list (loop, &expr);
e6f10d79 349 if (expr == NULL_TREE)
350 expr = loop;
57cf061a 351 *stmt_p = expr;
e7911019 352}
353
57cf061a 354/* Genericize a WHILE_STMT node *STMT_P. */
e7911019 355
356static void
57cf061a 357genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 358{
359 tree stmt = *stmt_p;
57cf061a 360 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
361 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
e7911019 362}
363
57cf061a 364/* Genericize a DO_STMT node *STMT_P. */
e7911019 365
366static void
57cf061a 367genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 368{
369 tree stmt = *stmt_p;
57cf061a 370 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
371 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
e7911019 372}
373
57cf061a 374/* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
e7911019 375
376static void
57cf061a 377genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 378{
379 tree stmt = *stmt_p;
57cf061a 380 tree break_block, body, cond, type;
381 location_t stmt_locus = EXPR_LOCATION (stmt);
e7911019 382
e7911019 383 body = SWITCH_STMT_BODY (stmt);
384 if (!body)
e60a6f7b 385 body = build_empty_stmt (stmt_locus);
57cf061a 386 cond = SWITCH_STMT_COND (stmt);
387 type = SWITCH_STMT_TYPE (stmt);
e7911019 388
57cf061a 389 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
1f03c410 390
391 break_block = begin_bc_block (bc_break, stmt_locus);
392
393 cp_walk_tree (&body, cp_genericize_r, data, NULL);
57cf061a 394 cp_walk_tree (&type, cp_genericize_r, data, NULL);
395 *walk_subtrees = 0;
e7911019 396
17cf92d6 397 if (TREE_USED (break_block))
398 SWITCH_BREAK_LABEL_P (break_block) = 1;
399 finish_bc_block (&body, bc_break, break_block);
bd37ce3e 400 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
3501ad33 401 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
402 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
403 || !TREE_USED (break_block));
57cf061a 404}
405
406/* Genericize a CONTINUE_STMT node *STMT_P. */
407
408static void
409genericize_continue_stmt (tree *stmt_p)
410{
411 tree stmt_list = NULL;
412 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
413 tree label = get_bc_label (bc_continue);
414 location_t location = EXPR_LOCATION (*stmt_p);
415 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
22e029d2 416 append_to_statement_list_force (pred, &stmt_list);
57cf061a 417 append_to_statement_list (jump, &stmt_list);
418 *stmt_p = stmt_list;
e7911019 419}
420
57cf061a 421/* Genericize a BREAK_STMT node *STMT_P. */
422
423static void
424genericize_break_stmt (tree *stmt_p)
425{
426 tree label = get_bc_label (bc_break);
427 location_t location = EXPR_LOCATION (*stmt_p);
428 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
429}
430
431/* Genericize a OMP_FOR node *STMT_P. */
432
433static void
434genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
435{
436 tree stmt = *stmt_p;
437 location_t locus = EXPR_LOCATION (stmt);
438 tree clab = begin_bc_block (bc_continue, locus);
439
440 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
0dfe87da 441 if (TREE_CODE (stmt) != OMP_TASKLOOP)
442 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
57cf061a 443 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
444 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
445 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
446 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
447 *walk_subtrees = 0;
448
449 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
450}
451
452/* Hook into the middle of gimplifying an OMP_FOR node. */
8487df40 453
454static enum gimplify_status
75a70cf9 455cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8487df40 456{
457 tree for_stmt = *expr_p;
75a70cf9 458 gimple_seq seq = NULL;
8487df40 459
460 /* Protect ourselves from recursion. */
461 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
462 return GS_UNHANDLED;
463 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
464
75a70cf9 465 gimplify_and_add (for_stmt, &seq);
75a70cf9 466 gimple_seq_add_seq (pre_p, seq);
8487df40 467
8487df40 468 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
469
470 return GS_ALL_DONE;
471}
472
7219fab5 473/* Gimplify an EXPR_STMT node. */
474
475static void
476gimplify_expr_stmt (tree *stmt_p)
477{
478 tree stmt = EXPR_STMT_EXPR (*stmt_p);
479
480 if (stmt == error_mark_node)
481 stmt = NULL;
482
483 /* Gimplification of a statement expression will nullify the
484 statement if all its side effects are moved to *PRE_P and *POST_P.
485
486 In this case we will not want to emit the gimplified statement.
487 However, we may still want to emit a warning, so we do that before
488 gimplification. */
43667bd3 489 if (stmt && warn_unused_value)
7219fab5 490 {
491 if (!TREE_SIDE_EFFECTS (stmt))
492 {
493 if (!IS_EMPTY_STMT (stmt)
494 && !VOID_TYPE_P (TREE_TYPE (stmt))
495 && !TREE_NO_WARNING (stmt))
43667bd3 496 warning (OPT_Wunused_value, "statement with no effect");
7219fab5 497 }
43667bd3 498 else
7219fab5 499 warn_if_unused_value (stmt, input_location);
500 }
501
502 if (stmt == NULL_TREE)
503 stmt = alloc_stmt_list ();
504
505 *stmt_p = stmt;
506}
507
4ee9c684 508/* Gimplify initialization from an AGGR_INIT_EXPR. */
509
510static void
da73cc75 511cp_gimplify_init_expr (tree *expr_p)
4ee9c684 512{
513 tree from = TREE_OPERAND (*expr_p, 1);
514 tree to = TREE_OPERAND (*expr_p, 0);
930e8175 515 tree t;
4ee9c684 516
4ee9c684 517 /* What about code that pulls out the temp and uses it elsewhere? I
518 think that such code never uses the TARGET_EXPR as an initializer. If
519 I'm wrong, we'll abort because the temp won't have any RTL. In that
520 case, I guess we'll need to replace references somehow. */
521 if (TREE_CODE (from) == TARGET_EXPR)
8e9e8d76 522 from = TARGET_EXPR_INITIAL (from);
4ee9c684 523
c3d09d4d 524 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
525 inside the TARGET_EXPR. */
930e8175 526 for (t = from; t; )
527 {
528 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
4ee9c684 529
930e8175 530 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
531 replace the slot operand with our target.
4ee9c684 532
930e8175 533 Should we add a target parm to gimplify_expr instead? No, as in this
534 case we want to replace the INIT_EXPR. */
a8b75081 535 if (TREE_CODE (sub) == AGGR_INIT_EXPR
536 || TREE_CODE (sub) == VEC_INIT_EXPR)
930e8175 537 {
a8b75081 538 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
539 AGGR_INIT_EXPR_SLOT (sub) = to;
540 else
541 VEC_INIT_EXPR_SLOT (sub) = to;
930e8175 542 *expr_p = from;
543
544 /* The initialization is now a side-effect, so the container can
545 become void. */
546 if (from != sub)
547 TREE_TYPE (from) = void_type_node;
548 }
930e8175 549
ffc5ad9b 550 /* Handle aggregate NSDMI. */
551 replace_placeholders (sub, to);
cf72f34d 552
930e8175 553 if (t == sub)
554 break;
555 else
556 t = TREE_OPERAND (t, 1);
4ee9c684 557 }
930e8175 558
4ee9c684 559}
560
561/* Gimplify a MUST_NOT_THROW_EXPR. */
562
75a70cf9 563static enum gimplify_status
564gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
4ee9c684 565{
566 tree stmt = *expr_p;
2363ef00 567 tree temp = voidify_wrapper_expr (stmt, NULL);
4ee9c684 568 tree body = TREE_OPERAND (stmt, 0);
73bb17ce 569 gimple_seq try_ = NULL;
570 gimple_seq catch_ = NULL;
42acab1c 571 gimple *mnt;
4ee9c684 572
73bb17ce 573 gimplify_and_add (body, &try_);
c4bac24d 574 mnt = gimple_build_eh_must_not_throw (terminate_fn);
e0d98d5f 575 gimple_seq_add_stmt_without_update (&catch_, mnt);
73bb17ce 576 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
4ee9c684 577
e0d98d5f 578 gimple_seq_add_stmt_without_update (pre_p, mnt);
4ee9c684 579 if (temp)
580 {
4ee9c684 581 *expr_p = temp;
75a70cf9 582 return GS_OK;
4ee9c684 583 }
75a70cf9 584
585 *expr_p = NULL;
586 return GS_ALL_DONE;
4ee9c684 587}
8495c0ca 588
6442eaae 589/* Return TRUE if an operand (OP) of a given TYPE being copied is
590 really just an empty class copy.
591
592 Check that the operand has a simple form so that TARGET_EXPRs and
593 non-empty CONSTRUCTORs get reduced properly, and we leave the
594 return slot optimization alone because it isn't a copy. */
595
596static bool
597simple_empty_class_p (tree type, tree op)
598{
599 return
600 ((TREE_CODE (op) == COMPOUND_EXPR
601 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
550c1405 602 || TREE_CODE (op) == EMPTY_CLASS_EXPR
6442eaae 603 || is_gimple_lvalue (op)
604 || INDIRECT_REF_P (op)
605 || (TREE_CODE (op) == CONSTRUCTOR
606 && CONSTRUCTOR_NELTS (op) == 0
607 && !TREE_CLOBBER_P (op))
608 || (TREE_CODE (op) == CALL_EXPR
609 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
cd5a9ed1 610 && is_really_empty_class (type, /*ignore_vptr*/true);
6442eaae 611}
612
a6a52a73 613/* Returns true if evaluating E as an lvalue has side-effects;
614 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
615 have side-effects until there is a read or write through it. */
616
617static bool
618lvalue_has_side_effects (tree e)
619{
620 if (!TREE_SIDE_EFFECTS (e))
621 return false;
622 while (handled_component_p (e))
623 {
624 if (TREE_CODE (e) == ARRAY_REF
625 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
626 return true;
627 e = TREE_OPERAND (e, 0);
628 }
629 if (DECL_P (e))
630 /* Just naming a variable has no side-effects. */
631 return false;
632 else if (INDIRECT_REF_P (e))
633 /* Similarly, indirection has no side-effects. */
634 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
635 else
636 /* For anything else, trust TREE_SIDE_EFFECTS. */
637 return TREE_SIDE_EFFECTS (e);
638}
639
8495c0ca 640/* Do C++-specific gimplification. Args are as for gimplify_expr. */
641
642int
75a70cf9 643cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8495c0ca 644{
645 int saved_stmts_are_full_exprs_p = 0;
d3a3cfb8 646 location_t loc = cp_expr_loc_or_loc (*expr_p, input_location);
8495c0ca 647 enum tree_code code = TREE_CODE (*expr_p);
648 enum gimplify_status ret;
649
650 if (STATEMENT_CODE_P (code))
651 {
652 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
653 current_stmt_tree ()->stmts_are_full_exprs_p
654 = STMT_IS_FULL_EXPR_P (*expr_p);
655 }
656
657 switch (code)
658 {
8495c0ca 659 case AGGR_INIT_EXPR:
660 simplify_aggr_init_expr (expr_p);
661 ret = GS_OK;
662 break;
663
a8b75081 664 case VEC_INIT_EXPR:
665 {
666 location_t loc = input_location;
da73cc75 667 tree init = VEC_INIT_EXPR_INIT (*expr_p);
f66fb566 668 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
a8b75081 669 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
670 input_location = EXPR_LOCATION (*expr_p);
f66fb566 671 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
98c0a208 672 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
da73cc75 673 from_array,
a8b75081 674 tf_warning_or_error);
74fd83a9 675 hash_set<tree> pset;
676 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
a0168bf5 677 cp_genericize_tree (expr_p, false);
a8b75081 678 ret = GS_OK;
679 input_location = loc;
680 }
681 break;
682
8495c0ca 683 case THROW_EXPR:
a17c2a3a 684 /* FIXME communicate throw type to back end, probably by moving
8495c0ca 685 THROW_EXPR into ../tree.def. */
686 *expr_p = TREE_OPERAND (*expr_p, 0);
687 ret = GS_OK;
688 break;
689
690 case MUST_NOT_THROW_EXPR:
75a70cf9 691 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
8495c0ca 692 break;
693
75a70cf9 694 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
cf6b103e 695 LHS of an assignment might also be involved in the RHS, as in bug
696 25979. */
8495c0ca 697 case INIT_EXPR:
da73cc75 698 cp_gimplify_init_expr (expr_p);
4fd66866 699 if (TREE_CODE (*expr_p) != INIT_EXPR)
700 return GS_OK;
e3533433 701 /* Fall through. */
1cce6590 702 case MODIFY_EXPR:
6442eaae 703 modify_expr_case:
1cce6590 704 {
705 /* If the back end isn't clever enough to know that the lhs and rhs
706 types are the same, add an explicit conversion. */
707 tree op0 = TREE_OPERAND (*expr_p, 0);
708 tree op1 = TREE_OPERAND (*expr_p, 1);
709
97f7ff5b 710 if (!error_operand_p (op0)
711 && !error_operand_p (op1)
712 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
713 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
1cce6590 714 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
715 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
716 TREE_TYPE (op0), op1);
eda37335 717
6442eaae 718 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
eda37335 719 {
6442eaae 720 /* Remove any copies of empty classes. Also drop volatile
721 variables on the RHS to avoid infinite recursion from
722 gimplify_expr trying to load the value. */
6442eaae 723 if (TREE_SIDE_EFFECTS (op1))
724 {
725 if (TREE_THIS_VOLATILE (op1)
726 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
727 op1 = build_fold_addr_expr (op1);
728
729 gimplify_and_add (op1, pre_p);
730 }
a6a52a73 731 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
732 is_gimple_lvalue, fb_lvalue);
6442eaae 733 *expr_p = TREE_OPERAND (*expr_p, 0);
eda37335 734 }
a6a52a73 735 /* P0145 says that the RHS is sequenced before the LHS.
736 gimplify_modify_expr gimplifies the RHS before the LHS, but that
737 isn't quite strong enough in two cases:
738
739 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
740 mean it's evaluated after the LHS.
741
742 2) the value calculation of the RHS is also sequenced before the
743 LHS, so for scalar assignment we need to preevaluate if the
744 RHS could be affected by LHS side-effects even if it has no
745 side-effects of its own. We don't need this for classes because
746 class assignment takes its RHS by reference. */
747 else if (flag_strong_eval_order > 1
748 && TREE_CODE (*expr_p) == MODIFY_EXPR
749 && lvalue_has_side_effects (op0)
750 && (TREE_CODE (op1) == CALL_EXPR
751 || (SCALAR_TYPE_P (TREE_TYPE (op1))
752 && !TREE_CONSTANT (op1))))
753 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
1cce6590 754 }
8495c0ca 755 ret = GS_OK;
756 break;
757
758 case EMPTY_CLASS_EXPR:
ff8c638e 759 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
760 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
8495c0ca 761 ret = GS_OK;
762 break;
763
764 case BASELINK:
765 *expr_p = BASELINK_FUNCTIONS (*expr_p);
766 ret = GS_OK;
767 break;
768
769 case TRY_BLOCK:
770 genericize_try_block (expr_p);
771 ret = GS_OK;
772 break;
773
774 case HANDLER:
775 genericize_catch_block (expr_p);
776 ret = GS_OK;
777 break;
778
779 case EH_SPEC_BLOCK:
780 genericize_eh_spec_block (expr_p);
781 ret = GS_OK;
782 break;
783
784 case USING_STMT:
660c48c4 785 gcc_unreachable ();
8495c0ca 786
e7911019 787 case FOR_STMT:
e7911019 788 case WHILE_STMT:
e7911019 789 case DO_STMT:
e7911019 790 case SWITCH_STMT:
57cf061a 791 case CONTINUE_STMT:
792 case BREAK_STMT:
793 gcc_unreachable ();
e7911019 794
8487df40 795 case OMP_FOR:
bc7bff74 796 case OMP_SIMD:
797 case OMP_DISTRIBUTE:
43895be5 798 case OMP_TASKLOOP:
75a70cf9 799 ret = cp_gimplify_omp_for (expr_p, pre_p);
8487df40 800 break;
801
7219fab5 802 case EXPR_STMT:
803 gimplify_expr_stmt (expr_p);
804 ret = GS_OK;
805 break;
806
97d541d5 807 case UNARY_PLUS_EXPR:
808 {
809 tree arg = TREE_OPERAND (*expr_p, 0);
810 tree type = TREE_TYPE (*expr_p);
811 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
812 : arg;
813 ret = GS_OK;
814 }
815 break;
816
433e804e 817 case CALL_EXPR:
10621300 818 ret = GS_OK;
06c75b9a 819 if (!CALL_EXPR_FN (*expr_p))
820 /* Internal function call. */;
821 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
10621300 822 {
06c75b9a 823 /* This is a call to a (compound) assignment operator that used
824 the operator syntax; gimplify the RHS first. */
825 gcc_assert (call_expr_nargs (*expr_p) == 2);
826 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
827 enum gimplify_status t
828 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
829 if (t == GS_ERROR)
830 ret = GS_ERROR;
831 }
832 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
833 {
834 /* Leave the last argument for gimplify_call_expr, to avoid problems
835 with __builtin_va_arg_pack(). */
836 int nargs = call_expr_nargs (*expr_p) - 1;
837 for (int i = 0; i < nargs; ++i)
10621300 838 {
839 enum gimplify_status t
840 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
841 if (t == GS_ERROR)
842 ret = GS_ERROR;
843 }
844 }
e59cff35 845 else if (flag_strong_eval_order
06c75b9a 846 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
847 {
e59cff35 848 /* If flag_strong_eval_order, evaluate the object argument first. */
06c75b9a 849 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
d03fa520 850 if (INDIRECT_TYPE_P (fntype))
06c75b9a 851 fntype = TREE_TYPE (fntype);
852 if (TREE_CODE (fntype) == METHOD_TYPE)
853 {
854 enum gimplify_status t
855 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
856 if (t == GS_ERROR)
857 ret = GS_ERROR;
858 }
859 }
18d371d3 860 if (ret != GS_ERROR)
861 {
862 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
863 if (decl
a0e9bfbb 864 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
865 BUILT_IN_FRONTEND))
18d371d3 866 *expr_p = boolean_false_node;
867 }
10621300 868 break;
869
6442eaae 870 case RETURN_EXPR:
871 if (TREE_OPERAND (*expr_p, 0)
872 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
873 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
874 {
875 expr_p = &TREE_OPERAND (*expr_p, 0);
876 code = TREE_CODE (*expr_p);
877 /* Avoid going through the INIT_EXPR case, which can
878 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
879 goto modify_expr_case;
880 }
881 /* Fall through. */
882
8495c0ca 883 default:
8458f4ca 884 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
8495c0ca 885 break;
886 }
887
888 /* Restore saved state. */
889 if (STATEMENT_CODE_P (code))
890 current_stmt_tree ()->stmts_are_full_exprs_p
891 = saved_stmts_are_full_exprs_p;
892
893 return ret;
894}
dddab69e 895
dddcebdc 896static inline bool
9f627b1a 897is_invisiref_parm (const_tree t)
dddcebdc 898{
806e4c12 899 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
dddcebdc 900 && DECL_BY_REFERENCE (t));
901}
902
7db5a284 903/* Return true if the uid in both int tree maps are equal. */
904
2ef51f0e 905bool
906cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
7db5a284 907{
7db5a284 908 return (a->uid == b->uid);
909}
910
911/* Hash a UID in a cxx_int_tree_map. */
912
913unsigned int
2ef51f0e 914cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
7db5a284 915{
2ef51f0e 916 return item->uid;
7db5a284 917}
918
9b222de3 919/* A stable comparison routine for use with splay trees and DECLs. */
920
921static int
922splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
923{
924 tree a = (tree) xa;
925 tree b = (tree) xb;
926
927 return DECL_UID (a) - DECL_UID (b);
928}
929
930/* OpenMP context during genericization. */
931
932struct cp_genericize_omp_taskreg
933{
934 bool is_parallel;
935 bool default_shared;
936 struct cp_genericize_omp_taskreg *outer;
937 splay_tree variables;
938};
939
940/* Return true if genericization should try to determine if
941 DECL is firstprivate or shared within task regions. */
942
943static bool
944omp_var_to_track (tree decl)
945{
946 tree type = TREE_TYPE (decl);
947 if (is_invisiref_parm (decl))
948 type = TREE_TYPE (type);
90ad495b 949 else if (TYPE_REF_P (type))
a3ee44e4 950 type = TREE_TYPE (type);
9b222de3 951 while (TREE_CODE (type) == ARRAY_TYPE)
952 type = TREE_TYPE (type);
953 if (type == error_mark_node || !CLASS_TYPE_P (type))
954 return false;
800478e6 955 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
9b222de3 956 return false;
957 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
958 return false;
959 return true;
960}
961
962/* Note DECL use in OpenMP region OMP_CTX during genericization. */
963
964static void
965omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
966{
967 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
968 (splay_tree_key) decl);
969 if (n == NULL)
970 {
971 int flags = OMP_CLAUSE_DEFAULT_SHARED;
972 if (omp_ctx->outer)
973 omp_cxx_notice_variable (omp_ctx->outer, decl);
974 if (!omp_ctx->default_shared)
975 {
976 struct cp_genericize_omp_taskreg *octx;
977
978 for (octx = omp_ctx->outer; octx; octx = octx->outer)
979 {
980 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
981 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
982 {
983 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
984 break;
985 }
986 if (octx->is_parallel)
987 break;
988 }
989 if (octx == NULL
990 && (TREE_CODE (decl) == PARM_DECL
991 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
992 && DECL_CONTEXT (decl) == current_function_decl)))
993 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
994 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
995 {
996 /* DECL is implicitly determined firstprivate in
997 the current task construct. Ensure copy ctor and
998 dtor are instantiated, because during gimplification
999 it will be already too late. */
1000 tree type = TREE_TYPE (decl);
1001 if (is_invisiref_parm (decl))
1002 type = TREE_TYPE (type);
90ad495b 1003 else if (TYPE_REF_P (type))
a3ee44e4 1004 type = TREE_TYPE (type);
9b222de3 1005 while (TREE_CODE (type) == ARRAY_TYPE)
1006 type = TREE_TYPE (type);
1007 get_copy_ctor (type, tf_none);
1008 get_dtor (type, tf_none);
1009 }
1010 }
1011 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1012 }
1013}
1014
1015/* Genericization context. */
1016
660c48c4 1017struct cp_genericize_data
1018{
431205b7 1019 hash_set<tree> *p_set;
f1f41a6c 1020 vec<tree> bind_expr_stack;
9b222de3 1021 struct cp_genericize_omp_taskreg *omp_ctx;
cb40a6f7 1022 tree try_block;
d120fa25 1023 bool no_sanitize_p;
a0168bf5 1024 bool handle_invisiref_parm_p;
660c48c4 1025};
1026
d2c63826 1027/* Perform any pre-gimplification folding of C++ front end trees to
1028 GENERIC.
1029 Note: The folding of none-omp cases is something to move into
1030 the middle-end. As for now we have most foldings only on GENERIC
1031 in fold-const, we need to perform this before transformation to
1032 GIMPLE-form. */
1033
1034static tree
1035cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1036{
1037 tree stmt;
1038 enum tree_code code;
1039
1040 *stmt_p = stmt = cp_fold (*stmt_p);
1041
0406b32f 1042 if (((hash_set<tree> *) data)->add (stmt))
1043 {
1044 /* Don't walk subtrees of stmts we've already walked once, otherwise
1045 we can have exponential complexity with e.g. lots of nested
1046 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1047 always the same tree, which the first time cp_fold_r has been
1048 called on it had the subtrees walked. */
1049 *walk_subtrees = 0;
1050 return NULL;
1051 }
1052
d2c63826 1053 code = TREE_CODE (stmt);
1054 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
efa02472 1055 || code == OMP_TASKLOOP || code == OACC_LOOP)
d2c63826 1056 {
1057 tree x;
1058 int i, n;
1059
1060 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1061 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1062 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1063 x = OMP_FOR_COND (stmt);
1064 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1065 {
1066 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1067 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1068 }
1069 else if (x && TREE_CODE (x) == TREE_VEC)
1070 {
1071 n = TREE_VEC_LENGTH (x);
1072 for (i = 0; i < n; i++)
1073 {
1074 tree o = TREE_VEC_ELT (x, i);
1075 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1076 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1077 }
1078 }
1079 x = OMP_FOR_INCR (stmt);
1080 if (x && TREE_CODE (x) == TREE_VEC)
1081 {
1082 n = TREE_VEC_LENGTH (x);
1083 for (i = 0; i < n; i++)
1084 {
1085 tree o = TREE_VEC_ELT (x, i);
1086 if (o && TREE_CODE (o) == MODIFY_EXPR)
1087 o = TREE_OPERAND (o, 1);
1088 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1089 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1090 {
1091 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1092 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1093 }
1094 }
1095 }
1096 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1097 *walk_subtrees = 0;
1098 }
1099
1100 return NULL;
1101}
1102
8f559c6e 1103/* Fold ALL the trees! FIXME we should be able to remove this, but
1104 apparently that still causes optimization regressions. */
1105
1106void
1107cp_fold_function (tree fndecl)
1108{
0406b32f 1109 hash_set<tree> pset;
1110 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
8f559c6e 1111}
1112
dddcebdc 1113/* Perform any pre-gimplification lowering of C++ front end trees to
1114 GENERIC. */
dddab69e 1115
1116static tree
dddcebdc 1117cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
dddab69e 1118{
1119 tree stmt = *stmt_p;
660c48c4 1120 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
431205b7 1121 hash_set<tree> *p_set = wtd->p_set;
dddab69e 1122
9b222de3 1123 /* If in an OpenMP context, note var uses. */
1124 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
80a58eb0 1125 && (VAR_P (stmt)
9b222de3 1126 || TREE_CODE (stmt) == PARM_DECL
1127 || TREE_CODE (stmt) == RESULT_DECL)
1128 && omp_var_to_track (stmt))
1129 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1130
cbc3b89f 1131 /* Don't dereference parms in a thunk, pass the references through. */
1132 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1133 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1134 {
1135 *walk_subtrees = 0;
1136 return NULL;
1137 }
1138
6f0a524c 1139 /* Dereference invisible reference parms. */
a0168bf5 1140 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
dddcebdc 1141 {
806e4c12 1142 *stmt_p = convert_from_reference (stmt);
0e899ffe 1143 p_set->add (*stmt_p);
dddcebdc 1144 *walk_subtrees = 0;
1145 return NULL;
1146 }
1147
7db5a284 1148 /* Map block scope extern declarations to visible declarations with the
1149 same name and type in outer scopes if any. */
1150 if (cp_function_chain->extern_decl_map
4cace8cb 1151 && VAR_OR_FUNCTION_DECL_P (stmt)
7db5a284 1152 && DECL_EXTERNAL (stmt))
1153 {
1154 struct cxx_int_tree_map *h, in;
1155 in.uid = DECL_UID (stmt);
2ef51f0e 1156 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
7db5a284 1157 if (h)
1158 {
1159 *stmt_p = h->to;
cce77517 1160 TREE_USED (h->to) |= TREE_USED (stmt);
7db5a284 1161 *walk_subtrees = 0;
1162 return NULL;
1163 }
1164 }
1165
3ddb3278 1166 if (TREE_CODE (stmt) == INTEGER_CST
90ad495b 1167 && TYPE_REF_P (TREE_TYPE (stmt))
3ddb3278 1168 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1169 && !wtd->no_sanitize_p)
1170 {
1171 ubsan_maybe_instrument_reference (stmt_p);
1172 if (*stmt_p != stmt)
1173 {
1174 *walk_subtrees = 0;
1175 return NULL_TREE;
1176 }
1177 }
1178
dddcebdc 1179 /* Other than invisiref parms, don't walk the same tree twice. */
431205b7 1180 if (p_set->contains (stmt))
dddcebdc 1181 {
1182 *walk_subtrees = 0;
1183 return NULL_TREE;
1184 }
1185
cc9e1a64 1186 switch (TREE_CODE (stmt))
dddcebdc 1187 {
cc9e1a64 1188 case ADDR_EXPR:
1189 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1190 {
1191 /* If in an OpenMP context, note var uses. */
1192 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1193 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1194 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1195 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
9b222de3 1196 *walk_subtrees = 0;
cc9e1a64 1197 }
1198 break;
1199
1200 case RETURN_EXPR:
1201 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1202 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1203 *walk_subtrees = 0;
1204 break;
1205
1206 case OMP_CLAUSE:
1207 switch (OMP_CLAUSE_CODE (stmt))
1208 {
1209 case OMP_CLAUSE_LASTPRIVATE:
1210 /* Don't dereference an invisiref in OpenMP clauses. */
1211 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1212 {
1213 *walk_subtrees = 0;
1214 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1215 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1216 cp_genericize_r, data, NULL);
1217 }
1218 break;
1219 case OMP_CLAUSE_PRIVATE:
1220 /* Don't dereference an invisiref in OpenMP clauses. */
1221 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
9b222de3 1222 *walk_subtrees = 0;
cc9e1a64 1223 else if (wtd->omp_ctx != NULL)
1224 {
1225 /* Private clause doesn't cause any references to the
1226 var in outer contexts, avoid calling
1227 omp_cxx_notice_variable for it. */
1228 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1229 wtd->omp_ctx = NULL;
1230 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1231 data, NULL);
1232 wtd->omp_ctx = old;
1233 *walk_subtrees = 0;
1234 }
1235 break;
1236 case OMP_CLAUSE_SHARED:
1237 case OMP_CLAUSE_FIRSTPRIVATE:
1238 case OMP_CLAUSE_COPYIN:
1239 case OMP_CLAUSE_COPYPRIVATE:
1240 /* Don't dereference an invisiref in OpenMP clauses. */
1241 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
bc7bff74 1242 *walk_subtrees = 0;
cc9e1a64 1243 break;
1244 case OMP_CLAUSE_REDUCTION:
7e5a76c8 1245 case OMP_CLAUSE_IN_REDUCTION:
1246 case OMP_CLAUSE_TASK_REDUCTION:
cc9e1a64 1247 /* Don't dereference an invisiref in reduction clause's
1248 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1249 still needs to be genericized. */
1250 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1251 {
1252 *walk_subtrees = 0;
1253 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1254 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1255 cp_genericize_r, data, NULL);
1256 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1257 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1258 cp_genericize_r, data, NULL);
1259 }
1260 break;
1261 default:
1262 break;
1263 }
1264 break;
1265
1266 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1267 to lower this construct before scanning it, so we need to lower these
1268 before doing anything else. */
1269 case CLEANUP_STMT:
1270 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1271 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1272 : TRY_FINALLY_EXPR,
1273 void_type_node,
1274 CLEANUP_BODY (stmt),
1275 CLEANUP_EXPR (stmt));
1276 break;
1277
1278 case IF_STMT:
97767aad 1279 genericize_if_stmt (stmt_p);
1280 /* *stmt_p has changed, tail recurse to handle it again. */
1281 return cp_genericize_r (stmt_p, walk_subtrees, data);
97767aad 1282
cc9e1a64 1283 /* COND_EXPR might have incompatible types in branches if one or both
1284 arms are bitfields. Fix it up now. */
1285 case COND_EXPR:
1286 {
1287 tree type_left
1288 = (TREE_OPERAND (stmt, 1)
1289 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1290 : NULL_TREE);
1291 tree type_right
1292 = (TREE_OPERAND (stmt, 2)
1293 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1294 : NULL_TREE);
1295 if (type_left
1296 && !useless_type_conversion_p (TREE_TYPE (stmt),
1297 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1298 {
1299 TREE_OPERAND (stmt, 1)
1300 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1301 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1302 type_left));
1303 }
1304 if (type_right
1305 && !useless_type_conversion_p (TREE_TYPE (stmt),
1306 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1307 {
1308 TREE_OPERAND (stmt, 2)
1309 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1310 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1311 type_right));
1312 }
1313 }
1314 break;
a0a1efe3 1315
cc9e1a64 1316 case BIND_EXPR:
9b222de3 1317 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1318 {
1319 tree decl;
1320 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
80a58eb0 1321 if (VAR_P (decl)
9b222de3 1322 && !DECL_EXTERNAL (decl)
1323 && omp_var_to_track (decl))
1324 {
1325 splay_tree_node n
1326 = splay_tree_lookup (wtd->omp_ctx->variables,
1327 (splay_tree_key) decl);
1328 if (n == NULL)
1329 splay_tree_insert (wtd->omp_ctx->variables,
1330 (splay_tree_key) decl,
1331 TREE_STATIC (decl)
1332 ? OMP_CLAUSE_DEFAULT_SHARED
1333 : OMP_CLAUSE_DEFAULT_PRIVATE);
1334 }
1335 }
9917317a 1336 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
d120fa25 1337 {
1338 /* The point here is to not sanitize static initializers. */
1339 bool no_sanitize_p = wtd->no_sanitize_p;
1340 wtd->no_sanitize_p = true;
1341 for (tree decl = BIND_EXPR_VARS (stmt);
1342 decl;
1343 decl = DECL_CHAIN (decl))
1344 if (VAR_P (decl)
1345 && TREE_STATIC (decl)
1346 && DECL_INITIAL (decl))
1347 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1348 wtd->no_sanitize_p = no_sanitize_p;
1349 }
f1f41a6c 1350 wtd->bind_expr_stack.safe_push (stmt);
660c48c4 1351 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1352 cp_genericize_r, data, NULL);
f1f41a6c 1353 wtd->bind_expr_stack.pop ();
cc9e1a64 1354 break;
660c48c4 1355
cc9e1a64 1356 case USING_STMT:
1357 {
1358 tree block = NULL_TREE;
1359
1360 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1361 BLOCK, and append an IMPORTED_DECL to its
1362 BLOCK_VARS chained list. */
1363 if (wtd->bind_expr_stack.exists ())
1364 {
1365 int i;
1366 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1367 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1368 break;
1369 }
1370 if (block)
1371 {
3dd770ef 1372 tree decl = TREE_OPERAND (stmt, 0);
1373 gcc_assert (decl);
cc9e1a64 1374
3dd770ef 1375 if (undeduced_auto_decl (decl))
1376 /* Omit from the GENERIC, the back-end can't handle it. */;
1377 else
1378 {
1379 tree using_directive = make_node (IMPORTED_DECL);
1380 TREE_TYPE (using_directive) = void_type_node;
660c48c4 1381
3dd770ef 1382 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1383 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1384 BLOCK_VARS (block) = using_directive;
1385 }
cc9e1a64 1386 }
1387 /* The USING_STMT won't appear in GENERIC. */
1388 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1389 *walk_subtrees = 0;
1390 }
1391 break;
1392
1393 case DECL_EXPR:
1394 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
660c48c4 1395 {
cc9e1a64 1396 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1397 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1398 *walk_subtrees = 0;
660c48c4 1399 }
cc9e1a64 1400 else
660c48c4 1401 {
cc9e1a64 1402 tree d = DECL_EXPR_DECL (stmt);
1403 if (VAR_P (d))
1404 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
660c48c4 1405 }
cc9e1a64 1406 break;
9b222de3 1407
cc9e1a64 1408 case OMP_PARALLEL:
1409 case OMP_TASK:
1410 case OMP_TASKLOOP:
1411 {
1412 struct cp_genericize_omp_taskreg omp_ctx;
1413 tree c, decl;
1414 splay_tree_node n;
1415
1416 *walk_subtrees = 0;
1417 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1418 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1419 omp_ctx.default_shared = omp_ctx.is_parallel;
1420 omp_ctx.outer = wtd->omp_ctx;
1421 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1422 wtd->omp_ctx = &omp_ctx;
1423 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1424 switch (OMP_CLAUSE_CODE (c))
1425 {
1426 case OMP_CLAUSE_SHARED:
1427 case OMP_CLAUSE_PRIVATE:
1428 case OMP_CLAUSE_FIRSTPRIVATE:
1429 case OMP_CLAUSE_LASTPRIVATE:
1430 decl = OMP_CLAUSE_DECL (c);
1431 if (decl == error_mark_node || !omp_var_to_track (decl))
1432 break;
1433 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1434 if (n != NULL)
1435 break;
1436 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1437 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1438 ? OMP_CLAUSE_DEFAULT_SHARED
1439 : OMP_CLAUSE_DEFAULT_PRIVATE);
1440 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1441 omp_cxx_notice_variable (omp_ctx.outer, decl);
9b222de3 1442 break;
cc9e1a64 1443 case OMP_CLAUSE_DEFAULT:
1444 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1445 omp_ctx.default_shared = true;
1446 default:
9b222de3 1447 break;
cc9e1a64 1448 }
1449 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1450 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1451 else
1452 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1453 wtd->omp_ctx = omp_ctx.outer;
1454 splay_tree_delete (omp_ctx.variables);
1455 }
1456 break;
1457
1458 case TRY_BLOCK:
1459 {
1460 *walk_subtrees = 0;
1461 tree try_block = wtd->try_block;
1462 wtd->try_block = stmt;
1463 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1464 wtd->try_block = try_block;
1465 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1466 }
1467 break;
1468
1469 case MUST_NOT_THROW_EXPR:
cb40a6f7 1470 /* MUST_NOT_THROW_COND might be something else with TM. */
1471 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1472 {
1473 *walk_subtrees = 0;
1474 tree try_block = wtd->try_block;
1475 wtd->try_block = stmt;
1476 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1477 wtd->try_block = try_block;
1478 }
cc9e1a64 1479 break;
1480
1481 case THROW_EXPR:
1482 {
1483 location_t loc = location_of (stmt);
1484 if (TREE_NO_WARNING (stmt))
1485 /* Never mind. */;
1486 else if (wtd->try_block)
1487 {
bc35ef65 1488 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1489 {
1490 auto_diagnostic_group d;
1491 if (warning_at (loc, OPT_Wterminate,
85b9be9b 1492 "%<throw%> will always call %<terminate%>")
bc35ef65 1493 && cxx_dialect >= cxx11
1494 && DECL_DESTRUCTOR_P (current_function_decl))
85b9be9b 1495 inform (loc, "in C++11 destructors default to %<noexcept%>");
bc35ef65 1496 }
cc9e1a64 1497 }
1498 else
1499 {
1500 if (warn_cxx11_compat && cxx_dialect < cxx11
1501 && DECL_DESTRUCTOR_P (current_function_decl)
1502 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1503 == NULL_TREE)
1504 && (get_defaulted_eh_spec (current_function_decl)
1505 == empty_except_spec))
1506 warning_at (loc, OPT_Wc__11_compat,
85b9be9b 1507 "in C++11 this %<throw%> will call %<terminate%> "
1508 "because destructors default to %<noexcept%>");
cc9e1a64 1509 }
1510 }
1511 break;
1512
1513 case CONVERT_EXPR:
1514 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1515 break;
1516
1517 case FOR_STMT:
1518 genericize_for_stmt (stmt_p, walk_subtrees, data);
1519 break;
1520
1521 case WHILE_STMT:
1522 genericize_while_stmt (stmt_p, walk_subtrees, data);
1523 break;
1524
1525 case DO_STMT:
1526 genericize_do_stmt (stmt_p, walk_subtrees, data);
1527 break;
1528
1529 case SWITCH_STMT:
1530 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1531 break;
1532
1533 case CONTINUE_STMT:
1534 genericize_continue_stmt (stmt_p);
1535 break;
1536
1537 case BREAK_STMT:
1538 genericize_break_stmt (stmt_p);
1539 break;
1540
1541 case OMP_FOR:
1542 case OMP_SIMD:
1543 case OMP_DISTRIBUTE:
68bf4712 1544 case OACC_LOOP:
cc9e1a64 1545 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1546 break;
1547
1548 case PTRMEM_CST:
518495b8 1549 /* By the time we get here we're handing off to the back end, so we don't
1550 need or want to preserve PTRMEM_CST anymore. */
1551 *stmt_p = cplus_expand_constant (stmt);
1552 *walk_subtrees = 0;
cc9e1a64 1553 break;
1554
1555 case MEM_REF:
9564446e 1556 /* For MEM_REF, make sure not to sanitize the second operand even
cc9e1a64 1557 if it has reference type. It is just an offset with a type
9564446e 1558 holding other information. There is no other processing we
1559 need to do for INTEGER_CSTs, so just ignore the second argument
1560 unconditionally. */
1561 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1562 *walk_subtrees = 0;
cc9e1a64 1563 break;
1564
1565 case NOP_EXPR:
1566 if (!wtd->no_sanitize_p
1567 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
90ad495b 1568 && TYPE_REF_P (TREE_TYPE (stmt)))
3ddb3278 1569 ubsan_maybe_instrument_reference (stmt_p);
cc9e1a64 1570 break;
1571
1572 case CALL_EXPR:
1573 if (!wtd->no_sanitize_p
1574 && sanitize_flags_p ((SANITIZE_NULL
1575 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
392dee1e 1576 {
1577 tree fn = CALL_EXPR_FN (stmt);
1578 if (fn != NULL_TREE
1579 && !error_operand_p (fn)
d03fa520 1580 && INDIRECT_TYPE_P (TREE_TYPE (fn))
392dee1e 1581 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1582 {
1583 bool is_ctor
1584 = TREE_CODE (fn) == ADDR_EXPR
1585 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1586 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
9917317a 1587 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
32cf7025 1588 ubsan_maybe_instrument_member_call (stmt, is_ctor);
9917317a 1589 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
32cf7025 1590 cp_ubsan_maybe_instrument_member_call (stmt);
392dee1e 1591 }
13da18cc 1592 else if (fn == NULL_TREE
1593 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1594 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
90ad495b 1595 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
13da18cc 1596 *walk_subtrees = 0;
392dee1e 1597 }
91735070 1598 /* Fall through. */
1599 case AGGR_INIT_EXPR:
1600 /* For calls to a multi-versioned function, overload resolution
1601 returns the function with the highest target priority, that is,
1602 the version that will checked for dispatching first. If this
1603 version is inlinable, a direct call to this version can be made
1604 otherwise the call should go through the dispatcher. */
1605 {
ced7e116 1606 tree fn = cp_get_callee_fndecl_nofold (stmt);
91735070 1607 if (fn && DECL_FUNCTION_VERSIONED (fn)
1608 && (current_function_decl == NULL
1609 || !targetm.target_option.can_inline_p (current_function_decl,
1610 fn)))
1611 if (tree dis = get_function_version_dispatcher (fn))
1612 {
1613 mark_versions_used (dis);
1614 dis = build_address (dis);
1615 if (TREE_CODE (stmt) == CALL_EXPR)
1616 CALL_EXPR_FN (stmt) = dis;
1617 else
1618 AGGR_INIT_EXPR_FN (stmt) = dis;
1619 }
1620 }
cc9e1a64 1621 break;
1622
7604a798 1623 case TARGET_EXPR:
1624 if (TARGET_EXPR_INITIAL (stmt)
1625 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1626 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1627 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1628 break;
1629
cc9e1a64 1630 default:
1631 if (IS_TYPE_OR_DECL_P (stmt))
1632 *walk_subtrees = 0;
1633 break;
392dee1e 1634 }
660c48c4 1635
431205b7 1636 p_set->add (*stmt_p);
9031d10b 1637
dddab69e 1638 return NULL;
1639}
1640
57cf061a 1641/* Lower C++ front end trees to GENERIC in T_P. */
1642
1643static void
a0168bf5 1644cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
57cf061a 1645{
1646 struct cp_genericize_data wtd;
1647
431205b7 1648 wtd.p_set = new hash_set<tree>;
f1f41a6c 1649 wtd.bind_expr_stack.create (0);
57cf061a 1650 wtd.omp_ctx = NULL;
cb40a6f7 1651 wtd.try_block = NULL_TREE;
d120fa25 1652 wtd.no_sanitize_p = false;
a0168bf5 1653 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
57cf061a 1654 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
431205b7 1655 delete wtd.p_set;
f1f41a6c 1656 wtd.bind_expr_stack.release ();
9917317a 1657 if (sanitize_flags_p (SANITIZE_VPTR))
32cf7025 1658 cp_ubsan_instrument_member_accesses (t_p);
57cf061a 1659}
1660
020bc656 1661/* If a function that should end with a return in non-void
1662 function doesn't obviously end with return, add ubsan
2fb20ba2 1663 instrumentation code to verify it at runtime. If -fsanitize=return
1664 is not enabled, instrument __builtin_unreachable. */
020bc656 1665
1666static void
2fb20ba2 1667cp_maybe_instrument_return (tree fndecl)
020bc656 1668{
1669 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1670 || DECL_CONSTRUCTOR_P (fndecl)
1671 || DECL_DESTRUCTOR_P (fndecl)
1672 || !targetm.warn_func_return (fndecl))
1673 return;
1674
9987c8d5 1675 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1676 /* Don't add __builtin_unreachable () if not optimizing, it will not
1677 improve any optimizations in that case, just break UB code.
1678 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1679 UBSan covers this with ubsan_instrument_return above where sufficient
1680 information is provided, while the __builtin_unreachable () below
1681 if return sanitization is disabled will just result in hard to
1682 understand runtime error without location. */
1683 && (!optimize
1684 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1685 return;
1686
020bc656 1687 tree t = DECL_SAVED_TREE (fndecl);
1688 while (t)
1689 {
1690 switch (TREE_CODE (t))
1691 {
1692 case BIND_EXPR:
1693 t = BIND_EXPR_BODY (t);
1694 continue;
1695 case TRY_FINALLY_EXPR:
f907d51b 1696 case CLEANUP_POINT_EXPR:
020bc656 1697 t = TREE_OPERAND (t, 0);
1698 continue;
1699 case STATEMENT_LIST:
1700 {
1701 tree_stmt_iterator i = tsi_last (t);
8a42a2fb 1702 while (!tsi_end_p (i))
1703 {
1704 tree p = tsi_stmt (i);
1705 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1706 break;
1707 tsi_prev (&i);
1708 }
020bc656 1709 if (!tsi_end_p (i))
1710 {
1711 t = tsi_stmt (i);
1712 continue;
1713 }
1714 }
1715 break;
1716 case RETURN_EXPR:
1717 return;
1718 default:
1719 break;
1720 }
1721 break;
1722 }
1723 if (t == NULL_TREE)
1724 return;
ce7e2f0e 1725 tree *p = &DECL_SAVED_TREE (fndecl);
1726 if (TREE_CODE (*p) == BIND_EXPR)
1727 p = &BIND_EXPR_BODY (*p);
2fb20ba2 1728
1729 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1730 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1731 t = ubsan_instrument_return (loc);
1732 else
1733 {
1734 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1735 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1736 }
1737
ce7e2f0e 1738 append_to_statement_list (t, p);
020bc656 1739}
1740
dddab69e 1741void
1742cp_genericize (tree fndecl)
1743{
dddcebdc 1744 tree t;
dddcebdc 1745
1746 /* Fix up the types of parms passed by invisible reference. */
1767a056 1747 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1fe46df1 1748 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1749 {
1750 /* If a function's arguments are copied to create a thunk,
1751 then DECL_BY_REFERENCE will be set -- but the type of the
1752 argument will be a pointer type, so we will never get
1753 here. */
1754 gcc_assert (!DECL_BY_REFERENCE (t));
1755 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1756 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1757 DECL_BY_REFERENCE (t) = 1;
1758 TREE_ADDRESSABLE (t) = 0;
1759 relayout_decl (t);
1760 }
dddcebdc 1761
806e4c12 1762 /* Do the same for the return value. */
1763 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1764 {
1765 t = DECL_RESULT (fndecl);
1766 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1767 DECL_BY_REFERENCE (t) = 1;
1768 TREE_ADDRESSABLE (t) = 0;
1769 relayout_decl (t);
ae294470 1770 if (DECL_NAME (t))
1771 {
1772 /* Adjust DECL_VALUE_EXPR of the original var. */
1773 tree outer = outer_curly_brace_block (current_function_decl);
1774 tree var;
1775
1776 if (outer)
1777 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1f10c59c 1778 if (VAR_P (var)
1779 && DECL_NAME (t) == DECL_NAME (var)
ae294470 1780 && DECL_HAS_VALUE_EXPR_P (var)
1781 && DECL_VALUE_EXPR (var) == t)
1782 {
1783 tree val = convert_from_reference (t);
1784 SET_DECL_VALUE_EXPR (var, val);
1785 break;
1786 }
1787 }
806e4c12 1788 }
1789
dddcebdc 1790 /* If we're a clone, the body is already GIMPLE. */
1791 if (DECL_CLONED_FUNCTION_P (fndecl))
1792 return;
1793
df0c563f 1794 /* Allow cp_genericize calls to be nested. */
1795 tree save_bc_label[2];
1796 save_bc_label[bc_break] = bc_label[bc_break];
1797 save_bc_label[bc_continue] = bc_label[bc_continue];
1798 bc_label[bc_break] = NULL_TREE;
1799 bc_label[bc_continue] = NULL_TREE;
1800
dddcebdc 1801 /* We do want to see every occurrence of the parms, so we can't just use
1802 walk_tree's hash functionality. */
a0168bf5 1803 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
dddab69e 1804
2fb20ba2 1805 cp_maybe_instrument_return (fndecl);
020bc656 1806
dddab69e 1807 /* Do everything else. */
1808 c_genericize (fndecl);
8487df40 1809
1810 gcc_assert (bc_label[bc_break] == NULL);
1811 gcc_assert (bc_label[bc_continue] == NULL);
df0c563f 1812 bc_label[bc_break] = save_bc_label[bc_break];
1813 bc_label[bc_continue] = save_bc_label[bc_continue];
8487df40 1814}
1815\f
1816/* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1817 NULL if there is in fact nothing to do. ARG2 may be null if FN
1818 actually only takes one argument. */
1819
1820static tree
1821cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1822{
c1be427d 1823 tree defparm, parm, t;
d01f58f9 1824 int i = 0;
1825 int nargs;
1826 tree *argarray;
2f2c591f 1827
8487df40 1828 if (fn == NULL)
1829 return NULL;
1830
d01f58f9 1831 nargs = list_length (DECL_ARGUMENTS (fn));
fd70b918 1832 argarray = XALLOCAVEC (tree, nargs);
d01f58f9 1833
2f2c591f 1834 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1835 if (arg2)
1836 defparm = TREE_CHAIN (defparm);
1837
c06d7bdd 1838 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
8487df40 1839 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1840 {
1841 tree inner_type = TREE_TYPE (arg1);
1842 tree start1, end1, p1;
1843 tree start2 = NULL, p2 = NULL;
c1be427d 1844 tree ret = NULL, lab;
8487df40 1845
1846 start1 = arg1;
1847 start2 = arg2;
1848 do
1849 {
1850 inner_type = TREE_TYPE (inner_type);
1851 start1 = build4 (ARRAY_REF, inner_type, start1,
1852 size_zero_node, NULL, NULL);
1853 if (arg2)
1854 start2 = build4 (ARRAY_REF, inner_type, start2,
1855 size_zero_node, NULL, NULL);
1856 }
1857 while (TREE_CODE (inner_type) == ARRAY_TYPE);
389dd41b 1858 start1 = build_fold_addr_expr_loc (input_location, start1);
8487df40 1859 if (arg2)
389dd41b 1860 start2 = build_fold_addr_expr_loc (input_location, start2);
8487df40 1861
1862 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2cc66f2a 1863 end1 = fold_build_pointer_plus (start1, end1);
8487df40 1864
f9e245b2 1865 p1 = create_tmp_var (TREE_TYPE (start1));
75a70cf9 1866 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
8487df40 1867 append_to_statement_list (t, &ret);
1868
1869 if (arg2)
1870 {
f9e245b2 1871 p2 = create_tmp_var (TREE_TYPE (start2));
75a70cf9 1872 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
8487df40 1873 append_to_statement_list (t, &ret);
1874 }
1875
e60a6f7b 1876 lab = create_artificial_label (input_location);
8487df40 1877 t = build1 (LABEL_EXPR, void_type_node, lab);
1878 append_to_statement_list (t, &ret);
1879
d01f58f9 1880 argarray[i++] = p1;
8487df40 1881 if (arg2)
d01f58f9 1882 argarray[i++] = p2;
2f2c591f 1883 /* Handle default arguments. */
93bb78b6 1884 for (parm = defparm; parm && parm != void_list_node;
1885 parm = TREE_CHAIN (parm), i++)
d01f58f9 1886 argarray[i] = convert_default_arg (TREE_VALUE (parm),
c06d7bdd 1887 TREE_PURPOSE (parm), fn,
1888 i - is_method, tf_warning_or_error);
d01f58f9 1889 t = build_call_a (fn, i, argarray);
c1be427d 1890 t = fold_convert (void_type_node, t);
1891 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8487df40 1892 append_to_statement_list (t, &ret);
1893
2cc66f2a 1894 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
75a70cf9 1895 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
8487df40 1896 append_to_statement_list (t, &ret);
1897
1898 if (arg2)
1899 {
2cc66f2a 1900 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
75a70cf9 1901 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
8487df40 1902 append_to_statement_list (t, &ret);
1903 }
1904
1905 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1906 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1907 append_to_statement_list (t, &ret);
1908
1909 return ret;
1910 }
1911 else
1912 {
389dd41b 1913 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
8487df40 1914 if (arg2)
389dd41b 1915 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2f2c591f 1916 /* Handle default arguments. */
93bb78b6 1917 for (parm = defparm; parm && parm != void_list_node;
d01f58f9 1918 parm = TREE_CHAIN (parm), i++)
1919 argarray[i] = convert_default_arg (TREE_VALUE (parm),
c06d7bdd 1920 TREE_PURPOSE (parm), fn,
1921 i - is_method, tf_warning_or_error);
c1be427d 1922 t = build_call_a (fn, i, argarray);
1923 t = fold_convert (void_type_node, t);
1924 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8487df40 1925 }
1926}
1927
1928/* Return code to initialize DECL with its default constructor, or
1929 NULL if there's nothing to do. */
1930
1931tree
a49c5913 1932cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
8487df40 1933{
1934 tree info = CP_OMP_CLAUSE_INFO (clause);
1935 tree ret = NULL;
1936
1937 if (info)
1938 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1939
1940 return ret;
1941}
1942
1943/* Return code to initialize DST with a copy constructor from SRC. */
1944
1945tree
1946cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1947{
1948 tree info = CP_OMP_CLAUSE_INFO (clause);
1949 tree ret = NULL;
1950
1951 if (info)
1952 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1953 if (ret == NULL)
75a70cf9 1954 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8487df40 1955
1956 return ret;
1957}
1958
1959/* Similarly, except use an assignment operator instead. */
1960
1961tree
1962cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1963{
1964 tree info = CP_OMP_CLAUSE_INFO (clause);
1965 tree ret = NULL;
1966
1967 if (info)
1968 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1969 if (ret == NULL)
75a70cf9 1970 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8487df40 1971
1972 return ret;
1973}
1974
1975/* Return code to destroy DECL. */
1976
1977tree
1978cxx_omp_clause_dtor (tree clause, tree decl)
1979{
1980 tree info = CP_OMP_CLAUSE_INFO (clause);
1981 tree ret = NULL;
1982
1983 if (info)
1984 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1985
1986 return ret;
dddab69e 1987}
df2c34fc 1988
1989/* True if OpenMP should privatize what this DECL points to rather
1990 than the DECL itself. */
1991
1992bool
9f627b1a 1993cxx_omp_privatize_by_reference (const_tree decl)
df2c34fc 1994{
90ad495b 1995 return (TYPE_REF_P (TREE_TYPE (decl))
bc7bff74 1996 || is_invisiref_parm (decl));
df2c34fc 1997}
fd6481cf 1998
2169f33b 1999/* Return true if DECL is const qualified var having no mutable member. */
2000bool
2001cxx_omp_const_qual_no_mutable (tree decl)
fd6481cf 2002{
2169f33b 2003 tree type = TREE_TYPE (decl);
90ad495b 2004 if (TYPE_REF_P (type))
fd6481cf 2005 {
2006 if (!is_invisiref_parm (decl))
2169f33b 2007 return false;
fd6481cf 2008 type = TREE_TYPE (type);
2009
2010 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2011 {
2012 /* NVR doesn't preserve const qualification of the
2013 variable's type. */
2014 tree outer = outer_curly_brace_block (current_function_decl);
2015 tree var;
2016
2017 if (outer)
1767a056 2018 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1f10c59c 2019 if (VAR_P (var)
2020 && DECL_NAME (decl) == DECL_NAME (var)
fd6481cf 2021 && (TYPE_MAIN_VARIANT (type)
2022 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2023 {
2024 if (TYPE_READONLY (TREE_TYPE (var)))
2025 type = TREE_TYPE (var);
2026 break;
2027 }
2028 }
2029 }
2030
2031 if (type == error_mark_node)
2169f33b 2032 return false;
fd6481cf 2033
2034 /* Variables with const-qualified type having no mutable member
2035 are predetermined shared. */
2036 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2169f33b 2037 return true;
2038
2039 return false;
2040}
2041
2042/* True if OpenMP sharing attribute of DECL is predetermined. */
2043
2044enum omp_clause_default_kind
b16a5119 2045cxx_omp_predetermined_sharing_1 (tree decl)
2169f33b 2046{
2047 /* Static data members are predetermined shared. */
2048 if (TREE_STATIC (decl))
2049 {
2050 tree ctx = CP_DECL_CONTEXT (decl);
2051 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2052 return OMP_CLAUSE_DEFAULT_SHARED;
2053 }
2054
7e5a76c8 2055 /* this may not be specified in data-sharing clauses, still we need
2056 to predetermined it firstprivate. */
2057 if (decl == current_class_ptr)
2058 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
fd6481cf 2059
2060 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2061}
2062
b16a5119 2063/* Likewise, but also include the artificial vars. We don't want to
2064 disallow the artificial vars being mentioned in explicit clauses,
2065 as we use artificial vars e.g. for loop constructs with random
2066 access iterators other than pointers, but during gimplification
2067 we want to treat them as predetermined. */
2068
2069enum omp_clause_default_kind
2070cxx_omp_predetermined_sharing (tree decl)
2071{
2072 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2073 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2074 return ret;
2075
2076 /* Predetermine artificial variables holding integral values, those
2077 are usually result of gimplify_one_sizepos or SAVE_EXPR
2078 gimplification. */
2079 if (VAR_P (decl)
2080 && DECL_ARTIFICIAL (decl)
2081 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2082 && !(DECL_LANG_SPECIFIC (decl)
2083 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2084 return OMP_CLAUSE_DEFAULT_SHARED;
2085
2086 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2087}
2088
fd6481cf 2089/* Finalize an implicitly determined clause. */
2090
2091void
691447ab 2092cxx_omp_finish_clause (tree c, gimple_seq *)
fd6481cf 2093{
2094 tree decl, inner_type;
2095 bool make_shared = false;
2096
2097 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2098 return;
2099
2100 decl = OMP_CLAUSE_DECL (c);
2101 decl = require_complete_type (decl);
2102 inner_type = TREE_TYPE (decl);
2103 if (decl == error_mark_node)
2104 make_shared = true;
90ad495b 2105 else if (TYPE_REF_P (TREE_TYPE (decl)))
43895be5 2106 inner_type = TREE_TYPE (inner_type);
fd6481cf 2107
2108 /* We're interested in the base element, not arrays. */
2109 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2110 inner_type = TREE_TYPE (inner_type);
2111
2112 /* Check for special function availability by building a call to one.
2113 Save the results, because later we won't be in the right context
2114 for making these queries. */
2115 if (!make_shared
2116 && CLASS_TYPE_P (inner_type)
bc7bff74 2117 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
fd6481cf 2118 make_shared = true;
2119
2120 if (make_shared)
1c3f8c56 2121 {
2122 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2123 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2124 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2125 }
fd6481cf 2126}
43895be5 2127
2128/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2129 disregarded in OpenMP construct, because it is going to be
2130 remapped during OpenMP lowering. SHARED is true if DECL
2131 is going to be shared, false if it is going to be privatized. */
2132
2133bool
2134cxx_omp_disregard_value_expr (tree decl, bool shared)
2135{
2136 return !shared
2137 && VAR_P (decl)
2138 && DECL_HAS_VALUE_EXPR_P (decl)
2139 && DECL_ARTIFICIAL (decl)
2140 && DECL_LANG_SPECIFIC (decl)
2141 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2142}
d2c63826 2143
69f54cf5 2144/* Fold expression X which is used as an rvalue if RVAL is true. */
2145
4749c4ac 2146tree
69f54cf5 2147cp_fold_maybe_rvalue (tree x, bool rval)
2148{
e71bb662 2149 while (true)
69f54cf5 2150 {
e71bb662 2151 x = cp_fold (x);
0c2ebbc4 2152 if (rval)
2153 x = mark_rvalue_use (x);
ac6641ca 2154 if (rval && DECL_P (x)
90ad495b 2155 && !TYPE_REF_P (TREE_TYPE (x)))
e71bb662 2156 {
2157 tree v = decl_constant_value (x);
2158 if (v != x && v != error_mark_node)
2159 {
2160 x = v;
2161 continue;
2162 }
2163 }
2164 break;
69f54cf5 2165 }
e71bb662 2166 return x;
69f54cf5 2167}
2168
2169/* Fold expression X which is used as an rvalue. */
2170
4749c4ac 2171tree
69f54cf5 2172cp_fold_rvalue (tree x)
2173{
2174 return cp_fold_maybe_rvalue (x, true);
2175}
2176
d93ee6f8 2177/* Perform folding on expression X. */
2178
2179tree
2180cp_fully_fold (tree x)
2181{
2182 if (processing_template_decl)
2183 return x;
2184 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2185 have to call both. */
2186 if (cxx_dialect >= cxx11)
5f9e77dd 2187 {
2188 x = maybe_constant_value (x);
2189 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2190 a TARGET_EXPR; undo that here. */
2191 if (TREE_CODE (x) == TARGET_EXPR)
2192 x = TARGET_EXPR_INITIAL (x);
2193 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2194 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2195 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2196 x = TREE_OPERAND (x, 0);
2197 }
d93ee6f8 2198 return cp_fold_rvalue (x);
2199}
2200
d560f985 2201/* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2202 in some cases. */
2203
2204tree
2205cp_fully_fold_init (tree x)
2206{
2207 if (processing_template_decl)
2208 return x;
2209 x = cp_fully_fold (x);
2210 hash_set<tree> pset;
2211 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2212 return x;
2213}
2214
da562e32 2215/* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2216 and certain changes are made to the folding done. Or should be (FIXME). We
2217 never touch maybe_const, as it is only used for the C front-end
2218 C_MAYBE_CONST_EXPR. */
2219
2220tree
69cd03b2 2221c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
da562e32 2222{
69cd03b2 2223 return cp_fold_maybe_rvalue (x, !lval);
da562e32 2224}
2225
2a655a4c 2226static GTY((deletable)) hash_map<tree, tree> *fold_cache;
d2c63826 2227
a0c919f7 2228/* Dispose of the whole FOLD_CACHE. */
2229
2230void
2231clear_fold_cache (void)
2232{
2a655a4c 2233 if (fold_cache != NULL)
2234 fold_cache->empty ();
a0c919f7 2235}
2236
d2c63826 2237/* This function tries to fold an expression X.
2238 To avoid combinatorial explosion, folding results are kept in fold_cache.
d76863c8 2239 If X is invalid, we don't fold at all.
d2c63826 2240 For performance reasons we don't cache expressions representing a
2241 declaration or constant.
2242 Function returns X or its folded variant. */
2243
2244static tree
2245cp_fold (tree x)
2246{
2247 tree op0, op1, op2, op3;
2248 tree org_x = x, r = NULL_TREE;
2249 enum tree_code code;
2250 location_t loc;
69f54cf5 2251 bool rval_ops = true;
d2c63826 2252
8f559c6e 2253 if (!x || x == error_mark_node)
d2c63826 2254 return x;
2255
d76863c8 2256 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
d2c63826 2257 return x;
2258
2259 /* Don't bother to cache DECLs or constants. */
2260 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2261 return x;
2262
2a655a4c 2263 if (fold_cache == NULL)
2264 fold_cache = hash_map<tree, tree>::create_ggc (101);
2265
2266 if (tree *cached = fold_cache->get (x))
2267 return *cached;
d2c63826 2268
2269 code = TREE_CODE (x);
2270 switch (code)
2271 {
d1cd4a64 2272 case CLEANUP_POINT_EXPR:
2273 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2274 effects. */
2275 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2276 if (!TREE_SIDE_EFFECTS (r))
2277 x = r;
2278 break;
2279
d2c63826 2280 case SIZEOF_EXPR:
2281 x = fold_sizeof_expr (x);
2282 break;
2283
2284 case VIEW_CONVERT_EXPR:
69f54cf5 2285 rval_ops = false;
e3533433 2286 /* FALLTHRU */
d2c63826 2287 case CONVERT_EXPR:
2288 case NOP_EXPR:
2289 case NON_LVALUE_EXPR:
2290
2291 if (VOID_TYPE_P (TREE_TYPE (x)))
ca29c574 2292 {
2293 /* This is just to make sure we don't end up with casts to
2294 void from error_mark_node. If we just return x, then
2295 cp_fold_r might fold the operand into error_mark_node and
2296 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2297 during gimplification doesn't like such casts.
2298 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2299 folding of the operand should be in the caches and if in cp_fold_r
2300 it will modify it in place. */
2301 op0 = cp_fold (TREE_OPERAND (x, 0));
2302 if (op0 == error_mark_node)
2303 x = error_mark_node;
2304 break;
2305 }
d2c63826 2306
d2c63826 2307 loc = EXPR_LOCATION (x);
f7d61b1e 2308 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
d2c63826 2309
b981525c 2310 if (code == CONVERT_EXPR
2311 && SCALAR_TYPE_P (TREE_TYPE (x))
2312 && op0 != void_node)
2313 /* During parsing we used convert_to_*_nofold; re-convert now using the
2314 folding variants, since fold() doesn't do those transformations. */
2315 x = fold (convert (TREE_TYPE (x), op0));
2316 else if (op0 != TREE_OPERAND (x, 0))
1b8c43ab 2317 {
2318 if (op0 == error_mark_node)
2319 x = error_mark_node;
2320 else
2321 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2322 }
111e415b 2323 else
2324 x = fold (x);
d2c63826 2325
2326 /* Conversion of an out-of-range value has implementation-defined
2327 behavior; the language considers it different from arithmetic
2328 overflow, which is undefined. */
2329 if (TREE_CODE (op0) == INTEGER_CST
2330 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2331 TREE_OVERFLOW (x) = false;
2332
2333 break;
2334
bcb45dab 2335 case INDIRECT_REF:
2336 /* We don't need the decltype(auto) obfuscation anymore. */
2337 if (REF_PARENTHESIZED_P (x))
2338 {
2339 tree p = maybe_undo_parenthesized_ref (x);
e71bb662 2340 return cp_fold (p);
bcb45dab 2341 }
2342 goto unary;
2343
d2c63826 2344 case ADDR_EXPR:
e885b147 2345 loc = EXPR_LOCATION (x);
2346 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2347
2348 /* Cope with user tricks that amount to offsetof. */
2349 if (op0 != error_mark_node
412b8947 2350 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
e885b147 2351 {
2352 tree val = get_base_address (op0);
2353 if (val
2354 && INDIRECT_REF_P (val)
2355 && COMPLETE_TYPE_P (TREE_TYPE (val))
2356 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2357 {
2358 val = TREE_OPERAND (val, 0);
2359 STRIP_NOPS (val);
26364f3e 2360 val = maybe_constant_value (val);
e885b147 2361 if (TREE_CODE (val) == INTEGER_CST)
3c43ed34 2362 return fold_offsetof (op0, TREE_TYPE (x));
e885b147 2363 }
2364 }
2365 goto finish_unary;
2366
d2c63826 2367 case REALPART_EXPR:
2368 case IMAGPART_EXPR:
69f54cf5 2369 rval_ops = false;
e3533433 2370 /* FALLTHRU */
d2c63826 2371 case CONJ_EXPR:
2372 case FIX_TRUNC_EXPR:
2373 case FLOAT_EXPR:
2374 case NEGATE_EXPR:
2375 case ABS_EXPR:
1c67942e 2376 case ABSU_EXPR:
d2c63826 2377 case BIT_NOT_EXPR:
2378 case TRUTH_NOT_EXPR:
2379 case FIXED_CONVERT_EXPR:
bcb45dab 2380 unary:
d2c63826 2381
2382 loc = EXPR_LOCATION (x);
69f54cf5 2383 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
d2c63826 2384
e885b147 2385 finish_unary:
d2c63826 2386 if (op0 != TREE_OPERAND (x, 0))
1b8c43ab 2387 {
2388 if (op0 == error_mark_node)
2389 x = error_mark_node;
2390 else
89f17a65 2391 {
2392 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2393 if (code == INDIRECT_REF
2394 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2395 {
2396 TREE_READONLY (x) = TREE_READONLY (org_x);
2397 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2398 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2399 }
2400 }
1b8c43ab 2401 }
111e415b 2402 else
2403 x = fold (x);
d2c63826 2404
2405 gcc_assert (TREE_CODE (x) != COND_EXPR
2406 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2407 break;
2408
a2eb1271 2409 case UNARY_PLUS_EXPR:
2410 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2411 if (op0 == error_mark_node)
2412 x = error_mark_node;
2413 else
2414 x = fold_convert (TREE_TYPE (x), op0);
2415 break;
2416
d2c63826 2417 case POSTDECREMENT_EXPR:
2418 case POSTINCREMENT_EXPR:
2419 case INIT_EXPR:
d2c63826 2420 case PREDECREMENT_EXPR:
2421 case PREINCREMENT_EXPR:
2422 case COMPOUND_EXPR:
69f54cf5 2423 case MODIFY_EXPR:
2424 rval_ops = false;
e3533433 2425 /* FALLTHRU */
d2c63826 2426 case POINTER_PLUS_EXPR:
2427 case PLUS_EXPR:
57e83b58 2428 case POINTER_DIFF_EXPR:
d2c63826 2429 case MINUS_EXPR:
2430 case MULT_EXPR:
2431 case TRUNC_DIV_EXPR:
2432 case CEIL_DIV_EXPR:
2433 case FLOOR_DIV_EXPR:
2434 case ROUND_DIV_EXPR:
2435 case TRUNC_MOD_EXPR:
2436 case CEIL_MOD_EXPR:
2437 case ROUND_MOD_EXPR:
2438 case RDIV_EXPR:
2439 case EXACT_DIV_EXPR:
2440 case MIN_EXPR:
2441 case MAX_EXPR:
2442 case LSHIFT_EXPR:
2443 case RSHIFT_EXPR:
2444 case LROTATE_EXPR:
2445 case RROTATE_EXPR:
2446 case BIT_AND_EXPR:
2447 case BIT_IOR_EXPR:
2448 case BIT_XOR_EXPR:
2449 case TRUTH_AND_EXPR:
2450 case TRUTH_ANDIF_EXPR:
2451 case TRUTH_OR_EXPR:
2452 case TRUTH_ORIF_EXPR:
2453 case TRUTH_XOR_EXPR:
2454 case LT_EXPR: case LE_EXPR:
2455 case GT_EXPR: case GE_EXPR:
2456 case EQ_EXPR: case NE_EXPR:
2457 case UNORDERED_EXPR: case ORDERED_EXPR:
2458 case UNLT_EXPR: case UNLE_EXPR:
2459 case UNGT_EXPR: case UNGE_EXPR:
2460 case UNEQ_EXPR: case LTGT_EXPR:
2461 case RANGE_EXPR: case COMPLEX_EXPR:
d2c63826 2462
2463 loc = EXPR_LOCATION (x);
69f54cf5 2464 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2465 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
d2c63826 2466
2467 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
1b8c43ab 2468 {
2469 if (op0 == error_mark_node || op1 == error_mark_node)
2470 x = error_mark_node;
2471 else
2472 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2473 }
111e415b 2474 else
2475 x = fold (x);
d2c63826 2476
4d984926 2477 /* This is only needed for -Wnonnull-compare and only if
2478 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2479 generation, we do it always. */
2480 if (COMPARISON_CLASS_P (org_x))
2cde02ad 2481 {
2482 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2483 ;
2484 else if (COMPARISON_CLASS_P (x))
4d984926 2485 {
2486 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2487 TREE_NO_WARNING (x) = 1;
2488 }
2cde02ad 2489 /* Otherwise give up on optimizing these, let GIMPLE folders
2490 optimize those later on. */
2491 else if (op0 != TREE_OPERAND (org_x, 0)
2492 || op1 != TREE_OPERAND (org_x, 1))
2493 {
2494 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
4d984926 2495 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2496 TREE_NO_WARNING (x) = 1;
2cde02ad 2497 }
2498 else
2499 x = org_x;
2500 }
d2c63826 2501 break;
2502
2503 case VEC_COND_EXPR:
2504 case COND_EXPR:
d2c63826 2505 loc = EXPR_LOCATION (x);
69f54cf5 2506 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
d2c63826 2507 op1 = cp_fold (TREE_OPERAND (x, 1));
2508 op2 = cp_fold (TREE_OPERAND (x, 2));
2509
7a7ca07c 2510 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2511 {
f9542e61 2512 warning_sentinel s (warn_int_in_bool_context);
7a7ca07c 2513 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2514 op1 = cp_truthvalue_conversion (op1);
2515 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2516 op2 = cp_truthvalue_conversion (op2);
2517 }
3c382e05 2518 else if (VOID_TYPE_P (TREE_TYPE (x)))
2519 {
2520 if (TREE_CODE (op0) == INTEGER_CST)
2521 {
2522 /* If the condition is constant, fold can fold away
2523 the COND_EXPR. If some statement-level uses of COND_EXPR
2524 have one of the branches NULL, avoid folding crash. */
2525 if (!op1)
2526 op1 = build_empty_stmt (loc);
2527 if (!op2)
2528 op2 = build_empty_stmt (loc);
2529 }
2530 else
2531 {
2532 /* Otherwise, don't bother folding a void condition, since
2533 it can't produce a constant value. */
2534 if (op0 != TREE_OPERAND (x, 0)
2535 || op1 != TREE_OPERAND (x, 1)
2536 || op2 != TREE_OPERAND (x, 2))
2537 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2538 break;
2539 }
2540 }
7a7ca07c 2541
f6dfb86a 2542 if (op0 != TREE_OPERAND (x, 0)
2543 || op1 != TREE_OPERAND (x, 1)
2544 || op2 != TREE_OPERAND (x, 2))
1b8c43ab 2545 {
2546 if (op0 == error_mark_node
2547 || op1 == error_mark_node
2548 || op2 == error_mark_node)
2549 x = error_mark_node;
2550 else
2551 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2552 }
f6dfb86a 2553 else
d2c63826 2554 x = fold (x);
2555
bf64d98a 2556 /* A COND_EXPR might have incompatible types in branches if one or both
2557 arms are bitfields. If folding exposed such a branch, fix it up. */
ec72e2f7 2558 if (TREE_CODE (x) != code
6fa371d1 2559 && x != error_mark_node
ec72e2f7 2560 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2561 x = fold_convert (TREE_TYPE (org_x), x);
bf64d98a 2562
d2c63826 2563 break;
2564
2565 case CALL_EXPR:
2566 {
2567 int i, m, sv = optimize, nw = sv, changed = 0;
2568 tree callee = get_callee_fndecl (x);
2569
efe6a40a 2570 /* Some built-in function calls will be evaluated at compile-time in
2571 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2572 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
a0e9bfbb 2573 if (callee && fndecl_built_in_p (callee) && !optimize
d2c63826 2574 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2575 && current_function_decl
2576 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2577 nw = 1;
d2c63826 2578
18d371d3 2579 /* Defer folding __builtin_is_constant_evaluated. */
2580 if (callee
a0e9bfbb 2581 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2582 BUILT_IN_FRONTEND))
18d371d3 2583 break;
2584
d2c63826 2585 x = copy_node (x);
2586
2587 m = call_expr_nargs (x);
2588 for (i = 0; i < m; i++)
2589 {
2590 r = cp_fold (CALL_EXPR_ARG (x, i));
2591 if (r != CALL_EXPR_ARG (x, i))
1b8c43ab 2592 {
2593 if (r == error_mark_node)
2594 {
2595 x = error_mark_node;
2596 break;
2597 }
2598 changed = 1;
2599 }
d2c63826 2600 CALL_EXPR_ARG (x, i) = r;
2601 }
1b8c43ab 2602 if (x == error_mark_node)
2603 break;
d2c63826 2604
2605 optimize = nw;
2606 r = fold (x);
2607 optimize = sv;
2608
2609 if (TREE_CODE (r) != CALL_EXPR)
2610 {
2611 x = cp_fold (r);
2612 break;
2613 }
2614
2615 optimize = nw;
2616
efe6a40a 2617 /* Invoke maybe_constant_value for functions declared
2618 constexpr and not called with AGGR_INIT_EXPRs.
d2c63826 2619 TODO:
efe6a40a 2620 Do constexpr expansion of expressions where the call itself is not
2621 constant, but the call followed by an INDIRECT_REF is. */
29684344 2622 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2623 && !flag_no_inline)
d9cfff22 2624 r = maybe_constant_value (x);
d2c63826 2625 optimize = sv;
2626
2627 if (TREE_CODE (r) != CALL_EXPR)
2628 {
d9cfff22 2629 if (DECL_CONSTRUCTOR_P (callee))
2630 {
2631 loc = EXPR_LOCATION (x);
2632 tree s = build_fold_indirect_ref_loc (loc,
2633 CALL_EXPR_ARG (x, 0));
2634 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2635 }
d2c63826 2636 x = r;
2637 break;
2638 }
2639
2640 if (!changed)
2641 x = org_x;
2642 break;
2643 }
2644
2645 case CONSTRUCTOR:
2646 {
2647 unsigned i;
2648 constructor_elt *p;
2649 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
41a5cb89 2650 vec<constructor_elt, va_gc> *nelts = NULL;
d2c63826 2651 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
41a5cb89 2652 {
2653 tree op = cp_fold (p->value);
41a5cb89 2654 if (op != p->value)
1b8c43ab 2655 {
2656 if (op == error_mark_node)
2657 {
2658 x = error_mark_node;
4ecaaab2 2659 vec_free (nelts);
1b8c43ab 2660 break;
2661 }
4ecaaab2 2662 if (nelts == NULL)
2663 nelts = elts->copy ();
2664 (*nelts)[i].value = op;
1b8c43ab 2665 }
41a5cb89 2666 }
4ecaaab2 2667 if (nelts)
7604a798 2668 {
2669 x = build_constructor (TREE_TYPE (x), nelts);
2670 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2671 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2672 }
f82dc839 2673 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2674 x = fold (x);
d2c63826 2675 break;
2676 }
2677 case TREE_VEC:
2678 {
2679 bool changed = false;
02fafda8 2680 releasing_vec vec;
d2c63826 2681 int i, n = TREE_VEC_LENGTH (x);
2682 vec_safe_reserve (vec, n);
2683
2684 for (i = 0; i < n; i++)
2685 {
2686 tree op = cp_fold (TREE_VEC_ELT (x, i));
2687 vec->quick_push (op);
2688 if (op != TREE_VEC_ELT (x, i))
2689 changed = true;
2690 }
2691
2692 if (changed)
2693 {
2694 r = copy_node (x);
2695 for (i = 0; i < n; i++)
2696 TREE_VEC_ELT (r, i) = (*vec)[i];
2697 x = r;
2698 }
d2c63826 2699 }
2700
2701 break;
2702
2703 case ARRAY_REF:
2704 case ARRAY_RANGE_REF:
2705
2706 loc = EXPR_LOCATION (x);
2707 op0 = cp_fold (TREE_OPERAND (x, 0));
2708 op1 = cp_fold (TREE_OPERAND (x, 1));
2709 op2 = cp_fold (TREE_OPERAND (x, 2));
2710 op3 = cp_fold (TREE_OPERAND (x, 3));
2711
1b8c43ab 2712 if (op0 != TREE_OPERAND (x, 0)
2713 || op1 != TREE_OPERAND (x, 1)
2714 || op2 != TREE_OPERAND (x, 2)
2715 || op3 != TREE_OPERAND (x, 3))
2716 {
2717 if (op0 == error_mark_node
2718 || op1 == error_mark_node
2719 || op2 == error_mark_node
2720 || op3 == error_mark_node)
2721 x = error_mark_node;
2722 else
89f17a65 2723 {
2724 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2725 TREE_READONLY (x) = TREE_READONLY (org_x);
2726 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2727 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2728 }
1b8c43ab 2729 }
d2c63826 2730
2731 x = fold (x);
2732 break;
2733
2af642bf 2734 case SAVE_EXPR:
2735 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2736 folding, evaluates to an invariant. In that case no need to wrap
2737 this folded tree with a SAVE_EXPR. */
2738 r = cp_fold (TREE_OPERAND (x, 0));
2739 if (tree_invariant_p (r))
2740 x = r;
2741 break;
2742
d2c63826 2743 default:
2744 return org_x;
2745 }
2746
2a655a4c 2747 fold_cache->put (org_x, x);
d2c63826 2748 /* Prevent that we try to fold an already folded result again. */
2749 if (x != org_x)
2a655a4c 2750 fold_cache->put (x, x);
d2c63826 2751
2752 return x;
2753}
2754
863c62e0 2755/* Look up either "hot" or "cold" in attribute list LIST. */
2756
2757tree
2758lookup_hotness_attribute (tree list)
2759{
2760 for (; list; list = TREE_CHAIN (list))
2761 {
2762 tree name = get_attribute_name (list);
2763 if (is_attribute_p ("hot", name)
2764 || is_attribute_p ("cold", name)
2765 || is_attribute_p ("likely", name)
2766 || is_attribute_p ("unlikely", name))
2767 break;
2768 }
2769 return list;
2770}
2771
2772/* Remove both "hot" and "cold" attributes from LIST. */
2773
2774static tree
2775remove_hotness_attribute (tree list)
2776{
2777 list = remove_attribute ("hot", list);
2778 list = remove_attribute ("cold", list);
2779 list = remove_attribute ("likely", list);
2780 list = remove_attribute ("unlikely", list);
2781 return list;
2782}
2783
2784/* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2785 PREDICT_EXPR. */
2786
2787tree
a1e1b603 2788process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
863c62e0 2789{
2790 if (std_attrs == error_mark_node)
2791 return std_attrs;
2792 if (tree attr = lookup_hotness_attribute (std_attrs))
2793 {
2794 tree name = get_attribute_name (attr);
2795 bool hot = (is_attribute_p ("hot", name)
2796 || is_attribute_p ("likely", name));
2797 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2798 hot ? TAKEN : NOT_TAKEN);
a1e1b603 2799 SET_EXPR_LOCATION (pred, attrs_loc);
863c62e0 2800 add_stmt (pred);
2801 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2802 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2803 get_attribute_name (other), name);
2804 std_attrs = remove_hotness_attribute (std_attrs);
2805 }
2806 return std_attrs;
2807}
2808
d2c63826 2809#include "gt-cp-cp-gimplify.h"