]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/cp-gimplify.c
2018-05-24 Paolo Carlini <paolo.carlini@oracle.com>
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
CommitLineData
88bce636 1/* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
4ee9c684 2
8e8f6434 3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4ee9c684 4 Contributed by Jason Merrill <jason@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
aa139c3f 10Software Foundation; either version 3, or (at your option) any later
4ee9c684 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
aa139c3f 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
4cba6f60 25#include "target.h"
9ef16211 26#include "basic-block.h"
4ee9c684 27#include "cp-tree.h"
9ef16211 28#include "gimple.h"
4cba6f60 29#include "predict.h"
9ef16211 30#include "stor-layout.h"
75a70cf9 31#include "tree-iterator.h"
a8783bee 32#include "gimplify.h"
020bc656 33#include "c-family/c-ubsan.h"
30a86690 34#include "stringpool.h"
35#include "attribs.h"
9917317a 36#include "asan.h"
4ee9c684 37
57cf061a 38/* Forward declarations. */
39
40static tree cp_genericize_r (tree *, int *, void *);
d2c63826 41static tree cp_fold_r (tree *, int *, void *);
a0168bf5 42static void cp_genericize_tree (tree*, bool);
d2c63826 43static tree cp_fold (tree);
57cf061a 44
e7911019 45/* Local declarations. */
46
47enum bc_t { bc_break = 0, bc_continue = 1 };
48
8487df40 49/* Stack of labels which are targets for "break" or "continue",
50 linked through TREE_CHAIN. */
51static tree bc_label[2];
e7911019 52
53/* Begin a scope which can be exited by a break or continue statement. BC
54 indicates which.
55
57cf061a 56 Just creates a label with location LOCATION and pushes it into the current
57 context. */
e7911019 58
59static tree
57cf061a 60begin_bc_block (enum bc_t bc, location_t location)
e7911019 61{
57cf061a 62 tree label = create_artificial_label (location);
1767a056 63 DECL_CHAIN (label) = bc_label[bc];
8487df40 64 bc_label[bc] = label;
00f21715 65 if (bc == bc_break)
66 LABEL_DECL_BREAK (label) = true;
67 else
68 LABEL_DECL_CONTINUE (label) = true;
e7911019 69 return label;
70}
71
72/* Finish a scope which can be exited by a break or continue statement.
57cf061a 73 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
e7911019 74 an expression for the contents of the scope.
75
76 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
57cf061a 77 BLOCK. Otherwise, just forget the label. */
e7911019 78
57cf061a 79static void
80finish_bc_block (tree *block, enum bc_t bc, tree label)
e7911019 81{
8487df40 82 gcc_assert (label == bc_label[bc]);
e7911019 83
84 if (TREE_USED (label))
57cf061a 85 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 block);
e7911019 87
1767a056 88 bc_label[bc] = DECL_CHAIN (label);
89 DECL_CHAIN (label) = NULL_TREE;
e7911019 90}
91
75a70cf9 92/* Get the LABEL_EXPR to represent a break or continue statement
93 in the current block scope. BC indicates which. */
e7911019 94
95static tree
75a70cf9 96get_bc_label (enum bc_t bc)
e7911019 97{
8487df40 98 tree label = bc_label[bc];
e7911019 99
e7911019 100 /* Mark the label used for finish_bc_block. */
101 TREE_USED (label) = 1;
75a70cf9 102 return label;
e7911019 103}
104
4ee9c684 105/* Genericize a TRY_BLOCK. */
106
107static void
108genericize_try_block (tree *stmt_p)
109{
110 tree body = TRY_STMTS (*stmt_p);
111 tree cleanup = TRY_HANDLERS (*stmt_p);
112
831d52a2 113 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
4ee9c684 114}
115
116/* Genericize a HANDLER by converting to a CATCH_EXPR. */
117
118static void
119genericize_catch_block (tree *stmt_p)
120{
121 tree type = HANDLER_TYPE (*stmt_p);
122 tree body = HANDLER_BODY (*stmt_p);
123
4ee9c684 124 /* FIXME should the caught type go in TREE_TYPE? */
831d52a2 125 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
4ee9c684 126}
127
75a70cf9 128/* A terser interface for building a representation of an exception
129 specification. */
130
131static tree
132build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133{
134 tree t;
135
136 /* FIXME should the allowed types go in TREE_TYPE? */
137 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139
140 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141 append_to_statement_list (body, &TREE_OPERAND (t, 0));
142
143 return t;
144}
145
4ee9c684 146/* Genericize an EH_SPEC_BLOCK by converting it to a
147 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
148
149static void
150genericize_eh_spec_block (tree *stmt_p)
151{
152 tree body = EH_SPEC_STMTS (*stmt_p);
153 tree allowed = EH_SPEC_RAISES (*stmt_p);
c4bac24d 154 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
4ee9c684 155
75a70cf9 156 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
e627cda1 157 TREE_NO_WARNING (*stmt_p) = true;
158 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
4ee9c684 159}
160
dddab69e 161/* Genericize an IF_STMT by turning it into a COND_EXPR. */
162
163static void
97767aad 164genericize_if_stmt (tree *stmt_p)
dddab69e 165{
551fa2c7 166 tree stmt, cond, then_, else_;
75a70cf9 167 location_t locus = EXPR_LOCATION (*stmt_p);
dddab69e 168
169 stmt = *stmt_p;
551fa2c7 170 cond = IF_COND (stmt);
dddab69e 171 then_ = THEN_CLAUSE (stmt);
172 else_ = ELSE_CLAUSE (stmt);
173
174 if (!then_)
e60a6f7b 175 then_ = build_empty_stmt (locus);
dddab69e 176 if (!else_)
e60a6f7b 177 else_ = build_empty_stmt (locus);
dddab69e 178
551fa2c7 179 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180 stmt = then_;
181 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182 stmt = else_;
183 else
184 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
ebd1f44d 185 if (!EXPR_HAS_LOCATION (stmt))
186 protected_set_expr_location (stmt, locus);
dddab69e 187 *stmt_p = stmt;
188}
189
e7911019 190/* Build a generic representation of one of the C loop forms. COND is the
191 loop condition or NULL_TREE. BODY is the (possibly compound) statement
192 controlled by the loop. INCR is the increment expression of a for-loop,
193 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
194 evaluated before the loop body as in while and for loops, or after the
195 loop body as in do-while loops. */
196
57cf061a 197static void
198genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 tree incr, bool cond_is_first, int *walk_subtrees,
200 void *data)
e7911019 201{
57cf061a 202 tree blab, clab;
e6f10d79 203 tree exit = NULL;
57cf061a 204 tree stmt_list = NULL;
205
206 blab = begin_bc_block (bc_break, start_locus);
207 clab = begin_bc_block (bc_continue, start_locus);
e7911019 208
ebd1f44d 209 protected_set_expr_location (incr, start_locus);
e7911019 210
57cf061a 211 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
212 cp_walk_tree (&body, cp_genericize_r, data, NULL);
213 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
214 *walk_subtrees = 0;
e7911019 215
e6f10d79 216 if (cond && TREE_CODE (cond) != INTEGER_CST)
e7911019 217 {
e6f10d79 218 /* If COND is constant, don't bother building an exit. If it's false,
219 we won't build a loop. If it's true, any exits are in the body. */
220 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
221 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
222 get_bc_label (bc_break));
223 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
224 build_empty_stmt (cloc), exit);
57cf061a 225 }
e7911019 226
e6f10d79 227 if (exit && cond_is_first)
228 append_to_statement_list (exit, &stmt_list);
57cf061a 229 append_to_statement_list (body, &stmt_list);
230 finish_bc_block (&stmt_list, bc_continue, clab);
231 append_to_statement_list (incr, &stmt_list);
e6f10d79 232 if (exit && !cond_is_first)
233 append_to_statement_list (exit, &stmt_list);
e7911019 234
e6f10d79 235 if (!stmt_list)
236 stmt_list = build_empty_stmt (start_locus);
237
238 tree loop;
239 if (cond && integer_zerop (cond))
240 {
241 if (cond_is_first)
242 loop = fold_build3_loc (start_locus, COND_EXPR,
243 void_type_node, cond, stmt_list,
244 build_empty_stmt (start_locus));
245 else
246 loop = stmt_list;
247 }
248 else
92765e8a 249 {
250 location_t loc = start_locus;
251 if (!cond || integer_nonzerop (cond))
252 loc = EXPR_LOCATION (expr_first (body));
253 if (loc == UNKNOWN_LOCATION)
254 loc = start_locus;
255 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
256 }
e6f10d79 257
258 stmt_list = NULL;
259 append_to_statement_list (loop, &stmt_list);
260 finish_bc_block (&stmt_list, bc_break, blab);
261 if (!stmt_list)
262 stmt_list = build_empty_stmt (start_locus);
e7911019 263
57cf061a 264 *stmt_p = stmt_list;
e7911019 265}
266
57cf061a 267/* Genericize a FOR_STMT node *STMT_P. */
e7911019 268
269static void
57cf061a 270genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 271{
272 tree stmt = *stmt_p;
57cf061a 273 tree expr = NULL;
274 tree loop;
275 tree init = FOR_INIT_STMT (stmt);
e7911019 276
57cf061a 277 if (init)
278 {
279 cp_walk_tree (&init, cp_genericize_r, data, NULL);
280 append_to_statement_list (init, &expr);
281 }
e7911019 282
57cf061a 283 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285 append_to_statement_list (loop, &expr);
e6f10d79 286 if (expr == NULL_TREE)
287 expr = loop;
57cf061a 288 *stmt_p = expr;
e7911019 289}
290
57cf061a 291/* Genericize a WHILE_STMT node *STMT_P. */
e7911019 292
293static void
57cf061a 294genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 295{
296 tree stmt = *stmt_p;
57cf061a 297 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
e7911019 299}
300
57cf061a 301/* Genericize a DO_STMT node *STMT_P. */
e7911019 302
303static void
57cf061a 304genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 305{
306 tree stmt = *stmt_p;
57cf061a 307 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
e7911019 309}
310
57cf061a 311/* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
e7911019 312
313static void
57cf061a 314genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
e7911019 315{
316 tree stmt = *stmt_p;
57cf061a 317 tree break_block, body, cond, type;
318 location_t stmt_locus = EXPR_LOCATION (stmt);
e7911019 319
57cf061a 320 break_block = begin_bc_block (bc_break, stmt_locus);
e7911019 321
322 body = SWITCH_STMT_BODY (stmt);
323 if (!body)
e60a6f7b 324 body = build_empty_stmt (stmt_locus);
57cf061a 325 cond = SWITCH_STMT_COND (stmt);
326 type = SWITCH_STMT_TYPE (stmt);
e7911019 327
57cf061a 328 cp_walk_tree (&body, cp_genericize_r, data, NULL);
329 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330 cp_walk_tree (&type, cp_genericize_r, data, NULL);
331 *walk_subtrees = 0;
e7911019 332
17cf92d6 333 if (TREE_USED (break_block))
334 SWITCH_BREAK_LABEL_P (break_block) = 1;
335 finish_bc_block (&body, bc_break, break_block);
bd37ce3e 336 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
3501ad33 337 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
338 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
339 || !TREE_USED (break_block));
57cf061a 340}
341
342/* Genericize a CONTINUE_STMT node *STMT_P. */
343
344static void
345genericize_continue_stmt (tree *stmt_p)
346{
347 tree stmt_list = NULL;
348 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
349 tree label = get_bc_label (bc_continue);
350 location_t location = EXPR_LOCATION (*stmt_p);
351 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
22e029d2 352 append_to_statement_list_force (pred, &stmt_list);
57cf061a 353 append_to_statement_list (jump, &stmt_list);
354 *stmt_p = stmt_list;
e7911019 355}
356
57cf061a 357/* Genericize a BREAK_STMT node *STMT_P. */
358
359static void
360genericize_break_stmt (tree *stmt_p)
361{
362 tree label = get_bc_label (bc_break);
363 location_t location = EXPR_LOCATION (*stmt_p);
364 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
365}
366
367/* Genericize a OMP_FOR node *STMT_P. */
368
369static void
370genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
371{
372 tree stmt = *stmt_p;
373 location_t locus = EXPR_LOCATION (stmt);
374 tree clab = begin_bc_block (bc_continue, locus);
375
376 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
0dfe87da 377 if (TREE_CODE (stmt) != OMP_TASKLOOP)
378 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
57cf061a 379 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
380 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
381 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
382 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
383 *walk_subtrees = 0;
384
385 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
386}
387
388/* Hook into the middle of gimplifying an OMP_FOR node. */
8487df40 389
390static enum gimplify_status
75a70cf9 391cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
8487df40 392{
393 tree for_stmt = *expr_p;
75a70cf9 394 gimple_seq seq = NULL;
8487df40 395
396 /* Protect ourselves from recursion. */
397 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
398 return GS_UNHANDLED;
399 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
400
75a70cf9 401 gimplify_and_add (for_stmt, &seq);
75a70cf9 402 gimple_seq_add_seq (pre_p, seq);
8487df40 403
8487df40 404 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
405
406 return GS_ALL_DONE;
407}
408
7219fab5 409/* Gimplify an EXPR_STMT node. */
410
411static void
412gimplify_expr_stmt (tree *stmt_p)
413{
414 tree stmt = EXPR_STMT_EXPR (*stmt_p);
415
416 if (stmt == error_mark_node)
417 stmt = NULL;
418
419 /* Gimplification of a statement expression will nullify the
420 statement if all its side effects are moved to *PRE_P and *POST_P.
421
422 In this case we will not want to emit the gimplified statement.
423 However, we may still want to emit a warning, so we do that before
424 gimplification. */
43667bd3 425 if (stmt && warn_unused_value)
7219fab5 426 {
427 if (!TREE_SIDE_EFFECTS (stmt))
428 {
429 if (!IS_EMPTY_STMT (stmt)
430 && !VOID_TYPE_P (TREE_TYPE (stmt))
431 && !TREE_NO_WARNING (stmt))
43667bd3 432 warning (OPT_Wunused_value, "statement with no effect");
7219fab5 433 }
43667bd3 434 else
7219fab5 435 warn_if_unused_value (stmt, input_location);
436 }
437
438 if (stmt == NULL_TREE)
439 stmt = alloc_stmt_list ();
440
441 *stmt_p = stmt;
442}
443
4ee9c684 444/* Gimplify initialization from an AGGR_INIT_EXPR. */
445
446static void
da73cc75 447cp_gimplify_init_expr (tree *expr_p)
4ee9c684 448{
449 tree from = TREE_OPERAND (*expr_p, 1);
450 tree to = TREE_OPERAND (*expr_p, 0);
930e8175 451 tree t;
4ee9c684 452
4ee9c684 453 /* What about code that pulls out the temp and uses it elsewhere? I
454 think that such code never uses the TARGET_EXPR as an initializer. If
455 I'm wrong, we'll abort because the temp won't have any RTL. In that
456 case, I guess we'll need to replace references somehow. */
457 if (TREE_CODE (from) == TARGET_EXPR)
8e9e8d76 458 from = TARGET_EXPR_INITIAL (from);
4ee9c684 459
c3d09d4d 460 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
461 inside the TARGET_EXPR. */
930e8175 462 for (t = from; t; )
463 {
464 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
4ee9c684 465
930e8175 466 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
467 replace the slot operand with our target.
4ee9c684 468
930e8175 469 Should we add a target parm to gimplify_expr instead? No, as in this
470 case we want to replace the INIT_EXPR. */
a8b75081 471 if (TREE_CODE (sub) == AGGR_INIT_EXPR
472 || TREE_CODE (sub) == VEC_INIT_EXPR)
930e8175 473 {
a8b75081 474 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
475 AGGR_INIT_EXPR_SLOT (sub) = to;
476 else
477 VEC_INIT_EXPR_SLOT (sub) = to;
930e8175 478 *expr_p = from;
479
480 /* The initialization is now a side-effect, so the container can
481 become void. */
482 if (from != sub)
483 TREE_TYPE (from) = void_type_node;
484 }
930e8175 485
ffc5ad9b 486 /* Handle aggregate NSDMI. */
487 replace_placeholders (sub, to);
cf72f34d 488
930e8175 489 if (t == sub)
490 break;
491 else
492 t = TREE_OPERAND (t, 1);
4ee9c684 493 }
930e8175 494
4ee9c684 495}
496
497/* Gimplify a MUST_NOT_THROW_EXPR. */
498
75a70cf9 499static enum gimplify_status
500gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
4ee9c684 501{
502 tree stmt = *expr_p;
2363ef00 503 tree temp = voidify_wrapper_expr (stmt, NULL);
4ee9c684 504 tree body = TREE_OPERAND (stmt, 0);
73bb17ce 505 gimple_seq try_ = NULL;
506 gimple_seq catch_ = NULL;
42acab1c 507 gimple *mnt;
4ee9c684 508
73bb17ce 509 gimplify_and_add (body, &try_);
c4bac24d 510 mnt = gimple_build_eh_must_not_throw (terminate_fn);
e0d98d5f 511 gimple_seq_add_stmt_without_update (&catch_, mnt);
73bb17ce 512 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
4ee9c684 513
e0d98d5f 514 gimple_seq_add_stmt_without_update (pre_p, mnt);
4ee9c684 515 if (temp)
516 {
4ee9c684 517 *expr_p = temp;
75a70cf9 518 return GS_OK;
4ee9c684 519 }
75a70cf9 520
521 *expr_p = NULL;
522 return GS_ALL_DONE;
4ee9c684 523}
8495c0ca 524
6442eaae 525/* Return TRUE if an operand (OP) of a given TYPE being copied is
526 really just an empty class copy.
527
528 Check that the operand has a simple form so that TARGET_EXPRs and
529 non-empty CONSTRUCTORs get reduced properly, and we leave the
530 return slot optimization alone because it isn't a copy. */
531
532static bool
533simple_empty_class_p (tree type, tree op)
534{
535 return
536 ((TREE_CODE (op) == COMPOUND_EXPR
537 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
550c1405 538 || TREE_CODE (op) == EMPTY_CLASS_EXPR
6442eaae 539 || is_gimple_lvalue (op)
540 || INDIRECT_REF_P (op)
541 || (TREE_CODE (op) == CONSTRUCTOR
542 && CONSTRUCTOR_NELTS (op) == 0
543 && !TREE_CLOBBER_P (op))
544 || (TREE_CODE (op) == CALL_EXPR
545 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
546 && is_really_empty_class (type);
547}
548
a6a52a73 549/* Returns true if evaluating E as an lvalue has side-effects;
550 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
551 have side-effects until there is a read or write through it. */
552
553static bool
554lvalue_has_side_effects (tree e)
555{
556 if (!TREE_SIDE_EFFECTS (e))
557 return false;
558 while (handled_component_p (e))
559 {
560 if (TREE_CODE (e) == ARRAY_REF
561 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
562 return true;
563 e = TREE_OPERAND (e, 0);
564 }
565 if (DECL_P (e))
566 /* Just naming a variable has no side-effects. */
567 return false;
568 else if (INDIRECT_REF_P (e))
569 /* Similarly, indirection has no side-effects. */
570 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
571 else
572 /* For anything else, trust TREE_SIDE_EFFECTS. */
573 return TREE_SIDE_EFFECTS (e);
574}
575
8495c0ca 576/* Do C++-specific gimplification. Args are as for gimplify_expr. */
577
578int
75a70cf9 579cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8495c0ca 580{
581 int saved_stmts_are_full_exprs_p = 0;
06c75b9a 582 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
8495c0ca 583 enum tree_code code = TREE_CODE (*expr_p);
584 enum gimplify_status ret;
585
586 if (STATEMENT_CODE_P (code))
587 {
588 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
589 current_stmt_tree ()->stmts_are_full_exprs_p
590 = STMT_IS_FULL_EXPR_P (*expr_p);
591 }
592
593 switch (code)
594 {
8495c0ca 595 case AGGR_INIT_EXPR:
596 simplify_aggr_init_expr (expr_p);
597 ret = GS_OK;
598 break;
599
a8b75081 600 case VEC_INIT_EXPR:
601 {
602 location_t loc = input_location;
da73cc75 603 tree init = VEC_INIT_EXPR_INIT (*expr_p);
f66fb566 604 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
a8b75081 605 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
606 input_location = EXPR_LOCATION (*expr_p);
f66fb566 607 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
98c0a208 608 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
da73cc75 609 from_array,
a8b75081 610 tf_warning_or_error);
74fd83a9 611 hash_set<tree> pset;
612 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
a0168bf5 613 cp_genericize_tree (expr_p, false);
a8b75081 614 ret = GS_OK;
615 input_location = loc;
616 }
617 break;
618
8495c0ca 619 case THROW_EXPR:
a17c2a3a 620 /* FIXME communicate throw type to back end, probably by moving
8495c0ca 621 THROW_EXPR into ../tree.def. */
622 *expr_p = TREE_OPERAND (*expr_p, 0);
623 ret = GS_OK;
624 break;
625
626 case MUST_NOT_THROW_EXPR:
75a70cf9 627 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
8495c0ca 628 break;
629
75a70cf9 630 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
cf6b103e 631 LHS of an assignment might also be involved in the RHS, as in bug
632 25979. */
8495c0ca 633 case INIT_EXPR:
da73cc75 634 cp_gimplify_init_expr (expr_p);
4fd66866 635 if (TREE_CODE (*expr_p) != INIT_EXPR)
636 return GS_OK;
e3533433 637 /* Fall through. */
1cce6590 638 case MODIFY_EXPR:
6442eaae 639 modify_expr_case:
1cce6590 640 {
641 /* If the back end isn't clever enough to know that the lhs and rhs
642 types are the same, add an explicit conversion. */
643 tree op0 = TREE_OPERAND (*expr_p, 0);
644 tree op1 = TREE_OPERAND (*expr_p, 1);
645
97f7ff5b 646 if (!error_operand_p (op0)
647 && !error_operand_p (op1)
648 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
649 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
1cce6590 650 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
651 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
652 TREE_TYPE (op0), op1);
eda37335 653
6442eaae 654 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
eda37335 655 {
6442eaae 656 /* Remove any copies of empty classes. Also drop volatile
657 variables on the RHS to avoid infinite recursion from
658 gimplify_expr trying to load the value. */
6442eaae 659 if (TREE_SIDE_EFFECTS (op1))
660 {
661 if (TREE_THIS_VOLATILE (op1)
662 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
663 op1 = build_fold_addr_expr (op1);
664
665 gimplify_and_add (op1, pre_p);
666 }
a6a52a73 667 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
668 is_gimple_lvalue, fb_lvalue);
6442eaae 669 *expr_p = TREE_OPERAND (*expr_p, 0);
eda37335 670 }
a6a52a73 671 /* P0145 says that the RHS is sequenced before the LHS.
672 gimplify_modify_expr gimplifies the RHS before the LHS, but that
673 isn't quite strong enough in two cases:
674
675 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
676 mean it's evaluated after the LHS.
677
678 2) the value calculation of the RHS is also sequenced before the
679 LHS, so for scalar assignment we need to preevaluate if the
680 RHS could be affected by LHS side-effects even if it has no
681 side-effects of its own. We don't need this for classes because
682 class assignment takes its RHS by reference. */
683 else if (flag_strong_eval_order > 1
684 && TREE_CODE (*expr_p) == MODIFY_EXPR
685 && lvalue_has_side_effects (op0)
686 && (TREE_CODE (op1) == CALL_EXPR
687 || (SCALAR_TYPE_P (TREE_TYPE (op1))
688 && !TREE_CONSTANT (op1))))
689 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
1cce6590 690 }
8495c0ca 691 ret = GS_OK;
692 break;
693
694 case EMPTY_CLASS_EXPR:
ff8c638e 695 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
696 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
8495c0ca 697 ret = GS_OK;
698 break;
699
700 case BASELINK:
701 *expr_p = BASELINK_FUNCTIONS (*expr_p);
702 ret = GS_OK;
703 break;
704
705 case TRY_BLOCK:
706 genericize_try_block (expr_p);
707 ret = GS_OK;
708 break;
709
710 case HANDLER:
711 genericize_catch_block (expr_p);
712 ret = GS_OK;
713 break;
714
715 case EH_SPEC_BLOCK:
716 genericize_eh_spec_block (expr_p);
717 ret = GS_OK;
718 break;
719
720 case USING_STMT:
660c48c4 721 gcc_unreachable ();
8495c0ca 722
e7911019 723 case FOR_STMT:
e7911019 724 case WHILE_STMT:
e7911019 725 case DO_STMT:
e7911019 726 case SWITCH_STMT:
57cf061a 727 case CONTINUE_STMT:
728 case BREAK_STMT:
729 gcc_unreachable ();
e7911019 730
8487df40 731 case OMP_FOR:
bc7bff74 732 case OMP_SIMD:
733 case OMP_DISTRIBUTE:
43895be5 734 case OMP_TASKLOOP:
75a70cf9 735 ret = cp_gimplify_omp_for (expr_p, pre_p);
8487df40 736 break;
737
7219fab5 738 case EXPR_STMT:
739 gimplify_expr_stmt (expr_p);
740 ret = GS_OK;
741 break;
742
97d541d5 743 case UNARY_PLUS_EXPR:
744 {
745 tree arg = TREE_OPERAND (*expr_p, 0);
746 tree type = TREE_TYPE (*expr_p);
747 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
748 : arg;
749 ret = GS_OK;
750 }
751 break;
752
433e804e 753 case CALL_EXPR:
10621300 754 ret = GS_OK;
06c75b9a 755 if (!CALL_EXPR_FN (*expr_p))
756 /* Internal function call. */;
757 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
10621300 758 {
06c75b9a 759 /* This is a call to a (compound) assignment operator that used
760 the operator syntax; gimplify the RHS first. */
761 gcc_assert (call_expr_nargs (*expr_p) == 2);
762 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
763 enum gimplify_status t
764 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
765 if (t == GS_ERROR)
766 ret = GS_ERROR;
767 }
768 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
769 {
770 /* Leave the last argument for gimplify_call_expr, to avoid problems
771 with __builtin_va_arg_pack(). */
772 int nargs = call_expr_nargs (*expr_p) - 1;
773 for (int i = 0; i < nargs; ++i)
10621300 774 {
775 enum gimplify_status t
776 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
777 if (t == GS_ERROR)
778 ret = GS_ERROR;
779 }
780 }
e59cff35 781 else if (flag_strong_eval_order
06c75b9a 782 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
783 {
e59cff35 784 /* If flag_strong_eval_order, evaluate the object argument first. */
06c75b9a 785 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
d03fa520 786 if (INDIRECT_TYPE_P (fntype))
06c75b9a 787 fntype = TREE_TYPE (fntype);
788 if (TREE_CODE (fntype) == METHOD_TYPE)
789 {
790 enum gimplify_status t
791 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
792 if (t == GS_ERROR)
793 ret = GS_ERROR;
794 }
795 }
10621300 796 break;
797
6442eaae 798 case RETURN_EXPR:
799 if (TREE_OPERAND (*expr_p, 0)
800 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
801 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
802 {
803 expr_p = &TREE_OPERAND (*expr_p, 0);
804 code = TREE_CODE (*expr_p);
805 /* Avoid going through the INIT_EXPR case, which can
806 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
807 goto modify_expr_case;
808 }
809 /* Fall through. */
810
8495c0ca 811 default:
8458f4ca 812 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
8495c0ca 813 break;
814 }
815
816 /* Restore saved state. */
817 if (STATEMENT_CODE_P (code))
818 current_stmt_tree ()->stmts_are_full_exprs_p
819 = saved_stmts_are_full_exprs_p;
820
821 return ret;
822}
dddab69e 823
dddcebdc 824static inline bool
9f627b1a 825is_invisiref_parm (const_tree t)
dddcebdc 826{
806e4c12 827 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
dddcebdc 828 && DECL_BY_REFERENCE (t));
829}
830
7db5a284 831/* Return true if the uid in both int tree maps are equal. */
832
2ef51f0e 833bool
834cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
7db5a284 835{
7db5a284 836 return (a->uid == b->uid);
837}
838
839/* Hash a UID in a cxx_int_tree_map. */
840
841unsigned int
2ef51f0e 842cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
7db5a284 843{
2ef51f0e 844 return item->uid;
7db5a284 845}
846
9b222de3 847/* A stable comparison routine for use with splay trees and DECLs. */
848
849static int
850splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
851{
852 tree a = (tree) xa;
853 tree b = (tree) xb;
854
855 return DECL_UID (a) - DECL_UID (b);
856}
857
858/* OpenMP context during genericization. */
859
860struct cp_genericize_omp_taskreg
861{
862 bool is_parallel;
863 bool default_shared;
864 struct cp_genericize_omp_taskreg *outer;
865 splay_tree variables;
866};
867
868/* Return true if genericization should try to determine if
869 DECL is firstprivate or shared within task regions. */
870
871static bool
872omp_var_to_track (tree decl)
873{
874 tree type = TREE_TYPE (decl);
875 if (is_invisiref_parm (decl))
876 type = TREE_TYPE (type);
90ad495b 877 else if (TYPE_REF_P (type))
a3ee44e4 878 type = TREE_TYPE (type);
9b222de3 879 while (TREE_CODE (type) == ARRAY_TYPE)
880 type = TREE_TYPE (type);
881 if (type == error_mark_node || !CLASS_TYPE_P (type))
882 return false;
800478e6 883 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
9b222de3 884 return false;
885 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
886 return false;
887 return true;
888}
889
890/* Note DECL use in OpenMP region OMP_CTX during genericization. */
891
892static void
893omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
894{
895 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
896 (splay_tree_key) decl);
897 if (n == NULL)
898 {
899 int flags = OMP_CLAUSE_DEFAULT_SHARED;
900 if (omp_ctx->outer)
901 omp_cxx_notice_variable (omp_ctx->outer, decl);
902 if (!omp_ctx->default_shared)
903 {
904 struct cp_genericize_omp_taskreg *octx;
905
906 for (octx = omp_ctx->outer; octx; octx = octx->outer)
907 {
908 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
909 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
910 {
911 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
912 break;
913 }
914 if (octx->is_parallel)
915 break;
916 }
917 if (octx == NULL
918 && (TREE_CODE (decl) == PARM_DECL
919 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
920 && DECL_CONTEXT (decl) == current_function_decl)))
921 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
922 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
923 {
924 /* DECL is implicitly determined firstprivate in
925 the current task construct. Ensure copy ctor and
926 dtor are instantiated, because during gimplification
927 it will be already too late. */
928 tree type = TREE_TYPE (decl);
929 if (is_invisiref_parm (decl))
930 type = TREE_TYPE (type);
90ad495b 931 else if (TYPE_REF_P (type))
a3ee44e4 932 type = TREE_TYPE (type);
9b222de3 933 while (TREE_CODE (type) == ARRAY_TYPE)
934 type = TREE_TYPE (type);
935 get_copy_ctor (type, tf_none);
936 get_dtor (type, tf_none);
937 }
938 }
939 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
940 }
941}
942
943/* Genericization context. */
944
660c48c4 945struct cp_genericize_data
946{
431205b7 947 hash_set<tree> *p_set;
f1f41a6c 948 vec<tree> bind_expr_stack;
9b222de3 949 struct cp_genericize_omp_taskreg *omp_ctx;
cb40a6f7 950 tree try_block;
d120fa25 951 bool no_sanitize_p;
a0168bf5 952 bool handle_invisiref_parm_p;
660c48c4 953};
954
d2c63826 955/* Perform any pre-gimplification folding of C++ front end trees to
956 GENERIC.
957 Note: The folding of none-omp cases is something to move into
958 the middle-end. As for now we have most foldings only on GENERIC
959 in fold-const, we need to perform this before transformation to
960 GIMPLE-form. */
961
962static tree
963cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
964{
965 tree stmt;
966 enum tree_code code;
967
968 *stmt_p = stmt = cp_fold (*stmt_p);
969
0406b32f 970 if (((hash_set<tree> *) data)->add (stmt))
971 {
972 /* Don't walk subtrees of stmts we've already walked once, otherwise
973 we can have exponential complexity with e.g. lots of nested
974 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
975 always the same tree, which the first time cp_fold_r has been
976 called on it had the subtrees walked. */
977 *walk_subtrees = 0;
978 return NULL;
979 }
980
d2c63826 981 code = TREE_CODE (stmt);
982 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
efa02472 983 || code == OMP_TASKLOOP || code == OACC_LOOP)
d2c63826 984 {
985 tree x;
986 int i, n;
987
988 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
989 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
990 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
991 x = OMP_FOR_COND (stmt);
992 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
993 {
994 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
995 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
996 }
997 else if (x && TREE_CODE (x) == TREE_VEC)
998 {
999 n = TREE_VEC_LENGTH (x);
1000 for (i = 0; i < n; i++)
1001 {
1002 tree o = TREE_VEC_ELT (x, i);
1003 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1004 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1005 }
1006 }
1007 x = OMP_FOR_INCR (stmt);
1008 if (x && TREE_CODE (x) == TREE_VEC)
1009 {
1010 n = TREE_VEC_LENGTH (x);
1011 for (i = 0; i < n; i++)
1012 {
1013 tree o = TREE_VEC_ELT (x, i);
1014 if (o && TREE_CODE (o) == MODIFY_EXPR)
1015 o = TREE_OPERAND (o, 1);
1016 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1017 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1018 {
1019 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1020 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1021 }
1022 }
1023 }
1024 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1025 *walk_subtrees = 0;
1026 }
1027
1028 return NULL;
1029}
1030
8f559c6e 1031/* Fold ALL the trees! FIXME we should be able to remove this, but
1032 apparently that still causes optimization regressions. */
1033
1034void
1035cp_fold_function (tree fndecl)
1036{
0406b32f 1037 hash_set<tree> pset;
1038 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
8f559c6e 1039}
1040
dddcebdc 1041/* Perform any pre-gimplification lowering of C++ front end trees to
1042 GENERIC. */
dddab69e 1043
1044static tree
dddcebdc 1045cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
dddab69e 1046{
1047 tree stmt = *stmt_p;
660c48c4 1048 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
431205b7 1049 hash_set<tree> *p_set = wtd->p_set;
dddab69e 1050
9b222de3 1051 /* If in an OpenMP context, note var uses. */
1052 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
80a58eb0 1053 && (VAR_P (stmt)
9b222de3 1054 || TREE_CODE (stmt) == PARM_DECL
1055 || TREE_CODE (stmt) == RESULT_DECL)
1056 && omp_var_to_track (stmt))
1057 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1058
cbc3b89f 1059 /* Don't dereference parms in a thunk, pass the references through. */
1060 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1061 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1062 {
1063 *walk_subtrees = 0;
1064 return NULL;
1065 }
1066
6f0a524c 1067 /* Dereference invisible reference parms. */
a0168bf5 1068 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
dddcebdc 1069 {
806e4c12 1070 *stmt_p = convert_from_reference (stmt);
0e899ffe 1071 p_set->add (*stmt_p);
dddcebdc 1072 *walk_subtrees = 0;
1073 return NULL;
1074 }
1075
7db5a284 1076 /* Map block scope extern declarations to visible declarations with the
1077 same name and type in outer scopes if any. */
1078 if (cp_function_chain->extern_decl_map
4cace8cb 1079 && VAR_OR_FUNCTION_DECL_P (stmt)
7db5a284 1080 && DECL_EXTERNAL (stmt))
1081 {
1082 struct cxx_int_tree_map *h, in;
1083 in.uid = DECL_UID (stmt);
2ef51f0e 1084 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
7db5a284 1085 if (h)
1086 {
1087 *stmt_p = h->to;
1088 *walk_subtrees = 0;
1089 return NULL;
1090 }
1091 }
1092
3ddb3278 1093 if (TREE_CODE (stmt) == INTEGER_CST
90ad495b 1094 && TYPE_REF_P (TREE_TYPE (stmt))
3ddb3278 1095 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1096 && !wtd->no_sanitize_p)
1097 {
1098 ubsan_maybe_instrument_reference (stmt_p);
1099 if (*stmt_p != stmt)
1100 {
1101 *walk_subtrees = 0;
1102 return NULL_TREE;
1103 }
1104 }
1105
dddcebdc 1106 /* Other than invisiref parms, don't walk the same tree twice. */
431205b7 1107 if (p_set->contains (stmt))
dddcebdc 1108 {
1109 *walk_subtrees = 0;
1110 return NULL_TREE;
1111 }
1112
cc9e1a64 1113 switch (TREE_CODE (stmt))
dddcebdc 1114 {
cc9e1a64 1115 case ADDR_EXPR:
1116 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1117 {
1118 /* If in an OpenMP context, note var uses. */
1119 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1120 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1121 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1122 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
9b222de3 1123 *walk_subtrees = 0;
cc9e1a64 1124 }
1125 break;
1126
1127 case RETURN_EXPR:
1128 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1129 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1130 *walk_subtrees = 0;
1131 break;
1132
1133 case OMP_CLAUSE:
1134 switch (OMP_CLAUSE_CODE (stmt))
1135 {
1136 case OMP_CLAUSE_LASTPRIVATE:
1137 /* Don't dereference an invisiref in OpenMP clauses. */
1138 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1139 {
1140 *walk_subtrees = 0;
1141 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1142 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1143 cp_genericize_r, data, NULL);
1144 }
1145 break;
1146 case OMP_CLAUSE_PRIVATE:
1147 /* Don't dereference an invisiref in OpenMP clauses. */
1148 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
9b222de3 1149 *walk_subtrees = 0;
cc9e1a64 1150 else if (wtd->omp_ctx != NULL)
1151 {
1152 /* Private clause doesn't cause any references to the
1153 var in outer contexts, avoid calling
1154 omp_cxx_notice_variable for it. */
1155 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1156 wtd->omp_ctx = NULL;
1157 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1158 data, NULL);
1159 wtd->omp_ctx = old;
1160 *walk_subtrees = 0;
1161 }
1162 break;
1163 case OMP_CLAUSE_SHARED:
1164 case OMP_CLAUSE_FIRSTPRIVATE:
1165 case OMP_CLAUSE_COPYIN:
1166 case OMP_CLAUSE_COPYPRIVATE:
1167 /* Don't dereference an invisiref in OpenMP clauses. */
1168 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
bc7bff74 1169 *walk_subtrees = 0;
cc9e1a64 1170 break;
1171 case OMP_CLAUSE_REDUCTION:
1172 /* Don't dereference an invisiref in reduction clause's
1173 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1174 still needs to be genericized. */
1175 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1176 {
1177 *walk_subtrees = 0;
1178 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1179 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1180 cp_genericize_r, data, NULL);
1181 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1182 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1183 cp_genericize_r, data, NULL);
1184 }
1185 break;
1186 default:
1187 break;
1188 }
1189 break;
1190
1191 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1192 to lower this construct before scanning it, so we need to lower these
1193 before doing anything else. */
1194 case CLEANUP_STMT:
1195 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1196 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1197 : TRY_FINALLY_EXPR,
1198 void_type_node,
1199 CLEANUP_BODY (stmt),
1200 CLEANUP_EXPR (stmt));
1201 break;
1202
1203 case IF_STMT:
97767aad 1204 genericize_if_stmt (stmt_p);
1205 /* *stmt_p has changed, tail recurse to handle it again. */
1206 return cp_genericize_r (stmt_p, walk_subtrees, data);
97767aad 1207
cc9e1a64 1208 /* COND_EXPR might have incompatible types in branches if one or both
1209 arms are bitfields. Fix it up now. */
1210 case COND_EXPR:
1211 {
1212 tree type_left
1213 = (TREE_OPERAND (stmt, 1)
1214 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1215 : NULL_TREE);
1216 tree type_right
1217 = (TREE_OPERAND (stmt, 2)
1218 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1219 : NULL_TREE);
1220 if (type_left
1221 && !useless_type_conversion_p (TREE_TYPE (stmt),
1222 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1223 {
1224 TREE_OPERAND (stmt, 1)
1225 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1226 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1227 type_left));
1228 }
1229 if (type_right
1230 && !useless_type_conversion_p (TREE_TYPE (stmt),
1231 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1232 {
1233 TREE_OPERAND (stmt, 2)
1234 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1235 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1236 type_right));
1237 }
1238 }
1239 break;
a0a1efe3 1240
cc9e1a64 1241 case BIND_EXPR:
9b222de3 1242 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1243 {
1244 tree decl;
1245 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
80a58eb0 1246 if (VAR_P (decl)
9b222de3 1247 && !DECL_EXTERNAL (decl)
1248 && omp_var_to_track (decl))
1249 {
1250 splay_tree_node n
1251 = splay_tree_lookup (wtd->omp_ctx->variables,
1252 (splay_tree_key) decl);
1253 if (n == NULL)
1254 splay_tree_insert (wtd->omp_ctx->variables,
1255 (splay_tree_key) decl,
1256 TREE_STATIC (decl)
1257 ? OMP_CLAUSE_DEFAULT_SHARED
1258 : OMP_CLAUSE_DEFAULT_PRIVATE);
1259 }
1260 }
9917317a 1261 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
d120fa25 1262 {
1263 /* The point here is to not sanitize static initializers. */
1264 bool no_sanitize_p = wtd->no_sanitize_p;
1265 wtd->no_sanitize_p = true;
1266 for (tree decl = BIND_EXPR_VARS (stmt);
1267 decl;
1268 decl = DECL_CHAIN (decl))
1269 if (VAR_P (decl)
1270 && TREE_STATIC (decl)
1271 && DECL_INITIAL (decl))
1272 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1273 wtd->no_sanitize_p = no_sanitize_p;
1274 }
f1f41a6c 1275 wtd->bind_expr_stack.safe_push (stmt);
660c48c4 1276 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1277 cp_genericize_r, data, NULL);
f1f41a6c 1278 wtd->bind_expr_stack.pop ();
cc9e1a64 1279 break;
660c48c4 1280
cc9e1a64 1281 case USING_STMT:
1282 {
1283 tree block = NULL_TREE;
1284
1285 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1286 BLOCK, and append an IMPORTED_DECL to its
1287 BLOCK_VARS chained list. */
1288 if (wtd->bind_expr_stack.exists ())
1289 {
1290 int i;
1291 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1292 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1293 break;
1294 }
1295 if (block)
1296 {
3dd770ef 1297 tree decl = TREE_OPERAND (stmt, 0);
1298 gcc_assert (decl);
cc9e1a64 1299
3dd770ef 1300 if (undeduced_auto_decl (decl))
1301 /* Omit from the GENERIC, the back-end can't handle it. */;
1302 else
1303 {
1304 tree using_directive = make_node (IMPORTED_DECL);
1305 TREE_TYPE (using_directive) = void_type_node;
660c48c4 1306
3dd770ef 1307 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1308 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1309 BLOCK_VARS (block) = using_directive;
1310 }
cc9e1a64 1311 }
1312 /* The USING_STMT won't appear in GENERIC. */
1313 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1314 *walk_subtrees = 0;
1315 }
1316 break;
1317
1318 case DECL_EXPR:
1319 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
660c48c4 1320 {
cc9e1a64 1321 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1322 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1323 *walk_subtrees = 0;
660c48c4 1324 }
cc9e1a64 1325 else
660c48c4 1326 {
cc9e1a64 1327 tree d = DECL_EXPR_DECL (stmt);
1328 if (VAR_P (d))
1329 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
660c48c4 1330 }
cc9e1a64 1331 break;
9b222de3 1332
cc9e1a64 1333 case OMP_PARALLEL:
1334 case OMP_TASK:
1335 case OMP_TASKLOOP:
1336 {
1337 struct cp_genericize_omp_taskreg omp_ctx;
1338 tree c, decl;
1339 splay_tree_node n;
1340
1341 *walk_subtrees = 0;
1342 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1343 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1344 omp_ctx.default_shared = omp_ctx.is_parallel;
1345 omp_ctx.outer = wtd->omp_ctx;
1346 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1347 wtd->omp_ctx = &omp_ctx;
1348 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1349 switch (OMP_CLAUSE_CODE (c))
1350 {
1351 case OMP_CLAUSE_SHARED:
1352 case OMP_CLAUSE_PRIVATE:
1353 case OMP_CLAUSE_FIRSTPRIVATE:
1354 case OMP_CLAUSE_LASTPRIVATE:
1355 decl = OMP_CLAUSE_DECL (c);
1356 if (decl == error_mark_node || !omp_var_to_track (decl))
1357 break;
1358 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1359 if (n != NULL)
1360 break;
1361 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1362 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1363 ? OMP_CLAUSE_DEFAULT_SHARED
1364 : OMP_CLAUSE_DEFAULT_PRIVATE);
1365 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1366 omp_cxx_notice_variable (omp_ctx.outer, decl);
9b222de3 1367 break;
cc9e1a64 1368 case OMP_CLAUSE_DEFAULT:
1369 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1370 omp_ctx.default_shared = true;
1371 default:
9b222de3 1372 break;
cc9e1a64 1373 }
1374 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1375 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1376 else
1377 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1378 wtd->omp_ctx = omp_ctx.outer;
1379 splay_tree_delete (omp_ctx.variables);
1380 }
1381 break;
1382
1383 case TRY_BLOCK:
1384 {
1385 *walk_subtrees = 0;
1386 tree try_block = wtd->try_block;
1387 wtd->try_block = stmt;
1388 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1389 wtd->try_block = try_block;
1390 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1391 }
1392 break;
1393
1394 case MUST_NOT_THROW_EXPR:
cb40a6f7 1395 /* MUST_NOT_THROW_COND might be something else with TM. */
1396 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1397 {
1398 *walk_subtrees = 0;
1399 tree try_block = wtd->try_block;
1400 wtd->try_block = stmt;
1401 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1402 wtd->try_block = try_block;
1403 }
cc9e1a64 1404 break;
1405
1406 case THROW_EXPR:
1407 {
1408 location_t loc = location_of (stmt);
1409 if (TREE_NO_WARNING (stmt))
1410 /* Never mind. */;
1411 else if (wtd->try_block)
1412 {
1413 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1414 && warning_at (loc, OPT_Wterminate,
1415 "throw will always call terminate()")
1416 && cxx_dialect >= cxx11
1417 && DECL_DESTRUCTOR_P (current_function_decl))
1418 inform (loc, "in C++11 destructors default to noexcept");
1419 }
1420 else
1421 {
1422 if (warn_cxx11_compat && cxx_dialect < cxx11
1423 && DECL_DESTRUCTOR_P (current_function_decl)
1424 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1425 == NULL_TREE)
1426 && (get_defaulted_eh_spec (current_function_decl)
1427 == empty_except_spec))
1428 warning_at (loc, OPT_Wc__11_compat,
1429 "in C++11 this throw will terminate because "
1430 "destructors default to noexcept");
1431 }
1432 }
1433 break;
1434
1435 case CONVERT_EXPR:
1436 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1437 break;
1438
1439 case FOR_STMT:
1440 genericize_for_stmt (stmt_p, walk_subtrees, data);
1441 break;
1442
1443 case WHILE_STMT:
1444 genericize_while_stmt (stmt_p, walk_subtrees, data);
1445 break;
1446
1447 case DO_STMT:
1448 genericize_do_stmt (stmt_p, walk_subtrees, data);
1449 break;
1450
1451 case SWITCH_STMT:
1452 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1453 break;
1454
1455 case CONTINUE_STMT:
1456 genericize_continue_stmt (stmt_p);
1457 break;
1458
1459 case BREAK_STMT:
1460 genericize_break_stmt (stmt_p);
1461 break;
1462
1463 case OMP_FOR:
1464 case OMP_SIMD:
1465 case OMP_DISTRIBUTE:
68bf4712 1466 case OACC_LOOP:
cc9e1a64 1467 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1468 break;
1469
1470 case PTRMEM_CST:
518495b8 1471 /* By the time we get here we're handing off to the back end, so we don't
1472 need or want to preserve PTRMEM_CST anymore. */
1473 *stmt_p = cplus_expand_constant (stmt);
1474 *walk_subtrees = 0;
cc9e1a64 1475 break;
1476
1477 case MEM_REF:
9564446e 1478 /* For MEM_REF, make sure not to sanitize the second operand even
cc9e1a64 1479 if it has reference type. It is just an offset with a type
9564446e 1480 holding other information. There is no other processing we
1481 need to do for INTEGER_CSTs, so just ignore the second argument
1482 unconditionally. */
1483 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1484 *walk_subtrees = 0;
cc9e1a64 1485 break;
1486
1487 case NOP_EXPR:
1488 if (!wtd->no_sanitize_p
1489 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
90ad495b 1490 && TYPE_REF_P (TREE_TYPE (stmt)))
3ddb3278 1491 ubsan_maybe_instrument_reference (stmt_p);
cc9e1a64 1492 break;
1493
1494 case CALL_EXPR:
1495 if (!wtd->no_sanitize_p
1496 && sanitize_flags_p ((SANITIZE_NULL
1497 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
392dee1e 1498 {
1499 tree fn = CALL_EXPR_FN (stmt);
1500 if (fn != NULL_TREE
1501 && !error_operand_p (fn)
d03fa520 1502 && INDIRECT_TYPE_P (TREE_TYPE (fn))
392dee1e 1503 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1504 {
1505 bool is_ctor
1506 = TREE_CODE (fn) == ADDR_EXPR
1507 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1508 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
9917317a 1509 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
32cf7025 1510 ubsan_maybe_instrument_member_call (stmt, is_ctor);
9917317a 1511 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
32cf7025 1512 cp_ubsan_maybe_instrument_member_call (stmt);
392dee1e 1513 }
13da18cc 1514 else if (fn == NULL_TREE
1515 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1516 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
90ad495b 1517 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
13da18cc 1518 *walk_subtrees = 0;
392dee1e 1519 }
91735070 1520 /* Fall through. */
1521 case AGGR_INIT_EXPR:
1522 /* For calls to a multi-versioned function, overload resolution
1523 returns the function with the highest target priority, that is,
1524 the version that will checked for dispatching first. If this
1525 version is inlinable, a direct call to this version can be made
1526 otherwise the call should go through the dispatcher. */
1527 {
ced7e116 1528 tree fn = cp_get_callee_fndecl_nofold (stmt);
91735070 1529 if (fn && DECL_FUNCTION_VERSIONED (fn)
1530 && (current_function_decl == NULL
1531 || !targetm.target_option.can_inline_p (current_function_decl,
1532 fn)))
1533 if (tree dis = get_function_version_dispatcher (fn))
1534 {
1535 mark_versions_used (dis);
1536 dis = build_address (dis);
1537 if (TREE_CODE (stmt) == CALL_EXPR)
1538 CALL_EXPR_FN (stmt) = dis;
1539 else
1540 AGGR_INIT_EXPR_FN (stmt) = dis;
1541 }
1542 }
cc9e1a64 1543 break;
1544
7604a798 1545 case TARGET_EXPR:
1546 if (TARGET_EXPR_INITIAL (stmt)
1547 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1548 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1549 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1550 break;
1551
cc9e1a64 1552 default:
1553 if (IS_TYPE_OR_DECL_P (stmt))
1554 *walk_subtrees = 0;
1555 break;
392dee1e 1556 }
660c48c4 1557
431205b7 1558 p_set->add (*stmt_p);
9031d10b 1559
dddab69e 1560 return NULL;
1561}
1562
57cf061a 1563/* Lower C++ front end trees to GENERIC in T_P. */
1564
1565static void
a0168bf5 1566cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
57cf061a 1567{
1568 struct cp_genericize_data wtd;
1569
431205b7 1570 wtd.p_set = new hash_set<tree>;
f1f41a6c 1571 wtd.bind_expr_stack.create (0);
57cf061a 1572 wtd.omp_ctx = NULL;
cb40a6f7 1573 wtd.try_block = NULL_TREE;
d120fa25 1574 wtd.no_sanitize_p = false;
a0168bf5 1575 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
57cf061a 1576 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
431205b7 1577 delete wtd.p_set;
f1f41a6c 1578 wtd.bind_expr_stack.release ();
9917317a 1579 if (sanitize_flags_p (SANITIZE_VPTR))
32cf7025 1580 cp_ubsan_instrument_member_accesses (t_p);
57cf061a 1581}
1582
020bc656 1583/* If a function that should end with a return in non-void
1584 function doesn't obviously end with return, add ubsan
2fb20ba2 1585 instrumentation code to verify it at runtime. If -fsanitize=return
1586 is not enabled, instrument __builtin_unreachable. */
020bc656 1587
1588static void
2fb20ba2 1589cp_maybe_instrument_return (tree fndecl)
020bc656 1590{
1591 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1592 || DECL_CONSTRUCTOR_P (fndecl)
1593 || DECL_DESTRUCTOR_P (fndecl)
1594 || !targetm.warn_func_return (fndecl))
1595 return;
1596
9987c8d5 1597 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1598 /* Don't add __builtin_unreachable () if not optimizing, it will not
1599 improve any optimizations in that case, just break UB code.
1600 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1601 UBSan covers this with ubsan_instrument_return above where sufficient
1602 information is provided, while the __builtin_unreachable () below
1603 if return sanitization is disabled will just result in hard to
1604 understand runtime error without location. */
1605 && (!optimize
1606 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1607 return;
1608
020bc656 1609 tree t = DECL_SAVED_TREE (fndecl);
1610 while (t)
1611 {
1612 switch (TREE_CODE (t))
1613 {
1614 case BIND_EXPR:
1615 t = BIND_EXPR_BODY (t);
1616 continue;
1617 case TRY_FINALLY_EXPR:
f907d51b 1618 case CLEANUP_POINT_EXPR:
020bc656 1619 t = TREE_OPERAND (t, 0);
1620 continue;
1621 case STATEMENT_LIST:
1622 {
1623 tree_stmt_iterator i = tsi_last (t);
1624 if (!tsi_end_p (i))
1625 {
1626 t = tsi_stmt (i);
1627 continue;
1628 }
1629 }
1630 break;
1631 case RETURN_EXPR:
1632 return;
1633 default:
1634 break;
1635 }
1636 break;
1637 }
1638 if (t == NULL_TREE)
1639 return;
ce7e2f0e 1640 tree *p = &DECL_SAVED_TREE (fndecl);
1641 if (TREE_CODE (*p) == BIND_EXPR)
1642 p = &BIND_EXPR_BODY (*p);
2fb20ba2 1643
1644 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1645 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1646 t = ubsan_instrument_return (loc);
1647 else
1648 {
1649 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1650 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1651 }
1652
ce7e2f0e 1653 append_to_statement_list (t, p);
020bc656 1654}
1655
dddab69e 1656void
1657cp_genericize (tree fndecl)
1658{
dddcebdc 1659 tree t;
dddcebdc 1660
1661 /* Fix up the types of parms passed by invisible reference. */
1767a056 1662 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1fe46df1 1663 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1664 {
1665 /* If a function's arguments are copied to create a thunk,
1666 then DECL_BY_REFERENCE will be set -- but the type of the
1667 argument will be a pointer type, so we will never get
1668 here. */
1669 gcc_assert (!DECL_BY_REFERENCE (t));
1670 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1671 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1672 DECL_BY_REFERENCE (t) = 1;
1673 TREE_ADDRESSABLE (t) = 0;
1674 relayout_decl (t);
1675 }
dddcebdc 1676
806e4c12 1677 /* Do the same for the return value. */
1678 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1679 {
1680 t = DECL_RESULT (fndecl);
1681 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1682 DECL_BY_REFERENCE (t) = 1;
1683 TREE_ADDRESSABLE (t) = 0;
1684 relayout_decl (t);
ae294470 1685 if (DECL_NAME (t))
1686 {
1687 /* Adjust DECL_VALUE_EXPR of the original var. */
1688 tree outer = outer_curly_brace_block (current_function_decl);
1689 tree var;
1690
1691 if (outer)
1692 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1f10c59c 1693 if (VAR_P (var)
1694 && DECL_NAME (t) == DECL_NAME (var)
ae294470 1695 && DECL_HAS_VALUE_EXPR_P (var)
1696 && DECL_VALUE_EXPR (var) == t)
1697 {
1698 tree val = convert_from_reference (t);
1699 SET_DECL_VALUE_EXPR (var, val);
1700 break;
1701 }
1702 }
806e4c12 1703 }
1704
dddcebdc 1705 /* If we're a clone, the body is already GIMPLE. */
1706 if (DECL_CLONED_FUNCTION_P (fndecl))
1707 return;
1708
df0c563f 1709 /* Allow cp_genericize calls to be nested. */
1710 tree save_bc_label[2];
1711 save_bc_label[bc_break] = bc_label[bc_break];
1712 save_bc_label[bc_continue] = bc_label[bc_continue];
1713 bc_label[bc_break] = NULL_TREE;
1714 bc_label[bc_continue] = NULL_TREE;
1715
dddcebdc 1716 /* We do want to see every occurrence of the parms, so we can't just use
1717 walk_tree's hash functionality. */
a0168bf5 1718 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
dddab69e 1719
2fb20ba2 1720 cp_maybe_instrument_return (fndecl);
020bc656 1721
dddab69e 1722 /* Do everything else. */
1723 c_genericize (fndecl);
8487df40 1724
1725 gcc_assert (bc_label[bc_break] == NULL);
1726 gcc_assert (bc_label[bc_continue] == NULL);
df0c563f 1727 bc_label[bc_break] = save_bc_label[bc_break];
1728 bc_label[bc_continue] = save_bc_label[bc_continue];
8487df40 1729}
1730\f
1731/* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1732 NULL if there is in fact nothing to do. ARG2 may be null if FN
1733 actually only takes one argument. */
1734
1735static tree
1736cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1737{
c1be427d 1738 tree defparm, parm, t;
d01f58f9 1739 int i = 0;
1740 int nargs;
1741 tree *argarray;
2f2c591f 1742
8487df40 1743 if (fn == NULL)
1744 return NULL;
1745
d01f58f9 1746 nargs = list_length (DECL_ARGUMENTS (fn));
fd70b918 1747 argarray = XALLOCAVEC (tree, nargs);
d01f58f9 1748
2f2c591f 1749 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1750 if (arg2)
1751 defparm = TREE_CHAIN (defparm);
1752
c06d7bdd 1753 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
8487df40 1754 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1755 {
1756 tree inner_type = TREE_TYPE (arg1);
1757 tree start1, end1, p1;
1758 tree start2 = NULL, p2 = NULL;
c1be427d 1759 tree ret = NULL, lab;
8487df40 1760
1761 start1 = arg1;
1762 start2 = arg2;
1763 do
1764 {
1765 inner_type = TREE_TYPE (inner_type);
1766 start1 = build4 (ARRAY_REF, inner_type, start1,
1767 size_zero_node, NULL, NULL);
1768 if (arg2)
1769 start2 = build4 (ARRAY_REF, inner_type, start2,
1770 size_zero_node, NULL, NULL);
1771 }
1772 while (TREE_CODE (inner_type) == ARRAY_TYPE);
389dd41b 1773 start1 = build_fold_addr_expr_loc (input_location, start1);
8487df40 1774 if (arg2)
389dd41b 1775 start2 = build_fold_addr_expr_loc (input_location, start2);
8487df40 1776
1777 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2cc66f2a 1778 end1 = fold_build_pointer_plus (start1, end1);
8487df40 1779
f9e245b2 1780 p1 = create_tmp_var (TREE_TYPE (start1));
75a70cf9 1781 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
8487df40 1782 append_to_statement_list (t, &ret);
1783
1784 if (arg2)
1785 {
f9e245b2 1786 p2 = create_tmp_var (TREE_TYPE (start2));
75a70cf9 1787 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
8487df40 1788 append_to_statement_list (t, &ret);
1789 }
1790
e60a6f7b 1791 lab = create_artificial_label (input_location);
8487df40 1792 t = build1 (LABEL_EXPR, void_type_node, lab);
1793 append_to_statement_list (t, &ret);
1794
d01f58f9 1795 argarray[i++] = p1;
8487df40 1796 if (arg2)
d01f58f9 1797 argarray[i++] = p2;
2f2c591f 1798 /* Handle default arguments. */
93bb78b6 1799 for (parm = defparm; parm && parm != void_list_node;
1800 parm = TREE_CHAIN (parm), i++)
d01f58f9 1801 argarray[i] = convert_default_arg (TREE_VALUE (parm),
c06d7bdd 1802 TREE_PURPOSE (parm), fn,
1803 i - is_method, tf_warning_or_error);
d01f58f9 1804 t = build_call_a (fn, i, argarray);
c1be427d 1805 t = fold_convert (void_type_node, t);
1806 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8487df40 1807 append_to_statement_list (t, &ret);
1808
2cc66f2a 1809 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
75a70cf9 1810 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
8487df40 1811 append_to_statement_list (t, &ret);
1812
1813 if (arg2)
1814 {
2cc66f2a 1815 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
75a70cf9 1816 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
8487df40 1817 append_to_statement_list (t, &ret);
1818 }
1819
1820 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1821 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1822 append_to_statement_list (t, &ret);
1823
1824 return ret;
1825 }
1826 else
1827 {
389dd41b 1828 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
8487df40 1829 if (arg2)
389dd41b 1830 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2f2c591f 1831 /* Handle default arguments. */
93bb78b6 1832 for (parm = defparm; parm && parm != void_list_node;
d01f58f9 1833 parm = TREE_CHAIN (parm), i++)
1834 argarray[i] = convert_default_arg (TREE_VALUE (parm),
c06d7bdd 1835 TREE_PURPOSE (parm), fn,
1836 i - is_method, tf_warning_or_error);
c1be427d 1837 t = build_call_a (fn, i, argarray);
1838 t = fold_convert (void_type_node, t);
1839 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8487df40 1840 }
1841}
1842
1843/* Return code to initialize DECL with its default constructor, or
1844 NULL if there's nothing to do. */
1845
1846tree
a49c5913 1847cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
8487df40 1848{
1849 tree info = CP_OMP_CLAUSE_INFO (clause);
1850 tree ret = NULL;
1851
1852 if (info)
1853 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1854
1855 return ret;
1856}
1857
1858/* Return code to initialize DST with a copy constructor from SRC. */
1859
1860tree
1861cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1862{
1863 tree info = CP_OMP_CLAUSE_INFO (clause);
1864 tree ret = NULL;
1865
1866 if (info)
1867 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1868 if (ret == NULL)
75a70cf9 1869 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8487df40 1870
1871 return ret;
1872}
1873
1874/* Similarly, except use an assignment operator instead. */
1875
1876tree
1877cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1878{
1879 tree info = CP_OMP_CLAUSE_INFO (clause);
1880 tree ret = NULL;
1881
1882 if (info)
1883 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1884 if (ret == NULL)
75a70cf9 1885 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8487df40 1886
1887 return ret;
1888}
1889
1890/* Return code to destroy DECL. */
1891
1892tree
1893cxx_omp_clause_dtor (tree clause, tree decl)
1894{
1895 tree info = CP_OMP_CLAUSE_INFO (clause);
1896 tree ret = NULL;
1897
1898 if (info)
1899 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1900
1901 return ret;
dddab69e 1902}
df2c34fc 1903
1904/* True if OpenMP should privatize what this DECL points to rather
1905 than the DECL itself. */
1906
1907bool
9f627b1a 1908cxx_omp_privatize_by_reference (const_tree decl)
df2c34fc 1909{
90ad495b 1910 return (TYPE_REF_P (TREE_TYPE (decl))
bc7bff74 1911 || is_invisiref_parm (decl));
df2c34fc 1912}
fd6481cf 1913
2169f33b 1914/* Return true if DECL is const qualified var having no mutable member. */
1915bool
1916cxx_omp_const_qual_no_mutable (tree decl)
fd6481cf 1917{
2169f33b 1918 tree type = TREE_TYPE (decl);
90ad495b 1919 if (TYPE_REF_P (type))
fd6481cf 1920 {
1921 if (!is_invisiref_parm (decl))
2169f33b 1922 return false;
fd6481cf 1923 type = TREE_TYPE (type);
1924
1925 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1926 {
1927 /* NVR doesn't preserve const qualification of the
1928 variable's type. */
1929 tree outer = outer_curly_brace_block (current_function_decl);
1930 tree var;
1931
1932 if (outer)
1767a056 1933 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1f10c59c 1934 if (VAR_P (var)
1935 && DECL_NAME (decl) == DECL_NAME (var)
fd6481cf 1936 && (TYPE_MAIN_VARIANT (type)
1937 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1938 {
1939 if (TYPE_READONLY (TREE_TYPE (var)))
1940 type = TREE_TYPE (var);
1941 break;
1942 }
1943 }
1944 }
1945
1946 if (type == error_mark_node)
2169f33b 1947 return false;
fd6481cf 1948
1949 /* Variables with const-qualified type having no mutable member
1950 are predetermined shared. */
1951 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2169f33b 1952 return true;
1953
1954 return false;
1955}
1956
1957/* True if OpenMP sharing attribute of DECL is predetermined. */
1958
1959enum omp_clause_default_kind
b16a5119 1960cxx_omp_predetermined_sharing_1 (tree decl)
2169f33b 1961{
1962 /* Static data members are predetermined shared. */
1963 if (TREE_STATIC (decl))
1964 {
1965 tree ctx = CP_DECL_CONTEXT (decl);
1966 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1967 return OMP_CLAUSE_DEFAULT_SHARED;
1968 }
1969
1970 /* Const qualified vars having no mutable member are predetermined
1971 shared. */
1972 if (cxx_omp_const_qual_no_mutable (decl))
fd6481cf 1973 return OMP_CLAUSE_DEFAULT_SHARED;
1974
1975 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1976}
1977
b16a5119 1978/* Likewise, but also include the artificial vars. We don't want to
1979 disallow the artificial vars being mentioned in explicit clauses,
1980 as we use artificial vars e.g. for loop constructs with random
1981 access iterators other than pointers, but during gimplification
1982 we want to treat them as predetermined. */
1983
1984enum omp_clause_default_kind
1985cxx_omp_predetermined_sharing (tree decl)
1986{
1987 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
1988 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1989 return ret;
1990
1991 /* Predetermine artificial variables holding integral values, those
1992 are usually result of gimplify_one_sizepos or SAVE_EXPR
1993 gimplification. */
1994 if (VAR_P (decl)
1995 && DECL_ARTIFICIAL (decl)
1996 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
1997 && !(DECL_LANG_SPECIFIC (decl)
1998 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
1999 return OMP_CLAUSE_DEFAULT_SHARED;
2000
2001 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2002}
2003
fd6481cf 2004/* Finalize an implicitly determined clause. */
2005
2006void
691447ab 2007cxx_omp_finish_clause (tree c, gimple_seq *)
fd6481cf 2008{
2009 tree decl, inner_type;
2010 bool make_shared = false;
2011
2012 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2013 return;
2014
2015 decl = OMP_CLAUSE_DECL (c);
2016 decl = require_complete_type (decl);
2017 inner_type = TREE_TYPE (decl);
2018 if (decl == error_mark_node)
2019 make_shared = true;
90ad495b 2020 else if (TYPE_REF_P (TREE_TYPE (decl)))
43895be5 2021 inner_type = TREE_TYPE (inner_type);
fd6481cf 2022
2023 /* We're interested in the base element, not arrays. */
2024 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2025 inner_type = TREE_TYPE (inner_type);
2026
2027 /* Check for special function availability by building a call to one.
2028 Save the results, because later we won't be in the right context
2029 for making these queries. */
2030 if (!make_shared
2031 && CLASS_TYPE_P (inner_type)
bc7bff74 2032 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
fd6481cf 2033 make_shared = true;
2034
2035 if (make_shared)
1c3f8c56 2036 {
2037 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2038 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2039 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2040 }
fd6481cf 2041}
43895be5 2042
2043/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2044 disregarded in OpenMP construct, because it is going to be
2045 remapped during OpenMP lowering. SHARED is true if DECL
2046 is going to be shared, false if it is going to be privatized. */
2047
2048bool
2049cxx_omp_disregard_value_expr (tree decl, bool shared)
2050{
2051 return !shared
2052 && VAR_P (decl)
2053 && DECL_HAS_VALUE_EXPR_P (decl)
2054 && DECL_ARTIFICIAL (decl)
2055 && DECL_LANG_SPECIFIC (decl)
2056 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2057}
d2c63826 2058
69f54cf5 2059/* Fold expression X which is used as an rvalue if RVAL is true. */
2060
2061static tree
2062cp_fold_maybe_rvalue (tree x, bool rval)
2063{
e71bb662 2064 while (true)
69f54cf5 2065 {
e71bb662 2066 x = cp_fold (x);
ac6641ca 2067 if (rval && DECL_P (x)
90ad495b 2068 && !TYPE_REF_P (TREE_TYPE (x)))
e71bb662 2069 {
2070 tree v = decl_constant_value (x);
2071 if (v != x && v != error_mark_node)
2072 {
2073 x = v;
2074 continue;
2075 }
2076 }
2077 break;
69f54cf5 2078 }
e71bb662 2079 return x;
69f54cf5 2080}
2081
2082/* Fold expression X which is used as an rvalue. */
2083
2084static tree
2085cp_fold_rvalue (tree x)
2086{
2087 return cp_fold_maybe_rvalue (x, true);
2088}
2089
d93ee6f8 2090/* Perform folding on expression X. */
2091
2092tree
2093cp_fully_fold (tree x)
2094{
2095 if (processing_template_decl)
2096 return x;
2097 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2098 have to call both. */
2099 if (cxx_dialect >= cxx11)
5f9e77dd 2100 {
2101 x = maybe_constant_value (x);
2102 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2103 a TARGET_EXPR; undo that here. */
2104 if (TREE_CODE (x) == TARGET_EXPR)
2105 x = TARGET_EXPR_INITIAL (x);
2106 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2107 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2108 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2109 x = TREE_OPERAND (x, 0);
2110 }
d93ee6f8 2111 return cp_fold_rvalue (x);
2112}
2113
da562e32 2114/* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2115 and certain changes are made to the folding done. Or should be (FIXME). We
2116 never touch maybe_const, as it is only used for the C front-end
2117 C_MAYBE_CONST_EXPR. */
2118
2119tree
69cd03b2 2120c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
da562e32 2121{
69cd03b2 2122 return cp_fold_maybe_rvalue (x, !lval);
da562e32 2123}
2124
2a655a4c 2125static GTY((deletable)) hash_map<tree, tree> *fold_cache;
d2c63826 2126
a0c919f7 2127/* Dispose of the whole FOLD_CACHE. */
2128
2129void
2130clear_fold_cache (void)
2131{
2a655a4c 2132 if (fold_cache != NULL)
2133 fold_cache->empty ();
a0c919f7 2134}
2135
d2c63826 2136/* This function tries to fold an expression X.
2137 To avoid combinatorial explosion, folding results are kept in fold_cache.
d76863c8 2138 If X is invalid, we don't fold at all.
d2c63826 2139 For performance reasons we don't cache expressions representing a
2140 declaration or constant.
2141 Function returns X or its folded variant. */
2142
2143static tree
2144cp_fold (tree x)
2145{
2146 tree op0, op1, op2, op3;
2147 tree org_x = x, r = NULL_TREE;
2148 enum tree_code code;
2149 location_t loc;
69f54cf5 2150 bool rval_ops = true;
d2c63826 2151
8f559c6e 2152 if (!x || x == error_mark_node)
d2c63826 2153 return x;
2154
d76863c8 2155 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
d2c63826 2156 return x;
2157
2158 /* Don't bother to cache DECLs or constants. */
2159 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2160 return x;
2161
2a655a4c 2162 if (fold_cache == NULL)
2163 fold_cache = hash_map<tree, tree>::create_ggc (101);
2164
2165 if (tree *cached = fold_cache->get (x))
2166 return *cached;
d2c63826 2167
2168 code = TREE_CODE (x);
2169 switch (code)
2170 {
d1cd4a64 2171 case CLEANUP_POINT_EXPR:
2172 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2173 effects. */
2174 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2175 if (!TREE_SIDE_EFFECTS (r))
2176 x = r;
2177 break;
2178
d2c63826 2179 case SIZEOF_EXPR:
2180 x = fold_sizeof_expr (x);
2181 break;
2182
2183 case VIEW_CONVERT_EXPR:
69f54cf5 2184 rval_ops = false;
e3533433 2185 /* FALLTHRU */
d2c63826 2186 case CONVERT_EXPR:
2187 case NOP_EXPR:
2188 case NON_LVALUE_EXPR:
2189
2190 if (VOID_TYPE_P (TREE_TYPE (x)))
ca29c574 2191 {
2192 /* This is just to make sure we don't end up with casts to
2193 void from error_mark_node. If we just return x, then
2194 cp_fold_r might fold the operand into error_mark_node and
2195 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2196 during gimplification doesn't like such casts.
2197 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2198 folding of the operand should be in the caches and if in cp_fold_r
2199 it will modify it in place. */
2200 op0 = cp_fold (TREE_OPERAND (x, 0));
2201 if (op0 == error_mark_node)
2202 x = error_mark_node;
2203 break;
2204 }
d2c63826 2205
d2c63826 2206 loc = EXPR_LOCATION (x);
f7d61b1e 2207 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
d2c63826 2208
b981525c 2209 if (code == CONVERT_EXPR
2210 && SCALAR_TYPE_P (TREE_TYPE (x))
2211 && op0 != void_node)
2212 /* During parsing we used convert_to_*_nofold; re-convert now using the
2213 folding variants, since fold() doesn't do those transformations. */
2214 x = fold (convert (TREE_TYPE (x), op0));
2215 else if (op0 != TREE_OPERAND (x, 0))
1b8c43ab 2216 {
2217 if (op0 == error_mark_node)
2218 x = error_mark_node;
2219 else
2220 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2221 }
111e415b 2222 else
2223 x = fold (x);
d2c63826 2224
2225 /* Conversion of an out-of-range value has implementation-defined
2226 behavior; the language considers it different from arithmetic
2227 overflow, which is undefined. */
2228 if (TREE_CODE (op0) == INTEGER_CST
2229 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2230 TREE_OVERFLOW (x) = false;
2231
2232 break;
2233
bcb45dab 2234 case INDIRECT_REF:
2235 /* We don't need the decltype(auto) obfuscation anymore. */
2236 if (REF_PARENTHESIZED_P (x))
2237 {
2238 tree p = maybe_undo_parenthesized_ref (x);
e71bb662 2239 return cp_fold (p);
bcb45dab 2240 }
2241 goto unary;
2242
d2c63826 2243 case ADDR_EXPR:
e885b147 2244 loc = EXPR_LOCATION (x);
2245 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2246
2247 /* Cope with user tricks that amount to offsetof. */
2248 if (op0 != error_mark_node
2249 && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2250 && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2251 {
2252 tree val = get_base_address (op0);
2253 if (val
2254 && INDIRECT_REF_P (val)
2255 && COMPLETE_TYPE_P (TREE_TYPE (val))
2256 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2257 {
2258 val = TREE_OPERAND (val, 0);
2259 STRIP_NOPS (val);
2260 if (TREE_CODE (val) == INTEGER_CST)
3c43ed34 2261 return fold_offsetof (op0, TREE_TYPE (x));
e885b147 2262 }
2263 }
2264 goto finish_unary;
2265
d2c63826 2266 case REALPART_EXPR:
2267 case IMAGPART_EXPR:
69f54cf5 2268 rval_ops = false;
e3533433 2269 /* FALLTHRU */
d2c63826 2270 case CONJ_EXPR:
2271 case FIX_TRUNC_EXPR:
2272 case FLOAT_EXPR:
2273 case NEGATE_EXPR:
2274 case ABS_EXPR:
2275 case BIT_NOT_EXPR:
2276 case TRUTH_NOT_EXPR:
2277 case FIXED_CONVERT_EXPR:
bcb45dab 2278 unary:
d2c63826 2279
2280 loc = EXPR_LOCATION (x);
69f54cf5 2281 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
d2c63826 2282
e885b147 2283 finish_unary:
d2c63826 2284 if (op0 != TREE_OPERAND (x, 0))
1b8c43ab 2285 {
2286 if (op0 == error_mark_node)
2287 x = error_mark_node;
2288 else
89f17a65 2289 {
2290 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2291 if (code == INDIRECT_REF
2292 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2293 {
2294 TREE_READONLY (x) = TREE_READONLY (org_x);
2295 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2296 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2297 }
2298 }
1b8c43ab 2299 }
111e415b 2300 else
2301 x = fold (x);
d2c63826 2302
2303 gcc_assert (TREE_CODE (x) != COND_EXPR
2304 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2305 break;
2306
a2eb1271 2307 case UNARY_PLUS_EXPR:
2308 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2309 if (op0 == error_mark_node)
2310 x = error_mark_node;
2311 else
2312 x = fold_convert (TREE_TYPE (x), op0);
2313 break;
2314
d2c63826 2315 case POSTDECREMENT_EXPR:
2316 case POSTINCREMENT_EXPR:
2317 case INIT_EXPR:
d2c63826 2318 case PREDECREMENT_EXPR:
2319 case PREINCREMENT_EXPR:
2320 case COMPOUND_EXPR:
69f54cf5 2321 case MODIFY_EXPR:
2322 rval_ops = false;
e3533433 2323 /* FALLTHRU */
d2c63826 2324 case POINTER_PLUS_EXPR:
2325 case PLUS_EXPR:
57e83b58 2326 case POINTER_DIFF_EXPR:
d2c63826 2327 case MINUS_EXPR:
2328 case MULT_EXPR:
2329 case TRUNC_DIV_EXPR:
2330 case CEIL_DIV_EXPR:
2331 case FLOOR_DIV_EXPR:
2332 case ROUND_DIV_EXPR:
2333 case TRUNC_MOD_EXPR:
2334 case CEIL_MOD_EXPR:
2335 case ROUND_MOD_EXPR:
2336 case RDIV_EXPR:
2337 case EXACT_DIV_EXPR:
2338 case MIN_EXPR:
2339 case MAX_EXPR:
2340 case LSHIFT_EXPR:
2341 case RSHIFT_EXPR:
2342 case LROTATE_EXPR:
2343 case RROTATE_EXPR:
2344 case BIT_AND_EXPR:
2345 case BIT_IOR_EXPR:
2346 case BIT_XOR_EXPR:
2347 case TRUTH_AND_EXPR:
2348 case TRUTH_ANDIF_EXPR:
2349 case TRUTH_OR_EXPR:
2350 case TRUTH_ORIF_EXPR:
2351 case TRUTH_XOR_EXPR:
2352 case LT_EXPR: case LE_EXPR:
2353 case GT_EXPR: case GE_EXPR:
2354 case EQ_EXPR: case NE_EXPR:
2355 case UNORDERED_EXPR: case ORDERED_EXPR:
2356 case UNLT_EXPR: case UNLE_EXPR:
2357 case UNGT_EXPR: case UNGE_EXPR:
2358 case UNEQ_EXPR: case LTGT_EXPR:
2359 case RANGE_EXPR: case COMPLEX_EXPR:
d2c63826 2360
2361 loc = EXPR_LOCATION (x);
69f54cf5 2362 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2363 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
d2c63826 2364
2365 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
1b8c43ab 2366 {
2367 if (op0 == error_mark_node || op1 == error_mark_node)
2368 x = error_mark_node;
2369 else
2370 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2371 }
111e415b 2372 else
2373 x = fold (x);
d2c63826 2374
6263dde1 2375 if (TREE_NO_WARNING (org_x)
2cde02ad 2376 && warn_nonnull_compare
2377 && COMPARISON_CLASS_P (org_x))
2378 {
2379 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2380 ;
2381 else if (COMPARISON_CLASS_P (x))
2382 TREE_NO_WARNING (x) = 1;
2383 /* Otherwise give up on optimizing these, let GIMPLE folders
2384 optimize those later on. */
2385 else if (op0 != TREE_OPERAND (org_x, 0)
2386 || op1 != TREE_OPERAND (org_x, 1))
2387 {
2388 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2389 TREE_NO_WARNING (x) = 1;
2390 }
2391 else
2392 x = org_x;
2393 }
d2c63826 2394 break;
2395
2396 case VEC_COND_EXPR:
2397 case COND_EXPR:
d2c63826 2398 loc = EXPR_LOCATION (x);
69f54cf5 2399 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
d2c63826 2400 op1 = cp_fold (TREE_OPERAND (x, 1));
2401 op2 = cp_fold (TREE_OPERAND (x, 2));
2402
7a7ca07c 2403 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2404 {
f9542e61 2405 warning_sentinel s (warn_int_in_bool_context);
7a7ca07c 2406 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2407 op1 = cp_truthvalue_conversion (op1);
2408 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2409 op2 = cp_truthvalue_conversion (op2);
2410 }
3c382e05 2411 else if (VOID_TYPE_P (TREE_TYPE (x)))
2412 {
2413 if (TREE_CODE (op0) == INTEGER_CST)
2414 {
2415 /* If the condition is constant, fold can fold away
2416 the COND_EXPR. If some statement-level uses of COND_EXPR
2417 have one of the branches NULL, avoid folding crash. */
2418 if (!op1)
2419 op1 = build_empty_stmt (loc);
2420 if (!op2)
2421 op2 = build_empty_stmt (loc);
2422 }
2423 else
2424 {
2425 /* Otherwise, don't bother folding a void condition, since
2426 it can't produce a constant value. */
2427 if (op0 != TREE_OPERAND (x, 0)
2428 || op1 != TREE_OPERAND (x, 1)
2429 || op2 != TREE_OPERAND (x, 2))
2430 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2431 break;
2432 }
2433 }
7a7ca07c 2434
f6dfb86a 2435 if (op0 != TREE_OPERAND (x, 0)
2436 || op1 != TREE_OPERAND (x, 1)
2437 || op2 != TREE_OPERAND (x, 2))
1b8c43ab 2438 {
2439 if (op0 == error_mark_node
2440 || op1 == error_mark_node
2441 || op2 == error_mark_node)
2442 x = error_mark_node;
2443 else
2444 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2445 }
f6dfb86a 2446 else
d2c63826 2447 x = fold (x);
2448
bf64d98a 2449 /* A COND_EXPR might have incompatible types in branches if one or both
2450 arms are bitfields. If folding exposed such a branch, fix it up. */
ec72e2f7 2451 if (TREE_CODE (x) != code
6fa371d1 2452 && x != error_mark_node
ec72e2f7 2453 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2454 x = fold_convert (TREE_TYPE (org_x), x);
bf64d98a 2455
d2c63826 2456 break;
2457
2458 case CALL_EXPR:
2459 {
2460 int i, m, sv = optimize, nw = sv, changed = 0;
2461 tree callee = get_callee_fndecl (x);
2462
efe6a40a 2463 /* Some built-in function calls will be evaluated at compile-time in
2464 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2465 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
d2c63826 2466 if (callee && DECL_BUILT_IN (callee) && !optimize
2467 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2468 && current_function_decl
2469 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2470 nw = 1;
d2c63826 2471
2472 x = copy_node (x);
2473
2474 m = call_expr_nargs (x);
2475 for (i = 0; i < m; i++)
2476 {
2477 r = cp_fold (CALL_EXPR_ARG (x, i));
2478 if (r != CALL_EXPR_ARG (x, i))
1b8c43ab 2479 {
2480 if (r == error_mark_node)
2481 {
2482 x = error_mark_node;
2483 break;
2484 }
2485 changed = 1;
2486 }
d2c63826 2487 CALL_EXPR_ARG (x, i) = r;
2488 }
1b8c43ab 2489 if (x == error_mark_node)
2490 break;
d2c63826 2491
2492 optimize = nw;
2493 r = fold (x);
2494 optimize = sv;
2495
2496 if (TREE_CODE (r) != CALL_EXPR)
2497 {
2498 x = cp_fold (r);
2499 break;
2500 }
2501
2502 optimize = nw;
2503
efe6a40a 2504 /* Invoke maybe_constant_value for functions declared
2505 constexpr and not called with AGGR_INIT_EXPRs.
d2c63826 2506 TODO:
efe6a40a 2507 Do constexpr expansion of expressions where the call itself is not
2508 constant, but the call followed by an INDIRECT_REF is. */
29684344 2509 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2510 && !flag_no_inline)
d9cfff22 2511 r = maybe_constant_value (x);
d2c63826 2512 optimize = sv;
2513
2514 if (TREE_CODE (r) != CALL_EXPR)
2515 {
d9cfff22 2516 if (DECL_CONSTRUCTOR_P (callee))
2517 {
2518 loc = EXPR_LOCATION (x);
2519 tree s = build_fold_indirect_ref_loc (loc,
2520 CALL_EXPR_ARG (x, 0));
2521 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2522 }
d2c63826 2523 x = r;
2524 break;
2525 }
2526
2527 if (!changed)
2528 x = org_x;
2529 break;
2530 }
2531
2532 case CONSTRUCTOR:
2533 {
2534 unsigned i;
2535 constructor_elt *p;
2536 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
41a5cb89 2537 vec<constructor_elt, va_gc> *nelts = NULL;
d2c63826 2538 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
41a5cb89 2539 {
2540 tree op = cp_fold (p->value);
41a5cb89 2541 if (op != p->value)
1b8c43ab 2542 {
2543 if (op == error_mark_node)
2544 {
2545 x = error_mark_node;
4ecaaab2 2546 vec_free (nelts);
1b8c43ab 2547 break;
2548 }
4ecaaab2 2549 if (nelts == NULL)
2550 nelts = elts->copy ();
2551 (*nelts)[i].value = op;
1b8c43ab 2552 }
41a5cb89 2553 }
4ecaaab2 2554 if (nelts)
7604a798 2555 {
2556 x = build_constructor (TREE_TYPE (x), nelts);
2557 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2558 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2559 }
f82dc839 2560 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2561 x = fold (x);
d2c63826 2562 break;
2563 }
2564 case TREE_VEC:
2565 {
2566 bool changed = false;
2567 vec<tree, va_gc> *vec = make_tree_vector ();
2568 int i, n = TREE_VEC_LENGTH (x);
2569 vec_safe_reserve (vec, n);
2570
2571 for (i = 0; i < n; i++)
2572 {
2573 tree op = cp_fold (TREE_VEC_ELT (x, i));
2574 vec->quick_push (op);
2575 if (op != TREE_VEC_ELT (x, i))
2576 changed = true;
2577 }
2578
2579 if (changed)
2580 {
2581 r = copy_node (x);
2582 for (i = 0; i < n; i++)
2583 TREE_VEC_ELT (r, i) = (*vec)[i];
2584 x = r;
2585 }
2586
2587 release_tree_vector (vec);
2588 }
2589
2590 break;
2591
2592 case ARRAY_REF:
2593 case ARRAY_RANGE_REF:
2594
2595 loc = EXPR_LOCATION (x);
2596 op0 = cp_fold (TREE_OPERAND (x, 0));
2597 op1 = cp_fold (TREE_OPERAND (x, 1));
2598 op2 = cp_fold (TREE_OPERAND (x, 2));
2599 op3 = cp_fold (TREE_OPERAND (x, 3));
2600
1b8c43ab 2601 if (op0 != TREE_OPERAND (x, 0)
2602 || op1 != TREE_OPERAND (x, 1)
2603 || op2 != TREE_OPERAND (x, 2)
2604 || op3 != TREE_OPERAND (x, 3))
2605 {
2606 if (op0 == error_mark_node
2607 || op1 == error_mark_node
2608 || op2 == error_mark_node
2609 || op3 == error_mark_node)
2610 x = error_mark_node;
2611 else
89f17a65 2612 {
2613 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2614 TREE_READONLY (x) = TREE_READONLY (org_x);
2615 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2616 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2617 }
1b8c43ab 2618 }
d2c63826 2619
2620 x = fold (x);
2621 break;
2622
2af642bf 2623 case SAVE_EXPR:
2624 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2625 folding, evaluates to an invariant. In that case no need to wrap
2626 this folded tree with a SAVE_EXPR. */
2627 r = cp_fold (TREE_OPERAND (x, 0));
2628 if (tree_invariant_p (r))
2629 x = r;
2630 break;
2631
d2c63826 2632 default:
2633 return org_x;
2634 }
2635
2a655a4c 2636 fold_cache->put (org_x, x);
d2c63826 2637 /* Prevent that we try to fold an already folded result again. */
2638 if (x != org_x)
2a655a4c 2639 fold_cache->put (x, x);
d2c63826 2640
2641 return x;
2642}
2643
2644#include "gt-cp-cp-gimplify.h"