]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/cp-gimplify.c
hash-table.h: support non-zero empty values in empty_slow (v2)
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
CommitLineData
24baab8a 1/* C++-specific tree lowering bits; see also c-gimplify.c and gimple.c.
6de9cd9a 2
8d9254fc 3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Jason Merrill <jason@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
e77f031d 10Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
e77f031d
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
2adfab87 25#include "target.h"
c7131fb2 26#include "basic-block.h"
6de9cd9a 27#include "cp-tree.h"
c7131fb2 28#include "gimple.h"
2adfab87 29#include "predict.h"
c7131fb2 30#include "stor-layout.h"
726a989a 31#include "tree-iterator.h"
45b0be94 32#include "gimplify.h"
0a508bb6 33#include "c-family/c-ubsan.h"
314e6352
ML
34#include "stringpool.h"
35#include "attribs.h"
45b2222a 36#include "asan.h"
2674fa47 37#include "gcc-rich-location.h"
705f02b0 38#include "memmodel.h"
f30025bb 39#include "tm_p.h"
ff603745
JJ
40#include "output.h"
41#include "file-prefix-map.h"
42#include "cgraph.h"
6de9cd9a 43
b2cb7511
TV
44/* Forward declarations. */
45
46static tree cp_genericize_r (tree *, int *, void *);
cda0a029 47static tree cp_fold_r (tree *, int *, void *);
e2df2328 48static void cp_genericize_tree (tree*, bool);
cda0a029 49static tree cp_fold (tree);
b2cb7511 50
fbc315db
ILT
51/* Local declarations. */
52
53enum bc_t { bc_break = 0, bc_continue = 1 };
54
1799e5d5
RH
55/* Stack of labels which are targets for "break" or "continue",
56 linked through TREE_CHAIN. */
57static tree bc_label[2];
fbc315db
ILT
58
59/* Begin a scope which can be exited by a break or continue statement. BC
60 indicates which.
61
b2cb7511
TV
62 Just creates a label with location LOCATION and pushes it into the current
63 context. */
fbc315db
ILT
64
65static tree
b2cb7511 66begin_bc_block (enum bc_t bc, location_t location)
fbc315db 67{
b2cb7511 68 tree label = create_artificial_label (location);
910ad8de 69 DECL_CHAIN (label) = bc_label[bc];
1799e5d5 70 bc_label[bc] = label;
56632b27
JM
71 if (bc == bc_break)
72 LABEL_DECL_BREAK (label) = true;
73 else
74 LABEL_DECL_CONTINUE (label) = true;
fbc315db
ILT
75 return label;
76}
77
78/* Finish a scope which can be exited by a break or continue statement.
b2cb7511 79 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
fbc315db
ILT
80 an expression for the contents of the scope.
81
82 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
b2cb7511 83 BLOCK. Otherwise, just forget the label. */
fbc315db 84
b2cb7511
TV
85static void
86finish_bc_block (tree *block, enum bc_t bc, tree label)
fbc315db 87{
1799e5d5 88 gcc_assert (label == bc_label[bc]);
fbc315db
ILT
89
90 if (TREE_USED (label))
b2cb7511
TV
91 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
92 block);
fbc315db 93
910ad8de
NF
94 bc_label[bc] = DECL_CHAIN (label);
95 DECL_CHAIN (label) = NULL_TREE;
fbc315db
ILT
96}
97
726a989a
RB
98/* Get the LABEL_EXPR to represent a break or continue statement
99 in the current block scope. BC indicates which. */
fbc315db
ILT
100
101static tree
726a989a 102get_bc_label (enum bc_t bc)
fbc315db 103{
1799e5d5 104 tree label = bc_label[bc];
fbc315db 105
fbc315db
ILT
106 /* Mark the label used for finish_bc_block. */
107 TREE_USED (label) = 1;
726a989a 108 return label;
fbc315db
ILT
109}
110
6de9cd9a
DN
111/* Genericize a TRY_BLOCK. */
112
113static void
114genericize_try_block (tree *stmt_p)
115{
116 tree body = TRY_STMTS (*stmt_p);
117 tree cleanup = TRY_HANDLERS (*stmt_p);
118
f293ce4b 119 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
6de9cd9a
DN
120}
121
122/* Genericize a HANDLER by converting to a CATCH_EXPR. */
123
124static void
125genericize_catch_block (tree *stmt_p)
126{
127 tree type = HANDLER_TYPE (*stmt_p);
128 tree body = HANDLER_BODY (*stmt_p);
129
6de9cd9a 130 /* FIXME should the caught type go in TREE_TYPE? */
f293ce4b 131 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
6de9cd9a
DN
132}
133
726a989a
RB
134/* A terser interface for building a representation of an exception
135 specification. */
136
137static tree
138build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
139{
140 tree t;
141
142 /* FIXME should the allowed types go in TREE_TYPE? */
143 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
144 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
145
146 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
147 append_to_statement_list (body, &TREE_OPERAND (t, 0));
148
149 return t;
150}
151
6de9cd9a
DN
152/* Genericize an EH_SPEC_BLOCK by converting it to a
153 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
154
155static void
156genericize_eh_spec_block (tree *stmt_p)
157{
158 tree body = EH_SPEC_STMTS (*stmt_p);
159 tree allowed = EH_SPEC_RAISES (*stmt_p);
1a66d857 160 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
6de9cd9a 161
726a989a 162 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
d665b6e5
MLI
163 TREE_NO_WARNING (*stmt_p) = true;
164 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
6de9cd9a
DN
165}
166
2674fa47
JM
167/* Return the first non-compound statement in STMT. */
168
169tree
170first_stmt (tree stmt)
171{
172 switch (TREE_CODE (stmt))
173 {
174 case STATEMENT_LIST:
175 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
176 return first_stmt (p->stmt);
177 return void_node;
178
179 case BIND_EXPR:
180 return first_stmt (BIND_EXPR_BODY (stmt));
181
182 default:
183 return stmt;
184 }
185}
186
5a508662
RH
187/* Genericize an IF_STMT by turning it into a COND_EXPR. */
188
189static void
f74d9c8f 190genericize_if_stmt (tree *stmt_p)
5a508662 191{
eeae0768 192 tree stmt, cond, then_, else_;
726a989a 193 location_t locus = EXPR_LOCATION (*stmt_p);
5a508662
RH
194
195 stmt = *stmt_p;
eeae0768 196 cond = IF_COND (stmt);
5a508662
RH
197 then_ = THEN_CLAUSE (stmt);
198 else_ = ELSE_CLAUSE (stmt);
199
2674fa47
JM
200 if (then_ && else_)
201 {
202 tree ft = first_stmt (then_);
203 tree fe = first_stmt (else_);
204 br_predictor pr;
205 if (TREE_CODE (ft) == PREDICT_EXPR
206 && TREE_CODE (fe) == PREDICT_EXPR
207 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
208 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
209 {
210 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
211 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
212 warning_at (&richloc, OPT_Wattributes,
213 "both branches of %<if%> statement marked as %qs",
2d9273ca 214 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
2674fa47
JM
215 }
216 }
217
5a508662 218 if (!then_)
c2255bc4 219 then_ = build_empty_stmt (locus);
5a508662 220 if (!else_)
c2255bc4 221 else_ = build_empty_stmt (locus);
5a508662 222
eeae0768
RS
223 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
224 stmt = then_;
225 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
226 stmt = else_;
227 else
228 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
5b5dce39
MP
229 if (!EXPR_HAS_LOCATION (stmt))
230 protected_set_expr_location (stmt, locus);
5a508662
RH
231 *stmt_p = stmt;
232}
233
fbc315db
ILT
234/* Build a generic representation of one of the C loop forms. COND is the
235 loop condition or NULL_TREE. BODY is the (possibly compound) statement
236 controlled by the loop. INCR is the increment expression of a for-loop,
237 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
238 evaluated before the loop body as in while and for loops, or after the
239 loop body as in do-while loops. */
240
b2cb7511
TV
241static void
242genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
243 tree incr, bool cond_is_first, int *walk_subtrees,
244 void *data)
fbc315db 245{
b2cb7511 246 tree blab, clab;
27d93d2c 247 tree exit = NULL;
b2cb7511 248 tree stmt_list = NULL;
c42b72a7 249 tree debug_begin = NULL;
b2cb7511 250
c42b72a7
JJ
251 if (EXPR_LOCATION (incr) == UNKNOWN_LOCATION)
252 protected_set_expr_location (incr, start_locus);
fbc315db 253
b2cb7511 254 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
b2cb7511 255 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
372e6e6b
JJ
256
257 blab = begin_bc_block (bc_break, start_locus);
258 clab = begin_bc_block (bc_continue, start_locus);
259
260 cp_walk_tree (&body, cp_genericize_r, data, NULL);
b2cb7511 261 *walk_subtrees = 0;
fbc315db 262
c42b72a7
JJ
263 if (MAY_HAVE_DEBUG_MARKER_STMTS
264 && (!cond || !integer_zerop (cond)))
265 {
266 debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
267 SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus));
268 }
269
27d93d2c 270 if (cond && TREE_CODE (cond) != INTEGER_CST)
fbc315db 271 {
27d93d2c
JM
272 /* If COND is constant, don't bother building an exit. If it's false,
273 we won't build a loop. If it's true, any exits are in the body. */
6bdfada4 274 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
27d93d2c
JM
275 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
276 get_bc_label (bc_break));
277 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
278 build_empty_stmt (cloc), exit);
b2cb7511 279 }
fbc315db 280
27d93d2c 281 if (exit && cond_is_first)
c42b72a7
JJ
282 {
283 append_to_statement_list (debug_begin, &stmt_list);
284 debug_begin = NULL_TREE;
285 append_to_statement_list (exit, &stmt_list);
286 }
b2cb7511
TV
287 append_to_statement_list (body, &stmt_list);
288 finish_bc_block (&stmt_list, bc_continue, clab);
c42b72a7
JJ
289 if (incr)
290 {
291 if (MAY_HAVE_DEBUG_MARKER_STMTS)
292 {
293 tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
294 SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus));
295 append_to_statement_list (d, &stmt_list);
296 }
297 append_to_statement_list (incr, &stmt_list);
298 }
299 append_to_statement_list (debug_begin, &stmt_list);
27d93d2c
JM
300 if (exit && !cond_is_first)
301 append_to_statement_list (exit, &stmt_list);
fbc315db 302
27d93d2c
JM
303 if (!stmt_list)
304 stmt_list = build_empty_stmt (start_locus);
305
306 tree loop;
307 if (cond && integer_zerop (cond))
308 {
309 if (cond_is_first)
310 loop = fold_build3_loc (start_locus, COND_EXPR,
311 void_type_node, cond, stmt_list,
312 build_empty_stmt (start_locus));
313 else
314 loop = stmt_list;
315 }
316 else
1509db23
JM
317 {
318 location_t loc = start_locus;
319 if (!cond || integer_nonzerop (cond))
320 loc = EXPR_LOCATION (expr_first (body));
321 if (loc == UNKNOWN_LOCATION)
322 loc = start_locus;
323 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
324 }
27d93d2c
JM
325
326 stmt_list = NULL;
327 append_to_statement_list (loop, &stmt_list);
328 finish_bc_block (&stmt_list, bc_break, blab);
329 if (!stmt_list)
330 stmt_list = build_empty_stmt (start_locus);
fbc315db 331
b2cb7511 332 *stmt_p = stmt_list;
fbc315db
ILT
333}
334
b2cb7511 335/* Genericize a FOR_STMT node *STMT_P. */
fbc315db
ILT
336
337static void
b2cb7511 338genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
339{
340 tree stmt = *stmt_p;
b2cb7511
TV
341 tree expr = NULL;
342 tree loop;
343 tree init = FOR_INIT_STMT (stmt);
fbc315db 344
b2cb7511
TV
345 if (init)
346 {
347 cp_walk_tree (&init, cp_genericize_r, data, NULL);
348 append_to_statement_list (init, &expr);
349 }
fbc315db 350
b2cb7511
TV
351 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
352 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
353 append_to_statement_list (loop, &expr);
27d93d2c
JM
354 if (expr == NULL_TREE)
355 expr = loop;
b2cb7511 356 *stmt_p = expr;
fbc315db
ILT
357}
358
b2cb7511 359/* Genericize a WHILE_STMT node *STMT_P. */
fbc315db
ILT
360
361static void
b2cb7511 362genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
363{
364 tree stmt = *stmt_p;
b2cb7511
TV
365 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
366 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
fbc315db
ILT
367}
368
b2cb7511 369/* Genericize a DO_STMT node *STMT_P. */
fbc315db
ILT
370
371static void
b2cb7511 372genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
373{
374 tree stmt = *stmt_p;
b2cb7511
TV
375 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
376 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
fbc315db
ILT
377}
378
b2cb7511 379/* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
fbc315db
ILT
380
381static void
b2cb7511 382genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
383{
384 tree stmt = *stmt_p;
b2cb7511
TV
385 tree break_block, body, cond, type;
386 location_t stmt_locus = EXPR_LOCATION (stmt);
fbc315db 387
fbc315db
ILT
388 body = SWITCH_STMT_BODY (stmt);
389 if (!body)
c2255bc4 390 body = build_empty_stmt (stmt_locus);
b2cb7511
TV
391 cond = SWITCH_STMT_COND (stmt);
392 type = SWITCH_STMT_TYPE (stmt);
fbc315db 393
b2cb7511 394 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
d0f2db23
JJ
395
396 break_block = begin_bc_block (bc_break, stmt_locus);
397
398 cp_walk_tree (&body, cp_genericize_r, data, NULL);
b2cb7511
TV
399 cp_walk_tree (&type, cp_genericize_r, data, NULL);
400 *walk_subtrees = 0;
fbc315db 401
65791f42
JJ
402 if (TREE_USED (break_block))
403 SWITCH_BREAK_LABEL_P (break_block) = 1;
404 finish_bc_block (&body, bc_break, break_block);
9e851845 405 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
1a2e9708
JJ
406 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
407 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
408 || !TREE_USED (break_block));
b2cb7511
TV
409}
410
411/* Genericize a CONTINUE_STMT node *STMT_P. */
412
413static void
414genericize_continue_stmt (tree *stmt_p)
415{
416 tree stmt_list = NULL;
417 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
418 tree label = get_bc_label (bc_continue);
419 location_t location = EXPR_LOCATION (*stmt_p);
420 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
bfeee8ac 421 append_to_statement_list_force (pred, &stmt_list);
b2cb7511
TV
422 append_to_statement_list (jump, &stmt_list);
423 *stmt_p = stmt_list;
fbc315db
ILT
424}
425
b2cb7511
TV
426/* Genericize a BREAK_STMT node *STMT_P. */
427
428static void
429genericize_break_stmt (tree *stmt_p)
430{
431 tree label = get_bc_label (bc_break);
432 location_t location = EXPR_LOCATION (*stmt_p);
433 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
434}
435
436/* Genericize a OMP_FOR node *STMT_P. */
437
438static void
439genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
440{
441 tree stmt = *stmt_p;
442 location_t locus = EXPR_LOCATION (stmt);
443 tree clab = begin_bc_block (bc_continue, locus);
444
445 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
04833609
JJ
446 if (TREE_CODE (stmt) != OMP_TASKLOOP)
447 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
b2cb7511
TV
448 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
449 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
450 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
451 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
452 *walk_subtrees = 0;
453
454 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
455}
456
457/* Hook into the middle of gimplifying an OMP_FOR node. */
1799e5d5
RH
458
459static enum gimplify_status
726a989a 460cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
1799e5d5
RH
461{
462 tree for_stmt = *expr_p;
726a989a 463 gimple_seq seq = NULL;
1799e5d5
RH
464
465 /* Protect ourselves from recursion. */
466 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
467 return GS_UNHANDLED;
468 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
469
726a989a 470 gimplify_and_add (for_stmt, &seq);
726a989a 471 gimple_seq_add_seq (pre_p, seq);
1799e5d5 472
1799e5d5
RH
473 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
474
475 return GS_ALL_DONE;
476}
477
934790cc
ILT
478/* Gimplify an EXPR_STMT node. */
479
480static void
481gimplify_expr_stmt (tree *stmt_p)
482{
483 tree stmt = EXPR_STMT_EXPR (*stmt_p);
484
485 if (stmt == error_mark_node)
486 stmt = NULL;
487
488 /* Gimplification of a statement expression will nullify the
489 statement if all its side effects are moved to *PRE_P and *POST_P.
490
491 In this case we will not want to emit the gimplified statement.
492 However, we may still want to emit a warning, so we do that before
493 gimplification. */
27f33b15 494 if (stmt && warn_unused_value)
934790cc
ILT
495 {
496 if (!TREE_SIDE_EFFECTS (stmt))
497 {
498 if (!IS_EMPTY_STMT (stmt)
499 && !VOID_TYPE_P (TREE_TYPE (stmt))
500 && !TREE_NO_WARNING (stmt))
27f33b15 501 warning (OPT_Wunused_value, "statement with no effect");
934790cc 502 }
27f33b15 503 else
934790cc
ILT
504 warn_if_unused_value (stmt, input_location);
505 }
506
507 if (stmt == NULL_TREE)
508 stmt = alloc_stmt_list ();
509
510 *stmt_p = stmt;
511}
512
6de9cd9a
DN
513/* Gimplify initialization from an AGGR_INIT_EXPR. */
514
515static void
942d334e 516cp_gimplify_init_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
517{
518 tree from = TREE_OPERAND (*expr_p, 1);
519 tree to = TREE_OPERAND (*expr_p, 0);
0fcedd9c 520 tree t;
6de9cd9a 521
6de9cd9a
DN
522 /* What about code that pulls out the temp and uses it elsewhere? I
523 think that such code never uses the TARGET_EXPR as an initializer. If
524 I'm wrong, we'll abort because the temp won't have any RTL. In that
525 case, I guess we'll need to replace references somehow. */
72036b59 526 if (TREE_CODE (from) == TARGET_EXPR && TARGET_EXPR_INITIAL (from))
db80e34e 527 from = TARGET_EXPR_INITIAL (from);
6de9cd9a 528
942d334e
JM
529 /* If we might need to clean up a partially constructed object, break down
530 the CONSTRUCTOR with split_nonconstant_init. */
531 if (TREE_CODE (from) == CONSTRUCTOR
8b5d34fc 532 && flag_exceptions
942d334e
JM
533 && TREE_SIDE_EFFECTS (from)
534 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (to)))
535 {
536 gimplify_expr (&to, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
537 replace_placeholders (from, to);
538 from = split_nonconstant_init (to, from);
539 cp_genericize_tree (&from, false);
640b23d7 540 copy_if_shared (&from);
942d334e
JM
541 *expr_p = from;
542 return;
543 }
544
c6c7698d
JM
545 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
546 inside the TARGET_EXPR. */
0fcedd9c
JM
547 for (t = from; t; )
548 {
549 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
6de9cd9a 550
0fcedd9c
JM
551 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
552 replace the slot operand with our target.
6de9cd9a 553
0fcedd9c
JM
554 Should we add a target parm to gimplify_expr instead? No, as in this
555 case we want to replace the INIT_EXPR. */
d5f4eddd
JM
556 if (TREE_CODE (sub) == AGGR_INIT_EXPR
557 || TREE_CODE (sub) == VEC_INIT_EXPR)
0fcedd9c 558 {
d5f4eddd
JM
559 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
560 AGGR_INIT_EXPR_SLOT (sub) = to;
561 else
562 VEC_INIT_EXPR_SLOT (sub) = to;
0fcedd9c
JM
563 *expr_p = from;
564
565 /* The initialization is now a side-effect, so the container can
566 become void. */
567 if (from != sub)
568 TREE_TYPE (from) = void_type_node;
569 }
0fcedd9c 570
2166aeb3
MP
571 /* Handle aggregate NSDMI. */
572 replace_placeholders (sub, to);
3e605b20 573
0fcedd9c
JM
574 if (t == sub)
575 break;
576 else
577 t = TREE_OPERAND (t, 1);
6de9cd9a 578 }
0fcedd9c 579
6de9cd9a
DN
580}
581
582/* Gimplify a MUST_NOT_THROW_EXPR. */
583
726a989a
RB
584static enum gimplify_status
585gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
586{
587 tree stmt = *expr_p;
325c3691 588 tree temp = voidify_wrapper_expr (stmt, NULL);
6de9cd9a 589 tree body = TREE_OPERAND (stmt, 0);
786f715d
JM
590 gimple_seq try_ = NULL;
591 gimple_seq catch_ = NULL;
355fe088 592 gimple *mnt;
6de9cd9a 593
786f715d 594 gimplify_and_add (body, &try_);
1a66d857 595 mnt = gimple_build_eh_must_not_throw (terminate_fn);
a1a6c5b2 596 gimple_seq_add_stmt_without_update (&catch_, mnt);
786f715d 597 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
6de9cd9a 598
a1a6c5b2 599 gimple_seq_add_stmt_without_update (pre_p, mnt);
6de9cd9a
DN
600 if (temp)
601 {
6de9cd9a 602 *expr_p = temp;
726a989a 603 return GS_OK;
6de9cd9a 604 }
726a989a
RB
605
606 *expr_p = NULL;
607 return GS_ALL_DONE;
6de9cd9a 608}
7c34ced1 609
25de0a29
AH
610/* Return TRUE if an operand (OP) of a given TYPE being copied is
611 really just an empty class copy.
612
613 Check that the operand has a simple form so that TARGET_EXPRs and
614 non-empty CONSTRUCTORs get reduced properly, and we leave the
615 return slot optimization alone because it isn't a copy. */
616
c652ff83 617bool
7d277e17 618simple_empty_class_p (tree type, tree op, tree_code code)
25de0a29 619{
7d277e17
JM
620 if (TREE_CODE (op) == COMPOUND_EXPR)
621 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
25de0a29 622 return
7d277e17
JM
623 (TREE_CODE (op) == EMPTY_CLASS_EXPR
624 || code == MODIFY_EXPR
25de0a29
AH
625 || is_gimple_lvalue (op)
626 || INDIRECT_REF_P (op)
627 || (TREE_CODE (op) == CONSTRUCTOR
7d277e17 628 && CONSTRUCTOR_NELTS (op) == 0)
25de0a29
AH
629 || (TREE_CODE (op) == CALL_EXPR
630 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
7d277e17 631 && !TREE_CLOBBER_P (op)
dbcd32f8 632 && is_really_empty_class (type, /*ignore_vptr*/true);
25de0a29
AH
633}
634
65a550b4
JM
635/* Returns true if evaluating E as an lvalue has side-effects;
636 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
637 have side-effects until there is a read or write through it. */
638
639static bool
640lvalue_has_side_effects (tree e)
641{
642 if (!TREE_SIDE_EFFECTS (e))
643 return false;
644 while (handled_component_p (e))
645 {
646 if (TREE_CODE (e) == ARRAY_REF
647 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
648 return true;
649 e = TREE_OPERAND (e, 0);
650 }
651 if (DECL_P (e))
652 /* Just naming a variable has no side-effects. */
653 return false;
654 else if (INDIRECT_REF_P (e))
655 /* Similarly, indirection has no side-effects. */
656 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
657 else
658 /* For anything else, trust TREE_SIDE_EFFECTS. */
659 return TREE_SIDE_EFFECTS (e);
660}
661
1a37b6d9
JJ
662/* Gimplify *EXPR_P as rvalue into an expression that can't be modified
663 by expressions with side-effects in other operands. */
664
665static enum gimplify_status
666gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
667 bool (*gimple_test_f) (tree))
668{
669 enum gimplify_status t
670 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
671 if (t == GS_ERROR)
672 return GS_ERROR;
673 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
8e5993e2 674 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
1a37b6d9
JJ
675 return t;
676}
677
7c34ced1
RH
678/* Do C++-specific gimplification. Args are as for gimplify_expr. */
679
680int
726a989a 681cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7c34ced1
RH
682{
683 int saved_stmts_are_full_exprs_p = 0;
f9d0ca40 684 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
7c34ced1
RH
685 enum tree_code code = TREE_CODE (*expr_p);
686 enum gimplify_status ret;
687
688 if (STATEMENT_CODE_P (code))
689 {
690 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
691 current_stmt_tree ()->stmts_are_full_exprs_p
692 = STMT_IS_FULL_EXPR_P (*expr_p);
693 }
694
695 switch (code)
696 {
7c34ced1
RH
697 case AGGR_INIT_EXPR:
698 simplify_aggr_init_expr (expr_p);
699 ret = GS_OK;
700 break;
701
d5f4eddd
JM
702 case VEC_INIT_EXPR:
703 {
704 location_t loc = input_location;
534ecb17 705 tree init = VEC_INIT_EXPR_INIT (*expr_p);
b73a4704 706 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
d5f4eddd
JM
707 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
708 input_location = EXPR_LOCATION (*expr_p);
b73a4704 709 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
4de2f020 710 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
534ecb17 711 from_array,
d5f4eddd 712 tf_warning_or_error);
f4d90295
JJ
713 hash_set<tree> pset;
714 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
e2df2328 715 cp_genericize_tree (expr_p, false);
640b23d7 716 copy_if_shared (expr_p);
d5f4eddd
JM
717 ret = GS_OK;
718 input_location = loc;
719 }
720 break;
721
7c34ced1 722 case THROW_EXPR:
3b426391 723 /* FIXME communicate throw type to back end, probably by moving
7c34ced1
RH
724 THROW_EXPR into ../tree.def. */
725 *expr_p = TREE_OPERAND (*expr_p, 0);
726 ret = GS_OK;
727 break;
728
729 case MUST_NOT_THROW_EXPR:
726a989a 730 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
7c34ced1
RH
731 break;
732
726a989a 733 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
dae7ec87
JM
734 LHS of an assignment might also be involved in the RHS, as in bug
735 25979. */
7c34ced1 736 case INIT_EXPR:
942d334e 737 cp_gimplify_init_expr (expr_p, pre_p);
85a52ea5
JM
738 if (TREE_CODE (*expr_p) != INIT_EXPR)
739 return GS_OK;
191816a3 740 /* Fall through. */
1e2ddf80 741 case MODIFY_EXPR:
25de0a29 742 modify_expr_case:
1e2ddf80
JM
743 {
744 /* If the back end isn't clever enough to know that the lhs and rhs
745 types are the same, add an explicit conversion. */
746 tree op0 = TREE_OPERAND (*expr_p, 0);
747 tree op1 = TREE_OPERAND (*expr_p, 1);
748
0d08582e
JM
749 if (!error_operand_p (op0)
750 && !error_operand_p (op1)
751 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
752 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
1e2ddf80
JM
753 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
754 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
755 TREE_TYPE (op0), op1);
6d729f28 756
7d277e17 757 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
6d729f28 758 {
25de0a29
AH
759 /* Remove any copies of empty classes. Also drop volatile
760 variables on the RHS to avoid infinite recursion from
761 gimplify_expr trying to load the value. */
25de0a29
AH
762 if (TREE_SIDE_EFFECTS (op1))
763 {
764 if (TREE_THIS_VOLATILE (op1)
765 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
766 op1 = build_fold_addr_expr (op1);
767
768 gimplify_and_add (op1, pre_p);
769 }
65a550b4
JM
770 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
771 is_gimple_lvalue, fb_lvalue);
25de0a29 772 *expr_p = TREE_OPERAND (*expr_p, 0);
6d729f28 773 }
65a550b4
JM
774 /* P0145 says that the RHS is sequenced before the LHS.
775 gimplify_modify_expr gimplifies the RHS before the LHS, but that
776 isn't quite strong enough in two cases:
777
778 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
779 mean it's evaluated after the LHS.
780
781 2) the value calculation of the RHS is also sequenced before the
782 LHS, so for scalar assignment we need to preevaluate if the
783 RHS could be affected by LHS side-effects even if it has no
784 side-effects of its own. We don't need this for classes because
785 class assignment takes its RHS by reference. */
786 else if (flag_strong_eval_order > 1
787 && TREE_CODE (*expr_p) == MODIFY_EXPR
788 && lvalue_has_side_effects (op0)
789 && (TREE_CODE (op1) == CALL_EXPR
790 || (SCALAR_TYPE_P (TREE_TYPE (op1))
791 && !TREE_CONSTANT (op1))))
8e5993e2 792 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
1e2ddf80 793 }
7c34ced1
RH
794 ret = GS_OK;
795 break;
796
797 case EMPTY_CLASS_EXPR:
f7683d37
RG
798 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
799 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
7c34ced1
RH
800 ret = GS_OK;
801 break;
802
803 case BASELINK:
804 *expr_p = BASELINK_FUNCTIONS (*expr_p);
805 ret = GS_OK;
806 break;
807
808 case TRY_BLOCK:
809 genericize_try_block (expr_p);
810 ret = GS_OK;
811 break;
812
813 case HANDLER:
814 genericize_catch_block (expr_p);
815 ret = GS_OK;
816 break;
817
818 case EH_SPEC_BLOCK:
819 genericize_eh_spec_block (expr_p);
820 ret = GS_OK;
821 break;
822
823 case USING_STMT:
ac3cbee5 824 gcc_unreachable ();
7c34ced1 825
fbc315db 826 case FOR_STMT:
fbc315db 827 case WHILE_STMT:
fbc315db 828 case DO_STMT:
fbc315db 829 case SWITCH_STMT:
b2cb7511
TV
830 case CONTINUE_STMT:
831 case BREAK_STMT:
832 gcc_unreachable ();
fbc315db 833
1799e5d5 834 case OMP_FOR:
acf0174b
JJ
835 case OMP_SIMD:
836 case OMP_DISTRIBUTE:
d81ab49d 837 case OMP_LOOP:
d9a6bd32 838 case OMP_TASKLOOP:
726a989a 839 ret = cp_gimplify_omp_for (expr_p, pre_p);
1799e5d5
RH
840 break;
841
934790cc
ILT
842 case EXPR_STMT:
843 gimplify_expr_stmt (expr_p);
844 ret = GS_OK;
845 break;
846
392e3d51
RS
847 case UNARY_PLUS_EXPR:
848 {
849 tree arg = TREE_OPERAND (*expr_p, 0);
850 tree type = TREE_TYPE (*expr_p);
851 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
852 : arg;
853 ret = GS_OK;
854 }
855 break;
856
12893402 857 case CALL_EXPR:
fe6ebcf1 858 ret = GS_OK;
708935b2
JJ
859 if (flag_strong_eval_order == 2
860 && CALL_EXPR_FN (*expr_p)
861 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
862 {
6835f8a0 863 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
708935b2 864 enum gimplify_status t
1a37b6d9
JJ
865 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
866 is_gimple_call_addr);
708935b2
JJ
867 if (t == GS_ERROR)
868 ret = GS_ERROR;
6835f8a0
JJ
869 /* GIMPLE considers most pointer conversion useless, but for
870 calls we actually care about the exact function pointer type. */
871 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
872 CALL_EXPR_FN (*expr_p)
873 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
708935b2 874 }
4eb24e01
JM
875 if (!CALL_EXPR_FN (*expr_p))
876 /* Internal function call. */;
877 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
fe6ebcf1 878 {
4eb24e01
JM
879 /* This is a call to a (compound) assignment operator that used
880 the operator syntax; gimplify the RHS first. */
881 gcc_assert (call_expr_nargs (*expr_p) == 2);
882 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
883 enum gimplify_status t
884 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
885 if (t == GS_ERROR)
886 ret = GS_ERROR;
887 }
888 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
889 {
890 /* Leave the last argument for gimplify_call_expr, to avoid problems
891 with __builtin_va_arg_pack(). */
892 int nargs = call_expr_nargs (*expr_p) - 1;
893 for (int i = 0; i < nargs; ++i)
fe6ebcf1
JM
894 {
895 enum gimplify_status t
896 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
897 if (t == GS_ERROR)
898 ret = GS_ERROR;
899 }
900 }
d0cf395a 901 else if (flag_strong_eval_order
4eb24e01
JM
902 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
903 {
d0cf395a 904 /* If flag_strong_eval_order, evaluate the object argument first. */
4eb24e01 905 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
71a93b08 906 if (INDIRECT_TYPE_P (fntype))
4eb24e01
JM
907 fntype = TREE_TYPE (fntype);
908 if (TREE_CODE (fntype) == METHOD_TYPE)
909 {
910 enum gimplify_status t
911 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
912 if (t == GS_ERROR)
913 ret = GS_ERROR;
914 }
915 }
e4082611
JJ
916 if (ret != GS_ERROR)
917 {
918 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
919 if (decl
3d78e008 920 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
ff603745 921 BUILT_IN_FRONTEND))
e4082611 922 *expr_p = boolean_false_node;
ff603745
JJ
923 else if (decl
924 && fndecl_built_in_p (decl, CP_BUILT_IN_SOURCE_LOCATION,
925 BUILT_IN_FRONTEND))
926 *expr_p = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
e4082611 927 }
fe6ebcf1
JM
928 break;
929
08f594eb
JM
930 case TARGET_EXPR:
931 /* A TARGET_EXPR that expresses direct-initialization should have been
932 elided by cp_gimplify_init_expr. */
933 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
934 ret = GS_UNHANDLED;
935 break;
936
25de0a29
AH
937 case RETURN_EXPR:
938 if (TREE_OPERAND (*expr_p, 0)
939 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
940 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
941 {
942 expr_p = &TREE_OPERAND (*expr_p, 0);
943 code = TREE_CODE (*expr_p);
944 /* Avoid going through the INIT_EXPR case, which can
945 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
946 goto modify_expr_case;
947 }
948 /* Fall through. */
949
7c34ced1 950 default:
32e8bb8e 951 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
7c34ced1
RH
952 break;
953 }
954
955 /* Restore saved state. */
956 if (STATEMENT_CODE_P (code))
957 current_stmt_tree ()->stmts_are_full_exprs_p
958 = saved_stmts_are_full_exprs_p;
959
960 return ret;
961}
5a508662 962
d8472c75 963static inline bool
58f9752a 964is_invisiref_parm (const_tree t)
d8472c75 965{
cc77ae10 966 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
d8472c75
JM
967 && DECL_BY_REFERENCE (t));
968}
969
10827cd8
JJ
970/* Return true if the uid in both int tree maps are equal. */
971
2a22f99c
TS
972bool
973cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
10827cd8 974{
10827cd8
JJ
975 return (a->uid == b->uid);
976}
977
978/* Hash a UID in a cxx_int_tree_map. */
979
980unsigned int
2a22f99c 981cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
10827cd8 982{
2a22f99c 983 return item->uid;
10827cd8
JJ
984}
985
4577f730
JJ
986/* A stable comparison routine for use with splay trees and DECLs. */
987
988static int
989splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
990{
991 tree a = (tree) xa;
992 tree b = (tree) xb;
993
994 return DECL_UID (a) - DECL_UID (b);
995}
996
997/* OpenMP context during genericization. */
998
999struct cp_genericize_omp_taskreg
1000{
1001 bool is_parallel;
1002 bool default_shared;
1003 struct cp_genericize_omp_taskreg *outer;
1004 splay_tree variables;
1005};
1006
1007/* Return true if genericization should try to determine if
1008 DECL is firstprivate or shared within task regions. */
1009
1010static bool
1011omp_var_to_track (tree decl)
1012{
1013 tree type = TREE_TYPE (decl);
1014 if (is_invisiref_parm (decl))
1015 type = TREE_TYPE (type);
9f613f06 1016 else if (TYPE_REF_P (type))
8b586510 1017 type = TREE_TYPE (type);
4577f730
JJ
1018 while (TREE_CODE (type) == ARRAY_TYPE)
1019 type = TREE_TYPE (type);
1020 if (type == error_mark_node || !CLASS_TYPE_P (type))
1021 return false;
3048c0c7 1022 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
4577f730
JJ
1023 return false;
1024 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1025 return false;
1026 return true;
1027}
1028
1029/* Note DECL use in OpenMP region OMP_CTX during genericization. */
1030
1031static void
1032omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
1033{
1034 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
1035 (splay_tree_key) decl);
1036 if (n == NULL)
1037 {
1038 int flags = OMP_CLAUSE_DEFAULT_SHARED;
1039 if (omp_ctx->outer)
1040 omp_cxx_notice_variable (omp_ctx->outer, decl);
1041 if (!omp_ctx->default_shared)
1042 {
1043 struct cp_genericize_omp_taskreg *octx;
1044
1045 for (octx = omp_ctx->outer; octx; octx = octx->outer)
1046 {
1047 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1048 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1049 {
1050 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1051 break;
1052 }
1053 if (octx->is_parallel)
1054 break;
1055 }
1056 if (octx == NULL
1057 && (TREE_CODE (decl) == PARM_DECL
1058 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1059 && DECL_CONTEXT (decl) == current_function_decl)))
1060 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1061 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1062 {
1063 /* DECL is implicitly determined firstprivate in
1064 the current task construct. Ensure copy ctor and
1065 dtor are instantiated, because during gimplification
1066 it will be already too late. */
1067 tree type = TREE_TYPE (decl);
1068 if (is_invisiref_parm (decl))
1069 type = TREE_TYPE (type);
9f613f06 1070 else if (TYPE_REF_P (type))
8b586510 1071 type = TREE_TYPE (type);
4577f730
JJ
1072 while (TREE_CODE (type) == ARRAY_TYPE)
1073 type = TREE_TYPE (type);
1074 get_copy_ctor (type, tf_none);
1075 get_dtor (type, tf_none);
1076 }
1077 }
1078 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1079 }
1080}
1081
1082/* Genericization context. */
1083
ac3cbee5
RG
1084struct cp_genericize_data
1085{
6e2830c3 1086 hash_set<tree> *p_set;
9771b263 1087 vec<tree> bind_expr_stack;
4577f730 1088 struct cp_genericize_omp_taskreg *omp_ctx;
8243e2a9 1089 tree try_block;
7b3a9795 1090 bool no_sanitize_p;
e2df2328 1091 bool handle_invisiref_parm_p;
ac3cbee5
RG
1092};
1093
cda0a029
JM
1094/* Perform any pre-gimplification folding of C++ front end trees to
1095 GENERIC.
1096 Note: The folding of none-omp cases is something to move into
1097 the middle-end. As for now we have most foldings only on GENERIC
1098 in fold-const, we need to perform this before transformation to
1099 GIMPLE-form. */
1100
1101static tree
1102cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1103{
1104 tree stmt;
1105 enum tree_code code;
1106
1107 *stmt_p = stmt = cp_fold (*stmt_p);
1108
6f5bcd24
JJ
1109 if (((hash_set<tree> *) data)->add (stmt))
1110 {
1111 /* Don't walk subtrees of stmts we've already walked once, otherwise
1112 we can have exponential complexity with e.g. lots of nested
1113 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1114 always the same tree, which the first time cp_fold_r has been
1115 called on it had the subtrees walked. */
1116 *walk_subtrees = 0;
1117 return NULL;
1118 }
1119
cda0a029
JM
1120 code = TREE_CODE (stmt);
1121 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
d81ab49d 1122 || code == OMP_LOOP || code == OMP_TASKLOOP || code == OACC_LOOP)
cda0a029
JM
1123 {
1124 tree x;
1125 int i, n;
1126
1127 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1128 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1129 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1130 x = OMP_FOR_COND (stmt);
1131 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1132 {
1133 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1134 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1135 }
1136 else if (x && TREE_CODE (x) == TREE_VEC)
1137 {
1138 n = TREE_VEC_LENGTH (x);
1139 for (i = 0; i < n; i++)
1140 {
1141 tree o = TREE_VEC_ELT (x, i);
1142 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1143 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1144 }
1145 }
1146 x = OMP_FOR_INCR (stmt);
1147 if (x && TREE_CODE (x) == TREE_VEC)
1148 {
1149 n = TREE_VEC_LENGTH (x);
1150 for (i = 0; i < n; i++)
1151 {
1152 tree o = TREE_VEC_ELT (x, i);
1153 if (o && TREE_CODE (o) == MODIFY_EXPR)
1154 o = TREE_OPERAND (o, 1);
1155 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1156 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1157 {
1158 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1159 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1160 }
1161 }
1162 }
1163 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1164 *walk_subtrees = 0;
1165 }
1166
1167 return NULL;
1168}
1169
2fa586ad
JM
1170/* Fold ALL the trees! FIXME we should be able to remove this, but
1171 apparently that still causes optimization regressions. */
1172
1173void
1174cp_fold_function (tree fndecl)
1175{
6f5bcd24
JJ
1176 hash_set<tree> pset;
1177 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
2fa586ad
JM
1178}
1179
b7689b96
JM
1180/* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1181
1182static tree genericize_spaceship (tree expr)
1183{
1184 iloc_sentinel s (cp_expr_location (expr));
1185 tree type = TREE_TYPE (expr);
1186 tree op0 = TREE_OPERAND (expr, 0);
1187 tree op1 = TREE_OPERAND (expr, 1);
1188 return genericize_spaceship (type, op0, op1);
1189}
1190
d8472c75
JM
1191/* Perform any pre-gimplification lowering of C++ front end trees to
1192 GENERIC. */
5a508662
RH
1193
1194static tree
d8472c75 1195cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
5a508662
RH
1196{
1197 tree stmt = *stmt_p;
ac3cbee5 1198 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
6e2830c3 1199 hash_set<tree> *p_set = wtd->p_set;
5a508662 1200
4577f730
JJ
1201 /* If in an OpenMP context, note var uses. */
1202 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
5a6ccc94 1203 && (VAR_P (stmt)
4577f730
JJ
1204 || TREE_CODE (stmt) == PARM_DECL
1205 || TREE_CODE (stmt) == RESULT_DECL)
1206 && omp_var_to_track (stmt))
1207 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1208
6aa80414
NS
1209 /* Don't dereference parms in a thunk, pass the references through. */
1210 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1211 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1212 {
1213 *walk_subtrees = 0;
1214 return NULL;
1215 }
1216
4b9f2115 1217 /* Dereference invisible reference parms. */
e2df2328 1218 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
d8472c75 1219 {
cc77ae10 1220 *stmt_p = convert_from_reference (stmt);
7cfd79d6 1221 p_set->add (*stmt_p);
d8472c75
JM
1222 *walk_subtrees = 0;
1223 return NULL;
1224 }
1225
10827cd8
JJ
1226 /* Map block scope extern declarations to visible declarations with the
1227 same name and type in outer scopes if any. */
1228 if (cp_function_chain->extern_decl_map
cb6da767 1229 && VAR_OR_FUNCTION_DECL_P (stmt)
10827cd8
JJ
1230 && DECL_EXTERNAL (stmt))
1231 {
1232 struct cxx_int_tree_map *h, in;
1233 in.uid = DECL_UID (stmt);
2a22f99c 1234 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
10827cd8
JJ
1235 if (h)
1236 {
1237 *stmt_p = h->to;
9e51f66f 1238 TREE_USED (h->to) |= TREE_USED (stmt);
10827cd8
JJ
1239 *walk_subtrees = 0;
1240 return NULL;
1241 }
1242 }
1243
6f3af356 1244 if (TREE_CODE (stmt) == INTEGER_CST
9f613f06 1245 && TYPE_REF_P (TREE_TYPE (stmt))
6f3af356
JJ
1246 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1247 && !wtd->no_sanitize_p)
1248 {
1249 ubsan_maybe_instrument_reference (stmt_p);
1250 if (*stmt_p != stmt)
1251 {
1252 *walk_subtrees = 0;
1253 return NULL_TREE;
1254 }
1255 }
1256
d8472c75 1257 /* Other than invisiref parms, don't walk the same tree twice. */
6e2830c3 1258 if (p_set->contains (stmt))
d8472c75
JM
1259 {
1260 *walk_subtrees = 0;
1261 return NULL_TREE;
1262 }
1263
c74985e3 1264 switch (TREE_CODE (stmt))
d8472c75 1265 {
c74985e3
JJ
1266 case ADDR_EXPR:
1267 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1268 {
1269 /* If in an OpenMP context, note var uses. */
1270 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1271 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1272 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1273 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
4577f730 1274 *walk_subtrees = 0;
c74985e3
JJ
1275 }
1276 break;
1277
1278 case RETURN_EXPR:
1279 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1280 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1281 *walk_subtrees = 0;
1282 break;
1283
1284 case OMP_CLAUSE:
1285 switch (OMP_CLAUSE_CODE (stmt))
1286 {
1287 case OMP_CLAUSE_LASTPRIVATE:
1288 /* Don't dereference an invisiref in OpenMP clauses. */
1289 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1290 {
1291 *walk_subtrees = 0;
1292 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1293 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1294 cp_genericize_r, data, NULL);
1295 }
1296 break;
1297 case OMP_CLAUSE_PRIVATE:
1298 /* Don't dereference an invisiref in OpenMP clauses. */
1299 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
4577f730 1300 *walk_subtrees = 0;
c74985e3
JJ
1301 else if (wtd->omp_ctx != NULL)
1302 {
1303 /* Private clause doesn't cause any references to the
1304 var in outer contexts, avoid calling
1305 omp_cxx_notice_variable for it. */
1306 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1307 wtd->omp_ctx = NULL;
1308 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1309 data, NULL);
1310 wtd->omp_ctx = old;
1311 *walk_subtrees = 0;
1312 }
1313 break;
1314 case OMP_CLAUSE_SHARED:
1315 case OMP_CLAUSE_FIRSTPRIVATE:
1316 case OMP_CLAUSE_COPYIN:
1317 case OMP_CLAUSE_COPYPRIVATE:
6a2892a6
JJ
1318 case OMP_CLAUSE_INCLUSIVE:
1319 case OMP_CLAUSE_EXCLUSIVE:
c74985e3
JJ
1320 /* Don't dereference an invisiref in OpenMP clauses. */
1321 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
acf0174b 1322 *walk_subtrees = 0;
c74985e3
JJ
1323 break;
1324 case OMP_CLAUSE_REDUCTION:
28567c40
JJ
1325 case OMP_CLAUSE_IN_REDUCTION:
1326 case OMP_CLAUSE_TASK_REDUCTION:
c74985e3
JJ
1327 /* Don't dereference an invisiref in reduction clause's
1328 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1329 still needs to be genericized. */
1330 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1331 {
1332 *walk_subtrees = 0;
1333 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1334 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1335 cp_genericize_r, data, NULL);
1336 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1337 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1338 cp_genericize_r, data, NULL);
1339 }
1340 break;
1341 default:
1342 break;
1343 }
1344 break;
1345
1346 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1347 to lower this construct before scanning it, so we need to lower these
1348 before doing anything else. */
1349 case CLEANUP_STMT:
1350 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1351 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1352 : TRY_FINALLY_EXPR,
1353 void_type_node,
1354 CLEANUP_BODY (stmt),
1355 CLEANUP_EXPR (stmt));
1356 break;
1357
1358 case IF_STMT:
f74d9c8f
JJ
1359 genericize_if_stmt (stmt_p);
1360 /* *stmt_p has changed, tail recurse to handle it again. */
1361 return cp_genericize_r (stmt_p, walk_subtrees, data);
f74d9c8f 1362
c74985e3
JJ
1363 /* COND_EXPR might have incompatible types in branches if one or both
1364 arms are bitfields. Fix it up now. */
1365 case COND_EXPR:
1366 {
1367 tree type_left
1368 = (TREE_OPERAND (stmt, 1)
1369 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1370 : NULL_TREE);
1371 tree type_right
1372 = (TREE_OPERAND (stmt, 2)
1373 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1374 : NULL_TREE);
1375 if (type_left
1376 && !useless_type_conversion_p (TREE_TYPE (stmt),
1377 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1378 {
1379 TREE_OPERAND (stmt, 1)
1380 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1381 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1382 type_left));
1383 }
1384 if (type_right
1385 && !useless_type_conversion_p (TREE_TYPE (stmt),
1386 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1387 {
1388 TREE_OPERAND (stmt, 2)
1389 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1390 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1391 type_right));
1392 }
1393 }
1394 break;
bbdf5682 1395
c74985e3 1396 case BIND_EXPR:
4577f730
JJ
1397 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1398 {
1399 tree decl;
1400 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
5a6ccc94 1401 if (VAR_P (decl)
4577f730
JJ
1402 && !DECL_EXTERNAL (decl)
1403 && omp_var_to_track (decl))
1404 {
1405 splay_tree_node n
1406 = splay_tree_lookup (wtd->omp_ctx->variables,
1407 (splay_tree_key) decl);
1408 if (n == NULL)
1409 splay_tree_insert (wtd->omp_ctx->variables,
1410 (splay_tree_key) decl,
1411 TREE_STATIC (decl)
1412 ? OMP_CLAUSE_DEFAULT_SHARED
1413 : OMP_CLAUSE_DEFAULT_PRIVATE);
1414 }
1415 }
45b2222a 1416 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
7b3a9795
MP
1417 {
1418 /* The point here is to not sanitize static initializers. */
1419 bool no_sanitize_p = wtd->no_sanitize_p;
1420 wtd->no_sanitize_p = true;
1421 for (tree decl = BIND_EXPR_VARS (stmt);
1422 decl;
1423 decl = DECL_CHAIN (decl))
1424 if (VAR_P (decl)
1425 && TREE_STATIC (decl)
1426 && DECL_INITIAL (decl))
1427 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1428 wtd->no_sanitize_p = no_sanitize_p;
1429 }
9771b263 1430 wtd->bind_expr_stack.safe_push (stmt);
ac3cbee5
RG
1431 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1432 cp_genericize_r, data, NULL);
9771b263 1433 wtd->bind_expr_stack.pop ();
c74985e3 1434 break;
ac3cbee5 1435
c74985e3
JJ
1436 case USING_STMT:
1437 {
1438 tree block = NULL_TREE;
1439
1440 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1441 BLOCK, and append an IMPORTED_DECL to its
1442 BLOCK_VARS chained list. */
1443 if (wtd->bind_expr_stack.exists ())
1444 {
1445 int i;
1446 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1447 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1448 break;
1449 }
1450 if (block)
1451 {
0c923157
JM
1452 tree decl = TREE_OPERAND (stmt, 0);
1453 gcc_assert (decl);
c74985e3 1454
0c923157
JM
1455 if (undeduced_auto_decl (decl))
1456 /* Omit from the GENERIC, the back-end can't handle it. */;
1457 else
1458 {
1459 tree using_directive = make_node (IMPORTED_DECL);
1460 TREE_TYPE (using_directive) = void_type_node;
ac3cbee5 1461
0c923157
JM
1462 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1463 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1464 BLOCK_VARS (block) = using_directive;
1465 }
c74985e3
JJ
1466 }
1467 /* The USING_STMT won't appear in GENERIC. */
1468 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1469 *walk_subtrees = 0;
1470 }
1471 break;
1472
1473 case DECL_EXPR:
1474 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
ac3cbee5 1475 {
c74985e3
JJ
1476 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1477 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1478 *walk_subtrees = 0;
ac3cbee5 1479 }
c74985e3 1480 else
ac3cbee5 1481 {
c74985e3
JJ
1482 tree d = DECL_EXPR_DECL (stmt);
1483 if (VAR_P (d))
1484 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
ac3cbee5 1485 }
c74985e3 1486 break;
4577f730 1487
c74985e3
JJ
1488 case OMP_PARALLEL:
1489 case OMP_TASK:
1490 case OMP_TASKLOOP:
1491 {
1492 struct cp_genericize_omp_taskreg omp_ctx;
1493 tree c, decl;
1494 splay_tree_node n;
1495
1496 *walk_subtrees = 0;
1497 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1498 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1499 omp_ctx.default_shared = omp_ctx.is_parallel;
1500 omp_ctx.outer = wtd->omp_ctx;
1501 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1502 wtd->omp_ctx = &omp_ctx;
1503 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1504 switch (OMP_CLAUSE_CODE (c))
1505 {
1506 case OMP_CLAUSE_SHARED:
1507 case OMP_CLAUSE_PRIVATE:
1508 case OMP_CLAUSE_FIRSTPRIVATE:
1509 case OMP_CLAUSE_LASTPRIVATE:
1510 decl = OMP_CLAUSE_DECL (c);
1511 if (decl == error_mark_node || !omp_var_to_track (decl))
1512 break;
1513 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1514 if (n != NULL)
1515 break;
1516 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1517 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1518 ? OMP_CLAUSE_DEFAULT_SHARED
1519 : OMP_CLAUSE_DEFAULT_PRIVATE);
1520 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1521 omp_cxx_notice_variable (omp_ctx.outer, decl);
4577f730 1522 break;
c74985e3
JJ
1523 case OMP_CLAUSE_DEFAULT:
1524 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1525 omp_ctx.default_shared = true;
1526 default:
4577f730 1527 break;
c74985e3
JJ
1528 }
1529 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1530 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1531 else
1532 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1533 wtd->omp_ctx = omp_ctx.outer;
1534 splay_tree_delete (omp_ctx.variables);
1535 }
1536 break;
1537
1538 case TRY_BLOCK:
1539 {
1540 *walk_subtrees = 0;
1541 tree try_block = wtd->try_block;
1542 wtd->try_block = stmt;
1543 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1544 wtd->try_block = try_block;
1545 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1546 }
1547 break;
1548
1549 case MUST_NOT_THROW_EXPR:
8243e2a9
JM
1550 /* MUST_NOT_THROW_COND might be something else with TM. */
1551 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1552 {
1553 *walk_subtrees = 0;
1554 tree try_block = wtd->try_block;
1555 wtd->try_block = stmt;
1556 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1557 wtd->try_block = try_block;
1558 }
c74985e3
JJ
1559 break;
1560
1561 case THROW_EXPR:
1562 {
1563 location_t loc = location_of (stmt);
1564 if (TREE_NO_WARNING (stmt))
1565 /* Never mind. */;
1566 else if (wtd->try_block)
1567 {
097f82ec
DM
1568 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1569 {
1570 auto_diagnostic_group d;
1571 if (warning_at (loc, OPT_Wterminate,
a9c697b8 1572 "%<throw%> will always call %<terminate%>")
097f82ec
DM
1573 && cxx_dialect >= cxx11
1574 && DECL_DESTRUCTOR_P (current_function_decl))
a9c697b8 1575 inform (loc, "in C++11 destructors default to %<noexcept%>");
097f82ec 1576 }
c74985e3
JJ
1577 }
1578 else
1579 {
1580 if (warn_cxx11_compat && cxx_dialect < cxx11
1581 && DECL_DESTRUCTOR_P (current_function_decl)
1582 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1583 == NULL_TREE)
1584 && (get_defaulted_eh_spec (current_function_decl)
1585 == empty_except_spec))
1586 warning_at (loc, OPT_Wc__11_compat,
a9c697b8
MS
1587 "in C++11 this %<throw%> will call %<terminate%> "
1588 "because destructors default to %<noexcept%>");
c74985e3
JJ
1589 }
1590 }
1591 break;
1592
1593 case CONVERT_EXPR:
1594 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1595 break;
1596
1597 case FOR_STMT:
1598 genericize_for_stmt (stmt_p, walk_subtrees, data);
1599 break;
1600
1601 case WHILE_STMT:
1602 genericize_while_stmt (stmt_p, walk_subtrees, data);
1603 break;
1604
1605 case DO_STMT:
1606 genericize_do_stmt (stmt_p, walk_subtrees, data);
1607 break;
1608
1609 case SWITCH_STMT:
1610 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1611 break;
1612
1613 case CONTINUE_STMT:
1614 genericize_continue_stmt (stmt_p);
1615 break;
1616
1617 case BREAK_STMT:
1618 genericize_break_stmt (stmt_p);
1619 break;
1620
b7689b96
JM
1621 case SPACESHIP_EXPR:
1622 *stmt_p = genericize_spaceship (*stmt_p);
1623 break;
1624
c74985e3
JJ
1625 case OMP_FOR:
1626 case OMP_SIMD:
1627 case OMP_DISTRIBUTE:
d81ab49d 1628 case OMP_LOOP:
950ad0ba 1629 case OACC_LOOP:
c74985e3
JJ
1630 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1631 break;
1632
1633 case PTRMEM_CST:
9d409934
JM
1634 /* By the time we get here we're handing off to the back end, so we don't
1635 need or want to preserve PTRMEM_CST anymore. */
1636 *stmt_p = cplus_expand_constant (stmt);
1637 *walk_subtrees = 0;
c74985e3
JJ
1638 break;
1639
1640 case MEM_REF:
f31a8339 1641 /* For MEM_REF, make sure not to sanitize the second operand even
c74985e3 1642 if it has reference type. It is just an offset with a type
f31a8339
JJ
1643 holding other information. There is no other processing we
1644 need to do for INTEGER_CSTs, so just ignore the second argument
1645 unconditionally. */
1646 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1647 *walk_subtrees = 0;
c74985e3
JJ
1648 break;
1649
1650 case NOP_EXPR:
1651 if (!wtd->no_sanitize_p
1652 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
9f613f06 1653 && TYPE_REF_P (TREE_TYPE (stmt)))
6f3af356 1654 ubsan_maybe_instrument_reference (stmt_p);
c74985e3
JJ
1655 break;
1656
1657 case CALL_EXPR:
861d4af8
AS
1658 /* Evaluate function concept checks instead of treating them as
1659 normal functions. */
1660 if (concept_check_p (stmt))
1661 {
1662 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1663 * walk_subtrees = 0;
1664 break;
1665 }
1666
c74985e3
JJ
1667 if (!wtd->no_sanitize_p
1668 && sanitize_flags_p ((SANITIZE_NULL
1669 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
944fa280
JJ
1670 {
1671 tree fn = CALL_EXPR_FN (stmt);
1672 if (fn != NULL_TREE
1673 && !error_operand_p (fn)
71a93b08 1674 && INDIRECT_TYPE_P (TREE_TYPE (fn))
944fa280
JJ
1675 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1676 {
1677 bool is_ctor
1678 = TREE_CODE (fn) == ADDR_EXPR
1679 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1680 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
45b2222a 1681 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
35228ac7 1682 ubsan_maybe_instrument_member_call (stmt, is_ctor);
45b2222a 1683 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
35228ac7 1684 cp_ubsan_maybe_instrument_member_call (stmt);
944fa280 1685 }
830421fc
JJ
1686 else if (fn == NULL_TREE
1687 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1688 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
9f613f06 1689 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
830421fc 1690 *walk_subtrees = 0;
944fa280 1691 }
aafdbe06
JM
1692 /* Fall through. */
1693 case AGGR_INIT_EXPR:
1694 /* For calls to a multi-versioned function, overload resolution
1695 returns the function with the highest target priority, that is,
1696 the version that will checked for dispatching first. If this
1697 version is inlinable, a direct call to this version can be made
1698 otherwise the call should go through the dispatcher. */
1699 {
f5f035a3 1700 tree fn = cp_get_callee_fndecl_nofold (stmt);
aafdbe06
JM
1701 if (fn && DECL_FUNCTION_VERSIONED (fn)
1702 && (current_function_decl == NULL
1703 || !targetm.target_option.can_inline_p (current_function_decl,
1704 fn)))
1705 if (tree dis = get_function_version_dispatcher (fn))
1706 {
1707 mark_versions_used (dis);
1708 dis = build_address (dis);
1709 if (TREE_CODE (stmt) == CALL_EXPR)
1710 CALL_EXPR_FN (stmt) = dis;
1711 else
1712 AGGR_INIT_EXPR_FN (stmt) = dis;
1713 }
1714 }
c74985e3
JJ
1715 break;
1716
570f86f9
JJ
1717 case TARGET_EXPR:
1718 if (TARGET_EXPR_INITIAL (stmt)
1719 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1720 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1721 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1722 break;
1723
861d4af8
AS
1724 case TEMPLATE_ID_EXPR:
1725 gcc_assert (concept_check_p (stmt));
1726 /* Emit the value of the concept check. */
1727 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1728 walk_subtrees = 0;
1729 break;
1730
c74985e3
JJ
1731 default:
1732 if (IS_TYPE_OR_DECL_P (stmt))
1733 *walk_subtrees = 0;
1734 break;
944fa280 1735 }
ac3cbee5 1736
6e2830c3 1737 p_set->add (*stmt_p);
c8094d83 1738
5a508662
RH
1739 return NULL;
1740}
1741
b2cb7511
TV
1742/* Lower C++ front end trees to GENERIC in T_P. */
1743
1744static void
e2df2328 1745cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
b2cb7511
TV
1746{
1747 struct cp_genericize_data wtd;
1748
6e2830c3 1749 wtd.p_set = new hash_set<tree>;
9771b263 1750 wtd.bind_expr_stack.create (0);
b2cb7511 1751 wtd.omp_ctx = NULL;
8243e2a9 1752 wtd.try_block = NULL_TREE;
7b3a9795 1753 wtd.no_sanitize_p = false;
e2df2328 1754 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
b2cb7511 1755 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
6e2830c3 1756 delete wtd.p_set;
9771b263 1757 wtd.bind_expr_stack.release ();
45b2222a 1758 if (sanitize_flags_p (SANITIZE_VPTR))
35228ac7 1759 cp_ubsan_instrument_member_accesses (t_p);
b2cb7511
TV
1760}
1761
0a508bb6
JJ
1762/* If a function that should end with a return in non-void
1763 function doesn't obviously end with return, add ubsan
1b6fa695
ML
1764 instrumentation code to verify it at runtime. If -fsanitize=return
1765 is not enabled, instrument __builtin_unreachable. */
0a508bb6
JJ
1766
1767static void
1b6fa695 1768cp_maybe_instrument_return (tree fndecl)
0a508bb6
JJ
1769{
1770 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1771 || DECL_CONSTRUCTOR_P (fndecl)
1772 || DECL_DESTRUCTOR_P (fndecl)
1773 || !targetm.warn_func_return (fndecl))
1774 return;
1775
81e4859a
JJ
1776 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1777 /* Don't add __builtin_unreachable () if not optimizing, it will not
1778 improve any optimizations in that case, just break UB code.
1779 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1780 UBSan covers this with ubsan_instrument_return above where sufficient
1781 information is provided, while the __builtin_unreachable () below
1782 if return sanitization is disabled will just result in hard to
1783 understand runtime error without location. */
1784 && (!optimize
1785 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1786 return;
1787
0a508bb6
JJ
1788 tree t = DECL_SAVED_TREE (fndecl);
1789 while (t)
1790 {
1791 switch (TREE_CODE (t))
1792 {
1793 case BIND_EXPR:
1794 t = BIND_EXPR_BODY (t);
1795 continue;
1796 case TRY_FINALLY_EXPR:
78a5fce0 1797 case CLEANUP_POINT_EXPR:
0a508bb6
JJ
1798 t = TREE_OPERAND (t, 0);
1799 continue;
1800 case STATEMENT_LIST:
1801 {
1802 tree_stmt_iterator i = tsi_last (t);
f1bc6cae
JJ
1803 while (!tsi_end_p (i))
1804 {
1805 tree p = tsi_stmt (i);
1806 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1807 break;
1808 tsi_prev (&i);
1809 }
0a508bb6
JJ
1810 if (!tsi_end_p (i))
1811 {
1812 t = tsi_stmt (i);
1813 continue;
1814 }
1815 }
1816 break;
1817 case RETURN_EXPR:
1818 return;
1819 default:
1820 break;
1821 }
1822 break;
1823 }
1824 if (t == NULL_TREE)
1825 return;
459bcfb0
JJ
1826 tree *p = &DECL_SAVED_TREE (fndecl);
1827 if (TREE_CODE (*p) == BIND_EXPR)
1828 p = &BIND_EXPR_BODY (*p);
1b6fa695
ML
1829
1830 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1831 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1832 t = ubsan_instrument_return (loc);
1833 else
1834 {
1835 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1836 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1837 }
1838
459bcfb0 1839 append_to_statement_list (t, p);
0a508bb6
JJ
1840}
1841
5a508662
RH
1842void
1843cp_genericize (tree fndecl)
1844{
d8472c75 1845 tree t;
d8472c75
JM
1846
1847 /* Fix up the types of parms passed by invisible reference. */
910ad8de 1848 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
dfb5c523
MM
1849 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1850 {
1851 /* If a function's arguments are copied to create a thunk,
1852 then DECL_BY_REFERENCE will be set -- but the type of the
1853 argument will be a pointer type, so we will never get
1854 here. */
1855 gcc_assert (!DECL_BY_REFERENCE (t));
1856 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1857 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1858 DECL_BY_REFERENCE (t) = 1;
1859 TREE_ADDRESSABLE (t) = 0;
1860 relayout_decl (t);
1861 }
d8472c75 1862
cc77ae10
JM
1863 /* Do the same for the return value. */
1864 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1865 {
1866 t = DECL_RESULT (fndecl);
1867 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1868 DECL_BY_REFERENCE (t) = 1;
1869 TREE_ADDRESSABLE (t) = 0;
1870 relayout_decl (t);
140806fa
JJ
1871 if (DECL_NAME (t))
1872 {
1873 /* Adjust DECL_VALUE_EXPR of the original var. */
1874 tree outer = outer_curly_brace_block (current_function_decl);
1875 tree var;
1876
1877 if (outer)
1878 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
c60dc053
JJ
1879 if (VAR_P (var)
1880 && DECL_NAME (t) == DECL_NAME (var)
140806fa
JJ
1881 && DECL_HAS_VALUE_EXPR_P (var)
1882 && DECL_VALUE_EXPR (var) == t)
1883 {
1884 tree val = convert_from_reference (t);
1885 SET_DECL_VALUE_EXPR (var, val);
1886 break;
1887 }
1888 }
cc77ae10
JM
1889 }
1890
d8472c75
JM
1891 /* If we're a clone, the body is already GIMPLE. */
1892 if (DECL_CLONED_FUNCTION_P (fndecl))
1893 return;
1894
ee78cbaa
JJ
1895 /* Allow cp_genericize calls to be nested. */
1896 tree save_bc_label[2];
1897 save_bc_label[bc_break] = bc_label[bc_break];
1898 save_bc_label[bc_continue] = bc_label[bc_continue];
1899 bc_label[bc_break] = NULL_TREE;
1900 bc_label[bc_continue] = NULL_TREE;
1901
d8472c75
JM
1902 /* We do want to see every occurrence of the parms, so we can't just use
1903 walk_tree's hash functionality. */
e2df2328 1904 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
5a508662 1905
1b6fa695 1906 cp_maybe_instrument_return (fndecl);
0a508bb6 1907
5a508662
RH
1908 /* Do everything else. */
1909 c_genericize (fndecl);
1799e5d5
RH
1910
1911 gcc_assert (bc_label[bc_break] == NULL);
1912 gcc_assert (bc_label[bc_continue] == NULL);
ee78cbaa
JJ
1913 bc_label[bc_break] = save_bc_label[bc_break];
1914 bc_label[bc_continue] = save_bc_label[bc_continue];
1799e5d5
RH
1915}
1916\f
1917/* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1918 NULL if there is in fact nothing to do. ARG2 may be null if FN
1919 actually only takes one argument. */
1920
1921static tree
1922cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1923{
c2898ec9 1924 tree defparm, parm, t;
94a0dd7b
SL
1925 int i = 0;
1926 int nargs;
1927 tree *argarray;
fae2b46b 1928
1799e5d5
RH
1929 if (fn == NULL)
1930 return NULL;
1931
94a0dd7b 1932 nargs = list_length (DECL_ARGUMENTS (fn));
86b8fed1 1933 argarray = XALLOCAVEC (tree, nargs);
94a0dd7b 1934
fae2b46b
JJ
1935 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1936 if (arg2)
1937 defparm = TREE_CHAIN (defparm);
1938
4dbeb716 1939 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1799e5d5
RH
1940 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1941 {
1942 tree inner_type = TREE_TYPE (arg1);
1943 tree start1, end1, p1;
1944 tree start2 = NULL, p2 = NULL;
c2898ec9 1945 tree ret = NULL, lab;
1799e5d5
RH
1946
1947 start1 = arg1;
1948 start2 = arg2;
1949 do
1950 {
1951 inner_type = TREE_TYPE (inner_type);
1952 start1 = build4 (ARRAY_REF, inner_type, start1,
1953 size_zero_node, NULL, NULL);
1954 if (arg2)
1955 start2 = build4 (ARRAY_REF, inner_type, start2,
1956 size_zero_node, NULL, NULL);
1957 }
1958 while (TREE_CODE (inner_type) == ARRAY_TYPE);
db3927fb 1959 start1 = build_fold_addr_expr_loc (input_location, start1);
1799e5d5 1960 if (arg2)
db3927fb 1961 start2 = build_fold_addr_expr_loc (input_location, start2);
1799e5d5
RH
1962
1963 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
5d49b6a7 1964 end1 = fold_build_pointer_plus (start1, end1);
1799e5d5 1965
b731b390 1966 p1 = create_tmp_var (TREE_TYPE (start1));
726a989a 1967 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1799e5d5
RH
1968 append_to_statement_list (t, &ret);
1969
1970 if (arg2)
1971 {
b731b390 1972 p2 = create_tmp_var (TREE_TYPE (start2));
726a989a 1973 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1799e5d5
RH
1974 append_to_statement_list (t, &ret);
1975 }
1976
c2255bc4 1977 lab = create_artificial_label (input_location);
1799e5d5
RH
1978 t = build1 (LABEL_EXPR, void_type_node, lab);
1979 append_to_statement_list (t, &ret);
1980
94a0dd7b 1981 argarray[i++] = p1;
1799e5d5 1982 if (arg2)
94a0dd7b 1983 argarray[i++] = p2;
fae2b46b 1984 /* Handle default arguments. */
d2ee546f
JJ
1985 for (parm = defparm; parm && parm != void_list_node;
1986 parm = TREE_CHAIN (parm), i++)
94a0dd7b 1987 argarray[i] = convert_default_arg (TREE_VALUE (parm),
4dbeb716
JJ
1988 TREE_PURPOSE (parm), fn,
1989 i - is_method, tf_warning_or_error);
94a0dd7b 1990 t = build_call_a (fn, i, argarray);
c2898ec9
JJ
1991 t = fold_convert (void_type_node, t);
1992 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1799e5d5
RH
1993 append_to_statement_list (t, &ret);
1994
5d49b6a7 1995 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
726a989a 1996 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1799e5d5
RH
1997 append_to_statement_list (t, &ret);
1998
1999 if (arg2)
2000 {
5d49b6a7 2001 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
726a989a 2002 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1799e5d5
RH
2003 append_to_statement_list (t, &ret);
2004 }
2005
2006 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2007 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2008 append_to_statement_list (t, &ret);
2009
2010 return ret;
2011 }
2012 else
2013 {
db3927fb 2014 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1799e5d5 2015 if (arg2)
db3927fb 2016 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
fae2b46b 2017 /* Handle default arguments. */
d2ee546f 2018 for (parm = defparm; parm && parm != void_list_node;
94a0dd7b
SL
2019 parm = TREE_CHAIN (parm), i++)
2020 argarray[i] = convert_default_arg (TREE_VALUE (parm),
4dbeb716
JJ
2021 TREE_PURPOSE (parm), fn,
2022 i - is_method, tf_warning_or_error);
c2898ec9
JJ
2023 t = build_call_a (fn, i, argarray);
2024 t = fold_convert (void_type_node, t);
2025 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1799e5d5
RH
2026 }
2027}
2028
2029/* Return code to initialize DECL with its default constructor, or
2030 NULL if there's nothing to do. */
2031
2032tree
12308bc6 2033cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1799e5d5
RH
2034{
2035 tree info = CP_OMP_CLAUSE_INFO (clause);
2036 tree ret = NULL;
2037
2038 if (info)
2039 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2040
2041 return ret;
2042}
2043
2044/* Return code to initialize DST with a copy constructor from SRC. */
2045
2046tree
2047cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2048{
2049 tree info = CP_OMP_CLAUSE_INFO (clause);
2050 tree ret = NULL;
2051
2052 if (info)
2053 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2054 if (ret == NULL)
726a989a 2055 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1799e5d5
RH
2056
2057 return ret;
2058}
2059
2060/* Similarly, except use an assignment operator instead. */
2061
2062tree
2063cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2064{
2065 tree info = CP_OMP_CLAUSE_INFO (clause);
2066 tree ret = NULL;
2067
2068 if (info)
2069 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2070 if (ret == NULL)
726a989a 2071 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1799e5d5
RH
2072
2073 return ret;
2074}
2075
2076/* Return code to destroy DECL. */
2077
2078tree
2079cxx_omp_clause_dtor (tree clause, tree decl)
2080{
2081 tree info = CP_OMP_CLAUSE_INFO (clause);
2082 tree ret = NULL;
2083
2084 if (info)
2085 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2086
2087 return ret;
5a508662 2088}
077b0dfb
JJ
2089
2090/* True if OpenMP should privatize what this DECL points to rather
2091 than the DECL itself. */
2092
2093bool
58f9752a 2094cxx_omp_privatize_by_reference (const_tree decl)
077b0dfb 2095{
9f613f06 2096 return (TYPE_REF_P (TREE_TYPE (decl))
acf0174b 2097 || is_invisiref_parm (decl));
077b0dfb 2098}
a68ab351 2099
20906c66
JJ
2100/* Return true if DECL is const qualified var having no mutable member. */
2101bool
2102cxx_omp_const_qual_no_mutable (tree decl)
a68ab351 2103{
20906c66 2104 tree type = TREE_TYPE (decl);
9f613f06 2105 if (TYPE_REF_P (type))
a68ab351
JJ
2106 {
2107 if (!is_invisiref_parm (decl))
20906c66 2108 return false;
a68ab351
JJ
2109 type = TREE_TYPE (type);
2110
2111 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2112 {
2113 /* NVR doesn't preserve const qualification of the
2114 variable's type. */
2115 tree outer = outer_curly_brace_block (current_function_decl);
2116 tree var;
2117
2118 if (outer)
910ad8de 2119 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
c60dc053
JJ
2120 if (VAR_P (var)
2121 && DECL_NAME (decl) == DECL_NAME (var)
a68ab351
JJ
2122 && (TYPE_MAIN_VARIANT (type)
2123 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2124 {
2125 if (TYPE_READONLY (TREE_TYPE (var)))
2126 type = TREE_TYPE (var);
2127 break;
2128 }
2129 }
2130 }
2131
2132 if (type == error_mark_node)
20906c66 2133 return false;
a68ab351
JJ
2134
2135 /* Variables with const-qualified type having no mutable member
2136 are predetermined shared. */
2137 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
20906c66
JJ
2138 return true;
2139
2140 return false;
2141}
2142
2143/* True if OpenMP sharing attribute of DECL is predetermined. */
2144
2145enum omp_clause_default_kind
1c9ee609 2146cxx_omp_predetermined_sharing_1 (tree decl)
20906c66
JJ
2147{
2148 /* Static data members are predetermined shared. */
2149 if (TREE_STATIC (decl))
2150 {
2151 tree ctx = CP_DECL_CONTEXT (decl);
2152 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2153 return OMP_CLAUSE_DEFAULT_SHARED;
59bc434a
JJ
2154
2155 if (c_omp_predefined_variable (decl))
2156 return OMP_CLAUSE_DEFAULT_SHARED;
20906c66
JJ
2157 }
2158
28567c40
JJ
2159 /* this may not be specified in data-sharing clauses, still we need
2160 to predetermined it firstprivate. */
2161 if (decl == current_class_ptr)
2162 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
a68ab351
JJ
2163
2164 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2165}
2166
1c9ee609
JJ
2167/* Likewise, but also include the artificial vars. We don't want to
2168 disallow the artificial vars being mentioned in explicit clauses,
2169 as we use artificial vars e.g. for loop constructs with random
2170 access iterators other than pointers, but during gimplification
2171 we want to treat them as predetermined. */
2172
2173enum omp_clause_default_kind
2174cxx_omp_predetermined_sharing (tree decl)
2175{
2176 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2177 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2178 return ret;
2179
2180 /* Predetermine artificial variables holding integral values, those
2181 are usually result of gimplify_one_sizepos or SAVE_EXPR
2182 gimplification. */
2183 if (VAR_P (decl)
2184 && DECL_ARTIFICIAL (decl)
2185 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2186 && !(DECL_LANG_SPECIFIC (decl)
2187 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2188 return OMP_CLAUSE_DEFAULT_SHARED;
2189
2190 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2191}
2192
a68ab351
JJ
2193/* Finalize an implicitly determined clause. */
2194
2195void
f014c653 2196cxx_omp_finish_clause (tree c, gimple_seq *)
a68ab351
JJ
2197{
2198 tree decl, inner_type;
2199 bool make_shared = false;
2200
d81ab49d
JJ
2201 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2202 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2203 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
a68ab351
JJ
2204 return;
2205
2206 decl = OMP_CLAUSE_DECL (c);
2207 decl = require_complete_type (decl);
2208 inner_type = TREE_TYPE (decl);
2209 if (decl == error_mark_node)
2210 make_shared = true;
9f613f06 2211 else if (TYPE_REF_P (TREE_TYPE (decl)))
d9a6bd32 2212 inner_type = TREE_TYPE (inner_type);
a68ab351
JJ
2213
2214 /* We're interested in the base element, not arrays. */
2215 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2216 inner_type = TREE_TYPE (inner_type);
2217
2218 /* Check for special function availability by building a call to one.
2219 Save the results, because later we won't be in the right context
2220 for making these queries. */
d81ab49d 2221 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
a68ab351
JJ
2222 if (!make_shared
2223 && CLASS_TYPE_P (inner_type)
d81ab49d
JJ
2224 && cxx_omp_create_clause_info (c, inner_type, !first, first, !first,
2225 true))
a68ab351
JJ
2226 make_shared = true;
2227
2228 if (make_shared)
34361776
JJ
2229 {
2230 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2231 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2232 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2233 }
a68ab351 2234}
d9a6bd32
JJ
2235
2236/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2237 disregarded in OpenMP construct, because it is going to be
2238 remapped during OpenMP lowering. SHARED is true if DECL
2239 is going to be shared, false if it is going to be privatized. */
2240
2241bool
2242cxx_omp_disregard_value_expr (tree decl, bool shared)
2243{
2244 return !shared
2245 && VAR_P (decl)
2246 && DECL_HAS_VALUE_EXPR_P (decl)
2247 && DECL_ARTIFICIAL (decl)
2248 && DECL_LANG_SPECIFIC (decl)
2249 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2250}
cda0a029 2251
7426fcc8
JM
2252/* Fold expression X which is used as an rvalue if RVAL is true. */
2253
4cd3e7df 2254tree
7426fcc8
JM
2255cp_fold_maybe_rvalue (tree x, bool rval)
2256{
66f90a17 2257 while (true)
7426fcc8 2258 {
66f90a17 2259 x = cp_fold (x);
f43e0585
JM
2260 if (rval)
2261 x = mark_rvalue_use (x);
fd338b13 2262 if (rval && DECL_P (x)
9f613f06 2263 && !TYPE_REF_P (TREE_TYPE (x)))
66f90a17
JM
2264 {
2265 tree v = decl_constant_value (x);
2266 if (v != x && v != error_mark_node)
2267 {
2268 x = v;
2269 continue;
2270 }
2271 }
2272 break;
7426fcc8 2273 }
66f90a17 2274 return x;
7426fcc8
JM
2275}
2276
2277/* Fold expression X which is used as an rvalue. */
2278
4cd3e7df 2279tree
7426fcc8
JM
2280cp_fold_rvalue (tree x)
2281{
2282 return cp_fold_maybe_rvalue (x, true);
2283}
2284
bf31620c
JM
2285/* Perform folding on expression X. */
2286
2287tree
2288cp_fully_fold (tree x)
2289{
2290 if (processing_template_decl)
2291 return x;
2292 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2293 have to call both. */
2294 if (cxx_dialect >= cxx11)
587b2f67
MP
2295 {
2296 x = maybe_constant_value (x);
2297 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2298 a TARGET_EXPR; undo that here. */
2299 if (TREE_CODE (x) == TARGET_EXPR)
2300 x = TARGET_EXPR_INITIAL (x);
2301 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2302 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2303 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2304 x = TREE_OPERAND (x, 0);
2305 }
bf31620c
JM
2306 return cp_fold_rvalue (x);
2307}
2308
50867d20
JJ
2309/* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2310 in some cases. */
2311
2312tree
2313cp_fully_fold_init (tree x)
2314{
2315 if (processing_template_decl)
2316 return x;
2317 x = cp_fully_fold (x);
2318 hash_set<tree> pset;
2319 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2320 return x;
2321}
2322
4250754e
JM
2323/* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2324 and certain changes are made to the folding done. Or should be (FIXME). We
2325 never touch maybe_const, as it is only used for the C front-end
2326 C_MAYBE_CONST_EXPR. */
2327
2328tree
f9c59f7e 2329c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
4250754e 2330{
f9c59f7e 2331 return cp_fold_maybe_rvalue (x, !lval);
4250754e
JM
2332}
2333
7a7ac32a 2334static GTY((deletable)) hash_map<tree, tree> *fold_cache;
cda0a029 2335
1e297006
MP
2336/* Dispose of the whole FOLD_CACHE. */
2337
2338void
2339clear_fold_cache (void)
2340{
7a7ac32a
PP
2341 if (fold_cache != NULL)
2342 fold_cache->empty ();
1e297006
MP
2343}
2344
cda0a029
JM
2345/* This function tries to fold an expression X.
2346 To avoid combinatorial explosion, folding results are kept in fold_cache.
9a004410 2347 If X is invalid, we don't fold at all.
cda0a029
JM
2348 For performance reasons we don't cache expressions representing a
2349 declaration or constant.
2350 Function returns X or its folded variant. */
2351
2352static tree
2353cp_fold (tree x)
2354{
2355 tree op0, op1, op2, op3;
2356 tree org_x = x, r = NULL_TREE;
2357 enum tree_code code;
2358 location_t loc;
7426fcc8 2359 bool rval_ops = true;
cda0a029 2360
2fa586ad 2361 if (!x || x == error_mark_node)
cda0a029
JM
2362 return x;
2363
9a004410 2364 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
cda0a029
JM
2365 return x;
2366
2367 /* Don't bother to cache DECLs or constants. */
2368 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2369 return x;
2370
7a7ac32a
PP
2371 if (fold_cache == NULL)
2372 fold_cache = hash_map<tree, tree>::create_ggc (101);
2373
2374 if (tree *cached = fold_cache->get (x))
2375 return *cached;
cda0a029
JM
2376
2377 code = TREE_CODE (x);
2378 switch (code)
2379 {
c8b1fbc1
MP
2380 case CLEANUP_POINT_EXPR:
2381 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2382 effects. */
2383 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2384 if (!TREE_SIDE_EFFECTS (r))
2385 x = r;
2386 break;
2387
cda0a029
JM
2388 case SIZEOF_EXPR:
2389 x = fold_sizeof_expr (x);
2390 break;
2391
2392 case VIEW_CONVERT_EXPR:
7426fcc8 2393 rval_ops = false;
191816a3 2394 /* FALLTHRU */
cda0a029
JM
2395 case CONVERT_EXPR:
2396 case NOP_EXPR:
2397 case NON_LVALUE_EXPR:
2398
2399 if (VOID_TYPE_P (TREE_TYPE (x)))
19c37faa
JJ
2400 {
2401 /* This is just to make sure we don't end up with casts to
2402 void from error_mark_node. If we just return x, then
2403 cp_fold_r might fold the operand into error_mark_node and
2404 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2405 during gimplification doesn't like such casts.
2406 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2407 folding of the operand should be in the caches and if in cp_fold_r
2408 it will modify it in place. */
2409 op0 = cp_fold (TREE_OPERAND (x, 0));
2410 if (op0 == error_mark_node)
2411 x = error_mark_node;
2412 break;
2413 }
cda0a029 2414
cda0a029 2415 loc = EXPR_LOCATION (x);
8d8f3235 2416 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
cda0a029 2417
415594bb
JM
2418 if (code == CONVERT_EXPR
2419 && SCALAR_TYPE_P (TREE_TYPE (x))
2420 && op0 != void_node)
2421 /* During parsing we used convert_to_*_nofold; re-convert now using the
2422 folding variants, since fold() doesn't do those transformations. */
2423 x = fold (convert (TREE_TYPE (x), op0));
2424 else if (op0 != TREE_OPERAND (x, 0))
476805ae
JJ
2425 {
2426 if (op0 == error_mark_node)
2427 x = error_mark_node;
2428 else
2429 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2430 }
e9ea372d
JM
2431 else
2432 x = fold (x);
cda0a029
JM
2433
2434 /* Conversion of an out-of-range value has implementation-defined
2435 behavior; the language considers it different from arithmetic
2436 overflow, which is undefined. */
2437 if (TREE_CODE (op0) == INTEGER_CST
2438 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2439 TREE_OVERFLOW (x) = false;
2440
2441 break;
2442
290279c4
JM
2443 case INDIRECT_REF:
2444 /* We don't need the decltype(auto) obfuscation anymore. */
2445 if (REF_PARENTHESIZED_P (x))
2446 {
2447 tree p = maybe_undo_parenthesized_ref (x);
66f90a17 2448 return cp_fold (p);
290279c4
JM
2449 }
2450 goto unary;
2451
cda0a029 2452 case ADDR_EXPR:
715dd933
JJ
2453 loc = EXPR_LOCATION (x);
2454 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2455
2456 /* Cope with user tricks that amount to offsetof. */
2457 if (op0 != error_mark_node
7bdc7e06 2458 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
715dd933
JJ
2459 {
2460 tree val = get_base_address (op0);
2461 if (val
2462 && INDIRECT_REF_P (val)
2463 && COMPLETE_TYPE_P (TREE_TYPE (val))
2464 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2465 {
2466 val = TREE_OPERAND (val, 0);
2467 STRIP_NOPS (val);
1e9d6923 2468 val = maybe_constant_value (val);
715dd933 2469 if (TREE_CODE (val) == INTEGER_CST)
79e7b1fe 2470 return fold_offsetof (op0, TREE_TYPE (x));
715dd933
JJ
2471 }
2472 }
2473 goto finish_unary;
2474
cda0a029
JM
2475 case REALPART_EXPR:
2476 case IMAGPART_EXPR:
7426fcc8 2477 rval_ops = false;
191816a3 2478 /* FALLTHRU */
cda0a029
JM
2479 case CONJ_EXPR:
2480 case FIX_TRUNC_EXPR:
2481 case FLOAT_EXPR:
2482 case NEGATE_EXPR:
2483 case ABS_EXPR:
e197e64e 2484 case ABSU_EXPR:
cda0a029
JM
2485 case BIT_NOT_EXPR:
2486 case TRUTH_NOT_EXPR:
2487 case FIXED_CONVERT_EXPR:
290279c4 2488 unary:
cda0a029
JM
2489
2490 loc = EXPR_LOCATION (x);
7426fcc8 2491 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
cda0a029 2492
715dd933 2493 finish_unary:
cda0a029 2494 if (op0 != TREE_OPERAND (x, 0))
476805ae
JJ
2495 {
2496 if (op0 == error_mark_node)
2497 x = error_mark_node;
2498 else
0633ee10
JJ
2499 {
2500 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2501 if (code == INDIRECT_REF
2502 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2503 {
2504 TREE_READONLY (x) = TREE_READONLY (org_x);
2505 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2506 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2507 }
2508 }
476805ae 2509 }
e9ea372d
JM
2510 else
2511 x = fold (x);
cda0a029
JM
2512
2513 gcc_assert (TREE_CODE (x) != COND_EXPR
2514 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2515 break;
2516
d49b0aa0
MP
2517 case UNARY_PLUS_EXPR:
2518 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2519 if (op0 == error_mark_node)
2520 x = error_mark_node;
2521 else
2522 x = fold_convert (TREE_TYPE (x), op0);
2523 break;
2524
cda0a029
JM
2525 case POSTDECREMENT_EXPR:
2526 case POSTINCREMENT_EXPR:
2527 case INIT_EXPR:
cda0a029
JM
2528 case PREDECREMENT_EXPR:
2529 case PREINCREMENT_EXPR:
2530 case COMPOUND_EXPR:
7426fcc8
JM
2531 case MODIFY_EXPR:
2532 rval_ops = false;
191816a3 2533 /* FALLTHRU */
cda0a029
JM
2534 case POINTER_PLUS_EXPR:
2535 case PLUS_EXPR:
1af4ebf5 2536 case POINTER_DIFF_EXPR:
cda0a029
JM
2537 case MINUS_EXPR:
2538 case MULT_EXPR:
2539 case TRUNC_DIV_EXPR:
2540 case CEIL_DIV_EXPR:
2541 case FLOOR_DIV_EXPR:
2542 case ROUND_DIV_EXPR:
2543 case TRUNC_MOD_EXPR:
2544 case CEIL_MOD_EXPR:
2545 case ROUND_MOD_EXPR:
2546 case RDIV_EXPR:
2547 case EXACT_DIV_EXPR:
2548 case MIN_EXPR:
2549 case MAX_EXPR:
2550 case LSHIFT_EXPR:
2551 case RSHIFT_EXPR:
2552 case LROTATE_EXPR:
2553 case RROTATE_EXPR:
2554 case BIT_AND_EXPR:
2555 case BIT_IOR_EXPR:
2556 case BIT_XOR_EXPR:
2557 case TRUTH_AND_EXPR:
2558 case TRUTH_ANDIF_EXPR:
2559 case TRUTH_OR_EXPR:
2560 case TRUTH_ORIF_EXPR:
2561 case TRUTH_XOR_EXPR:
2562 case LT_EXPR: case LE_EXPR:
2563 case GT_EXPR: case GE_EXPR:
2564 case EQ_EXPR: case NE_EXPR:
2565 case UNORDERED_EXPR: case ORDERED_EXPR:
2566 case UNLT_EXPR: case UNLE_EXPR:
2567 case UNGT_EXPR: case UNGE_EXPR:
2568 case UNEQ_EXPR: case LTGT_EXPR:
2569 case RANGE_EXPR: case COMPLEX_EXPR:
cda0a029
JM
2570
2571 loc = EXPR_LOCATION (x);
7426fcc8
JM
2572 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2573 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
cda0a029
JM
2574
2575 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
476805ae
JJ
2576 {
2577 if (op0 == error_mark_node || op1 == error_mark_node)
2578 x = error_mark_node;
2579 else
2580 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2581 }
e9ea372d
JM
2582 else
2583 x = fold (x);
cda0a029 2584
315aa691
JJ
2585 /* This is only needed for -Wnonnull-compare and only if
2586 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2587 generation, we do it always. */
2588 if (COMPARISON_CLASS_P (org_x))
03ca8fb3
JJ
2589 {
2590 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2591 ;
2592 else if (COMPARISON_CLASS_P (x))
315aa691
JJ
2593 {
2594 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2595 TREE_NO_WARNING (x) = 1;
2596 }
03ca8fb3
JJ
2597 /* Otherwise give up on optimizing these, let GIMPLE folders
2598 optimize those later on. */
2599 else if (op0 != TREE_OPERAND (org_x, 0)
2600 || op1 != TREE_OPERAND (org_x, 1))
2601 {
2602 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
315aa691
JJ
2603 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2604 TREE_NO_WARNING (x) = 1;
03ca8fb3
JJ
2605 }
2606 else
2607 x = org_x;
2608 }
8a902edb
MP
2609 if (code == MODIFY_EXPR && TREE_CODE (x) == MODIFY_EXPR)
2610 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2611
cda0a029
JM
2612 break;
2613
2614 case VEC_COND_EXPR:
2615 case COND_EXPR:
cda0a029 2616 loc = EXPR_LOCATION (x);
7426fcc8 2617 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
cda0a029
JM
2618 op1 = cp_fold (TREE_OPERAND (x, 1));
2619 op2 = cp_fold (TREE_OPERAND (x, 2));
2620
627be19f
JM
2621 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2622 {
e525cfa7 2623 warning_sentinel s (warn_int_in_bool_context);
627be19f 2624 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2ab340fe 2625 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
627be19f 2626 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2ab340fe 2627 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
627be19f 2628 }
cb358080
JJ
2629 else if (VOID_TYPE_P (TREE_TYPE (x)))
2630 {
2631 if (TREE_CODE (op0) == INTEGER_CST)
2632 {
2633 /* If the condition is constant, fold can fold away
2634 the COND_EXPR. If some statement-level uses of COND_EXPR
2635 have one of the branches NULL, avoid folding crash. */
2636 if (!op1)
2637 op1 = build_empty_stmt (loc);
2638 if (!op2)
2639 op2 = build_empty_stmt (loc);
2640 }
2641 else
2642 {
2643 /* Otherwise, don't bother folding a void condition, since
2644 it can't produce a constant value. */
2645 if (op0 != TREE_OPERAND (x, 0)
2646 || op1 != TREE_OPERAND (x, 1)
2647 || op2 != TREE_OPERAND (x, 2))
2648 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2649 break;
2650 }
2651 }
627be19f 2652
7f26f7df
JM
2653 if (op0 != TREE_OPERAND (x, 0)
2654 || op1 != TREE_OPERAND (x, 1)
2655 || op2 != TREE_OPERAND (x, 2))
476805ae
JJ
2656 {
2657 if (op0 == error_mark_node
2658 || op1 == error_mark_node
2659 || op2 == error_mark_node)
2660 x = error_mark_node;
2661 else
2662 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2663 }
7f26f7df 2664 else
cda0a029
JM
2665 x = fold (x);
2666
683b8101
JM
2667 /* A COND_EXPR might have incompatible types in branches if one or both
2668 arms are bitfields. If folding exposed such a branch, fix it up. */
a5afbdd6 2669 if (TREE_CODE (x) != code
4ecd9c15 2670 && x != error_mark_node
a5afbdd6
MP
2671 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2672 x = fold_convert (TREE_TYPE (org_x), x);
683b8101 2673
cda0a029
JM
2674 break;
2675
2676 case CALL_EXPR:
2677 {
2678 int i, m, sv = optimize, nw = sv, changed = 0;
2679 tree callee = get_callee_fndecl (x);
2680
b925d25d
JM
2681 /* Some built-in function calls will be evaluated at compile-time in
2682 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2683 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3d78e008 2684 if (callee && fndecl_built_in_p (callee) && !optimize
cda0a029
JM
2685 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2686 && current_function_decl
2687 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2688 nw = 1;
cda0a029 2689
e4082611
JJ
2690 /* Defer folding __builtin_is_constant_evaluated. */
2691 if (callee
3d78e008 2692 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
ff603745 2693 BUILT_IN_FRONTEND))
e4082611
JJ
2694 break;
2695
ff603745
JJ
2696 if (callee
2697 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2698 BUILT_IN_FRONTEND))
2699 {
2700 x = fold_builtin_source_location (EXPR_LOCATION (x));
2701 break;
2702 }
2703
cda0a029
JM
2704 x = copy_node (x);
2705
2706 m = call_expr_nargs (x);
2707 for (i = 0; i < m; i++)
2708 {
2709 r = cp_fold (CALL_EXPR_ARG (x, i));
2710 if (r != CALL_EXPR_ARG (x, i))
476805ae
JJ
2711 {
2712 if (r == error_mark_node)
2713 {
2714 x = error_mark_node;
2715 break;
2716 }
2717 changed = 1;
2718 }
cda0a029
JM
2719 CALL_EXPR_ARG (x, i) = r;
2720 }
476805ae
JJ
2721 if (x == error_mark_node)
2722 break;
cda0a029
JM
2723
2724 optimize = nw;
2725 r = fold (x);
2726 optimize = sv;
2727
2728 if (TREE_CODE (r) != CALL_EXPR)
2729 {
2730 x = cp_fold (r);
2731 break;
2732 }
2733
2734 optimize = nw;
2735
b925d25d
JM
2736 /* Invoke maybe_constant_value for functions declared
2737 constexpr and not called with AGGR_INIT_EXPRs.
cda0a029 2738 TODO:
b925d25d
JM
2739 Do constexpr expansion of expressions where the call itself is not
2740 constant, but the call followed by an INDIRECT_REF is. */
28577b86
JM
2741 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2742 && !flag_no_inline)
25cb6b33 2743 r = maybe_constant_value (x);
cda0a029
JM
2744 optimize = sv;
2745
2746 if (TREE_CODE (r) != CALL_EXPR)
2747 {
25cb6b33
JJ
2748 if (DECL_CONSTRUCTOR_P (callee))
2749 {
2750 loc = EXPR_LOCATION (x);
2751 tree s = build_fold_indirect_ref_loc (loc,
2752 CALL_EXPR_ARG (x, 0));
2753 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2754 }
cda0a029
JM
2755 x = r;
2756 break;
2757 }
2758
2759 if (!changed)
2760 x = org_x;
2761 break;
2762 }
2763
2764 case CONSTRUCTOR:
2765 {
2766 unsigned i;
2767 constructor_elt *p;
2768 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
4b0b30ef 2769 vec<constructor_elt, va_gc> *nelts = NULL;
cda0a029 2770 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
4b0b30ef
JM
2771 {
2772 tree op = cp_fold (p->value);
4b0b30ef 2773 if (op != p->value)
476805ae
JJ
2774 {
2775 if (op == error_mark_node)
2776 {
2777 x = error_mark_node;
27de0fab 2778 vec_free (nelts);
476805ae
JJ
2779 break;
2780 }
27de0fab
RB
2781 if (nelts == NULL)
2782 nelts = elts->copy ();
2783 (*nelts)[i].value = op;
476805ae 2784 }
4b0b30ef 2785 }
27de0fab 2786 if (nelts)
570f86f9
JJ
2787 {
2788 x = build_constructor (TREE_TYPE (x), nelts);
2789 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2790 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2791 }
153dba6c
JJ
2792 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2793 x = fold (x);
cda0a029
JM
2794 break;
2795 }
2796 case TREE_VEC:
2797 {
2798 bool changed = false;
cd9cf97b 2799 releasing_vec vec;
cda0a029
JM
2800 int i, n = TREE_VEC_LENGTH (x);
2801 vec_safe_reserve (vec, n);
2802
2803 for (i = 0; i < n; i++)
2804 {
2805 tree op = cp_fold (TREE_VEC_ELT (x, i));
2806 vec->quick_push (op);
2807 if (op != TREE_VEC_ELT (x, i))
2808 changed = true;
2809 }
2810
2811 if (changed)
2812 {
2813 r = copy_node (x);
2814 for (i = 0; i < n; i++)
2815 TREE_VEC_ELT (r, i) = (*vec)[i];
2816 x = r;
2817 }
cda0a029
JM
2818 }
2819
2820 break;
2821
2822 case ARRAY_REF:
2823 case ARRAY_RANGE_REF:
2824
2825 loc = EXPR_LOCATION (x);
2826 op0 = cp_fold (TREE_OPERAND (x, 0));
2827 op1 = cp_fold (TREE_OPERAND (x, 1));
2828 op2 = cp_fold (TREE_OPERAND (x, 2));
2829 op3 = cp_fold (TREE_OPERAND (x, 3));
2830
476805ae
JJ
2831 if (op0 != TREE_OPERAND (x, 0)
2832 || op1 != TREE_OPERAND (x, 1)
2833 || op2 != TREE_OPERAND (x, 2)
2834 || op3 != TREE_OPERAND (x, 3))
2835 {
2836 if (op0 == error_mark_node
2837 || op1 == error_mark_node
2838 || op2 == error_mark_node
2839 || op3 == error_mark_node)
2840 x = error_mark_node;
2841 else
0633ee10
JJ
2842 {
2843 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2844 TREE_READONLY (x) = TREE_READONLY (org_x);
2845 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2846 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2847 }
476805ae 2848 }
cda0a029
JM
2849
2850 x = fold (x);
2851 break;
2852
6b6ae9eb
MP
2853 case SAVE_EXPR:
2854 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2855 folding, evaluates to an invariant. In that case no need to wrap
2856 this folded tree with a SAVE_EXPR. */
2857 r = cp_fold (TREE_OPERAND (x, 0));
2858 if (tree_invariant_p (r))
2859 x = r;
2860 break;
2861
cda0a029
JM
2862 default:
2863 return org_x;
2864 }
2865
7a7ac32a 2866 fold_cache->put (org_x, x);
cda0a029
JM
2867 /* Prevent that we try to fold an already folded result again. */
2868 if (x != org_x)
7a7ac32a 2869 fold_cache->put (x, x);
cda0a029
JM
2870
2871 return x;
2872}
2873
2674fa47
JM
2874/* Look up either "hot" or "cold" in attribute list LIST. */
2875
2876tree
2877lookup_hotness_attribute (tree list)
2878{
2879 for (; list; list = TREE_CHAIN (list))
2880 {
2881 tree name = get_attribute_name (list);
2882 if (is_attribute_p ("hot", name)
2883 || is_attribute_p ("cold", name)
2884 || is_attribute_p ("likely", name)
2885 || is_attribute_p ("unlikely", name))
2886 break;
2887 }
2888 return list;
2889}
2890
2891/* Remove both "hot" and "cold" attributes from LIST. */
2892
2893static tree
2894remove_hotness_attribute (tree list)
2895{
2896 list = remove_attribute ("hot", list);
2897 list = remove_attribute ("cold", list);
2898 list = remove_attribute ("likely", list);
2899 list = remove_attribute ("unlikely", list);
2900 return list;
2901}
2902
2903/* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2904 PREDICT_EXPR. */
2905
2906tree
2d9273ca 2907process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
2674fa47
JM
2908{
2909 if (std_attrs == error_mark_node)
2910 return std_attrs;
2911 if (tree attr = lookup_hotness_attribute (std_attrs))
2912 {
2913 tree name = get_attribute_name (attr);
2914 bool hot = (is_attribute_p ("hot", name)
2915 || is_attribute_p ("likely", name));
2916 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2917 hot ? TAKEN : NOT_TAKEN);
2d9273ca 2918 SET_EXPR_LOCATION (pred, attrs_loc);
2674fa47
JM
2919 add_stmt (pred);
2920 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2921 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2922 get_attribute_name (other), name);
2923 std_attrs = remove_hotness_attribute (std_attrs);
2924 }
2925 return std_attrs;
2926}
2927
ff603745
JJ
2928/* Helper of fold_builtin_source_location, return the
2929 std::source_location::__impl type after performing verification
2930 on it. LOC is used for reporting any errors. */
2931
2932static tree
2933get_source_location_impl_type (location_t loc)
2934{
2935 tree name = get_identifier ("source_location");
2936 tree decl = lookup_qualified_name (std_node, name);
2937 if (TREE_CODE (decl) != TYPE_DECL)
2938 {
2939 auto_diagnostic_group d;
2940 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
2941 qualified_name_lookup_error (std_node, name, decl, loc);
2942 else
2943 error_at (loc, "%qD is not a type", decl);
2944 return error_mark_node;
2945 }
2946 name = get_identifier ("__impl");
2947 tree type = TREE_TYPE (decl);
2948 decl = lookup_qualified_name (type, name);
2949 if (TREE_CODE (decl) != TYPE_DECL)
2950 {
2951 auto_diagnostic_group d;
2952 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
2953 qualified_name_lookup_error (type, name, decl, loc);
2954 else
2955 error_at (loc, "%qD is not a type", decl);
2956 return error_mark_node;
2957 }
2958 type = TREE_TYPE (decl);
2959 if (TREE_CODE (type) != RECORD_TYPE)
2960 {
2961 error_at (loc, "%qD is not a class type", decl);
2962 return error_mark_node;
2963 }
2964
2965 int cnt = 0;
2966 for (tree field = TYPE_FIELDS (type);
2967 (field = next_initializable_field (field)) != NULL_TREE;
2968 field = DECL_CHAIN (field))
2969 {
2970 if (DECL_NAME (field) != NULL_TREE)
2971 {
2972 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
2973 if (strcmp (n, "_M_file_name") == 0
2974 || strcmp (n, "_M_function_name") == 0)
2975 {
2976 if (TREE_TYPE (field) != const_string_type_node)
2977 {
2978 error_at (loc, "%qD does not have %<const char *%> type",
2979 field);
2980 return error_mark_node;
2981 }
2982 cnt++;
2983 continue;
2984 }
2985 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
2986 {
2987 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
2988 {
2989 error_at (loc, "%qD does not have integral type", field);
2990 return error_mark_node;
2991 }
2992 cnt++;
2993 continue;
2994 }
2995 }
2996 cnt = 0;
2997 break;
2998 }
2999 if (cnt != 4)
3000 {
3001 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3002 "non-static data members %<_M_file_name%>, "
3003 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3004 return error_mark_node;
3005 }
3006 return build_qualified_type (type, TYPE_QUAL_CONST);
3007}
3008
3009/* Type for source_location_table hash_set. */
3010struct GTY((for_user)) source_location_table_entry {
3011 location_t loc;
3012 unsigned uid;
3013 tree var;
3014};
3015
3016/* Traits class for function start hash maps below. */
3017
3018struct source_location_table_entry_hash
3019 : ggc_remove <source_location_table_entry>
3020{
3021 typedef source_location_table_entry value_type;
3022 typedef source_location_table_entry compare_type;
3023
3024 static hashval_t
3025 hash (const source_location_table_entry &ref)
3026 {
3027 inchash::hash hstate (0);
3028 hstate.add_int (ref.loc);
3029 hstate.add_int (ref.uid);
3030 return hstate.end ();
3031 }
3032
3033 static bool
3034 equal (const source_location_table_entry &ref1,
3035 const source_location_table_entry &ref2)
3036 {
3037 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3038 }
3039
3040 static void
3041 mark_deleted (source_location_table_entry &ref)
3042 {
3043 ref.loc = UNKNOWN_LOCATION;
3044 ref.uid = -1U;
3045 ref.var = NULL_TREE;
3046 }
3047
7ca50de0
DM
3048 static const bool empty_zero_p = true;
3049
ff603745
JJ
3050 static void
3051 mark_empty (source_location_table_entry &ref)
3052 {
3053 ref.loc = UNKNOWN_LOCATION;
3054 ref.uid = 0;
3055 ref.var = NULL_TREE;
3056 }
3057
3058 static bool
3059 is_deleted (const source_location_table_entry &ref)
3060 {
3061 return (ref.loc == UNKNOWN_LOCATION
3062 && ref.uid == -1U
3063 && ref.var == NULL_TREE);
3064 }
3065
3066 static bool
3067 is_empty (const source_location_table_entry &ref)
3068 {
3069 return (ref.loc == UNKNOWN_LOCATION
3070 && ref.uid == 0
3071 && ref.var == NULL_TREE);
3072 }
3073};
3074
3075static GTY(()) hash_table <source_location_table_entry_hash>
3076 *source_location_table;
3077static GTY(()) unsigned int source_location_id;
3078
3079/* Fold __builtin_source_location () call. LOC is the location
3080 of the call. */
3081
3082tree
3083fold_builtin_source_location (location_t loc)
3084{
3085 if (source_location_impl == NULL_TREE)
3086 {
3087 auto_diagnostic_group d;
3088 source_location_impl = get_source_location_impl_type (loc);
3089 if (source_location_impl == error_mark_node)
3090 inform (loc, "evaluating %qs", "__builtin_source_location");
3091 }
3092 if (source_location_impl == error_mark_node)
3093 return build_zero_cst (const_ptr_type_node);
3094 if (source_location_table == NULL)
3095 source_location_table
3096 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3097 const line_map_ordinary *map;
3098 source_location_table_entry entry;
3099 entry.loc
3100 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3101 &map);
3102 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3103 entry.var = error_mark_node;
3104 source_location_table_entry *entryp
3105 = source_location_table->find_slot (entry, INSERT);
3106 tree var;
3107 if (entryp->var)
3108 var = entryp->var;
3109 else
3110 {
3111 char tmp_name[32];
3112 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3113 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3114 source_location_impl);
3115 TREE_STATIC (var) = 1;
3116 TREE_PUBLIC (var) = 0;
3117 DECL_ARTIFICIAL (var) = 1;
3118 DECL_IGNORED_P (var) = 1;
3119 DECL_EXTERNAL (var) = 0;
3120 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3121 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3122 layout_decl (var, 0);
3123
3124 vec<constructor_elt, va_gc> *v = NULL;
3125 vec_alloc (v, 4);
3126 for (tree field = TYPE_FIELDS (source_location_impl);
3127 (field = next_initializable_field (field)) != NULL_TREE;
3128 field = DECL_CHAIN (field))
3129 {
3130 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3131 tree val = NULL_TREE;
3132 if (strcmp (n, "_M_file_name") == 0)
3133 {
3134 if (const char *fname = LOCATION_FILE (loc))
3135 {
3136 fname = remap_macro_filename (fname);
3137 val = build_string_literal (strlen (fname) + 1, fname);
3138 }
3139 else
3140 val = build_string_literal (1, "");
3141 }
3142 else if (strcmp (n, "_M_function_name") == 0)
3143 {
3144 const char *name = "";
3145
3146 if (current_function_decl)
3147 name = cxx_printable_name (current_function_decl, 0);
3148
3149 val = build_string_literal (strlen (name) + 1, name);
3150 }
3151 else if (strcmp (n, "_M_line") == 0)
3152 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3153 else if (strcmp (n, "_M_column") == 0)
3154 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3155 else
3156 gcc_unreachable ();
3157 CONSTRUCTOR_APPEND_ELT (v, field, val);
3158 }
3159
3160 tree ctor = build_constructor (source_location_impl, v);
3161 TREE_CONSTANT (ctor) = 1;
3162 TREE_STATIC (ctor) = 1;
3163 DECL_INITIAL (var) = ctor;
3164 varpool_node::finalize_decl (var);
3165 *entryp = entry;
3166 entryp->var = var;
3167 }
3168
3169 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3170}
3171
cda0a029 3172#include "gt-cp-cp-gimplify.h"