]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/cp-gimplify.c
OpenMP/Fortran: Fix (re)mapping of allocatable/pointer arrays [PR96668]
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
CommitLineData
24baab8a 1/* C++-specific tree lowering bits; see also c-gimplify.c and gimple.c.
6de9cd9a 2
8d9254fc 3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Jason Merrill <jason@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
e77f031d 10Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
e77f031d
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
2adfab87 25#include "target.h"
c7131fb2 26#include "basic-block.h"
6de9cd9a 27#include "cp-tree.h"
c7131fb2 28#include "gimple.h"
2adfab87 29#include "predict.h"
c7131fb2 30#include "stor-layout.h"
726a989a 31#include "tree-iterator.h"
45b0be94 32#include "gimplify.h"
0a508bb6 33#include "c-family/c-ubsan.h"
314e6352
ML
34#include "stringpool.h"
35#include "attribs.h"
45b2222a 36#include "asan.h"
2674fa47 37#include "gcc-rich-location.h"
705f02b0 38#include "memmodel.h"
f30025bb 39#include "tm_p.h"
ff603745
JJ
40#include "output.h"
41#include "file-prefix-map.h"
42#include "cgraph.h"
f1f862ae 43#include "omp-general.h"
6de9cd9a 44
b2cb7511
TV
45/* Forward declarations. */
46
47static tree cp_genericize_r (tree *, int *, void *);
cda0a029 48static tree cp_fold_r (tree *, int *, void *);
e2df2328 49static void cp_genericize_tree (tree*, bool);
cda0a029 50static tree cp_fold (tree);
b2cb7511 51
fbc315db
ILT
52/* Local declarations. */
53
54enum bc_t { bc_break = 0, bc_continue = 1 };
55
1799e5d5
RH
56/* Stack of labels which are targets for "break" or "continue",
57 linked through TREE_CHAIN. */
58static tree bc_label[2];
fbc315db
ILT
59
60/* Begin a scope which can be exited by a break or continue statement. BC
61 indicates which.
62
b2cb7511
TV
63 Just creates a label with location LOCATION and pushes it into the current
64 context. */
fbc315db
ILT
65
66static tree
b2cb7511 67begin_bc_block (enum bc_t bc, location_t location)
fbc315db 68{
b2cb7511 69 tree label = create_artificial_label (location);
910ad8de 70 DECL_CHAIN (label) = bc_label[bc];
1799e5d5 71 bc_label[bc] = label;
56632b27
JM
72 if (bc == bc_break)
73 LABEL_DECL_BREAK (label) = true;
74 else
75 LABEL_DECL_CONTINUE (label) = true;
fbc315db
ILT
76 return label;
77}
78
79/* Finish a scope which can be exited by a break or continue statement.
b2cb7511 80 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
fbc315db
ILT
81 an expression for the contents of the scope.
82
83 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
b2cb7511 84 BLOCK. Otherwise, just forget the label. */
fbc315db 85
b2cb7511
TV
86static void
87finish_bc_block (tree *block, enum bc_t bc, tree label)
fbc315db 88{
1799e5d5 89 gcc_assert (label == bc_label[bc]);
fbc315db
ILT
90
91 if (TREE_USED (label))
b2cb7511
TV
92 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
93 block);
fbc315db 94
910ad8de
NF
95 bc_label[bc] = DECL_CHAIN (label);
96 DECL_CHAIN (label) = NULL_TREE;
fbc315db
ILT
97}
98
726a989a
RB
99/* Get the LABEL_EXPR to represent a break or continue statement
100 in the current block scope. BC indicates which. */
fbc315db
ILT
101
102static tree
726a989a 103get_bc_label (enum bc_t bc)
fbc315db 104{
1799e5d5 105 tree label = bc_label[bc];
fbc315db 106
fbc315db
ILT
107 /* Mark the label used for finish_bc_block. */
108 TREE_USED (label) = 1;
726a989a 109 return label;
fbc315db
ILT
110}
111
6de9cd9a
DN
112/* Genericize a TRY_BLOCK. */
113
114static void
115genericize_try_block (tree *stmt_p)
116{
117 tree body = TRY_STMTS (*stmt_p);
118 tree cleanup = TRY_HANDLERS (*stmt_p);
119
f293ce4b 120 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
6de9cd9a
DN
121}
122
123/* Genericize a HANDLER by converting to a CATCH_EXPR. */
124
125static void
126genericize_catch_block (tree *stmt_p)
127{
128 tree type = HANDLER_TYPE (*stmt_p);
129 tree body = HANDLER_BODY (*stmt_p);
130
6de9cd9a 131 /* FIXME should the caught type go in TREE_TYPE? */
f293ce4b 132 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
6de9cd9a
DN
133}
134
726a989a
RB
135/* A terser interface for building a representation of an exception
136 specification. */
137
138static tree
139build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
140{
141 tree t;
142
143 /* FIXME should the allowed types go in TREE_TYPE? */
144 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
145 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
146
147 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
148 append_to_statement_list (body, &TREE_OPERAND (t, 0));
149
150 return t;
151}
152
6de9cd9a
DN
153/* Genericize an EH_SPEC_BLOCK by converting it to a
154 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
155
156static void
157genericize_eh_spec_block (tree *stmt_p)
158{
159 tree body = EH_SPEC_STMTS (*stmt_p);
160 tree allowed = EH_SPEC_RAISES (*stmt_p);
1a66d857 161 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
6de9cd9a 162
726a989a 163 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
d665b6e5
MLI
164 TREE_NO_WARNING (*stmt_p) = true;
165 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
6de9cd9a
DN
166}
167
2674fa47
JM
168/* Return the first non-compound statement in STMT. */
169
170tree
171first_stmt (tree stmt)
172{
173 switch (TREE_CODE (stmt))
174 {
175 case STATEMENT_LIST:
176 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
177 return first_stmt (p->stmt);
178 return void_node;
179
180 case BIND_EXPR:
181 return first_stmt (BIND_EXPR_BODY (stmt));
182
183 default:
184 return stmt;
185 }
186}
187
5a508662
RH
188/* Genericize an IF_STMT by turning it into a COND_EXPR. */
189
190static void
f74d9c8f 191genericize_if_stmt (tree *stmt_p)
5a508662 192{
eeae0768 193 tree stmt, cond, then_, else_;
726a989a 194 location_t locus = EXPR_LOCATION (*stmt_p);
5a508662
RH
195
196 stmt = *stmt_p;
eeae0768 197 cond = IF_COND (stmt);
5a508662
RH
198 then_ = THEN_CLAUSE (stmt);
199 else_ = ELSE_CLAUSE (stmt);
200
2674fa47
JM
201 if (then_ && else_)
202 {
203 tree ft = first_stmt (then_);
204 tree fe = first_stmt (else_);
205 br_predictor pr;
206 if (TREE_CODE (ft) == PREDICT_EXPR
207 && TREE_CODE (fe) == PREDICT_EXPR
208 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
209 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
210 {
211 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
212 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
213 warning_at (&richloc, OPT_Wattributes,
214 "both branches of %<if%> statement marked as %qs",
2d9273ca 215 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
2674fa47
JM
216 }
217 }
218
5a508662 219 if (!then_)
c2255bc4 220 then_ = build_empty_stmt (locus);
5a508662 221 if (!else_)
c2255bc4 222 else_ = build_empty_stmt (locus);
5a508662 223
eeae0768
RS
224 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
225 stmt = then_;
226 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
227 stmt = else_;
228 else
229 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
aae5d08a 230 protected_set_expr_location_if_unset (stmt, locus);
5a508662
RH
231 *stmt_p = stmt;
232}
233
fbc315db
ILT
234/* Build a generic representation of one of the C loop forms. COND is the
235 loop condition or NULL_TREE. BODY is the (possibly compound) statement
236 controlled by the loop. INCR is the increment expression of a for-loop,
237 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
238 evaluated before the loop body as in while and for loops, or after the
239 loop body as in do-while loops. */
240
b2cb7511
TV
241static void
242genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
243 tree incr, bool cond_is_first, int *walk_subtrees,
244 void *data)
fbc315db 245{
b2cb7511 246 tree blab, clab;
27d93d2c 247 tree exit = NULL;
b2cb7511 248 tree stmt_list = NULL;
c42b72a7 249 tree debug_begin = NULL;
b2cb7511 250
aae5d08a 251 protected_set_expr_location_if_unset (incr, start_locus);
fbc315db 252
b2cb7511 253 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
b2cb7511 254 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
372e6e6b
JJ
255
256 blab = begin_bc_block (bc_break, start_locus);
257 clab = begin_bc_block (bc_continue, start_locus);
258
259 cp_walk_tree (&body, cp_genericize_r, data, NULL);
b2cb7511 260 *walk_subtrees = 0;
fbc315db 261
c42b72a7
JJ
262 if (MAY_HAVE_DEBUG_MARKER_STMTS
263 && (!cond || !integer_zerop (cond)))
264 {
265 debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
266 SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus));
267 }
268
27d93d2c 269 if (cond && TREE_CODE (cond) != INTEGER_CST)
fbc315db 270 {
27d93d2c
JM
271 /* If COND is constant, don't bother building an exit. If it's false,
272 we won't build a loop. If it's true, any exits are in the body. */
6bdfada4 273 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
27d93d2c
JM
274 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
275 get_bc_label (bc_break));
276 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
277 build_empty_stmt (cloc), exit);
b2cb7511 278 }
fbc315db 279
27d93d2c 280 if (exit && cond_is_first)
c42b72a7
JJ
281 {
282 append_to_statement_list (debug_begin, &stmt_list);
283 debug_begin = NULL_TREE;
284 append_to_statement_list (exit, &stmt_list);
285 }
b2cb7511
TV
286 append_to_statement_list (body, &stmt_list);
287 finish_bc_block (&stmt_list, bc_continue, clab);
c42b72a7
JJ
288 if (incr)
289 {
290 if (MAY_HAVE_DEBUG_MARKER_STMTS)
291 {
292 tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
293 SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus));
294 append_to_statement_list (d, &stmt_list);
295 }
296 append_to_statement_list (incr, &stmt_list);
297 }
298 append_to_statement_list (debug_begin, &stmt_list);
27d93d2c
JM
299 if (exit && !cond_is_first)
300 append_to_statement_list (exit, &stmt_list);
fbc315db 301
27d93d2c
JM
302 if (!stmt_list)
303 stmt_list = build_empty_stmt (start_locus);
304
305 tree loop;
306 if (cond && integer_zerop (cond))
307 {
308 if (cond_is_first)
309 loop = fold_build3_loc (start_locus, COND_EXPR,
310 void_type_node, cond, stmt_list,
311 build_empty_stmt (start_locus));
312 else
313 loop = stmt_list;
314 }
315 else
1509db23
JM
316 {
317 location_t loc = start_locus;
318 if (!cond || integer_nonzerop (cond))
319 loc = EXPR_LOCATION (expr_first (body));
320 if (loc == UNKNOWN_LOCATION)
321 loc = start_locus;
322 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
323 }
27d93d2c
JM
324
325 stmt_list = NULL;
326 append_to_statement_list (loop, &stmt_list);
327 finish_bc_block (&stmt_list, bc_break, blab);
328 if (!stmt_list)
329 stmt_list = build_empty_stmt (start_locus);
fbc315db 330
b2cb7511 331 *stmt_p = stmt_list;
fbc315db
ILT
332}
333
b2cb7511 334/* Genericize a FOR_STMT node *STMT_P. */
fbc315db
ILT
335
336static void
b2cb7511 337genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
338{
339 tree stmt = *stmt_p;
b2cb7511
TV
340 tree expr = NULL;
341 tree loop;
342 tree init = FOR_INIT_STMT (stmt);
fbc315db 343
b2cb7511
TV
344 if (init)
345 {
346 cp_walk_tree (&init, cp_genericize_r, data, NULL);
347 append_to_statement_list (init, &expr);
348 }
fbc315db 349
b2cb7511
TV
350 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
351 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
352 append_to_statement_list (loop, &expr);
27d93d2c
JM
353 if (expr == NULL_TREE)
354 expr = loop;
b2cb7511 355 *stmt_p = expr;
fbc315db
ILT
356}
357
b2cb7511 358/* Genericize a WHILE_STMT node *STMT_P. */
fbc315db
ILT
359
360static void
b2cb7511 361genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
362{
363 tree stmt = *stmt_p;
b2cb7511
TV
364 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
365 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
fbc315db
ILT
366}
367
b2cb7511 368/* Genericize a DO_STMT node *STMT_P. */
fbc315db
ILT
369
370static void
b2cb7511 371genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
372{
373 tree stmt = *stmt_p;
b2cb7511
TV
374 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
375 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
fbc315db
ILT
376}
377
b2cb7511 378/* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
fbc315db
ILT
379
380static void
b2cb7511 381genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
382{
383 tree stmt = *stmt_p;
b2cb7511
TV
384 tree break_block, body, cond, type;
385 location_t stmt_locus = EXPR_LOCATION (stmt);
fbc315db 386
fbc315db
ILT
387 body = SWITCH_STMT_BODY (stmt);
388 if (!body)
c2255bc4 389 body = build_empty_stmt (stmt_locus);
b2cb7511
TV
390 cond = SWITCH_STMT_COND (stmt);
391 type = SWITCH_STMT_TYPE (stmt);
fbc315db 392
b2cb7511 393 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
d0f2db23
JJ
394
395 break_block = begin_bc_block (bc_break, stmt_locus);
396
397 cp_walk_tree (&body, cp_genericize_r, data, NULL);
b2cb7511
TV
398 cp_walk_tree (&type, cp_genericize_r, data, NULL);
399 *walk_subtrees = 0;
fbc315db 400
65791f42
JJ
401 if (TREE_USED (break_block))
402 SWITCH_BREAK_LABEL_P (break_block) = 1;
403 finish_bc_block (&body, bc_break, break_block);
9e851845 404 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
1a2e9708
JJ
405 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
406 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
407 || !TREE_USED (break_block));
b2cb7511
TV
408}
409
410/* Genericize a CONTINUE_STMT node *STMT_P. */
411
412static void
413genericize_continue_stmt (tree *stmt_p)
414{
415 tree stmt_list = NULL;
416 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
417 tree label = get_bc_label (bc_continue);
418 location_t location = EXPR_LOCATION (*stmt_p);
419 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
bfeee8ac 420 append_to_statement_list_force (pred, &stmt_list);
b2cb7511
TV
421 append_to_statement_list (jump, &stmt_list);
422 *stmt_p = stmt_list;
fbc315db
ILT
423}
424
b2cb7511
TV
425/* Genericize a BREAK_STMT node *STMT_P. */
426
427static void
428genericize_break_stmt (tree *stmt_p)
429{
430 tree label = get_bc_label (bc_break);
431 location_t location = EXPR_LOCATION (*stmt_p);
432 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
433}
434
435/* Genericize a OMP_FOR node *STMT_P. */
436
437static void
438genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
439{
440 tree stmt = *stmt_p;
441 location_t locus = EXPR_LOCATION (stmt);
442 tree clab = begin_bc_block (bc_continue, locus);
443
444 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
04833609
JJ
445 if (TREE_CODE (stmt) != OMP_TASKLOOP)
446 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
b2cb7511
TV
447 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
448 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
449 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
450 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
451 *walk_subtrees = 0;
452
453 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
454}
455
456/* Hook into the middle of gimplifying an OMP_FOR node. */
1799e5d5
RH
457
458static enum gimplify_status
726a989a 459cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
1799e5d5
RH
460{
461 tree for_stmt = *expr_p;
726a989a 462 gimple_seq seq = NULL;
1799e5d5
RH
463
464 /* Protect ourselves from recursion. */
465 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
466 return GS_UNHANDLED;
467 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
468
726a989a 469 gimplify_and_add (for_stmt, &seq);
726a989a 470 gimple_seq_add_seq (pre_p, seq);
1799e5d5 471
1799e5d5
RH
472 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
473
474 return GS_ALL_DONE;
475}
476
934790cc
ILT
477/* Gimplify an EXPR_STMT node. */
478
479static void
480gimplify_expr_stmt (tree *stmt_p)
481{
482 tree stmt = EXPR_STMT_EXPR (*stmt_p);
483
484 if (stmt == error_mark_node)
485 stmt = NULL;
486
487 /* Gimplification of a statement expression will nullify the
488 statement if all its side effects are moved to *PRE_P and *POST_P.
489
490 In this case we will not want to emit the gimplified statement.
491 However, we may still want to emit a warning, so we do that before
492 gimplification. */
27f33b15 493 if (stmt && warn_unused_value)
934790cc
ILT
494 {
495 if (!TREE_SIDE_EFFECTS (stmt))
496 {
497 if (!IS_EMPTY_STMT (stmt)
498 && !VOID_TYPE_P (TREE_TYPE (stmt))
499 && !TREE_NO_WARNING (stmt))
27f33b15 500 warning (OPT_Wunused_value, "statement with no effect");
934790cc 501 }
27f33b15 502 else
934790cc
ILT
503 warn_if_unused_value (stmt, input_location);
504 }
505
506 if (stmt == NULL_TREE)
507 stmt = alloc_stmt_list ();
508
509 *stmt_p = stmt;
510}
511
6de9cd9a
DN
512/* Gimplify initialization from an AGGR_INIT_EXPR. */
513
514static void
14af5d9b 515cp_gimplify_init_expr (tree *expr_p)
6de9cd9a
DN
516{
517 tree from = TREE_OPERAND (*expr_p, 1);
518 tree to = TREE_OPERAND (*expr_p, 0);
0fcedd9c 519 tree t;
6de9cd9a 520
6de9cd9a
DN
521 /* What about code that pulls out the temp and uses it elsewhere? I
522 think that such code never uses the TARGET_EXPR as an initializer. If
523 I'm wrong, we'll abort because the temp won't have any RTL. In that
524 case, I guess we'll need to replace references somehow. */
72036b59 525 if (TREE_CODE (from) == TARGET_EXPR && TARGET_EXPR_INITIAL (from))
db80e34e 526 from = TARGET_EXPR_INITIAL (from);
6de9cd9a 527
c6c7698d
JM
528 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
529 inside the TARGET_EXPR. */
0fcedd9c
JM
530 for (t = from; t; )
531 {
532 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
6de9cd9a 533
0fcedd9c
JM
534 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
535 replace the slot operand with our target.
6de9cd9a 536
0fcedd9c
JM
537 Should we add a target parm to gimplify_expr instead? No, as in this
538 case we want to replace the INIT_EXPR. */
d5f4eddd
JM
539 if (TREE_CODE (sub) == AGGR_INIT_EXPR
540 || TREE_CODE (sub) == VEC_INIT_EXPR)
0fcedd9c 541 {
d5f4eddd
JM
542 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
543 AGGR_INIT_EXPR_SLOT (sub) = to;
544 else
545 VEC_INIT_EXPR_SLOT (sub) = to;
0fcedd9c
JM
546 *expr_p = from;
547
548 /* The initialization is now a side-effect, so the container can
549 become void. */
550 if (from != sub)
551 TREE_TYPE (from) = void_type_node;
552 }
0fcedd9c 553
2166aeb3
MP
554 /* Handle aggregate NSDMI. */
555 replace_placeholders (sub, to);
3e605b20 556
0fcedd9c
JM
557 if (t == sub)
558 break;
559 else
560 t = TREE_OPERAND (t, 1);
6de9cd9a 561 }
0fcedd9c 562
6de9cd9a
DN
563}
564
565/* Gimplify a MUST_NOT_THROW_EXPR. */
566
726a989a
RB
567static enum gimplify_status
568gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
569{
570 tree stmt = *expr_p;
325c3691 571 tree temp = voidify_wrapper_expr (stmt, NULL);
6de9cd9a 572 tree body = TREE_OPERAND (stmt, 0);
786f715d
JM
573 gimple_seq try_ = NULL;
574 gimple_seq catch_ = NULL;
355fe088 575 gimple *mnt;
6de9cd9a 576
786f715d 577 gimplify_and_add (body, &try_);
1a66d857 578 mnt = gimple_build_eh_must_not_throw (terminate_fn);
a1a6c5b2 579 gimple_seq_add_stmt_without_update (&catch_, mnt);
786f715d 580 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
6de9cd9a 581
a1a6c5b2 582 gimple_seq_add_stmt_without_update (pre_p, mnt);
6de9cd9a
DN
583 if (temp)
584 {
6de9cd9a 585 *expr_p = temp;
726a989a 586 return GS_OK;
6de9cd9a 587 }
726a989a
RB
588
589 *expr_p = NULL;
590 return GS_ALL_DONE;
6de9cd9a 591}
7c34ced1 592
25de0a29
AH
593/* Return TRUE if an operand (OP) of a given TYPE being copied is
594 really just an empty class copy.
595
596 Check that the operand has a simple form so that TARGET_EXPRs and
597 non-empty CONSTRUCTORs get reduced properly, and we leave the
598 return slot optimization alone because it isn't a copy. */
599
c652ff83 600bool
7d277e17 601simple_empty_class_p (tree type, tree op, tree_code code)
25de0a29 602{
7d277e17
JM
603 if (TREE_CODE (op) == COMPOUND_EXPR)
604 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
94e24187
JM
605 if (SIMPLE_TARGET_EXPR_P (op)
606 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
607 /* The TARGET_EXPR is itself a simple copy, look through it. */
608 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
25de0a29 609 return
7d277e17
JM
610 (TREE_CODE (op) == EMPTY_CLASS_EXPR
611 || code == MODIFY_EXPR
25de0a29
AH
612 || is_gimple_lvalue (op)
613 || INDIRECT_REF_P (op)
614 || (TREE_CODE (op) == CONSTRUCTOR
7d277e17 615 && CONSTRUCTOR_NELTS (op) == 0)
25de0a29
AH
616 || (TREE_CODE (op) == CALL_EXPR
617 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
7d277e17 618 && !TREE_CLOBBER_P (op)
dbcd32f8 619 && is_really_empty_class (type, /*ignore_vptr*/true);
25de0a29
AH
620}
621
65a550b4
JM
622/* Returns true if evaluating E as an lvalue has side-effects;
623 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
624 have side-effects until there is a read or write through it. */
625
626static bool
627lvalue_has_side_effects (tree e)
628{
629 if (!TREE_SIDE_EFFECTS (e))
630 return false;
631 while (handled_component_p (e))
632 {
633 if (TREE_CODE (e) == ARRAY_REF
634 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
635 return true;
636 e = TREE_OPERAND (e, 0);
637 }
638 if (DECL_P (e))
639 /* Just naming a variable has no side-effects. */
640 return false;
641 else if (INDIRECT_REF_P (e))
642 /* Similarly, indirection has no side-effects. */
643 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
644 else
645 /* For anything else, trust TREE_SIDE_EFFECTS. */
646 return TREE_SIDE_EFFECTS (e);
647}
648
1a37b6d9
JJ
649/* Gimplify *EXPR_P as rvalue into an expression that can't be modified
650 by expressions with side-effects in other operands. */
651
652static enum gimplify_status
653gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
654 bool (*gimple_test_f) (tree))
655{
656 enum gimplify_status t
657 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
658 if (t == GS_ERROR)
659 return GS_ERROR;
660 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
8e5993e2 661 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
1a37b6d9
JJ
662 return t;
663}
664
7c34ced1
RH
665/* Do C++-specific gimplification. Args are as for gimplify_expr. */
666
667int
726a989a 668cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7c34ced1
RH
669{
670 int saved_stmts_are_full_exprs_p = 0;
f9d0ca40 671 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
7c34ced1
RH
672 enum tree_code code = TREE_CODE (*expr_p);
673 enum gimplify_status ret;
674
675 if (STATEMENT_CODE_P (code))
676 {
677 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
678 current_stmt_tree ()->stmts_are_full_exprs_p
679 = STMT_IS_FULL_EXPR_P (*expr_p);
680 }
681
682 switch (code)
683 {
7c34ced1
RH
684 case AGGR_INIT_EXPR:
685 simplify_aggr_init_expr (expr_p);
686 ret = GS_OK;
687 break;
688
d5f4eddd
JM
689 case VEC_INIT_EXPR:
690 {
691 location_t loc = input_location;
534ecb17 692 tree init = VEC_INIT_EXPR_INIT (*expr_p);
b73a4704 693 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
d5f4eddd
JM
694 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
695 input_location = EXPR_LOCATION (*expr_p);
b73a4704 696 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
4de2f020 697 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
534ecb17 698 from_array,
d5f4eddd 699 tf_warning_or_error);
f4d90295
JJ
700 hash_set<tree> pset;
701 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
e2df2328 702 cp_genericize_tree (expr_p, false);
640b23d7 703 copy_if_shared (expr_p);
d5f4eddd
JM
704 ret = GS_OK;
705 input_location = loc;
706 }
707 break;
708
7c34ced1 709 case THROW_EXPR:
3b426391 710 /* FIXME communicate throw type to back end, probably by moving
7c34ced1
RH
711 THROW_EXPR into ../tree.def. */
712 *expr_p = TREE_OPERAND (*expr_p, 0);
713 ret = GS_OK;
714 break;
715
716 case MUST_NOT_THROW_EXPR:
726a989a 717 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
7c34ced1
RH
718 break;
719
726a989a 720 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
dae7ec87
JM
721 LHS of an assignment might also be involved in the RHS, as in bug
722 25979. */
7c34ced1 723 case INIT_EXPR:
14af5d9b 724 cp_gimplify_init_expr (expr_p);
85a52ea5
JM
725 if (TREE_CODE (*expr_p) != INIT_EXPR)
726 return GS_OK;
191816a3 727 /* Fall through. */
1e2ddf80 728 case MODIFY_EXPR:
25de0a29 729 modify_expr_case:
1e2ddf80
JM
730 {
731 /* If the back end isn't clever enough to know that the lhs and rhs
732 types are the same, add an explicit conversion. */
733 tree op0 = TREE_OPERAND (*expr_p, 0);
734 tree op1 = TREE_OPERAND (*expr_p, 1);
735
0d08582e
JM
736 if (!error_operand_p (op0)
737 && !error_operand_p (op1)
738 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
739 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
1e2ddf80
JM
740 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
741 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
742 TREE_TYPE (op0), op1);
6d729f28 743
7d277e17 744 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
6d729f28 745 {
94e24187
JM
746 while (TREE_CODE (op1) == TARGET_EXPR)
747 /* We're disconnecting the initializer from its target,
748 don't create a temporary. */
749 op1 = TARGET_EXPR_INITIAL (op1);
750
25de0a29
AH
751 /* Remove any copies of empty classes. Also drop volatile
752 variables on the RHS to avoid infinite recursion from
753 gimplify_expr trying to load the value. */
25de0a29
AH
754 if (TREE_SIDE_EFFECTS (op1))
755 {
756 if (TREE_THIS_VOLATILE (op1)
757 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
758 op1 = build_fold_addr_expr (op1);
759
760 gimplify_and_add (op1, pre_p);
761 }
65a550b4
JM
762 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
763 is_gimple_lvalue, fb_lvalue);
25de0a29 764 *expr_p = TREE_OPERAND (*expr_p, 0);
94e24187
JM
765 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
766 /* Avoid 'return *<retval>;' */
767 *expr_p = TREE_OPERAND (*expr_p, 0);
6d729f28 768 }
65a550b4
JM
769 /* P0145 says that the RHS is sequenced before the LHS.
770 gimplify_modify_expr gimplifies the RHS before the LHS, but that
771 isn't quite strong enough in two cases:
772
773 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
774 mean it's evaluated after the LHS.
775
776 2) the value calculation of the RHS is also sequenced before the
777 LHS, so for scalar assignment we need to preevaluate if the
778 RHS could be affected by LHS side-effects even if it has no
779 side-effects of its own. We don't need this for classes because
780 class assignment takes its RHS by reference. */
781 else if (flag_strong_eval_order > 1
782 && TREE_CODE (*expr_p) == MODIFY_EXPR
783 && lvalue_has_side_effects (op0)
784 && (TREE_CODE (op1) == CALL_EXPR
785 || (SCALAR_TYPE_P (TREE_TYPE (op1))
786 && !TREE_CONSTANT (op1))))
8e5993e2 787 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
1e2ddf80 788 }
7c34ced1
RH
789 ret = GS_OK;
790 break;
791
792 case EMPTY_CLASS_EXPR:
f7683d37
RG
793 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
794 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
7c34ced1
RH
795 ret = GS_OK;
796 break;
797
798 case BASELINK:
799 *expr_p = BASELINK_FUNCTIONS (*expr_p);
800 ret = GS_OK;
801 break;
802
803 case TRY_BLOCK:
804 genericize_try_block (expr_p);
805 ret = GS_OK;
806 break;
807
808 case HANDLER:
809 genericize_catch_block (expr_p);
810 ret = GS_OK;
811 break;
812
813 case EH_SPEC_BLOCK:
814 genericize_eh_spec_block (expr_p);
815 ret = GS_OK;
816 break;
817
818 case USING_STMT:
ac3cbee5 819 gcc_unreachable ();
7c34ced1 820
fbc315db 821 case FOR_STMT:
fbc315db 822 case WHILE_STMT:
fbc315db 823 case DO_STMT:
fbc315db 824 case SWITCH_STMT:
b2cb7511
TV
825 case CONTINUE_STMT:
826 case BREAK_STMT:
827 gcc_unreachable ();
fbc315db 828
1799e5d5 829 case OMP_FOR:
acf0174b
JJ
830 case OMP_SIMD:
831 case OMP_DISTRIBUTE:
d81ab49d 832 case OMP_LOOP:
d9a6bd32 833 case OMP_TASKLOOP:
726a989a 834 ret = cp_gimplify_omp_for (expr_p, pre_p);
1799e5d5
RH
835 break;
836
934790cc
ILT
837 case EXPR_STMT:
838 gimplify_expr_stmt (expr_p);
839 ret = GS_OK;
840 break;
841
392e3d51
RS
842 case UNARY_PLUS_EXPR:
843 {
844 tree arg = TREE_OPERAND (*expr_p, 0);
845 tree type = TREE_TYPE (*expr_p);
846 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
847 : arg;
848 ret = GS_OK;
849 }
850 break;
851
12893402 852 case CALL_EXPR:
fe6ebcf1 853 ret = GS_OK;
708935b2
JJ
854 if (flag_strong_eval_order == 2
855 && CALL_EXPR_FN (*expr_p)
b2b8eb62 856 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
708935b2
JJ
857 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
858 {
6835f8a0 859 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
708935b2 860 enum gimplify_status t
1a37b6d9
JJ
861 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
862 is_gimple_call_addr);
708935b2
JJ
863 if (t == GS_ERROR)
864 ret = GS_ERROR;
6835f8a0
JJ
865 /* GIMPLE considers most pointer conversion useless, but for
866 calls we actually care about the exact function pointer type. */
867 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
868 CALL_EXPR_FN (*expr_p)
869 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
708935b2 870 }
4eb24e01
JM
871 if (!CALL_EXPR_FN (*expr_p))
872 /* Internal function call. */;
873 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
fe6ebcf1 874 {
4eb24e01
JM
875 /* This is a call to a (compound) assignment operator that used
876 the operator syntax; gimplify the RHS first. */
877 gcc_assert (call_expr_nargs (*expr_p) == 2);
878 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
879 enum gimplify_status t
880 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
881 if (t == GS_ERROR)
882 ret = GS_ERROR;
883 }
884 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
885 {
886 /* Leave the last argument for gimplify_call_expr, to avoid problems
887 with __builtin_va_arg_pack(). */
888 int nargs = call_expr_nargs (*expr_p) - 1;
889 for (int i = 0; i < nargs; ++i)
fe6ebcf1
JM
890 {
891 enum gimplify_status t
892 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
893 if (t == GS_ERROR)
894 ret = GS_ERROR;
895 }
896 }
d0cf395a 897 else if (flag_strong_eval_order
4eb24e01
JM
898 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
899 {
d0cf395a 900 /* If flag_strong_eval_order, evaluate the object argument first. */
4eb24e01 901 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
71a93b08 902 if (INDIRECT_TYPE_P (fntype))
4eb24e01
JM
903 fntype = TREE_TYPE (fntype);
904 if (TREE_CODE (fntype) == METHOD_TYPE)
905 {
906 enum gimplify_status t
907 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
908 if (t == GS_ERROR)
909 ret = GS_ERROR;
910 }
911 }
e4082611
JJ
912 if (ret != GS_ERROR)
913 {
914 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
915 if (decl
3d78e008 916 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
ff603745 917 BUILT_IN_FRONTEND))
e4082611 918 *expr_p = boolean_false_node;
ff603745
JJ
919 else if (decl
920 && fndecl_built_in_p (decl, CP_BUILT_IN_SOURCE_LOCATION,
921 BUILT_IN_FRONTEND))
922 *expr_p = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
e4082611 923 }
fe6ebcf1
JM
924 break;
925
08f594eb
JM
926 case TARGET_EXPR:
927 /* A TARGET_EXPR that expresses direct-initialization should have been
928 elided by cp_gimplify_init_expr. */
929 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
930 ret = GS_UNHANDLED;
931 break;
932
25de0a29
AH
933 case RETURN_EXPR:
934 if (TREE_OPERAND (*expr_p, 0)
935 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
936 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
937 {
938 expr_p = &TREE_OPERAND (*expr_p, 0);
25de0a29
AH
939 /* Avoid going through the INIT_EXPR case, which can
940 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
941 goto modify_expr_case;
942 }
943 /* Fall through. */
944
7c34ced1 945 default:
32e8bb8e 946 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
7c34ced1
RH
947 break;
948 }
949
950 /* Restore saved state. */
951 if (STATEMENT_CODE_P (code))
952 current_stmt_tree ()->stmts_are_full_exprs_p
953 = saved_stmts_are_full_exprs_p;
954
955 return ret;
956}
5a508662 957
d8472c75 958static inline bool
58f9752a 959is_invisiref_parm (const_tree t)
d8472c75 960{
cc77ae10 961 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
d8472c75
JM
962 && DECL_BY_REFERENCE (t));
963}
964
10827cd8
JJ
965/* Return true if the uid in both int tree maps are equal. */
966
2a22f99c
TS
967bool
968cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
10827cd8 969{
10827cd8
JJ
970 return (a->uid == b->uid);
971}
972
973/* Hash a UID in a cxx_int_tree_map. */
974
975unsigned int
2a22f99c 976cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
10827cd8 977{
2a22f99c 978 return item->uid;
10827cd8
JJ
979}
980
4577f730
JJ
981/* A stable comparison routine for use with splay trees and DECLs. */
982
983static int
984splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
985{
986 tree a = (tree) xa;
987 tree b = (tree) xb;
988
989 return DECL_UID (a) - DECL_UID (b);
990}
991
992/* OpenMP context during genericization. */
993
994struct cp_genericize_omp_taskreg
995{
996 bool is_parallel;
997 bool default_shared;
998 struct cp_genericize_omp_taskreg *outer;
999 splay_tree variables;
1000};
1001
1002/* Return true if genericization should try to determine if
1003 DECL is firstprivate or shared within task regions. */
1004
1005static bool
1006omp_var_to_track (tree decl)
1007{
1008 tree type = TREE_TYPE (decl);
1009 if (is_invisiref_parm (decl))
1010 type = TREE_TYPE (type);
9f613f06 1011 else if (TYPE_REF_P (type))
8b586510 1012 type = TREE_TYPE (type);
4577f730
JJ
1013 while (TREE_CODE (type) == ARRAY_TYPE)
1014 type = TREE_TYPE (type);
1015 if (type == error_mark_node || !CLASS_TYPE_P (type))
1016 return false;
3048c0c7 1017 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
4577f730
JJ
1018 return false;
1019 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1020 return false;
1021 return true;
1022}
1023
1024/* Note DECL use in OpenMP region OMP_CTX during genericization. */
1025
1026static void
1027omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
1028{
1029 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
1030 (splay_tree_key) decl);
1031 if (n == NULL)
1032 {
1033 int flags = OMP_CLAUSE_DEFAULT_SHARED;
1034 if (omp_ctx->outer)
1035 omp_cxx_notice_variable (omp_ctx->outer, decl);
1036 if (!omp_ctx->default_shared)
1037 {
1038 struct cp_genericize_omp_taskreg *octx;
1039
1040 for (octx = omp_ctx->outer; octx; octx = octx->outer)
1041 {
1042 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1043 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1044 {
1045 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1046 break;
1047 }
1048 if (octx->is_parallel)
1049 break;
1050 }
1051 if (octx == NULL
1052 && (TREE_CODE (decl) == PARM_DECL
1053 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1054 && DECL_CONTEXT (decl) == current_function_decl)))
1055 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1056 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1057 {
1058 /* DECL is implicitly determined firstprivate in
1059 the current task construct. Ensure copy ctor and
1060 dtor are instantiated, because during gimplification
1061 it will be already too late. */
1062 tree type = TREE_TYPE (decl);
1063 if (is_invisiref_parm (decl))
1064 type = TREE_TYPE (type);
9f613f06 1065 else if (TYPE_REF_P (type))
8b586510 1066 type = TREE_TYPE (type);
4577f730
JJ
1067 while (TREE_CODE (type) == ARRAY_TYPE)
1068 type = TREE_TYPE (type);
1069 get_copy_ctor (type, tf_none);
1070 get_dtor (type, tf_none);
1071 }
1072 }
1073 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1074 }
1075}
1076
1077/* Genericization context. */
1078
ac3cbee5
RG
1079struct cp_genericize_data
1080{
6e2830c3 1081 hash_set<tree> *p_set;
9771b263 1082 vec<tree> bind_expr_stack;
4577f730 1083 struct cp_genericize_omp_taskreg *omp_ctx;
8243e2a9 1084 tree try_block;
7b3a9795 1085 bool no_sanitize_p;
e2df2328 1086 bool handle_invisiref_parm_p;
ac3cbee5
RG
1087};
1088
cda0a029
JM
1089/* Perform any pre-gimplification folding of C++ front end trees to
1090 GENERIC.
1091 Note: The folding of none-omp cases is something to move into
1092 the middle-end. As for now we have most foldings only on GENERIC
1093 in fold-const, we need to perform this before transformation to
1094 GIMPLE-form. */
1095
1096static tree
1097cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1098{
1099 tree stmt;
1100 enum tree_code code;
1101
1102 *stmt_p = stmt = cp_fold (*stmt_p);
1103
6f5bcd24
JJ
1104 if (((hash_set<tree> *) data)->add (stmt))
1105 {
1106 /* Don't walk subtrees of stmts we've already walked once, otherwise
1107 we can have exponential complexity with e.g. lots of nested
1108 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1109 always the same tree, which the first time cp_fold_r has been
1110 called on it had the subtrees walked. */
1111 *walk_subtrees = 0;
1112 return NULL;
1113 }
1114
cda0a029
JM
1115 code = TREE_CODE (stmt);
1116 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
d81ab49d 1117 || code == OMP_LOOP || code == OMP_TASKLOOP || code == OACC_LOOP)
cda0a029
JM
1118 {
1119 tree x;
1120 int i, n;
1121
1122 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1123 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1124 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1125 x = OMP_FOR_COND (stmt);
1126 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1127 {
1128 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1129 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1130 }
1131 else if (x && TREE_CODE (x) == TREE_VEC)
1132 {
1133 n = TREE_VEC_LENGTH (x);
1134 for (i = 0; i < n; i++)
1135 {
1136 tree o = TREE_VEC_ELT (x, i);
1137 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1138 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1139 }
1140 }
1141 x = OMP_FOR_INCR (stmt);
1142 if (x && TREE_CODE (x) == TREE_VEC)
1143 {
1144 n = TREE_VEC_LENGTH (x);
1145 for (i = 0; i < n; i++)
1146 {
1147 tree o = TREE_VEC_ELT (x, i);
1148 if (o && TREE_CODE (o) == MODIFY_EXPR)
1149 o = TREE_OPERAND (o, 1);
1150 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1151 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1152 {
1153 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1154 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1155 }
1156 }
1157 }
1158 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1159 *walk_subtrees = 0;
1160 }
1161
1162 return NULL;
1163}
1164
2fa586ad
JM
1165/* Fold ALL the trees! FIXME we should be able to remove this, but
1166 apparently that still causes optimization regressions. */
1167
1168void
1169cp_fold_function (tree fndecl)
1170{
6f5bcd24
JJ
1171 hash_set<tree> pset;
1172 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
2fa586ad
JM
1173}
1174
b7689b96
JM
1175/* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1176
1177static tree genericize_spaceship (tree expr)
1178{
1179 iloc_sentinel s (cp_expr_location (expr));
1180 tree type = TREE_TYPE (expr);
1181 tree op0 = TREE_OPERAND (expr, 0);
1182 tree op1 = TREE_OPERAND (expr, 1);
1183 return genericize_spaceship (type, op0, op1);
1184}
1185
3539fc13
JM
1186/* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1187 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
ef4e0c35
JM
1188 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1189 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
3539fc13 1190
ef4e0c35 1191tree
3539fc13
JM
1192predeclare_vla (tree expr)
1193{
1194 tree type = TREE_TYPE (expr);
1195 if (type == error_mark_node)
1196 return expr;
ef4e0c35
JM
1197 if (is_typedef_decl (expr))
1198 type = DECL_ORIGINAL_TYPE (expr);
3539fc13
JM
1199
1200 /* We need to strip pointers for gimplify_type_sizes. */
1201 tree vla = type;
1202 while (POINTER_TYPE_P (vla))
1203 {
1204 if (TYPE_NAME (vla))
1205 return expr;
1206 vla = TREE_TYPE (vla);
1207 }
ef41587d
JM
1208 if (vla == type || TYPE_NAME (vla)
1209 || !variably_modified_type_p (vla, NULL_TREE))
3539fc13
JM
1210 return expr;
1211
1212 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1213 DECL_ARTIFICIAL (decl) = 1;
1214 TYPE_NAME (vla) = decl;
1215 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
ef4e0c35
JM
1216 if (DECL_P (expr))
1217 {
1218 add_stmt (dexp);
1219 return NULL_TREE;
1220 }
1221 else
1222 {
1223 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1224 return expr;
1225 }
3539fc13
JM
1226}
1227
d8472c75
JM
1228/* Perform any pre-gimplification lowering of C++ front end trees to
1229 GENERIC. */
5a508662
RH
1230
1231static tree
d8472c75 1232cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
5a508662
RH
1233{
1234 tree stmt = *stmt_p;
ac3cbee5 1235 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
6e2830c3 1236 hash_set<tree> *p_set = wtd->p_set;
5a508662 1237
4577f730
JJ
1238 /* If in an OpenMP context, note var uses. */
1239 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
5a6ccc94 1240 && (VAR_P (stmt)
4577f730
JJ
1241 || TREE_CODE (stmt) == PARM_DECL
1242 || TREE_CODE (stmt) == RESULT_DECL)
1243 && omp_var_to_track (stmt))
1244 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1245
6aa80414
NS
1246 /* Don't dereference parms in a thunk, pass the references through. */
1247 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1248 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1249 {
1250 *walk_subtrees = 0;
1251 return NULL;
1252 }
1253
4b9f2115 1254 /* Dereference invisible reference parms. */
e2df2328 1255 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
d8472c75 1256 {
cc77ae10 1257 *stmt_p = convert_from_reference (stmt);
7cfd79d6 1258 p_set->add (*stmt_p);
d8472c75
JM
1259 *walk_subtrees = 0;
1260 return NULL;
1261 }
1262
10827cd8
JJ
1263 /* Map block scope extern declarations to visible declarations with the
1264 same name and type in outer scopes if any. */
1265 if (cp_function_chain->extern_decl_map
cb6da767 1266 && VAR_OR_FUNCTION_DECL_P (stmt)
10827cd8
JJ
1267 && DECL_EXTERNAL (stmt))
1268 {
1269 struct cxx_int_tree_map *h, in;
1270 in.uid = DECL_UID (stmt);
2a22f99c 1271 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
10827cd8
JJ
1272 if (h)
1273 {
1274 *stmt_p = h->to;
9e51f66f 1275 TREE_USED (h->to) |= TREE_USED (stmt);
10827cd8
JJ
1276 *walk_subtrees = 0;
1277 return NULL;
1278 }
1279 }
1280
6f3af356 1281 if (TREE_CODE (stmt) == INTEGER_CST
9f613f06 1282 && TYPE_REF_P (TREE_TYPE (stmt))
6f3af356
JJ
1283 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1284 && !wtd->no_sanitize_p)
1285 {
1286 ubsan_maybe_instrument_reference (stmt_p);
1287 if (*stmt_p != stmt)
1288 {
1289 *walk_subtrees = 0;
1290 return NULL_TREE;
1291 }
1292 }
1293
d8472c75 1294 /* Other than invisiref parms, don't walk the same tree twice. */
6e2830c3 1295 if (p_set->contains (stmt))
d8472c75
JM
1296 {
1297 *walk_subtrees = 0;
1298 return NULL_TREE;
1299 }
1300
c74985e3 1301 switch (TREE_CODE (stmt))
d8472c75 1302 {
c74985e3
JJ
1303 case ADDR_EXPR:
1304 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1305 {
1306 /* If in an OpenMP context, note var uses. */
1307 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1308 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1309 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1310 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
4577f730 1311 *walk_subtrees = 0;
c74985e3
JJ
1312 }
1313 break;
1314
1315 case RETURN_EXPR:
1316 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1317 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1318 *walk_subtrees = 0;
1319 break;
1320
1321 case OMP_CLAUSE:
1322 switch (OMP_CLAUSE_CODE (stmt))
1323 {
1324 case OMP_CLAUSE_LASTPRIVATE:
1325 /* Don't dereference an invisiref in OpenMP clauses. */
1326 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1327 {
1328 *walk_subtrees = 0;
1329 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1330 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1331 cp_genericize_r, data, NULL);
1332 }
1333 break;
1334 case OMP_CLAUSE_PRIVATE:
1335 /* Don't dereference an invisiref in OpenMP clauses. */
1336 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
4577f730 1337 *walk_subtrees = 0;
c74985e3
JJ
1338 else if (wtd->omp_ctx != NULL)
1339 {
1340 /* Private clause doesn't cause any references to the
1341 var in outer contexts, avoid calling
1342 omp_cxx_notice_variable for it. */
1343 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1344 wtd->omp_ctx = NULL;
1345 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1346 data, NULL);
1347 wtd->omp_ctx = old;
1348 *walk_subtrees = 0;
1349 }
1350 break;
1351 case OMP_CLAUSE_SHARED:
1352 case OMP_CLAUSE_FIRSTPRIVATE:
1353 case OMP_CLAUSE_COPYIN:
1354 case OMP_CLAUSE_COPYPRIVATE:
6a2892a6
JJ
1355 case OMP_CLAUSE_INCLUSIVE:
1356 case OMP_CLAUSE_EXCLUSIVE:
c74985e3
JJ
1357 /* Don't dereference an invisiref in OpenMP clauses. */
1358 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
acf0174b 1359 *walk_subtrees = 0;
c74985e3
JJ
1360 break;
1361 case OMP_CLAUSE_REDUCTION:
28567c40
JJ
1362 case OMP_CLAUSE_IN_REDUCTION:
1363 case OMP_CLAUSE_TASK_REDUCTION:
c74985e3
JJ
1364 /* Don't dereference an invisiref in reduction clause's
1365 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1366 still needs to be genericized. */
1367 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1368 {
1369 *walk_subtrees = 0;
1370 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1371 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1372 cp_genericize_r, data, NULL);
1373 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1374 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1375 cp_genericize_r, data, NULL);
1376 }
1377 break;
1378 default:
1379 break;
1380 }
1381 break;
1382
1383 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1384 to lower this construct before scanning it, so we need to lower these
1385 before doing anything else. */
1386 case CLEANUP_STMT:
1387 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1388 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1389 : TRY_FINALLY_EXPR,
1390 void_type_node,
1391 CLEANUP_BODY (stmt),
1392 CLEANUP_EXPR (stmt));
1393 break;
1394
1395 case IF_STMT:
f74d9c8f
JJ
1396 genericize_if_stmt (stmt_p);
1397 /* *stmt_p has changed, tail recurse to handle it again. */
1398 return cp_genericize_r (stmt_p, walk_subtrees, data);
f74d9c8f 1399
c74985e3
JJ
1400 /* COND_EXPR might have incompatible types in branches if one or both
1401 arms are bitfields. Fix it up now. */
1402 case COND_EXPR:
1403 {
1404 tree type_left
1405 = (TREE_OPERAND (stmt, 1)
1406 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1407 : NULL_TREE);
1408 tree type_right
1409 = (TREE_OPERAND (stmt, 2)
1410 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1411 : NULL_TREE);
1412 if (type_left
1413 && !useless_type_conversion_p (TREE_TYPE (stmt),
1414 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1415 {
1416 TREE_OPERAND (stmt, 1)
1417 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1418 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1419 type_left));
1420 }
1421 if (type_right
1422 && !useless_type_conversion_p (TREE_TYPE (stmt),
1423 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1424 {
1425 TREE_OPERAND (stmt, 2)
1426 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1427 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1428 type_right));
1429 }
1430 }
1431 break;
bbdf5682 1432
c74985e3 1433 case BIND_EXPR:
4577f730
JJ
1434 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1435 {
1436 tree decl;
1437 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
5a6ccc94 1438 if (VAR_P (decl)
4577f730
JJ
1439 && !DECL_EXTERNAL (decl)
1440 && omp_var_to_track (decl))
1441 {
1442 splay_tree_node n
1443 = splay_tree_lookup (wtd->omp_ctx->variables,
1444 (splay_tree_key) decl);
1445 if (n == NULL)
1446 splay_tree_insert (wtd->omp_ctx->variables,
1447 (splay_tree_key) decl,
1448 TREE_STATIC (decl)
1449 ? OMP_CLAUSE_DEFAULT_SHARED
1450 : OMP_CLAUSE_DEFAULT_PRIVATE);
1451 }
1452 }
45b2222a 1453 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
7b3a9795
MP
1454 {
1455 /* The point here is to not sanitize static initializers. */
1456 bool no_sanitize_p = wtd->no_sanitize_p;
1457 wtd->no_sanitize_p = true;
1458 for (tree decl = BIND_EXPR_VARS (stmt);
1459 decl;
1460 decl = DECL_CHAIN (decl))
1461 if (VAR_P (decl)
1462 && TREE_STATIC (decl)
1463 && DECL_INITIAL (decl))
1464 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1465 wtd->no_sanitize_p = no_sanitize_p;
1466 }
9771b263 1467 wtd->bind_expr_stack.safe_push (stmt);
ac3cbee5
RG
1468 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1469 cp_genericize_r, data, NULL);
9771b263 1470 wtd->bind_expr_stack.pop ();
c74985e3 1471 break;
ac3cbee5 1472
c74985e3
JJ
1473 case USING_STMT:
1474 {
1475 tree block = NULL_TREE;
1476
1477 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1478 BLOCK, and append an IMPORTED_DECL to its
1479 BLOCK_VARS chained list. */
1480 if (wtd->bind_expr_stack.exists ())
1481 {
1482 int i;
1483 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1484 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1485 break;
1486 }
1487 if (block)
1488 {
0c923157
JM
1489 tree decl = TREE_OPERAND (stmt, 0);
1490 gcc_assert (decl);
c74985e3 1491
0c923157
JM
1492 if (undeduced_auto_decl (decl))
1493 /* Omit from the GENERIC, the back-end can't handle it. */;
1494 else
1495 {
1496 tree using_directive = make_node (IMPORTED_DECL);
1497 TREE_TYPE (using_directive) = void_type_node;
f3665bd1 1498 DECL_CONTEXT (using_directive) = current_function_decl;
ac3cbee5 1499
0c923157
JM
1500 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1501 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1502 BLOCK_VARS (block) = using_directive;
1503 }
c74985e3
JJ
1504 }
1505 /* The USING_STMT won't appear in GENERIC. */
1506 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1507 *walk_subtrees = 0;
1508 }
1509 break;
1510
1511 case DECL_EXPR:
1512 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
ac3cbee5 1513 {
c74985e3
JJ
1514 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1515 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1516 *walk_subtrees = 0;
ac3cbee5 1517 }
c74985e3 1518 else
ac3cbee5 1519 {
c74985e3
JJ
1520 tree d = DECL_EXPR_DECL (stmt);
1521 if (VAR_P (d))
1522 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
ac3cbee5 1523 }
c74985e3 1524 break;
4577f730 1525
c74985e3
JJ
1526 case OMP_PARALLEL:
1527 case OMP_TASK:
1528 case OMP_TASKLOOP:
1529 {
1530 struct cp_genericize_omp_taskreg omp_ctx;
1531 tree c, decl;
1532 splay_tree_node n;
1533
1534 *walk_subtrees = 0;
1535 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1536 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1537 omp_ctx.default_shared = omp_ctx.is_parallel;
1538 omp_ctx.outer = wtd->omp_ctx;
1539 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1540 wtd->omp_ctx = &omp_ctx;
1541 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1542 switch (OMP_CLAUSE_CODE (c))
1543 {
1544 case OMP_CLAUSE_SHARED:
1545 case OMP_CLAUSE_PRIVATE:
1546 case OMP_CLAUSE_FIRSTPRIVATE:
1547 case OMP_CLAUSE_LASTPRIVATE:
1548 decl = OMP_CLAUSE_DECL (c);
1549 if (decl == error_mark_node || !omp_var_to_track (decl))
1550 break;
1551 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1552 if (n != NULL)
1553 break;
1554 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1555 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1556 ? OMP_CLAUSE_DEFAULT_SHARED
1557 : OMP_CLAUSE_DEFAULT_PRIVATE);
1558 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1559 omp_cxx_notice_variable (omp_ctx.outer, decl);
4577f730 1560 break;
c74985e3
JJ
1561 case OMP_CLAUSE_DEFAULT:
1562 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1563 omp_ctx.default_shared = true;
1564 default:
4577f730 1565 break;
c74985e3
JJ
1566 }
1567 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1568 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1569 else
1570 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1571 wtd->omp_ctx = omp_ctx.outer;
1572 splay_tree_delete (omp_ctx.variables);
1573 }
1574 break;
1575
49ddde69
JJ
1576 case OMP_TARGET:
1577 cfun->has_omp_target = true;
1578 break;
1579
c74985e3
JJ
1580 case TRY_BLOCK:
1581 {
1582 *walk_subtrees = 0;
1583 tree try_block = wtd->try_block;
1584 wtd->try_block = stmt;
1585 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1586 wtd->try_block = try_block;
1587 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1588 }
1589 break;
1590
1591 case MUST_NOT_THROW_EXPR:
8243e2a9
JM
1592 /* MUST_NOT_THROW_COND might be something else with TM. */
1593 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1594 {
1595 *walk_subtrees = 0;
1596 tree try_block = wtd->try_block;
1597 wtd->try_block = stmt;
1598 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1599 wtd->try_block = try_block;
1600 }
c74985e3
JJ
1601 break;
1602
1603 case THROW_EXPR:
1604 {
1605 location_t loc = location_of (stmt);
1606 if (TREE_NO_WARNING (stmt))
1607 /* Never mind. */;
1608 else if (wtd->try_block)
1609 {
097f82ec
DM
1610 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1611 {
1612 auto_diagnostic_group d;
1613 if (warning_at (loc, OPT_Wterminate,
a9c697b8 1614 "%<throw%> will always call %<terminate%>")
097f82ec
DM
1615 && cxx_dialect >= cxx11
1616 && DECL_DESTRUCTOR_P (current_function_decl))
a9c697b8 1617 inform (loc, "in C++11 destructors default to %<noexcept%>");
097f82ec 1618 }
c74985e3
JJ
1619 }
1620 else
1621 {
1622 if (warn_cxx11_compat && cxx_dialect < cxx11
1623 && DECL_DESTRUCTOR_P (current_function_decl)
1624 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1625 == NULL_TREE)
1626 && (get_defaulted_eh_spec (current_function_decl)
1627 == empty_except_spec))
1628 warning_at (loc, OPT_Wc__11_compat,
a9c697b8
MS
1629 "in C++11 this %<throw%> will call %<terminate%> "
1630 "because destructors default to %<noexcept%>");
c74985e3
JJ
1631 }
1632 }
1633 break;
1634
1635 case CONVERT_EXPR:
1636 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1637 break;
1638
1639 case FOR_STMT:
1640 genericize_for_stmt (stmt_p, walk_subtrees, data);
1641 break;
1642
1643 case WHILE_STMT:
1644 genericize_while_stmt (stmt_p, walk_subtrees, data);
1645 break;
1646
1647 case DO_STMT:
1648 genericize_do_stmt (stmt_p, walk_subtrees, data);
1649 break;
1650
1651 case SWITCH_STMT:
1652 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1653 break;
1654
1655 case CONTINUE_STMT:
1656 genericize_continue_stmt (stmt_p);
1657 break;
1658
1659 case BREAK_STMT:
1660 genericize_break_stmt (stmt_p);
1661 break;
1662
b7689b96
JM
1663 case SPACESHIP_EXPR:
1664 *stmt_p = genericize_spaceship (*stmt_p);
1665 break;
1666
f1f862ae
JJ
1667 case OMP_DISTRIBUTE:
1668 /* Need to explicitly instantiate copy ctors on class iterators of
1669 composite distribute parallel for. */
1670 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1671 {
1672 tree *data[4] = { NULL, NULL, NULL, NULL };
1673 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1674 find_combined_omp_for, data, NULL);
1675 if (inner != NULL_TREE
1676 && TREE_CODE (inner) == OMP_FOR)
1677 {
1678 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1679 if (OMP_FOR_ORIG_DECLS (inner)
1680 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1681 i)) == TREE_LIST
1682 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1683 i)))
1684 {
1685 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1686 /* Class iterators aren't allowed on OMP_SIMD, so the only
1687 case we need to solve is distribute parallel for. */
1688 gcc_assert (TREE_CODE (inner) == OMP_FOR
1689 && data[1]);
1690 tree orig_decl = TREE_PURPOSE (orig);
1691 tree c, cl = NULL_TREE;
1692 for (c = OMP_FOR_CLAUSES (inner);
1693 c; c = OMP_CLAUSE_CHAIN (c))
1694 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1695 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1696 && OMP_CLAUSE_DECL (c) == orig_decl)
1697 {
1698 cl = c;
1699 break;
1700 }
1701 if (cl == NULL_TREE)
1702 {
1703 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1704 c; c = OMP_CLAUSE_CHAIN (c))
1705 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1706 && OMP_CLAUSE_DECL (c) == orig_decl)
1707 {
1708 cl = c;
1709 break;
1710 }
1711 }
1712 if (cl)
1713 {
1714 orig_decl = require_complete_type (orig_decl);
1715 tree inner_type = TREE_TYPE (orig_decl);
1716 if (orig_decl == error_mark_node)
1717 continue;
1718 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1719 inner_type = TREE_TYPE (inner_type);
1720
1721 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1722 inner_type = TREE_TYPE (inner_type);
1723 get_copy_ctor (inner_type, tf_warning_or_error);
1724 }
1725 }
1726 }
1727 }
1728 /* FALLTHRU */
c74985e3
JJ
1729 case OMP_FOR:
1730 case OMP_SIMD:
d81ab49d 1731 case OMP_LOOP:
950ad0ba 1732 case OACC_LOOP:
c74985e3
JJ
1733 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1734 break;
1735
1736 case PTRMEM_CST:
9d409934
JM
1737 /* By the time we get here we're handing off to the back end, so we don't
1738 need or want to preserve PTRMEM_CST anymore. */
1739 *stmt_p = cplus_expand_constant (stmt);
1740 *walk_subtrees = 0;
c74985e3
JJ
1741 break;
1742
1743 case MEM_REF:
f31a8339 1744 /* For MEM_REF, make sure not to sanitize the second operand even
c74985e3 1745 if it has reference type. It is just an offset with a type
f31a8339
JJ
1746 holding other information. There is no other processing we
1747 need to do for INTEGER_CSTs, so just ignore the second argument
1748 unconditionally. */
1749 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1750 *walk_subtrees = 0;
c74985e3
JJ
1751 break;
1752
1753 case NOP_EXPR:
3539fc13 1754 *stmt_p = predeclare_vla (*stmt_p);
c74985e3
JJ
1755 if (!wtd->no_sanitize_p
1756 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
9f613f06 1757 && TYPE_REF_P (TREE_TYPE (stmt)))
6f3af356 1758 ubsan_maybe_instrument_reference (stmt_p);
c74985e3
JJ
1759 break;
1760
1761 case CALL_EXPR:
861d4af8
AS
1762 /* Evaluate function concept checks instead of treating them as
1763 normal functions. */
1764 if (concept_check_p (stmt))
1765 {
1766 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1767 * walk_subtrees = 0;
1768 break;
1769 }
1770
c74985e3
JJ
1771 if (!wtd->no_sanitize_p
1772 && sanitize_flags_p ((SANITIZE_NULL
1773 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
944fa280
JJ
1774 {
1775 tree fn = CALL_EXPR_FN (stmt);
1776 if (fn != NULL_TREE
1777 && !error_operand_p (fn)
71a93b08 1778 && INDIRECT_TYPE_P (TREE_TYPE (fn))
944fa280
JJ
1779 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1780 {
1781 bool is_ctor
1782 = TREE_CODE (fn) == ADDR_EXPR
1783 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1784 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
45b2222a 1785 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
35228ac7 1786 ubsan_maybe_instrument_member_call (stmt, is_ctor);
45b2222a 1787 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
35228ac7 1788 cp_ubsan_maybe_instrument_member_call (stmt);
944fa280 1789 }
830421fc
JJ
1790 else if (fn == NULL_TREE
1791 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1792 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
9f613f06 1793 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
830421fc 1794 *walk_subtrees = 0;
944fa280 1795 }
aafdbe06
JM
1796 /* Fall through. */
1797 case AGGR_INIT_EXPR:
1798 /* For calls to a multi-versioned function, overload resolution
1799 returns the function with the highest target priority, that is,
1800 the version that will checked for dispatching first. If this
1801 version is inlinable, a direct call to this version can be made
1802 otherwise the call should go through the dispatcher. */
1803 {
f5f035a3 1804 tree fn = cp_get_callee_fndecl_nofold (stmt);
aafdbe06
JM
1805 if (fn && DECL_FUNCTION_VERSIONED (fn)
1806 && (current_function_decl == NULL
1807 || !targetm.target_option.can_inline_p (current_function_decl,
1808 fn)))
1809 if (tree dis = get_function_version_dispatcher (fn))
1810 {
1811 mark_versions_used (dis);
1812 dis = build_address (dis);
1813 if (TREE_CODE (stmt) == CALL_EXPR)
1814 CALL_EXPR_FN (stmt) = dis;
1815 else
1816 AGGR_INIT_EXPR_FN (stmt) = dis;
1817 }
1818 }
c74985e3
JJ
1819 break;
1820
570f86f9
JJ
1821 case TARGET_EXPR:
1822 if (TARGET_EXPR_INITIAL (stmt)
1823 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1824 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1825 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1826 break;
1827
a7ea3d2c
PP
1828 case REQUIRES_EXPR:
1829 /* Emit the value of the requires-expression. */
1830 *stmt_p = constant_boolean_node (constraints_satisfied_p (stmt),
1831 boolean_type_node);
1832 *walk_subtrees = 0;
1833 break;
1834
861d4af8
AS
1835 case TEMPLATE_ID_EXPR:
1836 gcc_assert (concept_check_p (stmt));
1837 /* Emit the value of the concept check. */
1838 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1839 walk_subtrees = 0;
1840 break;
1841
5a1706f6
JJ
1842 case STATEMENT_LIST:
1843 if (TREE_SIDE_EFFECTS (stmt))
1844 {
1845 tree_stmt_iterator i;
1846 int nondebug_stmts = 0;
1847 bool clear_side_effects = true;
1848 /* Genericization can clear TREE_SIDE_EFFECTS, e.g. when
1849 transforming an IF_STMT into COND_EXPR. If such stmt
1850 appears in a STATEMENT_LIST that contains only that
1851 stmt and some DEBUG_BEGIN_STMTs, without -g where the
1852 STATEMENT_LIST wouldn't be present at all the resulting
1853 expression wouldn't have TREE_SIDE_EFFECTS set, so make sure
1854 to clear it even on the STATEMENT_LIST in such cases. */
1855 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1856 {
1857 tree t = tsi_stmt (i);
1858 if (TREE_CODE (t) != DEBUG_BEGIN_STMT && nondebug_stmts < 2)
1859 nondebug_stmts++;
1860 cp_walk_tree (tsi_stmt_ptr (i), cp_genericize_r, data, NULL);
1861 if (TREE_CODE (t) != DEBUG_BEGIN_STMT
1862 && (nondebug_stmts > 1 || TREE_SIDE_EFFECTS (tsi_stmt (i))))
1863 clear_side_effects = false;
1864 }
1865 if (clear_side_effects)
1866 TREE_SIDE_EFFECTS (stmt) = 0;
1867 *walk_subtrees = 0;
1868 }
1869 break;
1870
c74985e3
JJ
1871 default:
1872 if (IS_TYPE_OR_DECL_P (stmt))
1873 *walk_subtrees = 0;
1874 break;
944fa280 1875 }
ac3cbee5 1876
6e2830c3 1877 p_set->add (*stmt_p);
c8094d83 1878
5a508662
RH
1879 return NULL;
1880}
1881
b2cb7511
TV
1882/* Lower C++ front end trees to GENERIC in T_P. */
1883
1884static void
e2df2328 1885cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
b2cb7511
TV
1886{
1887 struct cp_genericize_data wtd;
1888
6e2830c3 1889 wtd.p_set = new hash_set<tree>;
9771b263 1890 wtd.bind_expr_stack.create (0);
b2cb7511 1891 wtd.omp_ctx = NULL;
8243e2a9 1892 wtd.try_block = NULL_TREE;
7b3a9795 1893 wtd.no_sanitize_p = false;
e2df2328 1894 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
b2cb7511 1895 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
6e2830c3 1896 delete wtd.p_set;
9771b263 1897 wtd.bind_expr_stack.release ();
45b2222a 1898 if (sanitize_flags_p (SANITIZE_VPTR))
35228ac7 1899 cp_ubsan_instrument_member_accesses (t_p);
b2cb7511
TV
1900}
1901
0a508bb6
JJ
1902/* If a function that should end with a return in non-void
1903 function doesn't obviously end with return, add ubsan
1b6fa695
ML
1904 instrumentation code to verify it at runtime. If -fsanitize=return
1905 is not enabled, instrument __builtin_unreachable. */
0a508bb6
JJ
1906
1907static void
1b6fa695 1908cp_maybe_instrument_return (tree fndecl)
0a508bb6
JJ
1909{
1910 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1911 || DECL_CONSTRUCTOR_P (fndecl)
1912 || DECL_DESTRUCTOR_P (fndecl)
1913 || !targetm.warn_func_return (fndecl))
1914 return;
1915
81e4859a
JJ
1916 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1917 /* Don't add __builtin_unreachable () if not optimizing, it will not
1918 improve any optimizations in that case, just break UB code.
1919 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1920 UBSan covers this with ubsan_instrument_return above where sufficient
1921 information is provided, while the __builtin_unreachable () below
1922 if return sanitization is disabled will just result in hard to
1923 understand runtime error without location. */
1924 && (!optimize
1925 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1926 return;
1927
0a508bb6
JJ
1928 tree t = DECL_SAVED_TREE (fndecl);
1929 while (t)
1930 {
1931 switch (TREE_CODE (t))
1932 {
1933 case BIND_EXPR:
1934 t = BIND_EXPR_BODY (t);
1935 continue;
1936 case TRY_FINALLY_EXPR:
78a5fce0 1937 case CLEANUP_POINT_EXPR:
0a508bb6
JJ
1938 t = TREE_OPERAND (t, 0);
1939 continue;
1940 case STATEMENT_LIST:
1941 {
1942 tree_stmt_iterator i = tsi_last (t);
f1bc6cae
JJ
1943 while (!tsi_end_p (i))
1944 {
1945 tree p = tsi_stmt (i);
1946 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1947 break;
1948 tsi_prev (&i);
1949 }
0a508bb6
JJ
1950 if (!tsi_end_p (i))
1951 {
1952 t = tsi_stmt (i);
1953 continue;
1954 }
1955 }
1956 break;
1957 case RETURN_EXPR:
1958 return;
1959 default:
1960 break;
1961 }
1962 break;
1963 }
1964 if (t == NULL_TREE)
1965 return;
459bcfb0
JJ
1966 tree *p = &DECL_SAVED_TREE (fndecl);
1967 if (TREE_CODE (*p) == BIND_EXPR)
1968 p = &BIND_EXPR_BODY (*p);
1b6fa695
ML
1969
1970 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1971 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1972 t = ubsan_instrument_return (loc);
1973 else
1974 {
1975 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1976 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1977 }
1978
459bcfb0 1979 append_to_statement_list (t, p);
0a508bb6
JJ
1980}
1981
5a508662
RH
1982void
1983cp_genericize (tree fndecl)
1984{
d8472c75 1985 tree t;
d8472c75
JM
1986
1987 /* Fix up the types of parms passed by invisible reference. */
910ad8de 1988 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
dfb5c523
MM
1989 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1990 {
1991 /* If a function's arguments are copied to create a thunk,
1992 then DECL_BY_REFERENCE will be set -- but the type of the
1993 argument will be a pointer type, so we will never get
1994 here. */
1995 gcc_assert (!DECL_BY_REFERENCE (t));
1996 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1997 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1998 DECL_BY_REFERENCE (t) = 1;
1999 TREE_ADDRESSABLE (t) = 0;
2000 relayout_decl (t);
2001 }
d8472c75 2002
cc77ae10
JM
2003 /* Do the same for the return value. */
2004 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2005 {
2006 t = DECL_RESULT (fndecl);
2007 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2008 DECL_BY_REFERENCE (t) = 1;
2009 TREE_ADDRESSABLE (t) = 0;
2010 relayout_decl (t);
140806fa
JJ
2011 if (DECL_NAME (t))
2012 {
2013 /* Adjust DECL_VALUE_EXPR of the original var. */
2014 tree outer = outer_curly_brace_block (current_function_decl);
2015 tree var;
2016
2017 if (outer)
2018 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
c60dc053
JJ
2019 if (VAR_P (var)
2020 && DECL_NAME (t) == DECL_NAME (var)
140806fa
JJ
2021 && DECL_HAS_VALUE_EXPR_P (var)
2022 && DECL_VALUE_EXPR (var) == t)
2023 {
2024 tree val = convert_from_reference (t);
2025 SET_DECL_VALUE_EXPR (var, val);
2026 break;
2027 }
2028 }
cc77ae10
JM
2029 }
2030
d8472c75
JM
2031 /* If we're a clone, the body is already GIMPLE. */
2032 if (DECL_CLONED_FUNCTION_P (fndecl))
2033 return;
2034
ee78cbaa
JJ
2035 /* Allow cp_genericize calls to be nested. */
2036 tree save_bc_label[2];
2037 save_bc_label[bc_break] = bc_label[bc_break];
2038 save_bc_label[bc_continue] = bc_label[bc_continue];
2039 bc_label[bc_break] = NULL_TREE;
2040 bc_label[bc_continue] = NULL_TREE;
2041
d8472c75
JM
2042 /* We do want to see every occurrence of the parms, so we can't just use
2043 walk_tree's hash functionality. */
e2df2328 2044 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
5a508662 2045
1b6fa695 2046 cp_maybe_instrument_return (fndecl);
0a508bb6 2047
5a508662
RH
2048 /* Do everything else. */
2049 c_genericize (fndecl);
1799e5d5
RH
2050
2051 gcc_assert (bc_label[bc_break] == NULL);
2052 gcc_assert (bc_label[bc_continue] == NULL);
ee78cbaa
JJ
2053 bc_label[bc_break] = save_bc_label[bc_break];
2054 bc_label[bc_continue] = save_bc_label[bc_continue];
1799e5d5
RH
2055}
2056\f
2057/* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2058 NULL if there is in fact nothing to do. ARG2 may be null if FN
2059 actually only takes one argument. */
2060
2061static tree
2062cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2063{
c2898ec9 2064 tree defparm, parm, t;
94a0dd7b
SL
2065 int i = 0;
2066 int nargs;
2067 tree *argarray;
fae2b46b 2068
1799e5d5
RH
2069 if (fn == NULL)
2070 return NULL;
2071
94a0dd7b 2072 nargs = list_length (DECL_ARGUMENTS (fn));
86b8fed1 2073 argarray = XALLOCAVEC (tree, nargs);
94a0dd7b 2074
fae2b46b
JJ
2075 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2076 if (arg2)
2077 defparm = TREE_CHAIN (defparm);
2078
4dbeb716 2079 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1799e5d5
RH
2080 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2081 {
2082 tree inner_type = TREE_TYPE (arg1);
2083 tree start1, end1, p1;
2084 tree start2 = NULL, p2 = NULL;
c2898ec9 2085 tree ret = NULL, lab;
1799e5d5
RH
2086
2087 start1 = arg1;
2088 start2 = arg2;
2089 do
2090 {
2091 inner_type = TREE_TYPE (inner_type);
2092 start1 = build4 (ARRAY_REF, inner_type, start1,
2093 size_zero_node, NULL, NULL);
2094 if (arg2)
2095 start2 = build4 (ARRAY_REF, inner_type, start2,
2096 size_zero_node, NULL, NULL);
2097 }
2098 while (TREE_CODE (inner_type) == ARRAY_TYPE);
db3927fb 2099 start1 = build_fold_addr_expr_loc (input_location, start1);
1799e5d5 2100 if (arg2)
db3927fb 2101 start2 = build_fold_addr_expr_loc (input_location, start2);
1799e5d5
RH
2102
2103 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
5d49b6a7 2104 end1 = fold_build_pointer_plus (start1, end1);
1799e5d5 2105
b731b390 2106 p1 = create_tmp_var (TREE_TYPE (start1));
726a989a 2107 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1799e5d5
RH
2108 append_to_statement_list (t, &ret);
2109
2110 if (arg2)
2111 {
b731b390 2112 p2 = create_tmp_var (TREE_TYPE (start2));
726a989a 2113 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1799e5d5
RH
2114 append_to_statement_list (t, &ret);
2115 }
2116
c2255bc4 2117 lab = create_artificial_label (input_location);
1799e5d5
RH
2118 t = build1 (LABEL_EXPR, void_type_node, lab);
2119 append_to_statement_list (t, &ret);
2120
94a0dd7b 2121 argarray[i++] = p1;
1799e5d5 2122 if (arg2)
94a0dd7b 2123 argarray[i++] = p2;
fae2b46b 2124 /* Handle default arguments. */
d2ee546f
JJ
2125 for (parm = defparm; parm && parm != void_list_node;
2126 parm = TREE_CHAIN (parm), i++)
94a0dd7b 2127 argarray[i] = convert_default_arg (TREE_VALUE (parm),
4dbeb716
JJ
2128 TREE_PURPOSE (parm), fn,
2129 i - is_method, tf_warning_or_error);
94a0dd7b 2130 t = build_call_a (fn, i, argarray);
c2898ec9
JJ
2131 t = fold_convert (void_type_node, t);
2132 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1799e5d5
RH
2133 append_to_statement_list (t, &ret);
2134
5d49b6a7 2135 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
726a989a 2136 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1799e5d5
RH
2137 append_to_statement_list (t, &ret);
2138
2139 if (arg2)
2140 {
5d49b6a7 2141 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
726a989a 2142 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1799e5d5
RH
2143 append_to_statement_list (t, &ret);
2144 }
2145
2146 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2147 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2148 append_to_statement_list (t, &ret);
2149
2150 return ret;
2151 }
2152 else
2153 {
db3927fb 2154 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1799e5d5 2155 if (arg2)
db3927fb 2156 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
fae2b46b 2157 /* Handle default arguments. */
d2ee546f 2158 for (parm = defparm; parm && parm != void_list_node;
94a0dd7b
SL
2159 parm = TREE_CHAIN (parm), i++)
2160 argarray[i] = convert_default_arg (TREE_VALUE (parm),
4dbeb716
JJ
2161 TREE_PURPOSE (parm), fn,
2162 i - is_method, tf_warning_or_error);
c2898ec9
JJ
2163 t = build_call_a (fn, i, argarray);
2164 t = fold_convert (void_type_node, t);
2165 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1799e5d5
RH
2166 }
2167}
2168
2169/* Return code to initialize DECL with its default constructor, or
2170 NULL if there's nothing to do. */
2171
2172tree
12308bc6 2173cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1799e5d5
RH
2174{
2175 tree info = CP_OMP_CLAUSE_INFO (clause);
2176 tree ret = NULL;
2177
2178 if (info)
2179 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2180
2181 return ret;
2182}
2183
2184/* Return code to initialize DST with a copy constructor from SRC. */
2185
2186tree
2187cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2188{
2189 tree info = CP_OMP_CLAUSE_INFO (clause);
2190 tree ret = NULL;
2191
2192 if (info)
2193 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2194 if (ret == NULL)
726a989a 2195 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1799e5d5
RH
2196
2197 return ret;
2198}
2199
2200/* Similarly, except use an assignment operator instead. */
2201
2202tree
2203cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2204{
2205 tree info = CP_OMP_CLAUSE_INFO (clause);
2206 tree ret = NULL;
2207
2208 if (info)
2209 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2210 if (ret == NULL)
726a989a 2211 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1799e5d5
RH
2212
2213 return ret;
2214}
2215
2216/* Return code to destroy DECL. */
2217
2218tree
2219cxx_omp_clause_dtor (tree clause, tree decl)
2220{
2221 tree info = CP_OMP_CLAUSE_INFO (clause);
2222 tree ret = NULL;
2223
2224 if (info)
2225 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2226
2227 return ret;
5a508662 2228}
077b0dfb
JJ
2229
2230/* True if OpenMP should privatize what this DECL points to rather
2231 than the DECL itself. */
2232
2233bool
58f9752a 2234cxx_omp_privatize_by_reference (const_tree decl)
077b0dfb 2235{
9f613f06 2236 return (TYPE_REF_P (TREE_TYPE (decl))
acf0174b 2237 || is_invisiref_parm (decl));
077b0dfb 2238}
a68ab351 2239
20906c66
JJ
2240/* Return true if DECL is const qualified var having no mutable member. */
2241bool
2242cxx_omp_const_qual_no_mutable (tree decl)
a68ab351 2243{
20906c66 2244 tree type = TREE_TYPE (decl);
9f613f06 2245 if (TYPE_REF_P (type))
a68ab351
JJ
2246 {
2247 if (!is_invisiref_parm (decl))
20906c66 2248 return false;
a68ab351
JJ
2249 type = TREE_TYPE (type);
2250
2251 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2252 {
2253 /* NVR doesn't preserve const qualification of the
2254 variable's type. */
2255 tree outer = outer_curly_brace_block (current_function_decl);
2256 tree var;
2257
2258 if (outer)
910ad8de 2259 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
c60dc053
JJ
2260 if (VAR_P (var)
2261 && DECL_NAME (decl) == DECL_NAME (var)
a68ab351
JJ
2262 && (TYPE_MAIN_VARIANT (type)
2263 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2264 {
2265 if (TYPE_READONLY (TREE_TYPE (var)))
2266 type = TREE_TYPE (var);
2267 break;
2268 }
2269 }
2270 }
2271
2272 if (type == error_mark_node)
20906c66 2273 return false;
a68ab351
JJ
2274
2275 /* Variables with const-qualified type having no mutable member
2276 are predetermined shared. */
2277 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
20906c66
JJ
2278 return true;
2279
2280 return false;
2281}
2282
93535a2b
TB
2283/* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2284 of DECL is predetermined. */
20906c66
JJ
2285
2286enum omp_clause_default_kind
1c9ee609 2287cxx_omp_predetermined_sharing_1 (tree decl)
20906c66
JJ
2288{
2289 /* Static data members are predetermined shared. */
2290 if (TREE_STATIC (decl))
2291 {
2292 tree ctx = CP_DECL_CONTEXT (decl);
2293 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2294 return OMP_CLAUSE_DEFAULT_SHARED;
59bc434a
JJ
2295
2296 if (c_omp_predefined_variable (decl))
2297 return OMP_CLAUSE_DEFAULT_SHARED;
20906c66
JJ
2298 }
2299
28567c40
JJ
2300 /* this may not be specified in data-sharing clauses, still we need
2301 to predetermined it firstprivate. */
2302 if (decl == current_class_ptr)
2303 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
a68ab351
JJ
2304
2305 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2306}
2307
1c9ee609
JJ
2308/* Likewise, but also include the artificial vars. We don't want to
2309 disallow the artificial vars being mentioned in explicit clauses,
2310 as we use artificial vars e.g. for loop constructs with random
2311 access iterators other than pointers, but during gimplification
2312 we want to treat them as predetermined. */
2313
2314enum omp_clause_default_kind
2315cxx_omp_predetermined_sharing (tree decl)
2316{
2317 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2318 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2319 return ret;
2320
2321 /* Predetermine artificial variables holding integral values, those
2322 are usually result of gimplify_one_sizepos or SAVE_EXPR
2323 gimplification. */
2324 if (VAR_P (decl)
2325 && DECL_ARTIFICIAL (decl)
2326 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2327 && !(DECL_LANG_SPECIFIC (decl)
2328 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2329 return OMP_CLAUSE_DEFAULT_SHARED;
2330
8d7c0bf8
JJ
2331 /* Similarly for typeinfo symbols. */
2332 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2333 return OMP_CLAUSE_DEFAULT_SHARED;
2334
1c9ee609
JJ
2335 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2336}
2337
93535a2b
TB
2338enum omp_clause_defaultmap_kind
2339cxx_omp_predetermined_mapping (tree decl)
2340{
2341 /* Predetermine artificial variables holding integral values, those
2342 are usually result of gimplify_one_sizepos or SAVE_EXPR
2343 gimplification. */
2344 if (VAR_P (decl)
2345 && DECL_ARTIFICIAL (decl)
2346 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2347 && !(DECL_LANG_SPECIFIC (decl)
2348 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2349 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2350
2351 if (c_omp_predefined_variable (decl))
2352 return OMP_CLAUSE_DEFAULTMAP_TO;
2353
2354 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2355}
2356
a68ab351
JJ
2357/* Finalize an implicitly determined clause. */
2358
2359void
972da557 2360cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
a68ab351
JJ
2361{
2362 tree decl, inner_type;
2363 bool make_shared = false;
2364
d81ab49d
JJ
2365 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2366 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2367 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
a68ab351
JJ
2368 return;
2369
2370 decl = OMP_CLAUSE_DECL (c);
2371 decl = require_complete_type (decl);
2372 inner_type = TREE_TYPE (decl);
2373 if (decl == error_mark_node)
2374 make_shared = true;
9f613f06 2375 else if (TYPE_REF_P (TREE_TYPE (decl)))
d9a6bd32 2376 inner_type = TREE_TYPE (inner_type);
a68ab351
JJ
2377
2378 /* We're interested in the base element, not arrays. */
2379 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2380 inner_type = TREE_TYPE (inner_type);
2381
2382 /* Check for special function availability by building a call to one.
2383 Save the results, because later we won't be in the right context
2384 for making these queries. */
d81ab49d 2385 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
a68ab351
JJ
2386 if (!make_shared
2387 && CLASS_TYPE_P (inner_type)
d81ab49d
JJ
2388 && cxx_omp_create_clause_info (c, inner_type, !first, first, !first,
2389 true))
a68ab351
JJ
2390 make_shared = true;
2391
2392 if (make_shared)
34361776
JJ
2393 {
2394 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2395 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2396 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2397 }
a68ab351 2398}
d9a6bd32
JJ
2399
2400/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2401 disregarded in OpenMP construct, because it is going to be
2402 remapped during OpenMP lowering. SHARED is true if DECL
2403 is going to be shared, false if it is going to be privatized. */
2404
2405bool
2406cxx_omp_disregard_value_expr (tree decl, bool shared)
2407{
02f7334a
JJ
2408 if (shared)
2409 return false;
2410 if (VAR_P (decl)
2411 && DECL_HAS_VALUE_EXPR_P (decl)
2412 && DECL_ARTIFICIAL (decl)
2413 && DECL_LANG_SPECIFIC (decl)
2414 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2415 return true;
2416 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2417 return true;
2418 return false;
d9a6bd32 2419}
cda0a029 2420
7426fcc8
JM
2421/* Fold expression X which is used as an rvalue if RVAL is true. */
2422
4cd3e7df 2423tree
7426fcc8
JM
2424cp_fold_maybe_rvalue (tree x, bool rval)
2425{
66f90a17 2426 while (true)
7426fcc8 2427 {
66f90a17 2428 x = cp_fold (x);
f43e0585
JM
2429 if (rval)
2430 x = mark_rvalue_use (x);
fd338b13 2431 if (rval && DECL_P (x)
9f613f06 2432 && !TYPE_REF_P (TREE_TYPE (x)))
66f90a17
JM
2433 {
2434 tree v = decl_constant_value (x);
2435 if (v != x && v != error_mark_node)
2436 {
2437 x = v;
2438 continue;
2439 }
2440 }
2441 break;
7426fcc8 2442 }
66f90a17 2443 return x;
7426fcc8
JM
2444}
2445
2446/* Fold expression X which is used as an rvalue. */
2447
4cd3e7df 2448tree
7426fcc8
JM
2449cp_fold_rvalue (tree x)
2450{
2451 return cp_fold_maybe_rvalue (x, true);
2452}
2453
bf31620c
JM
2454/* Perform folding on expression X. */
2455
2456tree
2457cp_fully_fold (tree x)
2458{
2459 if (processing_template_decl)
2460 return x;
2461 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2462 have to call both. */
2463 if (cxx_dialect >= cxx11)
587b2f67
MP
2464 {
2465 x = maybe_constant_value (x);
2466 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2467 a TARGET_EXPR; undo that here. */
2468 if (TREE_CODE (x) == TARGET_EXPR)
2469 x = TARGET_EXPR_INITIAL (x);
2470 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2471 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2472 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2473 x = TREE_OPERAND (x, 0);
2474 }
bf31620c
JM
2475 return cp_fold_rvalue (x);
2476}
2477
50867d20
JJ
2478/* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2479 in some cases. */
2480
2481tree
2482cp_fully_fold_init (tree x)
2483{
2484 if (processing_template_decl)
2485 return x;
2486 x = cp_fully_fold (x);
2487 hash_set<tree> pset;
2488 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2489 return x;
2490}
2491
4250754e
JM
2492/* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2493 and certain changes are made to the folding done. Or should be (FIXME). We
2494 never touch maybe_const, as it is only used for the C front-end
2495 C_MAYBE_CONST_EXPR. */
2496
2497tree
f9c59f7e 2498c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
4250754e 2499{
f9c59f7e 2500 return cp_fold_maybe_rvalue (x, !lval);
4250754e
JM
2501}
2502
7a7ac32a 2503static GTY((deletable)) hash_map<tree, tree> *fold_cache;
cda0a029 2504
1e297006
MP
2505/* Dispose of the whole FOLD_CACHE. */
2506
2507void
2508clear_fold_cache (void)
2509{
7a7ac32a
PP
2510 if (fold_cache != NULL)
2511 fold_cache->empty ();
1e297006
MP
2512}
2513
cda0a029
JM
2514/* This function tries to fold an expression X.
2515 To avoid combinatorial explosion, folding results are kept in fold_cache.
9a004410 2516 If X is invalid, we don't fold at all.
cda0a029
JM
2517 For performance reasons we don't cache expressions representing a
2518 declaration or constant.
2519 Function returns X or its folded variant. */
2520
2521static tree
2522cp_fold (tree x)
2523{
2524 tree op0, op1, op2, op3;
2525 tree org_x = x, r = NULL_TREE;
2526 enum tree_code code;
2527 location_t loc;
7426fcc8 2528 bool rval_ops = true;
cda0a029 2529
2fa586ad 2530 if (!x || x == error_mark_node)
cda0a029
JM
2531 return x;
2532
9a004410 2533 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
cda0a029
JM
2534 return x;
2535
2536 /* Don't bother to cache DECLs or constants. */
2537 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2538 return x;
2539
7a7ac32a
PP
2540 if (fold_cache == NULL)
2541 fold_cache = hash_map<tree, tree>::create_ggc (101);
2542
2543 if (tree *cached = fold_cache->get (x))
2544 return *cached;
cda0a029 2545
f65a3299
PP
2546 uid_sensitive_constexpr_evaluation_checker c;
2547
cda0a029
JM
2548 code = TREE_CODE (x);
2549 switch (code)
2550 {
c8b1fbc1
MP
2551 case CLEANUP_POINT_EXPR:
2552 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2553 effects. */
2554 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2555 if (!TREE_SIDE_EFFECTS (r))
2556 x = r;
2557 break;
2558
cda0a029
JM
2559 case SIZEOF_EXPR:
2560 x = fold_sizeof_expr (x);
2561 break;
2562
2563 case VIEW_CONVERT_EXPR:
7426fcc8 2564 rval_ops = false;
191816a3 2565 /* FALLTHRU */
cda0a029
JM
2566 case CONVERT_EXPR:
2567 case NOP_EXPR:
2568 case NON_LVALUE_EXPR:
2569
2570 if (VOID_TYPE_P (TREE_TYPE (x)))
19c37faa
JJ
2571 {
2572 /* This is just to make sure we don't end up with casts to
2573 void from error_mark_node. If we just return x, then
2574 cp_fold_r might fold the operand into error_mark_node and
2575 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2576 during gimplification doesn't like such casts.
2577 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2578 folding of the operand should be in the caches and if in cp_fold_r
2579 it will modify it in place. */
2580 op0 = cp_fold (TREE_OPERAND (x, 0));
2581 if (op0 == error_mark_node)
2582 x = error_mark_node;
2583 break;
2584 }
cda0a029 2585
cda0a029 2586 loc = EXPR_LOCATION (x);
8d8f3235 2587 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
cda0a029 2588
415594bb
JM
2589 if (code == CONVERT_EXPR
2590 && SCALAR_TYPE_P (TREE_TYPE (x))
2591 && op0 != void_node)
2592 /* During parsing we used convert_to_*_nofold; re-convert now using the
2593 folding variants, since fold() doesn't do those transformations. */
2594 x = fold (convert (TREE_TYPE (x), op0));
2595 else if (op0 != TREE_OPERAND (x, 0))
476805ae
JJ
2596 {
2597 if (op0 == error_mark_node)
2598 x = error_mark_node;
2599 else
2600 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2601 }
e9ea372d
JM
2602 else
2603 x = fold (x);
cda0a029
JM
2604
2605 /* Conversion of an out-of-range value has implementation-defined
2606 behavior; the language considers it different from arithmetic
2607 overflow, which is undefined. */
2608 if (TREE_CODE (op0) == INTEGER_CST
2609 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2610 TREE_OVERFLOW (x) = false;
2611
2612 break;
2613
290279c4
JM
2614 case INDIRECT_REF:
2615 /* We don't need the decltype(auto) obfuscation anymore. */
2616 if (REF_PARENTHESIZED_P (x))
2617 {
2618 tree p = maybe_undo_parenthesized_ref (x);
66f90a17 2619 return cp_fold (p);
290279c4
JM
2620 }
2621 goto unary;
2622
cda0a029 2623 case ADDR_EXPR:
715dd933
JJ
2624 loc = EXPR_LOCATION (x);
2625 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2626
2627 /* Cope with user tricks that amount to offsetof. */
2628 if (op0 != error_mark_node
7bdc7e06 2629 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
715dd933
JJ
2630 {
2631 tree val = get_base_address (op0);
2632 if (val
2633 && INDIRECT_REF_P (val)
2634 && COMPLETE_TYPE_P (TREE_TYPE (val))
2635 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2636 {
2637 val = TREE_OPERAND (val, 0);
2638 STRIP_NOPS (val);
1e9d6923 2639 val = maybe_constant_value (val);
715dd933 2640 if (TREE_CODE (val) == INTEGER_CST)
79e7b1fe 2641 return fold_offsetof (op0, TREE_TYPE (x));
715dd933
JJ
2642 }
2643 }
2644 goto finish_unary;
2645
cda0a029
JM
2646 case REALPART_EXPR:
2647 case IMAGPART_EXPR:
7426fcc8 2648 rval_ops = false;
191816a3 2649 /* FALLTHRU */
cda0a029
JM
2650 case CONJ_EXPR:
2651 case FIX_TRUNC_EXPR:
2652 case FLOAT_EXPR:
2653 case NEGATE_EXPR:
2654 case ABS_EXPR:
e197e64e 2655 case ABSU_EXPR:
cda0a029
JM
2656 case BIT_NOT_EXPR:
2657 case TRUTH_NOT_EXPR:
2658 case FIXED_CONVERT_EXPR:
290279c4 2659 unary:
cda0a029
JM
2660
2661 loc = EXPR_LOCATION (x);
7426fcc8 2662 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
cda0a029 2663
715dd933 2664 finish_unary:
cda0a029 2665 if (op0 != TREE_OPERAND (x, 0))
476805ae
JJ
2666 {
2667 if (op0 == error_mark_node)
2668 x = error_mark_node;
2669 else
0633ee10
JJ
2670 {
2671 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2672 if (code == INDIRECT_REF
2673 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2674 {
2675 TREE_READONLY (x) = TREE_READONLY (org_x);
2676 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2677 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2678 }
2679 }
476805ae 2680 }
e9ea372d
JM
2681 else
2682 x = fold (x);
cda0a029
JM
2683
2684 gcc_assert (TREE_CODE (x) != COND_EXPR
2685 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2686 break;
2687
d49b0aa0
MP
2688 case UNARY_PLUS_EXPR:
2689 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2690 if (op0 == error_mark_node)
2691 x = error_mark_node;
2692 else
2693 x = fold_convert (TREE_TYPE (x), op0);
2694 break;
2695
cda0a029
JM
2696 case POSTDECREMENT_EXPR:
2697 case POSTINCREMENT_EXPR:
2698 case INIT_EXPR:
cda0a029
JM
2699 case PREDECREMENT_EXPR:
2700 case PREINCREMENT_EXPR:
2701 case COMPOUND_EXPR:
7426fcc8
JM
2702 case MODIFY_EXPR:
2703 rval_ops = false;
191816a3 2704 /* FALLTHRU */
cda0a029
JM
2705 case POINTER_PLUS_EXPR:
2706 case PLUS_EXPR:
1af4ebf5 2707 case POINTER_DIFF_EXPR:
cda0a029
JM
2708 case MINUS_EXPR:
2709 case MULT_EXPR:
2710 case TRUNC_DIV_EXPR:
2711 case CEIL_DIV_EXPR:
2712 case FLOOR_DIV_EXPR:
2713 case ROUND_DIV_EXPR:
2714 case TRUNC_MOD_EXPR:
2715 case CEIL_MOD_EXPR:
2716 case ROUND_MOD_EXPR:
2717 case RDIV_EXPR:
2718 case EXACT_DIV_EXPR:
2719 case MIN_EXPR:
2720 case MAX_EXPR:
2721 case LSHIFT_EXPR:
2722 case RSHIFT_EXPR:
2723 case LROTATE_EXPR:
2724 case RROTATE_EXPR:
2725 case BIT_AND_EXPR:
2726 case BIT_IOR_EXPR:
2727 case BIT_XOR_EXPR:
2728 case TRUTH_AND_EXPR:
2729 case TRUTH_ANDIF_EXPR:
2730 case TRUTH_OR_EXPR:
2731 case TRUTH_ORIF_EXPR:
2732 case TRUTH_XOR_EXPR:
2733 case LT_EXPR: case LE_EXPR:
2734 case GT_EXPR: case GE_EXPR:
2735 case EQ_EXPR: case NE_EXPR:
2736 case UNORDERED_EXPR: case ORDERED_EXPR:
2737 case UNLT_EXPR: case UNLE_EXPR:
2738 case UNGT_EXPR: case UNGE_EXPR:
2739 case UNEQ_EXPR: case LTGT_EXPR:
2740 case RANGE_EXPR: case COMPLEX_EXPR:
cda0a029
JM
2741
2742 loc = EXPR_LOCATION (x);
7426fcc8
JM
2743 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2744 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
cda0a029
JM
2745
2746 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
476805ae
JJ
2747 {
2748 if (op0 == error_mark_node || op1 == error_mark_node)
2749 x = error_mark_node;
2750 else
2751 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2752 }
e9ea372d
JM
2753 else
2754 x = fold (x);
cda0a029 2755
315aa691
JJ
2756 /* This is only needed for -Wnonnull-compare and only if
2757 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2758 generation, we do it always. */
2759 if (COMPARISON_CLASS_P (org_x))
03ca8fb3
JJ
2760 {
2761 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2762 ;
2763 else if (COMPARISON_CLASS_P (x))
315aa691
JJ
2764 {
2765 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2766 TREE_NO_WARNING (x) = 1;
2767 }
03ca8fb3
JJ
2768 /* Otherwise give up on optimizing these, let GIMPLE folders
2769 optimize those later on. */
2770 else if (op0 != TREE_OPERAND (org_x, 0)
2771 || op1 != TREE_OPERAND (org_x, 1))
2772 {
2773 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
315aa691
JJ
2774 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2775 TREE_NO_WARNING (x) = 1;
03ca8fb3
JJ
2776 }
2777 else
2778 x = org_x;
2779 }
8a902edb 2780
cda0a029
JM
2781 break;
2782
2783 case VEC_COND_EXPR:
2784 case COND_EXPR:
cda0a029 2785 loc = EXPR_LOCATION (x);
7426fcc8 2786 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
cda0a029
JM
2787 op1 = cp_fold (TREE_OPERAND (x, 1));
2788 op2 = cp_fold (TREE_OPERAND (x, 2));
2789
627be19f
JM
2790 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2791 {
e525cfa7 2792 warning_sentinel s (warn_int_in_bool_context);
627be19f 2793 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2ab340fe 2794 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
627be19f 2795 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2ab340fe 2796 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
627be19f 2797 }
cb358080
JJ
2798 else if (VOID_TYPE_P (TREE_TYPE (x)))
2799 {
2800 if (TREE_CODE (op0) == INTEGER_CST)
2801 {
2802 /* If the condition is constant, fold can fold away
2803 the COND_EXPR. If some statement-level uses of COND_EXPR
2804 have one of the branches NULL, avoid folding crash. */
2805 if (!op1)
2806 op1 = build_empty_stmt (loc);
2807 if (!op2)
2808 op2 = build_empty_stmt (loc);
2809 }
2810 else
2811 {
2812 /* Otherwise, don't bother folding a void condition, since
2813 it can't produce a constant value. */
2814 if (op0 != TREE_OPERAND (x, 0)
2815 || op1 != TREE_OPERAND (x, 1)
2816 || op2 != TREE_OPERAND (x, 2))
2817 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2818 break;
2819 }
2820 }
627be19f 2821
7f26f7df
JM
2822 if (op0 != TREE_OPERAND (x, 0)
2823 || op1 != TREE_OPERAND (x, 1)
2824 || op2 != TREE_OPERAND (x, 2))
476805ae
JJ
2825 {
2826 if (op0 == error_mark_node
2827 || op1 == error_mark_node
2828 || op2 == error_mark_node)
2829 x = error_mark_node;
2830 else
2831 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2832 }
7f26f7df 2833 else
cda0a029
JM
2834 x = fold (x);
2835
683b8101
JM
2836 /* A COND_EXPR might have incompatible types in branches if one or both
2837 arms are bitfields. If folding exposed such a branch, fix it up. */
a5afbdd6 2838 if (TREE_CODE (x) != code
4ecd9c15 2839 && x != error_mark_node
a5afbdd6
MP
2840 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2841 x = fold_convert (TREE_TYPE (org_x), x);
683b8101 2842
cda0a029
JM
2843 break;
2844
2845 case CALL_EXPR:
2846 {
a700b4a9 2847 int sv = optimize, nw = sv;
cda0a029
JM
2848 tree callee = get_callee_fndecl (x);
2849
b925d25d
JM
2850 /* Some built-in function calls will be evaluated at compile-time in
2851 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2852 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3d78e008 2853 if (callee && fndecl_built_in_p (callee) && !optimize
cda0a029
JM
2854 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2855 && current_function_decl
2856 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2857 nw = 1;
cda0a029 2858
e4082611
JJ
2859 /* Defer folding __builtin_is_constant_evaluated. */
2860 if (callee
3d78e008 2861 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
ff603745 2862 BUILT_IN_FRONTEND))
e4082611
JJ
2863 break;
2864
ff603745
JJ
2865 if (callee
2866 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2867 BUILT_IN_FRONTEND))
2868 {
2869 x = fold_builtin_source_location (EXPR_LOCATION (x));
2870 break;
2871 }
2872
a700b4a9
PP
2873 bool changed = false;
2874 int m = call_expr_nargs (x);
2875 for (int i = 0; i < m; i++)
cda0a029
JM
2876 {
2877 r = cp_fold (CALL_EXPR_ARG (x, i));
2878 if (r != CALL_EXPR_ARG (x, i))
476805ae
JJ
2879 {
2880 if (r == error_mark_node)
2881 {
2882 x = error_mark_node;
2883 break;
2884 }
a700b4a9
PP
2885 if (!changed)
2886 x = copy_node (x);
2887 CALL_EXPR_ARG (x, i) = r;
2888 changed = true;
476805ae 2889 }
cda0a029 2890 }
476805ae
JJ
2891 if (x == error_mark_node)
2892 break;
cda0a029
JM
2893
2894 optimize = nw;
2895 r = fold (x);
2896 optimize = sv;
2897
2898 if (TREE_CODE (r) != CALL_EXPR)
2899 {
2900 x = cp_fold (r);
2901 break;
2902 }
2903
2904 optimize = nw;
2905
b925d25d
JM
2906 /* Invoke maybe_constant_value for functions declared
2907 constexpr and not called with AGGR_INIT_EXPRs.
cda0a029 2908 TODO:
b925d25d
JM
2909 Do constexpr expansion of expressions where the call itself is not
2910 constant, but the call followed by an INDIRECT_REF is. */
28577b86
JM
2911 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2912 && !flag_no_inline)
25cb6b33 2913 r = maybe_constant_value (x);
cda0a029
JM
2914 optimize = sv;
2915
2916 if (TREE_CODE (r) != CALL_EXPR)
2917 {
25cb6b33
JJ
2918 if (DECL_CONSTRUCTOR_P (callee))
2919 {
2920 loc = EXPR_LOCATION (x);
2921 tree s = build_fold_indirect_ref_loc (loc,
2922 CALL_EXPR_ARG (x, 0));
2923 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2924 }
cda0a029
JM
2925 x = r;
2926 break;
2927 }
2928
cda0a029
JM
2929 break;
2930 }
2931
2932 case CONSTRUCTOR:
2933 {
2934 unsigned i;
2935 constructor_elt *p;
2936 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
4b0b30ef 2937 vec<constructor_elt, va_gc> *nelts = NULL;
cda0a029 2938 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
4b0b30ef
JM
2939 {
2940 tree op = cp_fold (p->value);
4b0b30ef 2941 if (op != p->value)
476805ae
JJ
2942 {
2943 if (op == error_mark_node)
2944 {
2945 x = error_mark_node;
27de0fab 2946 vec_free (nelts);
476805ae
JJ
2947 break;
2948 }
27de0fab
RB
2949 if (nelts == NULL)
2950 nelts = elts->copy ();
2951 (*nelts)[i].value = op;
476805ae 2952 }
4b0b30ef 2953 }
27de0fab 2954 if (nelts)
570f86f9
JJ
2955 {
2956 x = build_constructor (TREE_TYPE (x), nelts);
2957 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2958 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2959 }
153dba6c
JJ
2960 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2961 x = fold (x);
cda0a029
JM
2962 break;
2963 }
2964 case TREE_VEC:
2965 {
2966 bool changed = false;
a700b4a9 2967 int n = TREE_VEC_LENGTH (x);
cda0a029 2968
a700b4a9 2969 for (int i = 0; i < n; i++)
cda0a029
JM
2970 {
2971 tree op = cp_fold (TREE_VEC_ELT (x, i));
cda0a029 2972 if (op != TREE_VEC_ELT (x, i))
a700b4a9
PP
2973 {
2974 if (!changed)
2975 x = copy_node (x);
2976 TREE_VEC_ELT (x, i) = op;
2977 changed = true;
2978 }
cda0a029 2979 }
cda0a029
JM
2980 }
2981
2982 break;
2983
2984 case ARRAY_REF:
2985 case ARRAY_RANGE_REF:
2986
2987 loc = EXPR_LOCATION (x);
2988 op0 = cp_fold (TREE_OPERAND (x, 0));
2989 op1 = cp_fold (TREE_OPERAND (x, 1));
2990 op2 = cp_fold (TREE_OPERAND (x, 2));
2991 op3 = cp_fold (TREE_OPERAND (x, 3));
2992
476805ae
JJ
2993 if (op0 != TREE_OPERAND (x, 0)
2994 || op1 != TREE_OPERAND (x, 1)
2995 || op2 != TREE_OPERAND (x, 2)
2996 || op3 != TREE_OPERAND (x, 3))
2997 {
2998 if (op0 == error_mark_node
2999 || op1 == error_mark_node
3000 || op2 == error_mark_node
3001 || op3 == error_mark_node)
3002 x = error_mark_node;
3003 else
0633ee10
JJ
3004 {
3005 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3006 TREE_READONLY (x) = TREE_READONLY (org_x);
3007 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3008 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3009 }
476805ae 3010 }
cda0a029
JM
3011
3012 x = fold (x);
3013 break;
3014
6b6ae9eb
MP
3015 case SAVE_EXPR:
3016 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3017 folding, evaluates to an invariant. In that case no need to wrap
3018 this folded tree with a SAVE_EXPR. */
3019 r = cp_fold (TREE_OPERAND (x, 0));
3020 if (tree_invariant_p (r))
3021 x = r;
3022 break;
3023
cda0a029
JM
3024 default:
3025 return org_x;
3026 }
3027
1f32d529
MP
3028 if (EXPR_P (x) && TREE_CODE (x) == code)
3029 {
3030 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3031 TREE_NO_WARNING (x) = TREE_NO_WARNING (org_x);
3032 }
3033
f65a3299
PP
3034 if (!c.evaluation_restricted_p ())
3035 {
3036 fold_cache->put (org_x, x);
3037 /* Prevent that we try to fold an already folded result again. */
3038 if (x != org_x)
3039 fold_cache->put (x, x);
3040 }
cda0a029
JM
3041
3042 return x;
3043}
3044
2674fa47
JM
3045/* Look up either "hot" or "cold" in attribute list LIST. */
3046
3047tree
3048lookup_hotness_attribute (tree list)
3049{
3050 for (; list; list = TREE_CHAIN (list))
3051 {
3052 tree name = get_attribute_name (list);
3053 if (is_attribute_p ("hot", name)
3054 || is_attribute_p ("cold", name)
3055 || is_attribute_p ("likely", name)
3056 || is_attribute_p ("unlikely", name))
3057 break;
3058 }
3059 return list;
3060}
3061
3062/* Remove both "hot" and "cold" attributes from LIST. */
3063
3064static tree
3065remove_hotness_attribute (tree list)
3066{
3067 list = remove_attribute ("hot", list);
3068 list = remove_attribute ("cold", list);
3069 list = remove_attribute ("likely", list);
3070 list = remove_attribute ("unlikely", list);
3071 return list;
3072}
3073
3074/* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3075 PREDICT_EXPR. */
3076
3077tree
2d9273ca 3078process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
2674fa47
JM
3079{
3080 if (std_attrs == error_mark_node)
3081 return std_attrs;
3082 if (tree attr = lookup_hotness_attribute (std_attrs))
3083 {
3084 tree name = get_attribute_name (attr);
3085 bool hot = (is_attribute_p ("hot", name)
3086 || is_attribute_p ("likely", name));
3087 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3088 hot ? TAKEN : NOT_TAKEN);
2d9273ca 3089 SET_EXPR_LOCATION (pred, attrs_loc);
2674fa47
JM
3090 add_stmt (pred);
3091 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3092 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3093 get_attribute_name (other), name);
3094 std_attrs = remove_hotness_attribute (std_attrs);
3095 }
3096 return std_attrs;
3097}
3098
ff603745
JJ
3099/* Helper of fold_builtin_source_location, return the
3100 std::source_location::__impl type after performing verification
3101 on it. LOC is used for reporting any errors. */
3102
3103static tree
3104get_source_location_impl_type (location_t loc)
3105{
3106 tree name = get_identifier ("source_location");
3107 tree decl = lookup_qualified_name (std_node, name);
3108 if (TREE_CODE (decl) != TYPE_DECL)
3109 {
3110 auto_diagnostic_group d;
3111 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3112 qualified_name_lookup_error (std_node, name, decl, loc);
3113 else
3114 error_at (loc, "%qD is not a type", decl);
3115 return error_mark_node;
3116 }
3117 name = get_identifier ("__impl");
3118 tree type = TREE_TYPE (decl);
3119 decl = lookup_qualified_name (type, name);
3120 if (TREE_CODE (decl) != TYPE_DECL)
3121 {
3122 auto_diagnostic_group d;
3123 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3124 qualified_name_lookup_error (type, name, decl, loc);
3125 else
3126 error_at (loc, "%qD is not a type", decl);
3127 return error_mark_node;
3128 }
3129 type = TREE_TYPE (decl);
3130 if (TREE_CODE (type) != RECORD_TYPE)
3131 {
3132 error_at (loc, "%qD is not a class type", decl);
3133 return error_mark_node;
3134 }
3135
3136 int cnt = 0;
3137 for (tree field = TYPE_FIELDS (type);
3138 (field = next_initializable_field (field)) != NULL_TREE;
3139 field = DECL_CHAIN (field))
3140 {
3141 if (DECL_NAME (field) != NULL_TREE)
3142 {
3143 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3144 if (strcmp (n, "_M_file_name") == 0
3145 || strcmp (n, "_M_function_name") == 0)
3146 {
3147 if (TREE_TYPE (field) != const_string_type_node)
3148 {
3149 error_at (loc, "%qD does not have %<const char *%> type",
3150 field);
3151 return error_mark_node;
3152 }
3153 cnt++;
3154 continue;
3155 }
3156 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3157 {
3158 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3159 {
3160 error_at (loc, "%qD does not have integral type", field);
3161 return error_mark_node;
3162 }
3163 cnt++;
3164 continue;
3165 }
3166 }
3167 cnt = 0;
3168 break;
3169 }
3170 if (cnt != 4)
3171 {
3172 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3173 "non-static data members %<_M_file_name%>, "
3174 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3175 return error_mark_node;
3176 }
3177 return build_qualified_type (type, TYPE_QUAL_CONST);
3178}
3179
3180/* Type for source_location_table hash_set. */
3181struct GTY((for_user)) source_location_table_entry {
3182 location_t loc;
3183 unsigned uid;
3184 tree var;
3185};
3186
3187/* Traits class for function start hash maps below. */
3188
3189struct source_location_table_entry_hash
3190 : ggc_remove <source_location_table_entry>
3191{
3192 typedef source_location_table_entry value_type;
3193 typedef source_location_table_entry compare_type;
3194
3195 static hashval_t
3196 hash (const source_location_table_entry &ref)
3197 {
3198 inchash::hash hstate (0);
3199 hstate.add_int (ref.loc);
3200 hstate.add_int (ref.uid);
3201 return hstate.end ();
3202 }
3203
3204 static bool
3205 equal (const source_location_table_entry &ref1,
3206 const source_location_table_entry &ref2)
3207 {
3208 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3209 }
3210
3211 static void
3212 mark_deleted (source_location_table_entry &ref)
3213 {
3214 ref.loc = UNKNOWN_LOCATION;
3215 ref.uid = -1U;
3216 ref.var = NULL_TREE;
3217 }
3218
7ca50de0
DM
3219 static const bool empty_zero_p = true;
3220
ff603745
JJ
3221 static void
3222 mark_empty (source_location_table_entry &ref)
3223 {
3224 ref.loc = UNKNOWN_LOCATION;
3225 ref.uid = 0;
3226 ref.var = NULL_TREE;
3227 }
3228
3229 static bool
3230 is_deleted (const source_location_table_entry &ref)
3231 {
3232 return (ref.loc == UNKNOWN_LOCATION
3233 && ref.uid == -1U
3234 && ref.var == NULL_TREE);
3235 }
3236
3237 static bool
3238 is_empty (const source_location_table_entry &ref)
3239 {
3240 return (ref.loc == UNKNOWN_LOCATION
3241 && ref.uid == 0
3242 && ref.var == NULL_TREE);
3243 }
3244};
3245
3246static GTY(()) hash_table <source_location_table_entry_hash>
3247 *source_location_table;
3248static GTY(()) unsigned int source_location_id;
3249
3250/* Fold __builtin_source_location () call. LOC is the location
3251 of the call. */
3252
3253tree
3254fold_builtin_source_location (location_t loc)
3255{
3256 if (source_location_impl == NULL_TREE)
3257 {
3258 auto_diagnostic_group d;
3259 source_location_impl = get_source_location_impl_type (loc);
3260 if (source_location_impl == error_mark_node)
3261 inform (loc, "evaluating %qs", "__builtin_source_location");
3262 }
3263 if (source_location_impl == error_mark_node)
3264 return build_zero_cst (const_ptr_type_node);
3265 if (source_location_table == NULL)
3266 source_location_table
3267 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3268 const line_map_ordinary *map;
3269 source_location_table_entry entry;
3270 entry.loc
3271 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3272 &map);
3273 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3274 entry.var = error_mark_node;
3275 source_location_table_entry *entryp
3276 = source_location_table->find_slot (entry, INSERT);
3277 tree var;
3278 if (entryp->var)
3279 var = entryp->var;
3280 else
3281 {
3282 char tmp_name[32];
3283 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3284 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3285 source_location_impl);
3286 TREE_STATIC (var) = 1;
3287 TREE_PUBLIC (var) = 0;
3288 DECL_ARTIFICIAL (var) = 1;
3289 DECL_IGNORED_P (var) = 1;
3290 DECL_EXTERNAL (var) = 0;
3291 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3292 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3293 layout_decl (var, 0);
3294
3295 vec<constructor_elt, va_gc> *v = NULL;
3296 vec_alloc (v, 4);
3297 for (tree field = TYPE_FIELDS (source_location_impl);
3298 (field = next_initializable_field (field)) != NULL_TREE;
3299 field = DECL_CHAIN (field))
3300 {
3301 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3302 tree val = NULL_TREE;
3303 if (strcmp (n, "_M_file_name") == 0)
3304 {
3305 if (const char *fname = LOCATION_FILE (loc))
3306 {
3307 fname = remap_macro_filename (fname);
3308 val = build_string_literal (strlen (fname) + 1, fname);
3309 }
3310 else
3311 val = build_string_literal (1, "");
3312 }
3313 else if (strcmp (n, "_M_function_name") == 0)
3314 {
3315 const char *name = "";
3316
3317 if (current_function_decl)
3318 name = cxx_printable_name (current_function_decl, 0);
3319
3320 val = build_string_literal (strlen (name) + 1, name);
3321 }
3322 else if (strcmp (n, "_M_line") == 0)
3323 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3324 else if (strcmp (n, "_M_column") == 0)
3325 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3326 else
3327 gcc_unreachable ();
3328 CONSTRUCTOR_APPEND_ELT (v, field, val);
3329 }
3330
3331 tree ctor = build_constructor (source_location_impl, v);
3332 TREE_CONSTANT (ctor) = 1;
3333 TREE_STATIC (ctor) = 1;
3334 DECL_INITIAL (var) = ctor;
3335 varpool_node::finalize_decl (var);
3336 *entryp = entry;
3337 entryp->var = var;
3338 }
3339
3340 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3341}
3342
cda0a029 3343#include "gt-cp-cp-gimplify.h"