]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cp/cp-gimplify.c
[OpenMP] Fix mapping of artificial variables (PR94874)
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
CommitLineData
24baab8a 1/* C++-specific tree lowering bits; see also c-gimplify.c and gimple.c.
6de9cd9a 2
8d9254fc 3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
6de9cd9a
DN
4 Contributed by Jason Merrill <jason@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
e77f031d 10Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
e77f031d
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
2adfab87 25#include "target.h"
c7131fb2 26#include "basic-block.h"
6de9cd9a 27#include "cp-tree.h"
c7131fb2 28#include "gimple.h"
2adfab87 29#include "predict.h"
c7131fb2 30#include "stor-layout.h"
726a989a 31#include "tree-iterator.h"
45b0be94 32#include "gimplify.h"
0a508bb6 33#include "c-family/c-ubsan.h"
314e6352
ML
34#include "stringpool.h"
35#include "attribs.h"
45b2222a 36#include "asan.h"
2674fa47 37#include "gcc-rich-location.h"
705f02b0 38#include "memmodel.h"
f30025bb 39#include "tm_p.h"
ff603745
JJ
40#include "output.h"
41#include "file-prefix-map.h"
42#include "cgraph.h"
f1f862ae 43#include "omp-general.h"
6de9cd9a 44
b2cb7511
TV
45/* Forward declarations. */
46
47static tree cp_genericize_r (tree *, int *, void *);
cda0a029 48static tree cp_fold_r (tree *, int *, void *);
e2df2328 49static void cp_genericize_tree (tree*, bool);
cda0a029 50static tree cp_fold (tree);
b2cb7511 51
fbc315db
ILT
52/* Local declarations. */
53
54enum bc_t { bc_break = 0, bc_continue = 1 };
55
1799e5d5
RH
56/* Stack of labels which are targets for "break" or "continue",
57 linked through TREE_CHAIN. */
58static tree bc_label[2];
fbc315db
ILT
59
60/* Begin a scope which can be exited by a break or continue statement. BC
61 indicates which.
62
b2cb7511
TV
63 Just creates a label with location LOCATION and pushes it into the current
64 context. */
fbc315db
ILT
65
66static tree
b2cb7511 67begin_bc_block (enum bc_t bc, location_t location)
fbc315db 68{
b2cb7511 69 tree label = create_artificial_label (location);
910ad8de 70 DECL_CHAIN (label) = bc_label[bc];
1799e5d5 71 bc_label[bc] = label;
56632b27
JM
72 if (bc == bc_break)
73 LABEL_DECL_BREAK (label) = true;
74 else
75 LABEL_DECL_CONTINUE (label) = true;
fbc315db
ILT
76 return label;
77}
78
79/* Finish a scope which can be exited by a break or continue statement.
b2cb7511 80 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
fbc315db
ILT
81 an expression for the contents of the scope.
82
83 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
b2cb7511 84 BLOCK. Otherwise, just forget the label. */
fbc315db 85
b2cb7511
TV
86static void
87finish_bc_block (tree *block, enum bc_t bc, tree label)
fbc315db 88{
1799e5d5 89 gcc_assert (label == bc_label[bc]);
fbc315db
ILT
90
91 if (TREE_USED (label))
b2cb7511
TV
92 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
93 block);
fbc315db 94
910ad8de
NF
95 bc_label[bc] = DECL_CHAIN (label);
96 DECL_CHAIN (label) = NULL_TREE;
fbc315db
ILT
97}
98
726a989a
RB
99/* Get the LABEL_EXPR to represent a break or continue statement
100 in the current block scope. BC indicates which. */
fbc315db
ILT
101
102static tree
726a989a 103get_bc_label (enum bc_t bc)
fbc315db 104{
1799e5d5 105 tree label = bc_label[bc];
fbc315db 106
fbc315db
ILT
107 /* Mark the label used for finish_bc_block. */
108 TREE_USED (label) = 1;
726a989a 109 return label;
fbc315db
ILT
110}
111
6de9cd9a
DN
112/* Genericize a TRY_BLOCK. */
113
114static void
115genericize_try_block (tree *stmt_p)
116{
117 tree body = TRY_STMTS (*stmt_p);
118 tree cleanup = TRY_HANDLERS (*stmt_p);
119
f293ce4b 120 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
6de9cd9a
DN
121}
122
123/* Genericize a HANDLER by converting to a CATCH_EXPR. */
124
125static void
126genericize_catch_block (tree *stmt_p)
127{
128 tree type = HANDLER_TYPE (*stmt_p);
129 tree body = HANDLER_BODY (*stmt_p);
130
6de9cd9a 131 /* FIXME should the caught type go in TREE_TYPE? */
f293ce4b 132 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
6de9cd9a
DN
133}
134
726a989a
RB
135/* A terser interface for building a representation of an exception
136 specification. */
137
138static tree
139build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
140{
141 tree t;
142
143 /* FIXME should the allowed types go in TREE_TYPE? */
144 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
145 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
146
147 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
148 append_to_statement_list (body, &TREE_OPERAND (t, 0));
149
150 return t;
151}
152
6de9cd9a
DN
153/* Genericize an EH_SPEC_BLOCK by converting it to a
154 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
155
156static void
157genericize_eh_spec_block (tree *stmt_p)
158{
159 tree body = EH_SPEC_STMTS (*stmt_p);
160 tree allowed = EH_SPEC_RAISES (*stmt_p);
1a66d857 161 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
6de9cd9a 162
726a989a 163 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
d665b6e5
MLI
164 TREE_NO_WARNING (*stmt_p) = true;
165 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
6de9cd9a
DN
166}
167
2674fa47
JM
168/* Return the first non-compound statement in STMT. */
169
170tree
171first_stmt (tree stmt)
172{
173 switch (TREE_CODE (stmt))
174 {
175 case STATEMENT_LIST:
176 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
177 return first_stmt (p->stmt);
178 return void_node;
179
180 case BIND_EXPR:
181 return first_stmt (BIND_EXPR_BODY (stmt));
182
183 default:
184 return stmt;
185 }
186}
187
5a508662
RH
188/* Genericize an IF_STMT by turning it into a COND_EXPR. */
189
190static void
f74d9c8f 191genericize_if_stmt (tree *stmt_p)
5a508662 192{
eeae0768 193 tree stmt, cond, then_, else_;
726a989a 194 location_t locus = EXPR_LOCATION (*stmt_p);
5a508662
RH
195
196 stmt = *stmt_p;
eeae0768 197 cond = IF_COND (stmt);
5a508662
RH
198 then_ = THEN_CLAUSE (stmt);
199 else_ = ELSE_CLAUSE (stmt);
200
2674fa47
JM
201 if (then_ && else_)
202 {
203 tree ft = first_stmt (then_);
204 tree fe = first_stmt (else_);
205 br_predictor pr;
206 if (TREE_CODE (ft) == PREDICT_EXPR
207 && TREE_CODE (fe) == PREDICT_EXPR
208 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
209 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
210 {
211 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
212 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
213 warning_at (&richloc, OPT_Wattributes,
214 "both branches of %<if%> statement marked as %qs",
2d9273ca 215 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
2674fa47
JM
216 }
217 }
218
5a508662 219 if (!then_)
c2255bc4 220 then_ = build_empty_stmt (locus);
5a508662 221 if (!else_)
c2255bc4 222 else_ = build_empty_stmt (locus);
5a508662 223
eeae0768
RS
224 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
225 stmt = then_;
226 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
227 stmt = else_;
228 else
229 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
aae5d08a 230 protected_set_expr_location_if_unset (stmt, locus);
5a508662
RH
231 *stmt_p = stmt;
232}
233
fbc315db
ILT
234/* Build a generic representation of one of the C loop forms. COND is the
235 loop condition or NULL_TREE. BODY is the (possibly compound) statement
236 controlled by the loop. INCR is the increment expression of a for-loop,
237 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
238 evaluated before the loop body as in while and for loops, or after the
239 loop body as in do-while loops. */
240
b2cb7511
TV
241static void
242genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
243 tree incr, bool cond_is_first, int *walk_subtrees,
244 void *data)
fbc315db 245{
b2cb7511 246 tree blab, clab;
27d93d2c 247 tree exit = NULL;
b2cb7511 248 tree stmt_list = NULL;
c42b72a7 249 tree debug_begin = NULL;
b2cb7511 250
aae5d08a 251 protected_set_expr_location_if_unset (incr, start_locus);
fbc315db 252
b2cb7511 253 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
b2cb7511 254 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
372e6e6b
JJ
255
256 blab = begin_bc_block (bc_break, start_locus);
257 clab = begin_bc_block (bc_continue, start_locus);
258
259 cp_walk_tree (&body, cp_genericize_r, data, NULL);
b2cb7511 260 *walk_subtrees = 0;
fbc315db 261
c42b72a7
JJ
262 if (MAY_HAVE_DEBUG_MARKER_STMTS
263 && (!cond || !integer_zerop (cond)))
264 {
265 debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
266 SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus));
267 }
268
27d93d2c 269 if (cond && TREE_CODE (cond) != INTEGER_CST)
fbc315db 270 {
27d93d2c
JM
271 /* If COND is constant, don't bother building an exit. If it's false,
272 we won't build a loop. If it's true, any exits are in the body. */
6bdfada4 273 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
27d93d2c
JM
274 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
275 get_bc_label (bc_break));
276 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
277 build_empty_stmt (cloc), exit);
b2cb7511 278 }
fbc315db 279
27d93d2c 280 if (exit && cond_is_first)
c42b72a7
JJ
281 {
282 append_to_statement_list (debug_begin, &stmt_list);
283 debug_begin = NULL_TREE;
284 append_to_statement_list (exit, &stmt_list);
285 }
b2cb7511
TV
286 append_to_statement_list (body, &stmt_list);
287 finish_bc_block (&stmt_list, bc_continue, clab);
c42b72a7
JJ
288 if (incr)
289 {
290 if (MAY_HAVE_DEBUG_MARKER_STMTS)
291 {
292 tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
293 SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus));
294 append_to_statement_list (d, &stmt_list);
295 }
296 append_to_statement_list (incr, &stmt_list);
297 }
298 append_to_statement_list (debug_begin, &stmt_list);
27d93d2c
JM
299 if (exit && !cond_is_first)
300 append_to_statement_list (exit, &stmt_list);
fbc315db 301
27d93d2c
JM
302 if (!stmt_list)
303 stmt_list = build_empty_stmt (start_locus);
304
305 tree loop;
306 if (cond && integer_zerop (cond))
307 {
308 if (cond_is_first)
309 loop = fold_build3_loc (start_locus, COND_EXPR,
310 void_type_node, cond, stmt_list,
311 build_empty_stmt (start_locus));
312 else
313 loop = stmt_list;
314 }
315 else
1509db23
JM
316 {
317 location_t loc = start_locus;
318 if (!cond || integer_nonzerop (cond))
319 loc = EXPR_LOCATION (expr_first (body));
320 if (loc == UNKNOWN_LOCATION)
321 loc = start_locus;
322 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
323 }
27d93d2c
JM
324
325 stmt_list = NULL;
326 append_to_statement_list (loop, &stmt_list);
327 finish_bc_block (&stmt_list, bc_break, blab);
328 if (!stmt_list)
329 stmt_list = build_empty_stmt (start_locus);
fbc315db 330
b2cb7511 331 *stmt_p = stmt_list;
fbc315db
ILT
332}
333
b2cb7511 334/* Genericize a FOR_STMT node *STMT_P. */
fbc315db
ILT
335
336static void
b2cb7511 337genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
338{
339 tree stmt = *stmt_p;
b2cb7511
TV
340 tree expr = NULL;
341 tree loop;
342 tree init = FOR_INIT_STMT (stmt);
fbc315db 343
b2cb7511
TV
344 if (init)
345 {
346 cp_walk_tree (&init, cp_genericize_r, data, NULL);
347 append_to_statement_list (init, &expr);
348 }
fbc315db 349
b2cb7511
TV
350 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
351 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
352 append_to_statement_list (loop, &expr);
27d93d2c
JM
353 if (expr == NULL_TREE)
354 expr = loop;
b2cb7511 355 *stmt_p = expr;
fbc315db
ILT
356}
357
b2cb7511 358/* Genericize a WHILE_STMT node *STMT_P. */
fbc315db
ILT
359
360static void
b2cb7511 361genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
362{
363 tree stmt = *stmt_p;
b2cb7511
TV
364 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
365 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
fbc315db
ILT
366}
367
b2cb7511 368/* Genericize a DO_STMT node *STMT_P. */
fbc315db
ILT
369
370static void
b2cb7511 371genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
372{
373 tree stmt = *stmt_p;
b2cb7511
TV
374 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
375 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
fbc315db
ILT
376}
377
b2cb7511 378/* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
fbc315db
ILT
379
380static void
b2cb7511 381genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
fbc315db
ILT
382{
383 tree stmt = *stmt_p;
b2cb7511
TV
384 tree break_block, body, cond, type;
385 location_t stmt_locus = EXPR_LOCATION (stmt);
fbc315db 386
fbc315db
ILT
387 body = SWITCH_STMT_BODY (stmt);
388 if (!body)
c2255bc4 389 body = build_empty_stmt (stmt_locus);
b2cb7511
TV
390 cond = SWITCH_STMT_COND (stmt);
391 type = SWITCH_STMT_TYPE (stmt);
fbc315db 392
b2cb7511 393 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
d0f2db23
JJ
394
395 break_block = begin_bc_block (bc_break, stmt_locus);
396
397 cp_walk_tree (&body, cp_genericize_r, data, NULL);
b2cb7511
TV
398 cp_walk_tree (&type, cp_genericize_r, data, NULL);
399 *walk_subtrees = 0;
fbc315db 400
65791f42
JJ
401 if (TREE_USED (break_block))
402 SWITCH_BREAK_LABEL_P (break_block) = 1;
403 finish_bc_block (&body, bc_break, break_block);
9e851845 404 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
1a2e9708
JJ
405 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
406 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
407 || !TREE_USED (break_block));
b2cb7511
TV
408}
409
410/* Genericize a CONTINUE_STMT node *STMT_P. */
411
412static void
413genericize_continue_stmt (tree *stmt_p)
414{
415 tree stmt_list = NULL;
416 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
417 tree label = get_bc_label (bc_continue);
418 location_t location = EXPR_LOCATION (*stmt_p);
419 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
bfeee8ac 420 append_to_statement_list_force (pred, &stmt_list);
b2cb7511
TV
421 append_to_statement_list (jump, &stmt_list);
422 *stmt_p = stmt_list;
fbc315db
ILT
423}
424
b2cb7511
TV
425/* Genericize a BREAK_STMT node *STMT_P. */
426
427static void
428genericize_break_stmt (tree *stmt_p)
429{
430 tree label = get_bc_label (bc_break);
431 location_t location = EXPR_LOCATION (*stmt_p);
432 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
433}
434
435/* Genericize a OMP_FOR node *STMT_P. */
436
437static void
438genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
439{
440 tree stmt = *stmt_p;
441 location_t locus = EXPR_LOCATION (stmt);
442 tree clab = begin_bc_block (bc_continue, locus);
443
444 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
04833609
JJ
445 if (TREE_CODE (stmt) != OMP_TASKLOOP)
446 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
b2cb7511
TV
447 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
448 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
449 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
450 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
451 *walk_subtrees = 0;
452
453 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
454}
455
456/* Hook into the middle of gimplifying an OMP_FOR node. */
1799e5d5
RH
457
458static enum gimplify_status
726a989a 459cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
1799e5d5
RH
460{
461 tree for_stmt = *expr_p;
726a989a 462 gimple_seq seq = NULL;
1799e5d5
RH
463
464 /* Protect ourselves from recursion. */
465 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
466 return GS_UNHANDLED;
467 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
468
726a989a 469 gimplify_and_add (for_stmt, &seq);
726a989a 470 gimple_seq_add_seq (pre_p, seq);
1799e5d5 471
1799e5d5
RH
472 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
473
474 return GS_ALL_DONE;
475}
476
934790cc
ILT
477/* Gimplify an EXPR_STMT node. */
478
479static void
480gimplify_expr_stmt (tree *stmt_p)
481{
482 tree stmt = EXPR_STMT_EXPR (*stmt_p);
483
484 if (stmt == error_mark_node)
485 stmt = NULL;
486
487 /* Gimplification of a statement expression will nullify the
488 statement if all its side effects are moved to *PRE_P and *POST_P.
489
490 In this case we will not want to emit the gimplified statement.
491 However, we may still want to emit a warning, so we do that before
492 gimplification. */
27f33b15 493 if (stmt && warn_unused_value)
934790cc
ILT
494 {
495 if (!TREE_SIDE_EFFECTS (stmt))
496 {
497 if (!IS_EMPTY_STMT (stmt)
498 && !VOID_TYPE_P (TREE_TYPE (stmt))
499 && !TREE_NO_WARNING (stmt))
27f33b15 500 warning (OPT_Wunused_value, "statement with no effect");
934790cc 501 }
27f33b15 502 else
934790cc
ILT
503 warn_if_unused_value (stmt, input_location);
504 }
505
506 if (stmt == NULL_TREE)
507 stmt = alloc_stmt_list ();
508
509 *stmt_p = stmt;
510}
511
6de9cd9a
DN
512/* Gimplify initialization from an AGGR_INIT_EXPR. */
513
514static void
14af5d9b 515cp_gimplify_init_expr (tree *expr_p)
6de9cd9a
DN
516{
517 tree from = TREE_OPERAND (*expr_p, 1);
518 tree to = TREE_OPERAND (*expr_p, 0);
0fcedd9c 519 tree t;
6de9cd9a 520
6de9cd9a
DN
521 /* What about code that pulls out the temp and uses it elsewhere? I
522 think that such code never uses the TARGET_EXPR as an initializer. If
523 I'm wrong, we'll abort because the temp won't have any RTL. In that
524 case, I guess we'll need to replace references somehow. */
72036b59 525 if (TREE_CODE (from) == TARGET_EXPR && TARGET_EXPR_INITIAL (from))
db80e34e 526 from = TARGET_EXPR_INITIAL (from);
6de9cd9a 527
c6c7698d
JM
528 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
529 inside the TARGET_EXPR. */
0fcedd9c
JM
530 for (t = from; t; )
531 {
532 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
6de9cd9a 533
0fcedd9c
JM
534 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
535 replace the slot operand with our target.
6de9cd9a 536
0fcedd9c
JM
537 Should we add a target parm to gimplify_expr instead? No, as in this
538 case we want to replace the INIT_EXPR. */
d5f4eddd
JM
539 if (TREE_CODE (sub) == AGGR_INIT_EXPR
540 || TREE_CODE (sub) == VEC_INIT_EXPR)
0fcedd9c 541 {
d5f4eddd
JM
542 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
543 AGGR_INIT_EXPR_SLOT (sub) = to;
544 else
545 VEC_INIT_EXPR_SLOT (sub) = to;
0fcedd9c
JM
546 *expr_p = from;
547
548 /* The initialization is now a side-effect, so the container can
549 become void. */
550 if (from != sub)
551 TREE_TYPE (from) = void_type_node;
552 }
0fcedd9c 553
2166aeb3
MP
554 /* Handle aggregate NSDMI. */
555 replace_placeholders (sub, to);
3e605b20 556
0fcedd9c
JM
557 if (t == sub)
558 break;
559 else
560 t = TREE_OPERAND (t, 1);
6de9cd9a 561 }
0fcedd9c 562
6de9cd9a
DN
563}
564
565/* Gimplify a MUST_NOT_THROW_EXPR. */
566
726a989a
RB
567static enum gimplify_status
568gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
6de9cd9a
DN
569{
570 tree stmt = *expr_p;
325c3691 571 tree temp = voidify_wrapper_expr (stmt, NULL);
6de9cd9a 572 tree body = TREE_OPERAND (stmt, 0);
786f715d
JM
573 gimple_seq try_ = NULL;
574 gimple_seq catch_ = NULL;
355fe088 575 gimple *mnt;
6de9cd9a 576
786f715d 577 gimplify_and_add (body, &try_);
1a66d857 578 mnt = gimple_build_eh_must_not_throw (terminate_fn);
a1a6c5b2 579 gimple_seq_add_stmt_without_update (&catch_, mnt);
786f715d 580 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
6de9cd9a 581
a1a6c5b2 582 gimple_seq_add_stmt_without_update (pre_p, mnt);
6de9cd9a
DN
583 if (temp)
584 {
6de9cd9a 585 *expr_p = temp;
726a989a 586 return GS_OK;
6de9cd9a 587 }
726a989a
RB
588
589 *expr_p = NULL;
590 return GS_ALL_DONE;
6de9cd9a 591}
7c34ced1 592
25de0a29
AH
593/* Return TRUE if an operand (OP) of a given TYPE being copied is
594 really just an empty class copy.
595
596 Check that the operand has a simple form so that TARGET_EXPRs and
597 non-empty CONSTRUCTORs get reduced properly, and we leave the
598 return slot optimization alone because it isn't a copy. */
599
c652ff83 600bool
7d277e17 601simple_empty_class_p (tree type, tree op, tree_code code)
25de0a29 602{
7d277e17
JM
603 if (TREE_CODE (op) == COMPOUND_EXPR)
604 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
94e24187
JM
605 if (SIMPLE_TARGET_EXPR_P (op)
606 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
607 /* The TARGET_EXPR is itself a simple copy, look through it. */
608 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
25de0a29 609 return
7d277e17
JM
610 (TREE_CODE (op) == EMPTY_CLASS_EXPR
611 || code == MODIFY_EXPR
25de0a29
AH
612 || is_gimple_lvalue (op)
613 || INDIRECT_REF_P (op)
614 || (TREE_CODE (op) == CONSTRUCTOR
7d277e17 615 && CONSTRUCTOR_NELTS (op) == 0)
25de0a29
AH
616 || (TREE_CODE (op) == CALL_EXPR
617 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
7d277e17 618 && !TREE_CLOBBER_P (op)
dbcd32f8 619 && is_really_empty_class (type, /*ignore_vptr*/true);
25de0a29
AH
620}
621
65a550b4
JM
622/* Returns true if evaluating E as an lvalue has side-effects;
623 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
624 have side-effects until there is a read or write through it. */
625
626static bool
627lvalue_has_side_effects (tree e)
628{
629 if (!TREE_SIDE_EFFECTS (e))
630 return false;
631 while (handled_component_p (e))
632 {
633 if (TREE_CODE (e) == ARRAY_REF
634 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
635 return true;
636 e = TREE_OPERAND (e, 0);
637 }
638 if (DECL_P (e))
639 /* Just naming a variable has no side-effects. */
640 return false;
641 else if (INDIRECT_REF_P (e))
642 /* Similarly, indirection has no side-effects. */
643 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
644 else
645 /* For anything else, trust TREE_SIDE_EFFECTS. */
646 return TREE_SIDE_EFFECTS (e);
647}
648
1a37b6d9
JJ
649/* Gimplify *EXPR_P as rvalue into an expression that can't be modified
650 by expressions with side-effects in other operands. */
651
652static enum gimplify_status
653gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
654 bool (*gimple_test_f) (tree))
655{
656 enum gimplify_status t
657 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
658 if (t == GS_ERROR)
659 return GS_ERROR;
660 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
8e5993e2 661 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
1a37b6d9
JJ
662 return t;
663}
664
7c34ced1
RH
665/* Do C++-specific gimplification. Args are as for gimplify_expr. */
666
667int
726a989a 668cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7c34ced1
RH
669{
670 int saved_stmts_are_full_exprs_p = 0;
f9d0ca40 671 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
7c34ced1
RH
672 enum tree_code code = TREE_CODE (*expr_p);
673 enum gimplify_status ret;
674
675 if (STATEMENT_CODE_P (code))
676 {
677 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
678 current_stmt_tree ()->stmts_are_full_exprs_p
679 = STMT_IS_FULL_EXPR_P (*expr_p);
680 }
681
682 switch (code)
683 {
7c34ced1
RH
684 case AGGR_INIT_EXPR:
685 simplify_aggr_init_expr (expr_p);
686 ret = GS_OK;
687 break;
688
d5f4eddd
JM
689 case VEC_INIT_EXPR:
690 {
691 location_t loc = input_location;
534ecb17 692 tree init = VEC_INIT_EXPR_INIT (*expr_p);
b73a4704 693 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
d5f4eddd
JM
694 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
695 input_location = EXPR_LOCATION (*expr_p);
b73a4704 696 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
4de2f020 697 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
534ecb17 698 from_array,
d5f4eddd 699 tf_warning_or_error);
f4d90295
JJ
700 hash_set<tree> pset;
701 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
e2df2328 702 cp_genericize_tree (expr_p, false);
640b23d7 703 copy_if_shared (expr_p);
d5f4eddd
JM
704 ret = GS_OK;
705 input_location = loc;
706 }
707 break;
708
7c34ced1 709 case THROW_EXPR:
3b426391 710 /* FIXME communicate throw type to back end, probably by moving
7c34ced1
RH
711 THROW_EXPR into ../tree.def. */
712 *expr_p = TREE_OPERAND (*expr_p, 0);
713 ret = GS_OK;
714 break;
715
716 case MUST_NOT_THROW_EXPR:
726a989a 717 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
7c34ced1
RH
718 break;
719
726a989a 720 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
dae7ec87
JM
721 LHS of an assignment might also be involved in the RHS, as in bug
722 25979. */
7c34ced1 723 case INIT_EXPR:
14af5d9b 724 cp_gimplify_init_expr (expr_p);
85a52ea5
JM
725 if (TREE_CODE (*expr_p) != INIT_EXPR)
726 return GS_OK;
191816a3 727 /* Fall through. */
1e2ddf80 728 case MODIFY_EXPR:
25de0a29 729 modify_expr_case:
1e2ddf80
JM
730 {
731 /* If the back end isn't clever enough to know that the lhs and rhs
732 types are the same, add an explicit conversion. */
733 tree op0 = TREE_OPERAND (*expr_p, 0);
734 tree op1 = TREE_OPERAND (*expr_p, 1);
735
0d08582e
JM
736 if (!error_operand_p (op0)
737 && !error_operand_p (op1)
738 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
739 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
1e2ddf80
JM
740 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
741 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
742 TREE_TYPE (op0), op1);
6d729f28 743
7d277e17 744 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
6d729f28 745 {
94e24187
JM
746 while (TREE_CODE (op1) == TARGET_EXPR)
747 /* We're disconnecting the initializer from its target,
748 don't create a temporary. */
749 op1 = TARGET_EXPR_INITIAL (op1);
750
25de0a29
AH
751 /* Remove any copies of empty classes. Also drop volatile
752 variables on the RHS to avoid infinite recursion from
753 gimplify_expr trying to load the value. */
25de0a29
AH
754 if (TREE_SIDE_EFFECTS (op1))
755 {
756 if (TREE_THIS_VOLATILE (op1)
757 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
758 op1 = build_fold_addr_expr (op1);
759
760 gimplify_and_add (op1, pre_p);
761 }
65a550b4
JM
762 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
763 is_gimple_lvalue, fb_lvalue);
25de0a29 764 *expr_p = TREE_OPERAND (*expr_p, 0);
94e24187
JM
765 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
766 /* Avoid 'return *<retval>;' */
767 *expr_p = TREE_OPERAND (*expr_p, 0);
6d729f28 768 }
65a550b4
JM
769 /* P0145 says that the RHS is sequenced before the LHS.
770 gimplify_modify_expr gimplifies the RHS before the LHS, but that
771 isn't quite strong enough in two cases:
772
773 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
774 mean it's evaluated after the LHS.
775
776 2) the value calculation of the RHS is also sequenced before the
777 LHS, so for scalar assignment we need to preevaluate if the
778 RHS could be affected by LHS side-effects even if it has no
779 side-effects of its own. We don't need this for classes because
780 class assignment takes its RHS by reference. */
781 else if (flag_strong_eval_order > 1
782 && TREE_CODE (*expr_p) == MODIFY_EXPR
783 && lvalue_has_side_effects (op0)
784 && (TREE_CODE (op1) == CALL_EXPR
785 || (SCALAR_TYPE_P (TREE_TYPE (op1))
786 && !TREE_CONSTANT (op1))))
8e5993e2 787 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
1e2ddf80 788 }
7c34ced1
RH
789 ret = GS_OK;
790 break;
791
792 case EMPTY_CLASS_EXPR:
f7683d37
RG
793 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
794 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
7c34ced1
RH
795 ret = GS_OK;
796 break;
797
798 case BASELINK:
799 *expr_p = BASELINK_FUNCTIONS (*expr_p);
800 ret = GS_OK;
801 break;
802
803 case TRY_BLOCK:
804 genericize_try_block (expr_p);
805 ret = GS_OK;
806 break;
807
808 case HANDLER:
809 genericize_catch_block (expr_p);
810 ret = GS_OK;
811 break;
812
813 case EH_SPEC_BLOCK:
814 genericize_eh_spec_block (expr_p);
815 ret = GS_OK;
816 break;
817
818 case USING_STMT:
ac3cbee5 819 gcc_unreachable ();
7c34ced1 820
fbc315db 821 case FOR_STMT:
fbc315db 822 case WHILE_STMT:
fbc315db 823 case DO_STMT:
fbc315db 824 case SWITCH_STMT:
b2cb7511
TV
825 case CONTINUE_STMT:
826 case BREAK_STMT:
827 gcc_unreachable ();
fbc315db 828
1799e5d5 829 case OMP_FOR:
acf0174b
JJ
830 case OMP_SIMD:
831 case OMP_DISTRIBUTE:
d81ab49d 832 case OMP_LOOP:
d9a6bd32 833 case OMP_TASKLOOP:
726a989a 834 ret = cp_gimplify_omp_for (expr_p, pre_p);
1799e5d5
RH
835 break;
836
934790cc
ILT
837 case EXPR_STMT:
838 gimplify_expr_stmt (expr_p);
839 ret = GS_OK;
840 break;
841
392e3d51
RS
842 case UNARY_PLUS_EXPR:
843 {
844 tree arg = TREE_OPERAND (*expr_p, 0);
845 tree type = TREE_TYPE (*expr_p);
846 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
847 : arg;
848 ret = GS_OK;
849 }
850 break;
851
12893402 852 case CALL_EXPR:
fe6ebcf1 853 ret = GS_OK;
708935b2
JJ
854 if (flag_strong_eval_order == 2
855 && CALL_EXPR_FN (*expr_p)
b2b8eb62 856 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
708935b2
JJ
857 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
858 {
6835f8a0 859 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
708935b2 860 enum gimplify_status t
1a37b6d9
JJ
861 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
862 is_gimple_call_addr);
708935b2
JJ
863 if (t == GS_ERROR)
864 ret = GS_ERROR;
6835f8a0
JJ
865 /* GIMPLE considers most pointer conversion useless, but for
866 calls we actually care about the exact function pointer type. */
867 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
868 CALL_EXPR_FN (*expr_p)
869 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
708935b2 870 }
4eb24e01
JM
871 if (!CALL_EXPR_FN (*expr_p))
872 /* Internal function call. */;
873 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
fe6ebcf1 874 {
4eb24e01
JM
875 /* This is a call to a (compound) assignment operator that used
876 the operator syntax; gimplify the RHS first. */
877 gcc_assert (call_expr_nargs (*expr_p) == 2);
878 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
879 enum gimplify_status t
880 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
881 if (t == GS_ERROR)
882 ret = GS_ERROR;
883 }
884 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
885 {
886 /* Leave the last argument for gimplify_call_expr, to avoid problems
887 with __builtin_va_arg_pack(). */
888 int nargs = call_expr_nargs (*expr_p) - 1;
889 for (int i = 0; i < nargs; ++i)
fe6ebcf1
JM
890 {
891 enum gimplify_status t
892 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
893 if (t == GS_ERROR)
894 ret = GS_ERROR;
895 }
896 }
d0cf395a 897 else if (flag_strong_eval_order
4eb24e01
JM
898 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
899 {
d0cf395a 900 /* If flag_strong_eval_order, evaluate the object argument first. */
4eb24e01 901 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
71a93b08 902 if (INDIRECT_TYPE_P (fntype))
4eb24e01
JM
903 fntype = TREE_TYPE (fntype);
904 if (TREE_CODE (fntype) == METHOD_TYPE)
905 {
906 enum gimplify_status t
907 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
908 if (t == GS_ERROR)
909 ret = GS_ERROR;
910 }
911 }
e4082611
JJ
912 if (ret != GS_ERROR)
913 {
914 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
915 if (decl
3d78e008 916 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
ff603745 917 BUILT_IN_FRONTEND))
e4082611 918 *expr_p = boolean_false_node;
ff603745
JJ
919 else if (decl
920 && fndecl_built_in_p (decl, CP_BUILT_IN_SOURCE_LOCATION,
921 BUILT_IN_FRONTEND))
922 *expr_p = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
e4082611 923 }
fe6ebcf1
JM
924 break;
925
08f594eb
JM
926 case TARGET_EXPR:
927 /* A TARGET_EXPR that expresses direct-initialization should have been
928 elided by cp_gimplify_init_expr. */
929 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
930 ret = GS_UNHANDLED;
931 break;
932
25de0a29
AH
933 case RETURN_EXPR:
934 if (TREE_OPERAND (*expr_p, 0)
935 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
936 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
937 {
938 expr_p = &TREE_OPERAND (*expr_p, 0);
25de0a29
AH
939 /* Avoid going through the INIT_EXPR case, which can
940 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
941 goto modify_expr_case;
942 }
943 /* Fall through. */
944
7c34ced1 945 default:
32e8bb8e 946 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
7c34ced1
RH
947 break;
948 }
949
950 /* Restore saved state. */
951 if (STATEMENT_CODE_P (code))
952 current_stmt_tree ()->stmts_are_full_exprs_p
953 = saved_stmts_are_full_exprs_p;
954
955 return ret;
956}
5a508662 957
d8472c75 958static inline bool
58f9752a 959is_invisiref_parm (const_tree t)
d8472c75 960{
cc77ae10 961 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
d8472c75
JM
962 && DECL_BY_REFERENCE (t));
963}
964
10827cd8
JJ
965/* Return true if the uid in both int tree maps are equal. */
966
2a22f99c
TS
967bool
968cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
10827cd8 969{
10827cd8
JJ
970 return (a->uid == b->uid);
971}
972
973/* Hash a UID in a cxx_int_tree_map. */
974
975unsigned int
2a22f99c 976cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
10827cd8 977{
2a22f99c 978 return item->uid;
10827cd8
JJ
979}
980
4577f730
JJ
981/* A stable comparison routine for use with splay trees and DECLs. */
982
983static int
984splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
985{
986 tree a = (tree) xa;
987 tree b = (tree) xb;
988
989 return DECL_UID (a) - DECL_UID (b);
990}
991
992/* OpenMP context during genericization. */
993
994struct cp_genericize_omp_taskreg
995{
996 bool is_parallel;
997 bool default_shared;
998 struct cp_genericize_omp_taskreg *outer;
999 splay_tree variables;
1000};
1001
1002/* Return true if genericization should try to determine if
1003 DECL is firstprivate or shared within task regions. */
1004
1005static bool
1006omp_var_to_track (tree decl)
1007{
1008 tree type = TREE_TYPE (decl);
1009 if (is_invisiref_parm (decl))
1010 type = TREE_TYPE (type);
9f613f06 1011 else if (TYPE_REF_P (type))
8b586510 1012 type = TREE_TYPE (type);
4577f730
JJ
1013 while (TREE_CODE (type) == ARRAY_TYPE)
1014 type = TREE_TYPE (type);
1015 if (type == error_mark_node || !CLASS_TYPE_P (type))
1016 return false;
3048c0c7 1017 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
4577f730
JJ
1018 return false;
1019 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1020 return false;
1021 return true;
1022}
1023
1024/* Note DECL use in OpenMP region OMP_CTX during genericization. */
1025
1026static void
1027omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
1028{
1029 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
1030 (splay_tree_key) decl);
1031 if (n == NULL)
1032 {
1033 int flags = OMP_CLAUSE_DEFAULT_SHARED;
1034 if (omp_ctx->outer)
1035 omp_cxx_notice_variable (omp_ctx->outer, decl);
1036 if (!omp_ctx->default_shared)
1037 {
1038 struct cp_genericize_omp_taskreg *octx;
1039
1040 for (octx = omp_ctx->outer; octx; octx = octx->outer)
1041 {
1042 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1043 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1044 {
1045 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1046 break;
1047 }
1048 if (octx->is_parallel)
1049 break;
1050 }
1051 if (octx == NULL
1052 && (TREE_CODE (decl) == PARM_DECL
1053 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1054 && DECL_CONTEXT (decl) == current_function_decl)))
1055 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1056 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1057 {
1058 /* DECL is implicitly determined firstprivate in
1059 the current task construct. Ensure copy ctor and
1060 dtor are instantiated, because during gimplification
1061 it will be already too late. */
1062 tree type = TREE_TYPE (decl);
1063 if (is_invisiref_parm (decl))
1064 type = TREE_TYPE (type);
9f613f06 1065 else if (TYPE_REF_P (type))
8b586510 1066 type = TREE_TYPE (type);
4577f730
JJ
1067 while (TREE_CODE (type) == ARRAY_TYPE)
1068 type = TREE_TYPE (type);
1069 get_copy_ctor (type, tf_none);
1070 get_dtor (type, tf_none);
1071 }
1072 }
1073 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1074 }
1075}
1076
1077/* Genericization context. */
1078
ac3cbee5
RG
1079struct cp_genericize_data
1080{
6e2830c3 1081 hash_set<tree> *p_set;
9771b263 1082 vec<tree> bind_expr_stack;
4577f730 1083 struct cp_genericize_omp_taskreg *omp_ctx;
8243e2a9 1084 tree try_block;
7b3a9795 1085 bool no_sanitize_p;
e2df2328 1086 bool handle_invisiref_parm_p;
ac3cbee5
RG
1087};
1088
cda0a029
JM
1089/* Perform any pre-gimplification folding of C++ front end trees to
1090 GENERIC.
1091 Note: The folding of none-omp cases is something to move into
1092 the middle-end. As for now we have most foldings only on GENERIC
1093 in fold-const, we need to perform this before transformation to
1094 GIMPLE-form. */
1095
1096static tree
1097cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1098{
1099 tree stmt;
1100 enum tree_code code;
1101
1102 *stmt_p = stmt = cp_fold (*stmt_p);
1103
6f5bcd24
JJ
1104 if (((hash_set<tree> *) data)->add (stmt))
1105 {
1106 /* Don't walk subtrees of stmts we've already walked once, otherwise
1107 we can have exponential complexity with e.g. lots of nested
1108 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1109 always the same tree, which the first time cp_fold_r has been
1110 called on it had the subtrees walked. */
1111 *walk_subtrees = 0;
1112 return NULL;
1113 }
1114
cda0a029
JM
1115 code = TREE_CODE (stmt);
1116 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
d81ab49d 1117 || code == OMP_LOOP || code == OMP_TASKLOOP || code == OACC_LOOP)
cda0a029
JM
1118 {
1119 tree x;
1120 int i, n;
1121
1122 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1123 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1124 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1125 x = OMP_FOR_COND (stmt);
1126 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1127 {
1128 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1129 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1130 }
1131 else if (x && TREE_CODE (x) == TREE_VEC)
1132 {
1133 n = TREE_VEC_LENGTH (x);
1134 for (i = 0; i < n; i++)
1135 {
1136 tree o = TREE_VEC_ELT (x, i);
1137 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1138 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1139 }
1140 }
1141 x = OMP_FOR_INCR (stmt);
1142 if (x && TREE_CODE (x) == TREE_VEC)
1143 {
1144 n = TREE_VEC_LENGTH (x);
1145 for (i = 0; i < n; i++)
1146 {
1147 tree o = TREE_VEC_ELT (x, i);
1148 if (o && TREE_CODE (o) == MODIFY_EXPR)
1149 o = TREE_OPERAND (o, 1);
1150 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1151 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1152 {
1153 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1154 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1155 }
1156 }
1157 }
1158 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1159 *walk_subtrees = 0;
1160 }
1161
1162 return NULL;
1163}
1164
2fa586ad
JM
1165/* Fold ALL the trees! FIXME we should be able to remove this, but
1166 apparently that still causes optimization regressions. */
1167
1168void
1169cp_fold_function (tree fndecl)
1170{
6f5bcd24
JJ
1171 hash_set<tree> pset;
1172 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
2fa586ad
JM
1173}
1174
b7689b96
JM
1175/* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1176
1177static tree genericize_spaceship (tree expr)
1178{
1179 iloc_sentinel s (cp_expr_location (expr));
1180 tree type = TREE_TYPE (expr);
1181 tree op0 = TREE_OPERAND (expr, 0);
1182 tree op1 = TREE_OPERAND (expr, 1);
1183 return genericize_spaceship (type, op0, op1);
1184}
1185
3539fc13
JM
1186/* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1187 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1188 the middle-end (c++/88256). */
1189
1190static tree
1191predeclare_vla (tree expr)
1192{
1193 tree type = TREE_TYPE (expr);
1194 if (type == error_mark_node)
1195 return expr;
1196
1197 /* We need to strip pointers for gimplify_type_sizes. */
1198 tree vla = type;
1199 while (POINTER_TYPE_P (vla))
1200 {
1201 if (TYPE_NAME (vla))
1202 return expr;
1203 vla = TREE_TYPE (vla);
1204 }
1205 if (TYPE_NAME (vla) || !variably_modified_type_p (vla, NULL_TREE))
1206 return expr;
1207
1208 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1209 DECL_ARTIFICIAL (decl) = 1;
1210 TYPE_NAME (vla) = decl;
1211 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1212 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1213 return expr;
1214}
1215
d8472c75
JM
1216/* Perform any pre-gimplification lowering of C++ front end trees to
1217 GENERIC. */
5a508662
RH
1218
1219static tree
d8472c75 1220cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
5a508662
RH
1221{
1222 tree stmt = *stmt_p;
ac3cbee5 1223 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
6e2830c3 1224 hash_set<tree> *p_set = wtd->p_set;
5a508662 1225
4577f730
JJ
1226 /* If in an OpenMP context, note var uses. */
1227 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
5a6ccc94 1228 && (VAR_P (stmt)
4577f730
JJ
1229 || TREE_CODE (stmt) == PARM_DECL
1230 || TREE_CODE (stmt) == RESULT_DECL)
1231 && omp_var_to_track (stmt))
1232 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1233
6aa80414
NS
1234 /* Don't dereference parms in a thunk, pass the references through. */
1235 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1236 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1237 {
1238 *walk_subtrees = 0;
1239 return NULL;
1240 }
1241
4b9f2115 1242 /* Dereference invisible reference parms. */
e2df2328 1243 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
d8472c75 1244 {
cc77ae10 1245 *stmt_p = convert_from_reference (stmt);
7cfd79d6 1246 p_set->add (*stmt_p);
d8472c75
JM
1247 *walk_subtrees = 0;
1248 return NULL;
1249 }
1250
10827cd8
JJ
1251 /* Map block scope extern declarations to visible declarations with the
1252 same name and type in outer scopes if any. */
1253 if (cp_function_chain->extern_decl_map
cb6da767 1254 && VAR_OR_FUNCTION_DECL_P (stmt)
10827cd8
JJ
1255 && DECL_EXTERNAL (stmt))
1256 {
1257 struct cxx_int_tree_map *h, in;
1258 in.uid = DECL_UID (stmt);
2a22f99c 1259 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
10827cd8
JJ
1260 if (h)
1261 {
1262 *stmt_p = h->to;
9e51f66f 1263 TREE_USED (h->to) |= TREE_USED (stmt);
10827cd8
JJ
1264 *walk_subtrees = 0;
1265 return NULL;
1266 }
1267 }
1268
6f3af356 1269 if (TREE_CODE (stmt) == INTEGER_CST
9f613f06 1270 && TYPE_REF_P (TREE_TYPE (stmt))
6f3af356
JJ
1271 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1272 && !wtd->no_sanitize_p)
1273 {
1274 ubsan_maybe_instrument_reference (stmt_p);
1275 if (*stmt_p != stmt)
1276 {
1277 *walk_subtrees = 0;
1278 return NULL_TREE;
1279 }
1280 }
1281
d8472c75 1282 /* Other than invisiref parms, don't walk the same tree twice. */
6e2830c3 1283 if (p_set->contains (stmt))
d8472c75
JM
1284 {
1285 *walk_subtrees = 0;
1286 return NULL_TREE;
1287 }
1288
c74985e3 1289 switch (TREE_CODE (stmt))
d8472c75 1290 {
c74985e3
JJ
1291 case ADDR_EXPR:
1292 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1293 {
1294 /* If in an OpenMP context, note var uses. */
1295 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1296 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1297 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1298 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
4577f730 1299 *walk_subtrees = 0;
c74985e3
JJ
1300 }
1301 break;
1302
1303 case RETURN_EXPR:
1304 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1305 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1306 *walk_subtrees = 0;
1307 break;
1308
1309 case OMP_CLAUSE:
1310 switch (OMP_CLAUSE_CODE (stmt))
1311 {
1312 case OMP_CLAUSE_LASTPRIVATE:
1313 /* Don't dereference an invisiref in OpenMP clauses. */
1314 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1315 {
1316 *walk_subtrees = 0;
1317 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1318 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1319 cp_genericize_r, data, NULL);
1320 }
1321 break;
1322 case OMP_CLAUSE_PRIVATE:
1323 /* Don't dereference an invisiref in OpenMP clauses. */
1324 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
4577f730 1325 *walk_subtrees = 0;
c74985e3
JJ
1326 else if (wtd->omp_ctx != NULL)
1327 {
1328 /* Private clause doesn't cause any references to the
1329 var in outer contexts, avoid calling
1330 omp_cxx_notice_variable for it. */
1331 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1332 wtd->omp_ctx = NULL;
1333 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1334 data, NULL);
1335 wtd->omp_ctx = old;
1336 *walk_subtrees = 0;
1337 }
1338 break;
1339 case OMP_CLAUSE_SHARED:
1340 case OMP_CLAUSE_FIRSTPRIVATE:
1341 case OMP_CLAUSE_COPYIN:
1342 case OMP_CLAUSE_COPYPRIVATE:
6a2892a6
JJ
1343 case OMP_CLAUSE_INCLUSIVE:
1344 case OMP_CLAUSE_EXCLUSIVE:
c74985e3
JJ
1345 /* Don't dereference an invisiref in OpenMP clauses. */
1346 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
acf0174b 1347 *walk_subtrees = 0;
c74985e3
JJ
1348 break;
1349 case OMP_CLAUSE_REDUCTION:
28567c40
JJ
1350 case OMP_CLAUSE_IN_REDUCTION:
1351 case OMP_CLAUSE_TASK_REDUCTION:
c74985e3
JJ
1352 /* Don't dereference an invisiref in reduction clause's
1353 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1354 still needs to be genericized. */
1355 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1356 {
1357 *walk_subtrees = 0;
1358 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1359 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1360 cp_genericize_r, data, NULL);
1361 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1362 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1363 cp_genericize_r, data, NULL);
1364 }
1365 break;
1366 default:
1367 break;
1368 }
1369 break;
1370
1371 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1372 to lower this construct before scanning it, so we need to lower these
1373 before doing anything else. */
1374 case CLEANUP_STMT:
1375 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1376 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1377 : TRY_FINALLY_EXPR,
1378 void_type_node,
1379 CLEANUP_BODY (stmt),
1380 CLEANUP_EXPR (stmt));
1381 break;
1382
1383 case IF_STMT:
f74d9c8f
JJ
1384 genericize_if_stmt (stmt_p);
1385 /* *stmt_p has changed, tail recurse to handle it again. */
1386 return cp_genericize_r (stmt_p, walk_subtrees, data);
f74d9c8f 1387
c74985e3
JJ
1388 /* COND_EXPR might have incompatible types in branches if one or both
1389 arms are bitfields. Fix it up now. */
1390 case COND_EXPR:
1391 {
1392 tree type_left
1393 = (TREE_OPERAND (stmt, 1)
1394 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1395 : NULL_TREE);
1396 tree type_right
1397 = (TREE_OPERAND (stmt, 2)
1398 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1399 : NULL_TREE);
1400 if (type_left
1401 && !useless_type_conversion_p (TREE_TYPE (stmt),
1402 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1403 {
1404 TREE_OPERAND (stmt, 1)
1405 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1406 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1407 type_left));
1408 }
1409 if (type_right
1410 && !useless_type_conversion_p (TREE_TYPE (stmt),
1411 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1412 {
1413 TREE_OPERAND (stmt, 2)
1414 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1415 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1416 type_right));
1417 }
1418 }
1419 break;
bbdf5682 1420
c74985e3 1421 case BIND_EXPR:
4577f730
JJ
1422 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1423 {
1424 tree decl;
1425 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
5a6ccc94 1426 if (VAR_P (decl)
4577f730
JJ
1427 && !DECL_EXTERNAL (decl)
1428 && omp_var_to_track (decl))
1429 {
1430 splay_tree_node n
1431 = splay_tree_lookup (wtd->omp_ctx->variables,
1432 (splay_tree_key) decl);
1433 if (n == NULL)
1434 splay_tree_insert (wtd->omp_ctx->variables,
1435 (splay_tree_key) decl,
1436 TREE_STATIC (decl)
1437 ? OMP_CLAUSE_DEFAULT_SHARED
1438 : OMP_CLAUSE_DEFAULT_PRIVATE);
1439 }
1440 }
45b2222a 1441 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
7b3a9795
MP
1442 {
1443 /* The point here is to not sanitize static initializers. */
1444 bool no_sanitize_p = wtd->no_sanitize_p;
1445 wtd->no_sanitize_p = true;
1446 for (tree decl = BIND_EXPR_VARS (stmt);
1447 decl;
1448 decl = DECL_CHAIN (decl))
1449 if (VAR_P (decl)
1450 && TREE_STATIC (decl)
1451 && DECL_INITIAL (decl))
1452 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1453 wtd->no_sanitize_p = no_sanitize_p;
1454 }
9771b263 1455 wtd->bind_expr_stack.safe_push (stmt);
ac3cbee5
RG
1456 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1457 cp_genericize_r, data, NULL);
9771b263 1458 wtd->bind_expr_stack.pop ();
c74985e3 1459 break;
ac3cbee5 1460
c74985e3
JJ
1461 case USING_STMT:
1462 {
1463 tree block = NULL_TREE;
1464
1465 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1466 BLOCK, and append an IMPORTED_DECL to its
1467 BLOCK_VARS chained list. */
1468 if (wtd->bind_expr_stack.exists ())
1469 {
1470 int i;
1471 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1472 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1473 break;
1474 }
1475 if (block)
1476 {
0c923157
JM
1477 tree decl = TREE_OPERAND (stmt, 0);
1478 gcc_assert (decl);
c74985e3 1479
0c923157
JM
1480 if (undeduced_auto_decl (decl))
1481 /* Omit from the GENERIC, the back-end can't handle it. */;
1482 else
1483 {
1484 tree using_directive = make_node (IMPORTED_DECL);
1485 TREE_TYPE (using_directive) = void_type_node;
ac3cbee5 1486
0c923157
JM
1487 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1488 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1489 BLOCK_VARS (block) = using_directive;
1490 }
c74985e3
JJ
1491 }
1492 /* The USING_STMT won't appear in GENERIC. */
1493 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1494 *walk_subtrees = 0;
1495 }
1496 break;
1497
1498 case DECL_EXPR:
1499 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
ac3cbee5 1500 {
c74985e3
JJ
1501 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1502 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1503 *walk_subtrees = 0;
ac3cbee5 1504 }
c74985e3 1505 else
ac3cbee5 1506 {
c74985e3
JJ
1507 tree d = DECL_EXPR_DECL (stmt);
1508 if (VAR_P (d))
1509 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
ac3cbee5 1510 }
c74985e3 1511 break;
4577f730 1512
c74985e3
JJ
1513 case OMP_PARALLEL:
1514 case OMP_TASK:
1515 case OMP_TASKLOOP:
1516 {
1517 struct cp_genericize_omp_taskreg omp_ctx;
1518 tree c, decl;
1519 splay_tree_node n;
1520
1521 *walk_subtrees = 0;
1522 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1523 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1524 omp_ctx.default_shared = omp_ctx.is_parallel;
1525 omp_ctx.outer = wtd->omp_ctx;
1526 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1527 wtd->omp_ctx = &omp_ctx;
1528 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1529 switch (OMP_CLAUSE_CODE (c))
1530 {
1531 case OMP_CLAUSE_SHARED:
1532 case OMP_CLAUSE_PRIVATE:
1533 case OMP_CLAUSE_FIRSTPRIVATE:
1534 case OMP_CLAUSE_LASTPRIVATE:
1535 decl = OMP_CLAUSE_DECL (c);
1536 if (decl == error_mark_node || !omp_var_to_track (decl))
1537 break;
1538 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1539 if (n != NULL)
1540 break;
1541 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1542 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1543 ? OMP_CLAUSE_DEFAULT_SHARED
1544 : OMP_CLAUSE_DEFAULT_PRIVATE);
1545 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1546 omp_cxx_notice_variable (omp_ctx.outer, decl);
4577f730 1547 break;
c74985e3
JJ
1548 case OMP_CLAUSE_DEFAULT:
1549 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1550 omp_ctx.default_shared = true;
1551 default:
4577f730 1552 break;
c74985e3
JJ
1553 }
1554 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1555 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1556 else
1557 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1558 wtd->omp_ctx = omp_ctx.outer;
1559 splay_tree_delete (omp_ctx.variables);
1560 }
1561 break;
1562
49ddde69
JJ
1563 case OMP_TARGET:
1564 cfun->has_omp_target = true;
1565 break;
1566
c74985e3
JJ
1567 case TRY_BLOCK:
1568 {
1569 *walk_subtrees = 0;
1570 tree try_block = wtd->try_block;
1571 wtd->try_block = stmt;
1572 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1573 wtd->try_block = try_block;
1574 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1575 }
1576 break;
1577
1578 case MUST_NOT_THROW_EXPR:
8243e2a9
JM
1579 /* MUST_NOT_THROW_COND might be something else with TM. */
1580 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1581 {
1582 *walk_subtrees = 0;
1583 tree try_block = wtd->try_block;
1584 wtd->try_block = stmt;
1585 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1586 wtd->try_block = try_block;
1587 }
c74985e3
JJ
1588 break;
1589
1590 case THROW_EXPR:
1591 {
1592 location_t loc = location_of (stmt);
1593 if (TREE_NO_WARNING (stmt))
1594 /* Never mind. */;
1595 else if (wtd->try_block)
1596 {
097f82ec
DM
1597 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1598 {
1599 auto_diagnostic_group d;
1600 if (warning_at (loc, OPT_Wterminate,
a9c697b8 1601 "%<throw%> will always call %<terminate%>")
097f82ec
DM
1602 && cxx_dialect >= cxx11
1603 && DECL_DESTRUCTOR_P (current_function_decl))
a9c697b8 1604 inform (loc, "in C++11 destructors default to %<noexcept%>");
097f82ec 1605 }
c74985e3
JJ
1606 }
1607 else
1608 {
1609 if (warn_cxx11_compat && cxx_dialect < cxx11
1610 && DECL_DESTRUCTOR_P (current_function_decl)
1611 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1612 == NULL_TREE)
1613 && (get_defaulted_eh_spec (current_function_decl)
1614 == empty_except_spec))
1615 warning_at (loc, OPT_Wc__11_compat,
a9c697b8
MS
1616 "in C++11 this %<throw%> will call %<terminate%> "
1617 "because destructors default to %<noexcept%>");
c74985e3
JJ
1618 }
1619 }
1620 break;
1621
1622 case CONVERT_EXPR:
1623 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1624 break;
1625
1626 case FOR_STMT:
1627 genericize_for_stmt (stmt_p, walk_subtrees, data);
1628 break;
1629
1630 case WHILE_STMT:
1631 genericize_while_stmt (stmt_p, walk_subtrees, data);
1632 break;
1633
1634 case DO_STMT:
1635 genericize_do_stmt (stmt_p, walk_subtrees, data);
1636 break;
1637
1638 case SWITCH_STMT:
1639 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1640 break;
1641
1642 case CONTINUE_STMT:
1643 genericize_continue_stmt (stmt_p);
1644 break;
1645
1646 case BREAK_STMT:
1647 genericize_break_stmt (stmt_p);
1648 break;
1649
b7689b96
JM
1650 case SPACESHIP_EXPR:
1651 *stmt_p = genericize_spaceship (*stmt_p);
1652 break;
1653
f1f862ae
JJ
1654 case OMP_DISTRIBUTE:
1655 /* Need to explicitly instantiate copy ctors on class iterators of
1656 composite distribute parallel for. */
1657 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1658 {
1659 tree *data[4] = { NULL, NULL, NULL, NULL };
1660 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1661 find_combined_omp_for, data, NULL);
1662 if (inner != NULL_TREE
1663 && TREE_CODE (inner) == OMP_FOR)
1664 {
1665 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1666 if (OMP_FOR_ORIG_DECLS (inner)
1667 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1668 i)) == TREE_LIST
1669 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1670 i)))
1671 {
1672 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1673 /* Class iterators aren't allowed on OMP_SIMD, so the only
1674 case we need to solve is distribute parallel for. */
1675 gcc_assert (TREE_CODE (inner) == OMP_FOR
1676 && data[1]);
1677 tree orig_decl = TREE_PURPOSE (orig);
1678 tree c, cl = NULL_TREE;
1679 for (c = OMP_FOR_CLAUSES (inner);
1680 c; c = OMP_CLAUSE_CHAIN (c))
1681 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1682 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1683 && OMP_CLAUSE_DECL (c) == orig_decl)
1684 {
1685 cl = c;
1686 break;
1687 }
1688 if (cl == NULL_TREE)
1689 {
1690 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1691 c; c = OMP_CLAUSE_CHAIN (c))
1692 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1693 && OMP_CLAUSE_DECL (c) == orig_decl)
1694 {
1695 cl = c;
1696 break;
1697 }
1698 }
1699 if (cl)
1700 {
1701 orig_decl = require_complete_type (orig_decl);
1702 tree inner_type = TREE_TYPE (orig_decl);
1703 if (orig_decl == error_mark_node)
1704 continue;
1705 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1706 inner_type = TREE_TYPE (inner_type);
1707
1708 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1709 inner_type = TREE_TYPE (inner_type);
1710 get_copy_ctor (inner_type, tf_warning_or_error);
1711 }
1712 }
1713 }
1714 }
1715 /* FALLTHRU */
c74985e3
JJ
1716 case OMP_FOR:
1717 case OMP_SIMD:
d81ab49d 1718 case OMP_LOOP:
950ad0ba 1719 case OACC_LOOP:
c74985e3
JJ
1720 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1721 break;
1722
1723 case PTRMEM_CST:
9d409934
JM
1724 /* By the time we get here we're handing off to the back end, so we don't
1725 need or want to preserve PTRMEM_CST anymore. */
1726 *stmt_p = cplus_expand_constant (stmt);
1727 *walk_subtrees = 0;
c74985e3
JJ
1728 break;
1729
1730 case MEM_REF:
f31a8339 1731 /* For MEM_REF, make sure not to sanitize the second operand even
c74985e3 1732 if it has reference type. It is just an offset with a type
f31a8339
JJ
1733 holding other information. There is no other processing we
1734 need to do for INTEGER_CSTs, so just ignore the second argument
1735 unconditionally. */
1736 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1737 *walk_subtrees = 0;
c74985e3
JJ
1738 break;
1739
1740 case NOP_EXPR:
3539fc13 1741 *stmt_p = predeclare_vla (*stmt_p);
c74985e3
JJ
1742 if (!wtd->no_sanitize_p
1743 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
9f613f06 1744 && TYPE_REF_P (TREE_TYPE (stmt)))
6f3af356 1745 ubsan_maybe_instrument_reference (stmt_p);
c74985e3
JJ
1746 break;
1747
1748 case CALL_EXPR:
861d4af8
AS
1749 /* Evaluate function concept checks instead of treating them as
1750 normal functions. */
1751 if (concept_check_p (stmt))
1752 {
1753 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1754 * walk_subtrees = 0;
1755 break;
1756 }
1757
c74985e3
JJ
1758 if (!wtd->no_sanitize_p
1759 && sanitize_flags_p ((SANITIZE_NULL
1760 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
944fa280
JJ
1761 {
1762 tree fn = CALL_EXPR_FN (stmt);
1763 if (fn != NULL_TREE
1764 && !error_operand_p (fn)
71a93b08 1765 && INDIRECT_TYPE_P (TREE_TYPE (fn))
944fa280
JJ
1766 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1767 {
1768 bool is_ctor
1769 = TREE_CODE (fn) == ADDR_EXPR
1770 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1771 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
45b2222a 1772 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
35228ac7 1773 ubsan_maybe_instrument_member_call (stmt, is_ctor);
45b2222a 1774 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
35228ac7 1775 cp_ubsan_maybe_instrument_member_call (stmt);
944fa280 1776 }
830421fc
JJ
1777 else if (fn == NULL_TREE
1778 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1779 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
9f613f06 1780 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
830421fc 1781 *walk_subtrees = 0;
944fa280 1782 }
aafdbe06
JM
1783 /* Fall through. */
1784 case AGGR_INIT_EXPR:
1785 /* For calls to a multi-versioned function, overload resolution
1786 returns the function with the highest target priority, that is,
1787 the version that will checked for dispatching first. If this
1788 version is inlinable, a direct call to this version can be made
1789 otherwise the call should go through the dispatcher. */
1790 {
f5f035a3 1791 tree fn = cp_get_callee_fndecl_nofold (stmt);
aafdbe06
JM
1792 if (fn && DECL_FUNCTION_VERSIONED (fn)
1793 && (current_function_decl == NULL
1794 || !targetm.target_option.can_inline_p (current_function_decl,
1795 fn)))
1796 if (tree dis = get_function_version_dispatcher (fn))
1797 {
1798 mark_versions_used (dis);
1799 dis = build_address (dis);
1800 if (TREE_CODE (stmt) == CALL_EXPR)
1801 CALL_EXPR_FN (stmt) = dis;
1802 else
1803 AGGR_INIT_EXPR_FN (stmt) = dis;
1804 }
1805 }
c74985e3
JJ
1806 break;
1807
570f86f9
JJ
1808 case TARGET_EXPR:
1809 if (TARGET_EXPR_INITIAL (stmt)
1810 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1811 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1812 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1813 break;
1814
a7ea3d2c
PP
1815 case REQUIRES_EXPR:
1816 /* Emit the value of the requires-expression. */
1817 *stmt_p = constant_boolean_node (constraints_satisfied_p (stmt),
1818 boolean_type_node);
1819 *walk_subtrees = 0;
1820 break;
1821
861d4af8
AS
1822 case TEMPLATE_ID_EXPR:
1823 gcc_assert (concept_check_p (stmt));
1824 /* Emit the value of the concept check. */
1825 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1826 walk_subtrees = 0;
1827 break;
1828
5a1706f6
JJ
1829 case STATEMENT_LIST:
1830 if (TREE_SIDE_EFFECTS (stmt))
1831 {
1832 tree_stmt_iterator i;
1833 int nondebug_stmts = 0;
1834 bool clear_side_effects = true;
1835 /* Genericization can clear TREE_SIDE_EFFECTS, e.g. when
1836 transforming an IF_STMT into COND_EXPR. If such stmt
1837 appears in a STATEMENT_LIST that contains only that
1838 stmt and some DEBUG_BEGIN_STMTs, without -g where the
1839 STATEMENT_LIST wouldn't be present at all the resulting
1840 expression wouldn't have TREE_SIDE_EFFECTS set, so make sure
1841 to clear it even on the STATEMENT_LIST in such cases. */
1842 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1843 {
1844 tree t = tsi_stmt (i);
1845 if (TREE_CODE (t) != DEBUG_BEGIN_STMT && nondebug_stmts < 2)
1846 nondebug_stmts++;
1847 cp_walk_tree (tsi_stmt_ptr (i), cp_genericize_r, data, NULL);
1848 if (TREE_CODE (t) != DEBUG_BEGIN_STMT
1849 && (nondebug_stmts > 1 || TREE_SIDE_EFFECTS (tsi_stmt (i))))
1850 clear_side_effects = false;
1851 }
1852 if (clear_side_effects)
1853 TREE_SIDE_EFFECTS (stmt) = 0;
1854 *walk_subtrees = 0;
1855 }
1856 break;
1857
c74985e3
JJ
1858 default:
1859 if (IS_TYPE_OR_DECL_P (stmt))
1860 *walk_subtrees = 0;
1861 break;
944fa280 1862 }
ac3cbee5 1863
6e2830c3 1864 p_set->add (*stmt_p);
c8094d83 1865
5a508662
RH
1866 return NULL;
1867}
1868
b2cb7511
TV
1869/* Lower C++ front end trees to GENERIC in T_P. */
1870
1871static void
e2df2328 1872cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
b2cb7511
TV
1873{
1874 struct cp_genericize_data wtd;
1875
6e2830c3 1876 wtd.p_set = new hash_set<tree>;
9771b263 1877 wtd.bind_expr_stack.create (0);
b2cb7511 1878 wtd.omp_ctx = NULL;
8243e2a9 1879 wtd.try_block = NULL_TREE;
7b3a9795 1880 wtd.no_sanitize_p = false;
e2df2328 1881 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
b2cb7511 1882 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
6e2830c3 1883 delete wtd.p_set;
9771b263 1884 wtd.bind_expr_stack.release ();
45b2222a 1885 if (sanitize_flags_p (SANITIZE_VPTR))
35228ac7 1886 cp_ubsan_instrument_member_accesses (t_p);
b2cb7511
TV
1887}
1888
0a508bb6
JJ
1889/* If a function that should end with a return in non-void
1890 function doesn't obviously end with return, add ubsan
1b6fa695
ML
1891 instrumentation code to verify it at runtime. If -fsanitize=return
1892 is not enabled, instrument __builtin_unreachable. */
0a508bb6
JJ
1893
1894static void
1b6fa695 1895cp_maybe_instrument_return (tree fndecl)
0a508bb6
JJ
1896{
1897 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1898 || DECL_CONSTRUCTOR_P (fndecl)
1899 || DECL_DESTRUCTOR_P (fndecl)
1900 || !targetm.warn_func_return (fndecl))
1901 return;
1902
81e4859a
JJ
1903 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1904 /* Don't add __builtin_unreachable () if not optimizing, it will not
1905 improve any optimizations in that case, just break UB code.
1906 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1907 UBSan covers this with ubsan_instrument_return above where sufficient
1908 information is provided, while the __builtin_unreachable () below
1909 if return sanitization is disabled will just result in hard to
1910 understand runtime error without location. */
1911 && (!optimize
1912 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1913 return;
1914
0a508bb6
JJ
1915 tree t = DECL_SAVED_TREE (fndecl);
1916 while (t)
1917 {
1918 switch (TREE_CODE (t))
1919 {
1920 case BIND_EXPR:
1921 t = BIND_EXPR_BODY (t);
1922 continue;
1923 case TRY_FINALLY_EXPR:
78a5fce0 1924 case CLEANUP_POINT_EXPR:
0a508bb6
JJ
1925 t = TREE_OPERAND (t, 0);
1926 continue;
1927 case STATEMENT_LIST:
1928 {
1929 tree_stmt_iterator i = tsi_last (t);
f1bc6cae
JJ
1930 while (!tsi_end_p (i))
1931 {
1932 tree p = tsi_stmt (i);
1933 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1934 break;
1935 tsi_prev (&i);
1936 }
0a508bb6
JJ
1937 if (!tsi_end_p (i))
1938 {
1939 t = tsi_stmt (i);
1940 continue;
1941 }
1942 }
1943 break;
1944 case RETURN_EXPR:
1945 return;
1946 default:
1947 break;
1948 }
1949 break;
1950 }
1951 if (t == NULL_TREE)
1952 return;
459bcfb0
JJ
1953 tree *p = &DECL_SAVED_TREE (fndecl);
1954 if (TREE_CODE (*p) == BIND_EXPR)
1955 p = &BIND_EXPR_BODY (*p);
1b6fa695
ML
1956
1957 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1958 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1959 t = ubsan_instrument_return (loc);
1960 else
1961 {
1962 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1963 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1964 }
1965
459bcfb0 1966 append_to_statement_list (t, p);
0a508bb6
JJ
1967}
1968
5a508662
RH
1969void
1970cp_genericize (tree fndecl)
1971{
d8472c75 1972 tree t;
d8472c75
JM
1973
1974 /* Fix up the types of parms passed by invisible reference. */
910ad8de 1975 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
dfb5c523
MM
1976 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1977 {
1978 /* If a function's arguments are copied to create a thunk,
1979 then DECL_BY_REFERENCE will be set -- but the type of the
1980 argument will be a pointer type, so we will never get
1981 here. */
1982 gcc_assert (!DECL_BY_REFERENCE (t));
1983 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1984 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1985 DECL_BY_REFERENCE (t) = 1;
1986 TREE_ADDRESSABLE (t) = 0;
1987 relayout_decl (t);
1988 }
d8472c75 1989
cc77ae10
JM
1990 /* Do the same for the return value. */
1991 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1992 {
1993 t = DECL_RESULT (fndecl);
1994 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1995 DECL_BY_REFERENCE (t) = 1;
1996 TREE_ADDRESSABLE (t) = 0;
1997 relayout_decl (t);
140806fa
JJ
1998 if (DECL_NAME (t))
1999 {
2000 /* Adjust DECL_VALUE_EXPR of the original var. */
2001 tree outer = outer_curly_brace_block (current_function_decl);
2002 tree var;
2003
2004 if (outer)
2005 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
c60dc053
JJ
2006 if (VAR_P (var)
2007 && DECL_NAME (t) == DECL_NAME (var)
140806fa
JJ
2008 && DECL_HAS_VALUE_EXPR_P (var)
2009 && DECL_VALUE_EXPR (var) == t)
2010 {
2011 tree val = convert_from_reference (t);
2012 SET_DECL_VALUE_EXPR (var, val);
2013 break;
2014 }
2015 }
cc77ae10
JM
2016 }
2017
d8472c75
JM
2018 /* If we're a clone, the body is already GIMPLE. */
2019 if (DECL_CLONED_FUNCTION_P (fndecl))
2020 return;
2021
ee78cbaa
JJ
2022 /* Allow cp_genericize calls to be nested. */
2023 tree save_bc_label[2];
2024 save_bc_label[bc_break] = bc_label[bc_break];
2025 save_bc_label[bc_continue] = bc_label[bc_continue];
2026 bc_label[bc_break] = NULL_TREE;
2027 bc_label[bc_continue] = NULL_TREE;
2028
d8472c75
JM
2029 /* We do want to see every occurrence of the parms, so we can't just use
2030 walk_tree's hash functionality. */
e2df2328 2031 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
5a508662 2032
1b6fa695 2033 cp_maybe_instrument_return (fndecl);
0a508bb6 2034
5a508662
RH
2035 /* Do everything else. */
2036 c_genericize (fndecl);
1799e5d5
RH
2037
2038 gcc_assert (bc_label[bc_break] == NULL);
2039 gcc_assert (bc_label[bc_continue] == NULL);
ee78cbaa
JJ
2040 bc_label[bc_break] = save_bc_label[bc_break];
2041 bc_label[bc_continue] = save_bc_label[bc_continue];
1799e5d5
RH
2042}
2043\f
2044/* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2045 NULL if there is in fact nothing to do. ARG2 may be null if FN
2046 actually only takes one argument. */
2047
2048static tree
2049cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2050{
c2898ec9 2051 tree defparm, parm, t;
94a0dd7b
SL
2052 int i = 0;
2053 int nargs;
2054 tree *argarray;
fae2b46b 2055
1799e5d5
RH
2056 if (fn == NULL)
2057 return NULL;
2058
94a0dd7b 2059 nargs = list_length (DECL_ARGUMENTS (fn));
86b8fed1 2060 argarray = XALLOCAVEC (tree, nargs);
94a0dd7b 2061
fae2b46b
JJ
2062 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2063 if (arg2)
2064 defparm = TREE_CHAIN (defparm);
2065
4dbeb716 2066 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1799e5d5
RH
2067 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2068 {
2069 tree inner_type = TREE_TYPE (arg1);
2070 tree start1, end1, p1;
2071 tree start2 = NULL, p2 = NULL;
c2898ec9 2072 tree ret = NULL, lab;
1799e5d5
RH
2073
2074 start1 = arg1;
2075 start2 = arg2;
2076 do
2077 {
2078 inner_type = TREE_TYPE (inner_type);
2079 start1 = build4 (ARRAY_REF, inner_type, start1,
2080 size_zero_node, NULL, NULL);
2081 if (arg2)
2082 start2 = build4 (ARRAY_REF, inner_type, start2,
2083 size_zero_node, NULL, NULL);
2084 }
2085 while (TREE_CODE (inner_type) == ARRAY_TYPE);
db3927fb 2086 start1 = build_fold_addr_expr_loc (input_location, start1);
1799e5d5 2087 if (arg2)
db3927fb 2088 start2 = build_fold_addr_expr_loc (input_location, start2);
1799e5d5
RH
2089
2090 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
5d49b6a7 2091 end1 = fold_build_pointer_plus (start1, end1);
1799e5d5 2092
b731b390 2093 p1 = create_tmp_var (TREE_TYPE (start1));
726a989a 2094 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1799e5d5
RH
2095 append_to_statement_list (t, &ret);
2096
2097 if (arg2)
2098 {
b731b390 2099 p2 = create_tmp_var (TREE_TYPE (start2));
726a989a 2100 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1799e5d5
RH
2101 append_to_statement_list (t, &ret);
2102 }
2103
c2255bc4 2104 lab = create_artificial_label (input_location);
1799e5d5
RH
2105 t = build1 (LABEL_EXPR, void_type_node, lab);
2106 append_to_statement_list (t, &ret);
2107
94a0dd7b 2108 argarray[i++] = p1;
1799e5d5 2109 if (arg2)
94a0dd7b 2110 argarray[i++] = p2;
fae2b46b 2111 /* Handle default arguments. */
d2ee546f
JJ
2112 for (parm = defparm; parm && parm != void_list_node;
2113 parm = TREE_CHAIN (parm), i++)
94a0dd7b 2114 argarray[i] = convert_default_arg (TREE_VALUE (parm),
4dbeb716
JJ
2115 TREE_PURPOSE (parm), fn,
2116 i - is_method, tf_warning_or_error);
94a0dd7b 2117 t = build_call_a (fn, i, argarray);
c2898ec9
JJ
2118 t = fold_convert (void_type_node, t);
2119 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1799e5d5
RH
2120 append_to_statement_list (t, &ret);
2121
5d49b6a7 2122 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
726a989a 2123 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1799e5d5
RH
2124 append_to_statement_list (t, &ret);
2125
2126 if (arg2)
2127 {
5d49b6a7 2128 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
726a989a 2129 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1799e5d5
RH
2130 append_to_statement_list (t, &ret);
2131 }
2132
2133 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2134 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2135 append_to_statement_list (t, &ret);
2136
2137 return ret;
2138 }
2139 else
2140 {
db3927fb 2141 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1799e5d5 2142 if (arg2)
db3927fb 2143 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
fae2b46b 2144 /* Handle default arguments. */
d2ee546f 2145 for (parm = defparm; parm && parm != void_list_node;
94a0dd7b
SL
2146 parm = TREE_CHAIN (parm), i++)
2147 argarray[i] = convert_default_arg (TREE_VALUE (parm),
4dbeb716
JJ
2148 TREE_PURPOSE (parm), fn,
2149 i - is_method, tf_warning_or_error);
c2898ec9
JJ
2150 t = build_call_a (fn, i, argarray);
2151 t = fold_convert (void_type_node, t);
2152 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1799e5d5
RH
2153 }
2154}
2155
2156/* Return code to initialize DECL with its default constructor, or
2157 NULL if there's nothing to do. */
2158
2159tree
12308bc6 2160cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1799e5d5
RH
2161{
2162 tree info = CP_OMP_CLAUSE_INFO (clause);
2163 tree ret = NULL;
2164
2165 if (info)
2166 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2167
2168 return ret;
2169}
2170
2171/* Return code to initialize DST with a copy constructor from SRC. */
2172
2173tree
2174cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2175{
2176 tree info = CP_OMP_CLAUSE_INFO (clause);
2177 tree ret = NULL;
2178
2179 if (info)
2180 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2181 if (ret == NULL)
726a989a 2182 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1799e5d5
RH
2183
2184 return ret;
2185}
2186
2187/* Similarly, except use an assignment operator instead. */
2188
2189tree
2190cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2191{
2192 tree info = CP_OMP_CLAUSE_INFO (clause);
2193 tree ret = NULL;
2194
2195 if (info)
2196 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2197 if (ret == NULL)
726a989a 2198 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1799e5d5
RH
2199
2200 return ret;
2201}
2202
2203/* Return code to destroy DECL. */
2204
2205tree
2206cxx_omp_clause_dtor (tree clause, tree decl)
2207{
2208 tree info = CP_OMP_CLAUSE_INFO (clause);
2209 tree ret = NULL;
2210
2211 if (info)
2212 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2213
2214 return ret;
5a508662 2215}
077b0dfb
JJ
2216
2217/* True if OpenMP should privatize what this DECL points to rather
2218 than the DECL itself. */
2219
2220bool
58f9752a 2221cxx_omp_privatize_by_reference (const_tree decl)
077b0dfb 2222{
9f613f06 2223 return (TYPE_REF_P (TREE_TYPE (decl))
acf0174b 2224 || is_invisiref_parm (decl));
077b0dfb 2225}
a68ab351 2226
20906c66
JJ
2227/* Return true if DECL is const qualified var having no mutable member. */
2228bool
2229cxx_omp_const_qual_no_mutable (tree decl)
a68ab351 2230{
20906c66 2231 tree type = TREE_TYPE (decl);
9f613f06 2232 if (TYPE_REF_P (type))
a68ab351
JJ
2233 {
2234 if (!is_invisiref_parm (decl))
20906c66 2235 return false;
a68ab351
JJ
2236 type = TREE_TYPE (type);
2237
2238 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2239 {
2240 /* NVR doesn't preserve const qualification of the
2241 variable's type. */
2242 tree outer = outer_curly_brace_block (current_function_decl);
2243 tree var;
2244
2245 if (outer)
910ad8de 2246 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
c60dc053
JJ
2247 if (VAR_P (var)
2248 && DECL_NAME (decl) == DECL_NAME (var)
a68ab351
JJ
2249 && (TYPE_MAIN_VARIANT (type)
2250 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2251 {
2252 if (TYPE_READONLY (TREE_TYPE (var)))
2253 type = TREE_TYPE (var);
2254 break;
2255 }
2256 }
2257 }
2258
2259 if (type == error_mark_node)
20906c66 2260 return false;
a68ab351
JJ
2261
2262 /* Variables with const-qualified type having no mutable member
2263 are predetermined shared. */
2264 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
20906c66
JJ
2265 return true;
2266
2267 return false;
2268}
2269
93535a2b
TB
2270/* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2271 of DECL is predetermined. */
20906c66
JJ
2272
2273enum omp_clause_default_kind
1c9ee609 2274cxx_omp_predetermined_sharing_1 (tree decl)
20906c66
JJ
2275{
2276 /* Static data members are predetermined shared. */
2277 if (TREE_STATIC (decl))
2278 {
2279 tree ctx = CP_DECL_CONTEXT (decl);
2280 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2281 return OMP_CLAUSE_DEFAULT_SHARED;
59bc434a
JJ
2282
2283 if (c_omp_predefined_variable (decl))
2284 return OMP_CLAUSE_DEFAULT_SHARED;
20906c66
JJ
2285 }
2286
28567c40
JJ
2287 /* this may not be specified in data-sharing clauses, still we need
2288 to predetermined it firstprivate. */
2289 if (decl == current_class_ptr)
2290 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
a68ab351
JJ
2291
2292 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2293}
2294
1c9ee609
JJ
2295/* Likewise, but also include the artificial vars. We don't want to
2296 disallow the artificial vars being mentioned in explicit clauses,
2297 as we use artificial vars e.g. for loop constructs with random
2298 access iterators other than pointers, but during gimplification
2299 we want to treat them as predetermined. */
2300
2301enum omp_clause_default_kind
2302cxx_omp_predetermined_sharing (tree decl)
2303{
2304 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2305 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2306 return ret;
2307
2308 /* Predetermine artificial variables holding integral values, those
2309 are usually result of gimplify_one_sizepos or SAVE_EXPR
2310 gimplification. */
2311 if (VAR_P (decl)
2312 && DECL_ARTIFICIAL (decl)
2313 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2314 && !(DECL_LANG_SPECIFIC (decl)
2315 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2316 return OMP_CLAUSE_DEFAULT_SHARED;
2317
8d7c0bf8
JJ
2318 /* Similarly for typeinfo symbols. */
2319 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2320 return OMP_CLAUSE_DEFAULT_SHARED;
2321
1c9ee609
JJ
2322 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2323}
2324
93535a2b
TB
2325enum omp_clause_defaultmap_kind
2326cxx_omp_predetermined_mapping (tree decl)
2327{
2328 /* Predetermine artificial variables holding integral values, those
2329 are usually result of gimplify_one_sizepos or SAVE_EXPR
2330 gimplification. */
2331 if (VAR_P (decl)
2332 && DECL_ARTIFICIAL (decl)
2333 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2334 && !(DECL_LANG_SPECIFIC (decl)
2335 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2336 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2337
2338 if (c_omp_predefined_variable (decl))
2339 return OMP_CLAUSE_DEFAULTMAP_TO;
2340
2341 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2342}
2343
a68ab351
JJ
2344/* Finalize an implicitly determined clause. */
2345
2346void
f014c653 2347cxx_omp_finish_clause (tree c, gimple_seq *)
a68ab351
JJ
2348{
2349 tree decl, inner_type;
2350 bool make_shared = false;
2351
d81ab49d
JJ
2352 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2353 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2354 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
a68ab351
JJ
2355 return;
2356
2357 decl = OMP_CLAUSE_DECL (c);
2358 decl = require_complete_type (decl);
2359 inner_type = TREE_TYPE (decl);
2360 if (decl == error_mark_node)
2361 make_shared = true;
9f613f06 2362 else if (TYPE_REF_P (TREE_TYPE (decl)))
d9a6bd32 2363 inner_type = TREE_TYPE (inner_type);
a68ab351
JJ
2364
2365 /* We're interested in the base element, not arrays. */
2366 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2367 inner_type = TREE_TYPE (inner_type);
2368
2369 /* Check for special function availability by building a call to one.
2370 Save the results, because later we won't be in the right context
2371 for making these queries. */
d81ab49d 2372 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
a68ab351
JJ
2373 if (!make_shared
2374 && CLASS_TYPE_P (inner_type)
d81ab49d
JJ
2375 && cxx_omp_create_clause_info (c, inner_type, !first, first, !first,
2376 true))
a68ab351
JJ
2377 make_shared = true;
2378
2379 if (make_shared)
34361776
JJ
2380 {
2381 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2382 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2383 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2384 }
a68ab351 2385}
d9a6bd32
JJ
2386
2387/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2388 disregarded in OpenMP construct, because it is going to be
2389 remapped during OpenMP lowering. SHARED is true if DECL
2390 is going to be shared, false if it is going to be privatized. */
2391
2392bool
2393cxx_omp_disregard_value_expr (tree decl, bool shared)
2394{
02f7334a
JJ
2395 if (shared)
2396 return false;
2397 if (VAR_P (decl)
2398 && DECL_HAS_VALUE_EXPR_P (decl)
2399 && DECL_ARTIFICIAL (decl)
2400 && DECL_LANG_SPECIFIC (decl)
2401 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2402 return true;
2403 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2404 return true;
2405 return false;
d9a6bd32 2406}
cda0a029 2407
7426fcc8
JM
2408/* Fold expression X which is used as an rvalue if RVAL is true. */
2409
4cd3e7df 2410tree
7426fcc8
JM
2411cp_fold_maybe_rvalue (tree x, bool rval)
2412{
66f90a17 2413 while (true)
7426fcc8 2414 {
66f90a17 2415 x = cp_fold (x);
f43e0585
JM
2416 if (rval)
2417 x = mark_rvalue_use (x);
fd338b13 2418 if (rval && DECL_P (x)
9f613f06 2419 && !TYPE_REF_P (TREE_TYPE (x)))
66f90a17
JM
2420 {
2421 tree v = decl_constant_value (x);
2422 if (v != x && v != error_mark_node)
2423 {
2424 x = v;
2425 continue;
2426 }
2427 }
2428 break;
7426fcc8 2429 }
66f90a17 2430 return x;
7426fcc8
JM
2431}
2432
2433/* Fold expression X which is used as an rvalue. */
2434
4cd3e7df 2435tree
7426fcc8
JM
2436cp_fold_rvalue (tree x)
2437{
2438 return cp_fold_maybe_rvalue (x, true);
2439}
2440
bf31620c
JM
2441/* Perform folding on expression X. */
2442
2443tree
2444cp_fully_fold (tree x)
2445{
2446 if (processing_template_decl)
2447 return x;
2448 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2449 have to call both. */
2450 if (cxx_dialect >= cxx11)
587b2f67
MP
2451 {
2452 x = maybe_constant_value (x);
2453 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2454 a TARGET_EXPR; undo that here. */
2455 if (TREE_CODE (x) == TARGET_EXPR)
2456 x = TARGET_EXPR_INITIAL (x);
2457 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2458 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2459 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2460 x = TREE_OPERAND (x, 0);
2461 }
bf31620c
JM
2462 return cp_fold_rvalue (x);
2463}
2464
50867d20
JJ
2465/* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2466 in some cases. */
2467
2468tree
2469cp_fully_fold_init (tree x)
2470{
2471 if (processing_template_decl)
2472 return x;
2473 x = cp_fully_fold (x);
2474 hash_set<tree> pset;
2475 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2476 return x;
2477}
2478
4250754e
JM
2479/* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2480 and certain changes are made to the folding done. Or should be (FIXME). We
2481 never touch maybe_const, as it is only used for the C front-end
2482 C_MAYBE_CONST_EXPR. */
2483
2484tree
f9c59f7e 2485c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
4250754e 2486{
f9c59f7e 2487 return cp_fold_maybe_rvalue (x, !lval);
4250754e
JM
2488}
2489
7a7ac32a 2490static GTY((deletable)) hash_map<tree, tree> *fold_cache;
cda0a029 2491
1e297006
MP
2492/* Dispose of the whole FOLD_CACHE. */
2493
2494void
2495clear_fold_cache (void)
2496{
7a7ac32a
PP
2497 if (fold_cache != NULL)
2498 fold_cache->empty ();
1e297006
MP
2499}
2500
cda0a029
JM
2501/* This function tries to fold an expression X.
2502 To avoid combinatorial explosion, folding results are kept in fold_cache.
9a004410 2503 If X is invalid, we don't fold at all.
cda0a029
JM
2504 For performance reasons we don't cache expressions representing a
2505 declaration or constant.
2506 Function returns X or its folded variant. */
2507
2508static tree
2509cp_fold (tree x)
2510{
2511 tree op0, op1, op2, op3;
2512 tree org_x = x, r = NULL_TREE;
2513 enum tree_code code;
2514 location_t loc;
7426fcc8 2515 bool rval_ops = true;
cda0a029 2516
2fa586ad 2517 if (!x || x == error_mark_node)
cda0a029
JM
2518 return x;
2519
9a004410 2520 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
cda0a029
JM
2521 return x;
2522
2523 /* Don't bother to cache DECLs or constants. */
2524 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2525 return x;
2526
7a7ac32a
PP
2527 if (fold_cache == NULL)
2528 fold_cache = hash_map<tree, tree>::create_ggc (101);
2529
2530 if (tree *cached = fold_cache->get (x))
2531 return *cached;
cda0a029 2532
f65a3299
PP
2533 uid_sensitive_constexpr_evaluation_checker c;
2534
cda0a029
JM
2535 code = TREE_CODE (x);
2536 switch (code)
2537 {
c8b1fbc1
MP
2538 case CLEANUP_POINT_EXPR:
2539 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2540 effects. */
2541 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2542 if (!TREE_SIDE_EFFECTS (r))
2543 x = r;
2544 break;
2545
cda0a029
JM
2546 case SIZEOF_EXPR:
2547 x = fold_sizeof_expr (x);
2548 break;
2549
2550 case VIEW_CONVERT_EXPR:
7426fcc8 2551 rval_ops = false;
191816a3 2552 /* FALLTHRU */
cda0a029
JM
2553 case CONVERT_EXPR:
2554 case NOP_EXPR:
2555 case NON_LVALUE_EXPR:
2556
2557 if (VOID_TYPE_P (TREE_TYPE (x)))
19c37faa
JJ
2558 {
2559 /* This is just to make sure we don't end up with casts to
2560 void from error_mark_node. If we just return x, then
2561 cp_fold_r might fold the operand into error_mark_node and
2562 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2563 during gimplification doesn't like such casts.
2564 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2565 folding of the operand should be in the caches and if in cp_fold_r
2566 it will modify it in place. */
2567 op0 = cp_fold (TREE_OPERAND (x, 0));
2568 if (op0 == error_mark_node)
2569 x = error_mark_node;
2570 break;
2571 }
cda0a029 2572
cda0a029 2573 loc = EXPR_LOCATION (x);
8d8f3235 2574 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
cda0a029 2575
415594bb
JM
2576 if (code == CONVERT_EXPR
2577 && SCALAR_TYPE_P (TREE_TYPE (x))
2578 && op0 != void_node)
2579 /* During parsing we used convert_to_*_nofold; re-convert now using the
2580 folding variants, since fold() doesn't do those transformations. */
2581 x = fold (convert (TREE_TYPE (x), op0));
2582 else if (op0 != TREE_OPERAND (x, 0))
476805ae
JJ
2583 {
2584 if (op0 == error_mark_node)
2585 x = error_mark_node;
2586 else
2587 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2588 }
e9ea372d
JM
2589 else
2590 x = fold (x);
cda0a029
JM
2591
2592 /* Conversion of an out-of-range value has implementation-defined
2593 behavior; the language considers it different from arithmetic
2594 overflow, which is undefined. */
2595 if (TREE_CODE (op0) == INTEGER_CST
2596 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2597 TREE_OVERFLOW (x) = false;
2598
2599 break;
2600
290279c4
JM
2601 case INDIRECT_REF:
2602 /* We don't need the decltype(auto) obfuscation anymore. */
2603 if (REF_PARENTHESIZED_P (x))
2604 {
2605 tree p = maybe_undo_parenthesized_ref (x);
66f90a17 2606 return cp_fold (p);
290279c4
JM
2607 }
2608 goto unary;
2609
cda0a029 2610 case ADDR_EXPR:
715dd933
JJ
2611 loc = EXPR_LOCATION (x);
2612 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2613
2614 /* Cope with user tricks that amount to offsetof. */
2615 if (op0 != error_mark_node
7bdc7e06 2616 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
715dd933
JJ
2617 {
2618 tree val = get_base_address (op0);
2619 if (val
2620 && INDIRECT_REF_P (val)
2621 && COMPLETE_TYPE_P (TREE_TYPE (val))
2622 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2623 {
2624 val = TREE_OPERAND (val, 0);
2625 STRIP_NOPS (val);
1e9d6923 2626 val = maybe_constant_value (val);
715dd933 2627 if (TREE_CODE (val) == INTEGER_CST)
79e7b1fe 2628 return fold_offsetof (op0, TREE_TYPE (x));
715dd933
JJ
2629 }
2630 }
2631 goto finish_unary;
2632
cda0a029
JM
2633 case REALPART_EXPR:
2634 case IMAGPART_EXPR:
7426fcc8 2635 rval_ops = false;
191816a3 2636 /* FALLTHRU */
cda0a029
JM
2637 case CONJ_EXPR:
2638 case FIX_TRUNC_EXPR:
2639 case FLOAT_EXPR:
2640 case NEGATE_EXPR:
2641 case ABS_EXPR:
e197e64e 2642 case ABSU_EXPR:
cda0a029
JM
2643 case BIT_NOT_EXPR:
2644 case TRUTH_NOT_EXPR:
2645 case FIXED_CONVERT_EXPR:
290279c4 2646 unary:
cda0a029
JM
2647
2648 loc = EXPR_LOCATION (x);
7426fcc8 2649 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
cda0a029 2650
715dd933 2651 finish_unary:
cda0a029 2652 if (op0 != TREE_OPERAND (x, 0))
476805ae
JJ
2653 {
2654 if (op0 == error_mark_node)
2655 x = error_mark_node;
2656 else
0633ee10
JJ
2657 {
2658 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2659 if (code == INDIRECT_REF
2660 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2661 {
2662 TREE_READONLY (x) = TREE_READONLY (org_x);
2663 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2664 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2665 }
2666 }
476805ae 2667 }
e9ea372d
JM
2668 else
2669 x = fold (x);
cda0a029
JM
2670
2671 gcc_assert (TREE_CODE (x) != COND_EXPR
2672 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2673 break;
2674
d49b0aa0
MP
2675 case UNARY_PLUS_EXPR:
2676 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2677 if (op0 == error_mark_node)
2678 x = error_mark_node;
2679 else
2680 x = fold_convert (TREE_TYPE (x), op0);
2681 break;
2682
cda0a029
JM
2683 case POSTDECREMENT_EXPR:
2684 case POSTINCREMENT_EXPR:
2685 case INIT_EXPR:
cda0a029
JM
2686 case PREDECREMENT_EXPR:
2687 case PREINCREMENT_EXPR:
2688 case COMPOUND_EXPR:
7426fcc8
JM
2689 case MODIFY_EXPR:
2690 rval_ops = false;
191816a3 2691 /* FALLTHRU */
cda0a029
JM
2692 case POINTER_PLUS_EXPR:
2693 case PLUS_EXPR:
1af4ebf5 2694 case POINTER_DIFF_EXPR:
cda0a029
JM
2695 case MINUS_EXPR:
2696 case MULT_EXPR:
2697 case TRUNC_DIV_EXPR:
2698 case CEIL_DIV_EXPR:
2699 case FLOOR_DIV_EXPR:
2700 case ROUND_DIV_EXPR:
2701 case TRUNC_MOD_EXPR:
2702 case CEIL_MOD_EXPR:
2703 case ROUND_MOD_EXPR:
2704 case RDIV_EXPR:
2705 case EXACT_DIV_EXPR:
2706 case MIN_EXPR:
2707 case MAX_EXPR:
2708 case LSHIFT_EXPR:
2709 case RSHIFT_EXPR:
2710 case LROTATE_EXPR:
2711 case RROTATE_EXPR:
2712 case BIT_AND_EXPR:
2713 case BIT_IOR_EXPR:
2714 case BIT_XOR_EXPR:
2715 case TRUTH_AND_EXPR:
2716 case TRUTH_ANDIF_EXPR:
2717 case TRUTH_OR_EXPR:
2718 case TRUTH_ORIF_EXPR:
2719 case TRUTH_XOR_EXPR:
2720 case LT_EXPR: case LE_EXPR:
2721 case GT_EXPR: case GE_EXPR:
2722 case EQ_EXPR: case NE_EXPR:
2723 case UNORDERED_EXPR: case ORDERED_EXPR:
2724 case UNLT_EXPR: case UNLE_EXPR:
2725 case UNGT_EXPR: case UNGE_EXPR:
2726 case UNEQ_EXPR: case LTGT_EXPR:
2727 case RANGE_EXPR: case COMPLEX_EXPR:
cda0a029
JM
2728
2729 loc = EXPR_LOCATION (x);
7426fcc8
JM
2730 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2731 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
cda0a029
JM
2732
2733 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
476805ae
JJ
2734 {
2735 if (op0 == error_mark_node || op1 == error_mark_node)
2736 x = error_mark_node;
2737 else
2738 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2739 }
e9ea372d
JM
2740 else
2741 x = fold (x);
cda0a029 2742
315aa691
JJ
2743 /* This is only needed for -Wnonnull-compare and only if
2744 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2745 generation, we do it always. */
2746 if (COMPARISON_CLASS_P (org_x))
03ca8fb3
JJ
2747 {
2748 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2749 ;
2750 else if (COMPARISON_CLASS_P (x))
315aa691
JJ
2751 {
2752 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2753 TREE_NO_WARNING (x) = 1;
2754 }
03ca8fb3
JJ
2755 /* Otherwise give up on optimizing these, let GIMPLE folders
2756 optimize those later on. */
2757 else if (op0 != TREE_OPERAND (org_x, 0)
2758 || op1 != TREE_OPERAND (org_x, 1))
2759 {
2760 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
315aa691
JJ
2761 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2762 TREE_NO_WARNING (x) = 1;
03ca8fb3
JJ
2763 }
2764 else
2765 x = org_x;
2766 }
8a902edb 2767
cda0a029
JM
2768 break;
2769
2770 case VEC_COND_EXPR:
2771 case COND_EXPR:
cda0a029 2772 loc = EXPR_LOCATION (x);
7426fcc8 2773 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
cda0a029
JM
2774 op1 = cp_fold (TREE_OPERAND (x, 1));
2775 op2 = cp_fold (TREE_OPERAND (x, 2));
2776
627be19f
JM
2777 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2778 {
e525cfa7 2779 warning_sentinel s (warn_int_in_bool_context);
627be19f 2780 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2ab340fe 2781 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
627be19f 2782 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2ab340fe 2783 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
627be19f 2784 }
cb358080
JJ
2785 else if (VOID_TYPE_P (TREE_TYPE (x)))
2786 {
2787 if (TREE_CODE (op0) == INTEGER_CST)
2788 {
2789 /* If the condition is constant, fold can fold away
2790 the COND_EXPR. If some statement-level uses of COND_EXPR
2791 have one of the branches NULL, avoid folding crash. */
2792 if (!op1)
2793 op1 = build_empty_stmt (loc);
2794 if (!op2)
2795 op2 = build_empty_stmt (loc);
2796 }
2797 else
2798 {
2799 /* Otherwise, don't bother folding a void condition, since
2800 it can't produce a constant value. */
2801 if (op0 != TREE_OPERAND (x, 0)
2802 || op1 != TREE_OPERAND (x, 1)
2803 || op2 != TREE_OPERAND (x, 2))
2804 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2805 break;
2806 }
2807 }
627be19f 2808
7f26f7df
JM
2809 if (op0 != TREE_OPERAND (x, 0)
2810 || op1 != TREE_OPERAND (x, 1)
2811 || op2 != TREE_OPERAND (x, 2))
476805ae
JJ
2812 {
2813 if (op0 == error_mark_node
2814 || op1 == error_mark_node
2815 || op2 == error_mark_node)
2816 x = error_mark_node;
2817 else
2818 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2819 }
7f26f7df 2820 else
cda0a029
JM
2821 x = fold (x);
2822
683b8101
JM
2823 /* A COND_EXPR might have incompatible types in branches if one or both
2824 arms are bitfields. If folding exposed such a branch, fix it up. */
a5afbdd6 2825 if (TREE_CODE (x) != code
4ecd9c15 2826 && x != error_mark_node
a5afbdd6
MP
2827 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2828 x = fold_convert (TREE_TYPE (org_x), x);
683b8101 2829
cda0a029
JM
2830 break;
2831
2832 case CALL_EXPR:
2833 {
a700b4a9 2834 int sv = optimize, nw = sv;
cda0a029
JM
2835 tree callee = get_callee_fndecl (x);
2836
b925d25d
JM
2837 /* Some built-in function calls will be evaluated at compile-time in
2838 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2839 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
3d78e008 2840 if (callee && fndecl_built_in_p (callee) && !optimize
cda0a029
JM
2841 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2842 && current_function_decl
2843 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2844 nw = 1;
cda0a029 2845
e4082611
JJ
2846 /* Defer folding __builtin_is_constant_evaluated. */
2847 if (callee
3d78e008 2848 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
ff603745 2849 BUILT_IN_FRONTEND))
e4082611
JJ
2850 break;
2851
ff603745
JJ
2852 if (callee
2853 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2854 BUILT_IN_FRONTEND))
2855 {
2856 x = fold_builtin_source_location (EXPR_LOCATION (x));
2857 break;
2858 }
2859
a700b4a9
PP
2860 bool changed = false;
2861 int m = call_expr_nargs (x);
2862 for (int i = 0; i < m; i++)
cda0a029
JM
2863 {
2864 r = cp_fold (CALL_EXPR_ARG (x, i));
2865 if (r != CALL_EXPR_ARG (x, i))
476805ae
JJ
2866 {
2867 if (r == error_mark_node)
2868 {
2869 x = error_mark_node;
2870 break;
2871 }
a700b4a9
PP
2872 if (!changed)
2873 x = copy_node (x);
2874 CALL_EXPR_ARG (x, i) = r;
2875 changed = true;
476805ae 2876 }
cda0a029 2877 }
476805ae
JJ
2878 if (x == error_mark_node)
2879 break;
cda0a029
JM
2880
2881 optimize = nw;
2882 r = fold (x);
2883 optimize = sv;
2884
2885 if (TREE_CODE (r) != CALL_EXPR)
2886 {
2887 x = cp_fold (r);
2888 break;
2889 }
2890
2891 optimize = nw;
2892
b925d25d
JM
2893 /* Invoke maybe_constant_value for functions declared
2894 constexpr and not called with AGGR_INIT_EXPRs.
cda0a029 2895 TODO:
b925d25d
JM
2896 Do constexpr expansion of expressions where the call itself is not
2897 constant, but the call followed by an INDIRECT_REF is. */
28577b86
JM
2898 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2899 && !flag_no_inline)
25cb6b33 2900 r = maybe_constant_value (x);
cda0a029
JM
2901 optimize = sv;
2902
2903 if (TREE_CODE (r) != CALL_EXPR)
2904 {
25cb6b33
JJ
2905 if (DECL_CONSTRUCTOR_P (callee))
2906 {
2907 loc = EXPR_LOCATION (x);
2908 tree s = build_fold_indirect_ref_loc (loc,
2909 CALL_EXPR_ARG (x, 0));
2910 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2911 }
cda0a029
JM
2912 x = r;
2913 break;
2914 }
2915
cda0a029
JM
2916 break;
2917 }
2918
2919 case CONSTRUCTOR:
2920 {
2921 unsigned i;
2922 constructor_elt *p;
2923 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
4b0b30ef 2924 vec<constructor_elt, va_gc> *nelts = NULL;
cda0a029 2925 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
4b0b30ef
JM
2926 {
2927 tree op = cp_fold (p->value);
4b0b30ef 2928 if (op != p->value)
476805ae
JJ
2929 {
2930 if (op == error_mark_node)
2931 {
2932 x = error_mark_node;
27de0fab 2933 vec_free (nelts);
476805ae
JJ
2934 break;
2935 }
27de0fab
RB
2936 if (nelts == NULL)
2937 nelts = elts->copy ();
2938 (*nelts)[i].value = op;
476805ae 2939 }
4b0b30ef 2940 }
27de0fab 2941 if (nelts)
570f86f9
JJ
2942 {
2943 x = build_constructor (TREE_TYPE (x), nelts);
2944 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2945 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2946 }
153dba6c
JJ
2947 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2948 x = fold (x);
cda0a029
JM
2949 break;
2950 }
2951 case TREE_VEC:
2952 {
2953 bool changed = false;
a700b4a9 2954 int n = TREE_VEC_LENGTH (x);
cda0a029 2955
a700b4a9 2956 for (int i = 0; i < n; i++)
cda0a029
JM
2957 {
2958 tree op = cp_fold (TREE_VEC_ELT (x, i));
cda0a029 2959 if (op != TREE_VEC_ELT (x, i))
a700b4a9
PP
2960 {
2961 if (!changed)
2962 x = copy_node (x);
2963 TREE_VEC_ELT (x, i) = op;
2964 changed = true;
2965 }
cda0a029 2966 }
cda0a029
JM
2967 }
2968
2969 break;
2970
2971 case ARRAY_REF:
2972 case ARRAY_RANGE_REF:
2973
2974 loc = EXPR_LOCATION (x);
2975 op0 = cp_fold (TREE_OPERAND (x, 0));
2976 op1 = cp_fold (TREE_OPERAND (x, 1));
2977 op2 = cp_fold (TREE_OPERAND (x, 2));
2978 op3 = cp_fold (TREE_OPERAND (x, 3));
2979
476805ae
JJ
2980 if (op0 != TREE_OPERAND (x, 0)
2981 || op1 != TREE_OPERAND (x, 1)
2982 || op2 != TREE_OPERAND (x, 2)
2983 || op3 != TREE_OPERAND (x, 3))
2984 {
2985 if (op0 == error_mark_node
2986 || op1 == error_mark_node
2987 || op2 == error_mark_node
2988 || op3 == error_mark_node)
2989 x = error_mark_node;
2990 else
0633ee10
JJ
2991 {
2992 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2993 TREE_READONLY (x) = TREE_READONLY (org_x);
2994 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2995 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2996 }
476805ae 2997 }
cda0a029
JM
2998
2999 x = fold (x);
3000 break;
3001
6b6ae9eb
MP
3002 case SAVE_EXPR:
3003 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3004 folding, evaluates to an invariant. In that case no need to wrap
3005 this folded tree with a SAVE_EXPR. */
3006 r = cp_fold (TREE_OPERAND (x, 0));
3007 if (tree_invariant_p (r))
3008 x = r;
3009 break;
3010
cda0a029
JM
3011 default:
3012 return org_x;
3013 }
3014
1f32d529
MP
3015 if (EXPR_P (x) && TREE_CODE (x) == code)
3016 {
3017 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3018 TREE_NO_WARNING (x) = TREE_NO_WARNING (org_x);
3019 }
3020
f65a3299
PP
3021 if (!c.evaluation_restricted_p ())
3022 {
3023 fold_cache->put (org_x, x);
3024 /* Prevent that we try to fold an already folded result again. */
3025 if (x != org_x)
3026 fold_cache->put (x, x);
3027 }
cda0a029
JM
3028
3029 return x;
3030}
3031
2674fa47
JM
3032/* Look up either "hot" or "cold" in attribute list LIST. */
3033
3034tree
3035lookup_hotness_attribute (tree list)
3036{
3037 for (; list; list = TREE_CHAIN (list))
3038 {
3039 tree name = get_attribute_name (list);
3040 if (is_attribute_p ("hot", name)
3041 || is_attribute_p ("cold", name)
3042 || is_attribute_p ("likely", name)
3043 || is_attribute_p ("unlikely", name))
3044 break;
3045 }
3046 return list;
3047}
3048
3049/* Remove both "hot" and "cold" attributes from LIST. */
3050
3051static tree
3052remove_hotness_attribute (tree list)
3053{
3054 list = remove_attribute ("hot", list);
3055 list = remove_attribute ("cold", list);
3056 list = remove_attribute ("likely", list);
3057 list = remove_attribute ("unlikely", list);
3058 return list;
3059}
3060
3061/* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3062 PREDICT_EXPR. */
3063
3064tree
2d9273ca 3065process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
2674fa47
JM
3066{
3067 if (std_attrs == error_mark_node)
3068 return std_attrs;
3069 if (tree attr = lookup_hotness_attribute (std_attrs))
3070 {
3071 tree name = get_attribute_name (attr);
3072 bool hot = (is_attribute_p ("hot", name)
3073 || is_attribute_p ("likely", name));
3074 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3075 hot ? TAKEN : NOT_TAKEN);
2d9273ca 3076 SET_EXPR_LOCATION (pred, attrs_loc);
2674fa47
JM
3077 add_stmt (pred);
3078 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3079 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3080 get_attribute_name (other), name);
3081 std_attrs = remove_hotness_attribute (std_attrs);
3082 }
3083 return std_attrs;
3084}
3085
ff603745
JJ
3086/* Helper of fold_builtin_source_location, return the
3087 std::source_location::__impl type after performing verification
3088 on it. LOC is used for reporting any errors. */
3089
3090static tree
3091get_source_location_impl_type (location_t loc)
3092{
3093 tree name = get_identifier ("source_location");
3094 tree decl = lookup_qualified_name (std_node, name);
3095 if (TREE_CODE (decl) != TYPE_DECL)
3096 {
3097 auto_diagnostic_group d;
3098 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3099 qualified_name_lookup_error (std_node, name, decl, loc);
3100 else
3101 error_at (loc, "%qD is not a type", decl);
3102 return error_mark_node;
3103 }
3104 name = get_identifier ("__impl");
3105 tree type = TREE_TYPE (decl);
3106 decl = lookup_qualified_name (type, name);
3107 if (TREE_CODE (decl) != TYPE_DECL)
3108 {
3109 auto_diagnostic_group d;
3110 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3111 qualified_name_lookup_error (type, name, decl, loc);
3112 else
3113 error_at (loc, "%qD is not a type", decl);
3114 return error_mark_node;
3115 }
3116 type = TREE_TYPE (decl);
3117 if (TREE_CODE (type) != RECORD_TYPE)
3118 {
3119 error_at (loc, "%qD is not a class type", decl);
3120 return error_mark_node;
3121 }
3122
3123 int cnt = 0;
3124 for (tree field = TYPE_FIELDS (type);
3125 (field = next_initializable_field (field)) != NULL_TREE;
3126 field = DECL_CHAIN (field))
3127 {
3128 if (DECL_NAME (field) != NULL_TREE)
3129 {
3130 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3131 if (strcmp (n, "_M_file_name") == 0
3132 || strcmp (n, "_M_function_name") == 0)
3133 {
3134 if (TREE_TYPE (field) != const_string_type_node)
3135 {
3136 error_at (loc, "%qD does not have %<const char *%> type",
3137 field);
3138 return error_mark_node;
3139 }
3140 cnt++;
3141 continue;
3142 }
3143 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3144 {
3145 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3146 {
3147 error_at (loc, "%qD does not have integral type", field);
3148 return error_mark_node;
3149 }
3150 cnt++;
3151 continue;
3152 }
3153 }
3154 cnt = 0;
3155 break;
3156 }
3157 if (cnt != 4)
3158 {
3159 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3160 "non-static data members %<_M_file_name%>, "
3161 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3162 return error_mark_node;
3163 }
3164 return build_qualified_type (type, TYPE_QUAL_CONST);
3165}
3166
3167/* Type for source_location_table hash_set. */
3168struct GTY((for_user)) source_location_table_entry {
3169 location_t loc;
3170 unsigned uid;
3171 tree var;
3172};
3173
3174/* Traits class for function start hash maps below. */
3175
3176struct source_location_table_entry_hash
3177 : ggc_remove <source_location_table_entry>
3178{
3179 typedef source_location_table_entry value_type;
3180 typedef source_location_table_entry compare_type;
3181
3182 static hashval_t
3183 hash (const source_location_table_entry &ref)
3184 {
3185 inchash::hash hstate (0);
3186 hstate.add_int (ref.loc);
3187 hstate.add_int (ref.uid);
3188 return hstate.end ();
3189 }
3190
3191 static bool
3192 equal (const source_location_table_entry &ref1,
3193 const source_location_table_entry &ref2)
3194 {
3195 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3196 }
3197
3198 static void
3199 mark_deleted (source_location_table_entry &ref)
3200 {
3201 ref.loc = UNKNOWN_LOCATION;
3202 ref.uid = -1U;
3203 ref.var = NULL_TREE;
3204 }
3205
7ca50de0
DM
3206 static const bool empty_zero_p = true;
3207
ff603745
JJ
3208 static void
3209 mark_empty (source_location_table_entry &ref)
3210 {
3211 ref.loc = UNKNOWN_LOCATION;
3212 ref.uid = 0;
3213 ref.var = NULL_TREE;
3214 }
3215
3216 static bool
3217 is_deleted (const source_location_table_entry &ref)
3218 {
3219 return (ref.loc == UNKNOWN_LOCATION
3220 && ref.uid == -1U
3221 && ref.var == NULL_TREE);
3222 }
3223
3224 static bool
3225 is_empty (const source_location_table_entry &ref)
3226 {
3227 return (ref.loc == UNKNOWN_LOCATION
3228 && ref.uid == 0
3229 && ref.var == NULL_TREE);
3230 }
3231};
3232
3233static GTY(()) hash_table <source_location_table_entry_hash>
3234 *source_location_table;
3235static GTY(()) unsigned int source_location_id;
3236
3237/* Fold __builtin_source_location () call. LOC is the location
3238 of the call. */
3239
3240tree
3241fold_builtin_source_location (location_t loc)
3242{
3243 if (source_location_impl == NULL_TREE)
3244 {
3245 auto_diagnostic_group d;
3246 source_location_impl = get_source_location_impl_type (loc);
3247 if (source_location_impl == error_mark_node)
3248 inform (loc, "evaluating %qs", "__builtin_source_location");
3249 }
3250 if (source_location_impl == error_mark_node)
3251 return build_zero_cst (const_ptr_type_node);
3252 if (source_location_table == NULL)
3253 source_location_table
3254 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3255 const line_map_ordinary *map;
3256 source_location_table_entry entry;
3257 entry.loc
3258 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3259 &map);
3260 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3261 entry.var = error_mark_node;
3262 source_location_table_entry *entryp
3263 = source_location_table->find_slot (entry, INSERT);
3264 tree var;
3265 if (entryp->var)
3266 var = entryp->var;
3267 else
3268 {
3269 char tmp_name[32];
3270 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3271 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3272 source_location_impl);
3273 TREE_STATIC (var) = 1;
3274 TREE_PUBLIC (var) = 0;
3275 DECL_ARTIFICIAL (var) = 1;
3276 DECL_IGNORED_P (var) = 1;
3277 DECL_EXTERNAL (var) = 0;
3278 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3279 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3280 layout_decl (var, 0);
3281
3282 vec<constructor_elt, va_gc> *v = NULL;
3283 vec_alloc (v, 4);
3284 for (tree field = TYPE_FIELDS (source_location_impl);
3285 (field = next_initializable_field (field)) != NULL_TREE;
3286 field = DECL_CHAIN (field))
3287 {
3288 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3289 tree val = NULL_TREE;
3290 if (strcmp (n, "_M_file_name") == 0)
3291 {
3292 if (const char *fname = LOCATION_FILE (loc))
3293 {
3294 fname = remap_macro_filename (fname);
3295 val = build_string_literal (strlen (fname) + 1, fname);
3296 }
3297 else
3298 val = build_string_literal (1, "");
3299 }
3300 else if (strcmp (n, "_M_function_name") == 0)
3301 {
3302 const char *name = "";
3303
3304 if (current_function_decl)
3305 name = cxx_printable_name (current_function_decl, 0);
3306
3307 val = build_string_literal (strlen (name) + 1, name);
3308 }
3309 else if (strcmp (n, "_M_line") == 0)
3310 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3311 else if (strcmp (n, "_M_column") == 0)
3312 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3313 else
3314 gcc_unreachable ();
3315 CONSTRUCTOR_APPEND_ELT (v, field, val);
3316 }
3317
3318 tree ctor = build_constructor (source_location_impl, v);
3319 TREE_CONSTANT (ctor) = 1;
3320 TREE_STATIC (ctor) = 1;
3321 DECL_INITIAL (var) = ctor;
3322 varpool_node::finalize_decl (var);
3323 *entryp = entry;
3324 entryp->var = var;
3325 }
3326
3327 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3328}
3329
cda0a029 3330#include "gt-cp-cp-gimplify.h"