]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/cp-gimplify.c
Update copyright years.
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and gimple.c.
2
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44
45 /* Forward declarations. */
46
47 static tree cp_genericize_r (tree *, int *, void *);
48 static tree cp_fold_r (tree *, int *, void *);
49 static void cp_genericize_tree (tree*, bool);
50 static tree cp_fold (tree);
51
52 /* Genericize a TRY_BLOCK. */
53
54 static void
55 genericize_try_block (tree *stmt_p)
56 {
57 tree body = TRY_STMTS (*stmt_p);
58 tree cleanup = TRY_HANDLERS (*stmt_p);
59
60 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
61 }
62
63 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
64
65 static void
66 genericize_catch_block (tree *stmt_p)
67 {
68 tree type = HANDLER_TYPE (*stmt_p);
69 tree body = HANDLER_BODY (*stmt_p);
70
71 /* FIXME should the caught type go in TREE_TYPE? */
72 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
73 }
74
75 /* A terser interface for building a representation of an exception
76 specification. */
77
78 static tree
79 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
80 {
81 tree t;
82
83 /* FIXME should the allowed types go in TREE_TYPE? */
84 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
85 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
86
87 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
88 append_to_statement_list (body, &TREE_OPERAND (t, 0));
89
90 return t;
91 }
92
93 /* Genericize an EH_SPEC_BLOCK by converting it to a
94 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
95
96 static void
97 genericize_eh_spec_block (tree *stmt_p)
98 {
99 tree body = EH_SPEC_STMTS (*stmt_p);
100 tree allowed = EH_SPEC_RAISES (*stmt_p);
101 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
102
103 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
104 suppress_warning (*stmt_p);
105 suppress_warning (TREE_OPERAND (*stmt_p, 1));
106 }
107
108 /* Return the first non-compound statement in STMT. */
109
110 tree
111 first_stmt (tree stmt)
112 {
113 switch (TREE_CODE (stmt))
114 {
115 case STATEMENT_LIST:
116 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
117 return first_stmt (p->stmt);
118 return void_node;
119
120 case BIND_EXPR:
121 return first_stmt (BIND_EXPR_BODY (stmt));
122
123 default:
124 return stmt;
125 }
126 }
127
128 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
129
130 static void
131 genericize_if_stmt (tree *stmt_p)
132 {
133 tree stmt, cond, then_, else_;
134 location_t locus = EXPR_LOCATION (*stmt_p);
135
136 stmt = *stmt_p;
137 cond = IF_COND (stmt);
138 then_ = THEN_CLAUSE (stmt);
139 else_ = ELSE_CLAUSE (stmt);
140
141 if (then_ && else_)
142 {
143 tree ft = first_stmt (then_);
144 tree fe = first_stmt (else_);
145 br_predictor pr;
146 if (TREE_CODE (ft) == PREDICT_EXPR
147 && TREE_CODE (fe) == PREDICT_EXPR
148 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
149 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
150 {
151 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
152 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
153 warning_at (&richloc, OPT_Wattributes,
154 "both branches of %<if%> statement marked as %qs",
155 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
156 }
157 }
158
159 if (!then_)
160 then_ = build_empty_stmt (locus);
161 if (!else_)
162 else_ = build_empty_stmt (locus);
163
164 /* consteval if has been verified not to have the then_/else_ blocks
165 entered by gotos/case labels from elsewhere, and as then_ block
166 can contain unfolded immediate function calls, we have to discard
167 the then_ block regardless of whether else_ has side-effects or not. */
168 if (IF_STMT_CONSTEVAL_P (stmt))
169 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
170 void_node, else_);
171 else
172 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
173 protected_set_expr_location_if_unset (stmt, locus);
174 *stmt_p = stmt;
175 }
176
177 /* Hook into the middle of gimplifying an OMP_FOR node. */
178
179 static enum gimplify_status
180 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
181 {
182 tree for_stmt = *expr_p;
183 gimple_seq seq = NULL;
184
185 /* Protect ourselves from recursion. */
186 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
187 return GS_UNHANDLED;
188 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
189
190 gimplify_and_add (for_stmt, &seq);
191 gimple_seq_add_seq (pre_p, seq);
192
193 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
194
195 return GS_ALL_DONE;
196 }
197
198 /* Gimplify an EXPR_STMT node. */
199
200 static void
201 gimplify_expr_stmt (tree *stmt_p)
202 {
203 tree stmt = EXPR_STMT_EXPR (*stmt_p);
204
205 if (stmt == error_mark_node)
206 stmt = NULL;
207
208 /* Gimplification of a statement expression will nullify the
209 statement if all its side effects are moved to *PRE_P and *POST_P.
210
211 In this case we will not want to emit the gimplified statement.
212 However, we may still want to emit a warning, so we do that before
213 gimplification. */
214 if (stmt && warn_unused_value)
215 {
216 if (!TREE_SIDE_EFFECTS (stmt))
217 {
218 if (!IS_EMPTY_STMT (stmt)
219 && !VOID_TYPE_P (TREE_TYPE (stmt))
220 && !warning_suppressed_p (stmt, OPT_Wunused_value))
221 warning (OPT_Wunused_value, "statement with no effect");
222 }
223 else
224 warn_if_unused_value (stmt, input_location);
225 }
226
227 if (stmt == NULL_TREE)
228 stmt = alloc_stmt_list ();
229
230 *stmt_p = stmt;
231 }
232
233 /* Gimplify initialization from an AGGR_INIT_EXPR. */
234
235 static void
236 cp_gimplify_init_expr (tree *expr_p)
237 {
238 tree from = TREE_OPERAND (*expr_p, 1);
239 tree to = TREE_OPERAND (*expr_p, 0);
240 tree t;
241
242 /* What about code that pulls out the temp and uses it elsewhere? I
243 think that such code never uses the TARGET_EXPR as an initializer. If
244 I'm wrong, we'll abort because the temp won't have any RTL. In that
245 case, I guess we'll need to replace references somehow. */
246 if (TREE_CODE (from) == TARGET_EXPR && TARGET_EXPR_INITIAL (from))
247 from = TARGET_EXPR_INITIAL (from);
248
249 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
250 inside the TARGET_EXPR. */
251 for (t = from; t; )
252 {
253 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
254
255 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
256 replace the slot operand with our target.
257
258 Should we add a target parm to gimplify_expr instead? No, as in this
259 case we want to replace the INIT_EXPR. */
260 if (TREE_CODE (sub) == AGGR_INIT_EXPR
261 || TREE_CODE (sub) == VEC_INIT_EXPR)
262 {
263 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
264 AGGR_INIT_EXPR_SLOT (sub) = to;
265 else
266 VEC_INIT_EXPR_SLOT (sub) = to;
267 *expr_p = from;
268
269 /* The initialization is now a side-effect, so the container can
270 become void. */
271 if (from != sub)
272 TREE_TYPE (from) = void_type_node;
273 }
274
275 /* Handle aggregate NSDMI. */
276 replace_placeholders (sub, to);
277
278 if (t == sub)
279 break;
280 else
281 t = TREE_OPERAND (t, 1);
282 }
283
284 }
285
286 /* Gimplify a MUST_NOT_THROW_EXPR. */
287
288 static enum gimplify_status
289 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
290 {
291 tree stmt = *expr_p;
292 tree temp = voidify_wrapper_expr (stmt, NULL);
293 tree body = TREE_OPERAND (stmt, 0);
294 gimple_seq try_ = NULL;
295 gimple_seq catch_ = NULL;
296 gimple *mnt;
297
298 gimplify_and_add (body, &try_);
299 mnt = gimple_build_eh_must_not_throw (terminate_fn);
300 gimple_seq_add_stmt_without_update (&catch_, mnt);
301 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
302
303 gimple_seq_add_stmt_without_update (pre_p, mnt);
304 if (temp)
305 {
306 *expr_p = temp;
307 return GS_OK;
308 }
309
310 *expr_p = NULL;
311 return GS_ALL_DONE;
312 }
313
314 /* Return TRUE if an operand (OP) of a given TYPE being copied is
315 really just an empty class copy.
316
317 Check that the operand has a simple form so that TARGET_EXPRs and
318 non-empty CONSTRUCTORs get reduced properly, and we leave the
319 return slot optimization alone because it isn't a copy. */
320
321 bool
322 simple_empty_class_p (tree type, tree op, tree_code code)
323 {
324 if (TREE_CODE (op) == COMPOUND_EXPR)
325 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
326 if (SIMPLE_TARGET_EXPR_P (op)
327 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
328 /* The TARGET_EXPR is itself a simple copy, look through it. */
329 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
330
331 if (TREE_CODE (op) == PARM_DECL
332 && TREE_ADDRESSABLE (TREE_TYPE (op)))
333 {
334 tree fn = DECL_CONTEXT (op);
335 if (DECL_THUNK_P (fn)
336 || lambda_static_thunk_p (fn))
337 /* In a thunk, we pass through invisible reference parms, so this isn't
338 actually a copy. */
339 return false;
340 }
341
342 return
343 (TREE_CODE (op) == EMPTY_CLASS_EXPR
344 || code == MODIFY_EXPR
345 || is_gimple_lvalue (op)
346 || INDIRECT_REF_P (op)
347 || (TREE_CODE (op) == CONSTRUCTOR
348 && CONSTRUCTOR_NELTS (op) == 0)
349 || (TREE_CODE (op) == CALL_EXPR
350 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
351 && !TREE_CLOBBER_P (op)
352 && is_really_empty_class (type, /*ignore_vptr*/true);
353 }
354
355 /* Returns true if evaluating E as an lvalue has side-effects;
356 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
357 have side-effects until there is a read or write through it. */
358
359 static bool
360 lvalue_has_side_effects (tree e)
361 {
362 if (!TREE_SIDE_EFFECTS (e))
363 return false;
364 while (handled_component_p (e))
365 {
366 if (TREE_CODE (e) == ARRAY_REF
367 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
368 return true;
369 e = TREE_OPERAND (e, 0);
370 }
371 if (DECL_P (e))
372 /* Just naming a variable has no side-effects. */
373 return false;
374 else if (INDIRECT_REF_P (e))
375 /* Similarly, indirection has no side-effects. */
376 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
377 else
378 /* For anything else, trust TREE_SIDE_EFFECTS. */
379 return TREE_SIDE_EFFECTS (e);
380 }
381
382 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
383 by expressions with side-effects in other operands. */
384
385 static enum gimplify_status
386 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
387 bool (*gimple_test_f) (tree))
388 {
389 enum gimplify_status t
390 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
391 if (t == GS_ERROR)
392 return GS_ERROR;
393 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
394 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
395 return t;
396 }
397
398 /* Like gimplify_arg, but if ORDERED is set (which should be set if
399 any of the arguments this argument is sequenced before has
400 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
401 are gimplified into SSA_NAME or a fresh temporary and for
402 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
403
404 static enum gimplify_status
405 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
406 bool ordered)
407 {
408 enum gimplify_status t;
409 if (ordered
410 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
411 && TREE_CODE (*arg_p) == TARGET_EXPR)
412 {
413 /* gimplify_arg would strip away the TARGET_EXPR, but
414 that can mean we don't copy the argument and some following
415 argument with side-effect could modify it. */
416 protected_set_expr_location (*arg_p, call_location);
417 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
418 }
419 else
420 {
421 t = gimplify_arg (arg_p, pre_p, call_location);
422 if (t == GS_ERROR)
423 return GS_ERROR;
424 else if (ordered
425 && is_gimple_reg_type (TREE_TYPE (*arg_p))
426 && is_gimple_variable (*arg_p)
427 && TREE_CODE (*arg_p) != SSA_NAME
428 /* No need to force references into register, references
429 can't be modified. */
430 && !TYPE_REF_P (TREE_TYPE (*arg_p))
431 /* And this can't be modified either. */
432 && *arg_p != current_class_ptr)
433 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
434 return t;
435 }
436
437 }
438
439 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
440
441 int
442 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
443 {
444 int saved_stmts_are_full_exprs_p = 0;
445 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
446 enum tree_code code = TREE_CODE (*expr_p);
447 enum gimplify_status ret;
448
449 if (STATEMENT_CODE_P (code))
450 {
451 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
452 current_stmt_tree ()->stmts_are_full_exprs_p
453 = STMT_IS_FULL_EXPR_P (*expr_p);
454 }
455
456 switch (code)
457 {
458 case AGGR_INIT_EXPR:
459 simplify_aggr_init_expr (expr_p);
460 ret = GS_OK;
461 break;
462
463 case VEC_INIT_EXPR:
464 {
465 location_t loc = input_location;
466 tree init = VEC_INIT_EXPR_INIT (*expr_p);
467 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
468 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
469 input_location = EXPR_LOCATION (*expr_p);
470 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
471 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
472 from_array,
473 tf_warning_or_error);
474 hash_set<tree> pset;
475 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
476 cp_genericize_tree (expr_p, false);
477 copy_if_shared (expr_p);
478 ret = GS_OK;
479 input_location = loc;
480 }
481 break;
482
483 case THROW_EXPR:
484 /* FIXME communicate throw type to back end, probably by moving
485 THROW_EXPR into ../tree.def. */
486 *expr_p = TREE_OPERAND (*expr_p, 0);
487 ret = GS_OK;
488 break;
489
490 case MUST_NOT_THROW_EXPR:
491 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
492 break;
493
494 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
495 LHS of an assignment might also be involved in the RHS, as in bug
496 25979. */
497 case INIT_EXPR:
498 cp_gimplify_init_expr (expr_p);
499 if (TREE_CODE (*expr_p) != INIT_EXPR)
500 return GS_OK;
501 /* Fall through. */
502 case MODIFY_EXPR:
503 modify_expr_case:
504 {
505 /* If the back end isn't clever enough to know that the lhs and rhs
506 types are the same, add an explicit conversion. */
507 tree op0 = TREE_OPERAND (*expr_p, 0);
508 tree op1 = TREE_OPERAND (*expr_p, 1);
509
510 if (!error_operand_p (op0)
511 && !error_operand_p (op1)
512 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
513 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
514 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
515 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
516 TREE_TYPE (op0), op1);
517
518 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
519 {
520 while (TREE_CODE (op1) == TARGET_EXPR)
521 /* We're disconnecting the initializer from its target,
522 don't create a temporary. */
523 op1 = TARGET_EXPR_INITIAL (op1);
524
525 /* Remove any copies of empty classes. Also drop volatile
526 variables on the RHS to avoid infinite recursion from
527 gimplify_expr trying to load the value. */
528 if (TREE_SIDE_EFFECTS (op1))
529 {
530 if (TREE_THIS_VOLATILE (op1)
531 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
532 op1 = build_fold_addr_expr (op1);
533
534 gimplify_and_add (op1, pre_p);
535 }
536 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
537 is_gimple_lvalue, fb_lvalue);
538 *expr_p = TREE_OPERAND (*expr_p, 0);
539 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
540 /* Avoid 'return *<retval>;' */
541 *expr_p = TREE_OPERAND (*expr_p, 0);
542 }
543 /* P0145 says that the RHS is sequenced before the LHS.
544 gimplify_modify_expr gimplifies the RHS before the LHS, but that
545 isn't quite strong enough in two cases:
546
547 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
548 mean it's evaluated after the LHS.
549
550 2) the value calculation of the RHS is also sequenced before the
551 LHS, so for scalar assignment we need to preevaluate if the
552 RHS could be affected by LHS side-effects even if it has no
553 side-effects of its own. We don't need this for classes because
554 class assignment takes its RHS by reference. */
555 else if (flag_strong_eval_order > 1
556 && TREE_CODE (*expr_p) == MODIFY_EXPR
557 && lvalue_has_side_effects (op0)
558 && (TREE_CODE (op1) == CALL_EXPR
559 || (SCALAR_TYPE_P (TREE_TYPE (op1))
560 && !TREE_CONSTANT (op1))))
561 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
562 }
563 ret = GS_OK;
564 break;
565
566 case EMPTY_CLASS_EXPR:
567 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
568 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
569 ret = GS_OK;
570 break;
571
572 case BASELINK:
573 *expr_p = BASELINK_FUNCTIONS (*expr_p);
574 ret = GS_OK;
575 break;
576
577 case TRY_BLOCK:
578 genericize_try_block (expr_p);
579 ret = GS_OK;
580 break;
581
582 case HANDLER:
583 genericize_catch_block (expr_p);
584 ret = GS_OK;
585 break;
586
587 case EH_SPEC_BLOCK:
588 genericize_eh_spec_block (expr_p);
589 ret = GS_OK;
590 break;
591
592 case USING_STMT:
593 gcc_unreachable ();
594
595 case FOR_STMT:
596 case WHILE_STMT:
597 case DO_STMT:
598 case SWITCH_STMT:
599 case CONTINUE_STMT:
600 case BREAK_STMT:
601 gcc_unreachable ();
602
603 case OMP_FOR:
604 case OMP_SIMD:
605 case OMP_DISTRIBUTE:
606 case OMP_LOOP:
607 case OMP_TASKLOOP:
608 ret = cp_gimplify_omp_for (expr_p, pre_p);
609 break;
610
611 case EXPR_STMT:
612 gimplify_expr_stmt (expr_p);
613 ret = GS_OK;
614 break;
615
616 case UNARY_PLUS_EXPR:
617 {
618 tree arg = TREE_OPERAND (*expr_p, 0);
619 tree type = TREE_TYPE (*expr_p);
620 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
621 : arg;
622 ret = GS_OK;
623 }
624 break;
625
626 case CALL_EXPR:
627 ret = GS_OK;
628 if (flag_strong_eval_order == 2
629 && CALL_EXPR_FN (*expr_p)
630 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
631 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
632 {
633 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
634 enum gimplify_status t
635 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
636 is_gimple_call_addr);
637 if (t == GS_ERROR)
638 ret = GS_ERROR;
639 /* GIMPLE considers most pointer conversion useless, but for
640 calls we actually care about the exact function pointer type. */
641 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
642 CALL_EXPR_FN (*expr_p)
643 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
644 }
645 if (!CALL_EXPR_FN (*expr_p))
646 /* Internal function call. */;
647 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
648 {
649 /* This is a call to a (compound) assignment operator that used
650 the operator syntax; gimplify the RHS first. */
651 gcc_assert (call_expr_nargs (*expr_p) == 2);
652 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
653 enum gimplify_status t
654 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
655 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
656 if (t == GS_ERROR)
657 ret = GS_ERROR;
658 }
659 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
660 {
661 /* Leave the last argument for gimplify_call_expr, to avoid problems
662 with __builtin_va_arg_pack(). */
663 int nargs = call_expr_nargs (*expr_p) - 1;
664 int last_side_effects_arg = -1;
665 for (int i = nargs; i > 0; --i)
666 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
667 {
668 last_side_effects_arg = i;
669 break;
670 }
671 for (int i = 0; i < nargs; ++i)
672 {
673 enum gimplify_status t
674 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
675 i < last_side_effects_arg);
676 if (t == GS_ERROR)
677 ret = GS_ERROR;
678 }
679 }
680 else if (flag_strong_eval_order
681 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
682 {
683 /* If flag_strong_eval_order, evaluate the object argument first. */
684 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
685 if (INDIRECT_TYPE_P (fntype))
686 fntype = TREE_TYPE (fntype);
687 if (TREE_CODE (fntype) == METHOD_TYPE)
688 {
689 int nargs = call_expr_nargs (*expr_p);
690 bool side_effects = false;
691 for (int i = 1; i < nargs; ++i)
692 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
693 {
694 side_effects = true;
695 break;
696 }
697 enum gimplify_status t
698 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
699 side_effects);
700 if (t == GS_ERROR)
701 ret = GS_ERROR;
702 }
703 }
704 if (ret != GS_ERROR)
705 {
706 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
707 if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
708 switch (DECL_FE_FUNCTION_CODE (decl))
709 {
710 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
711 *expr_p = boolean_false_node;
712 break;
713 case CP_BUILT_IN_SOURCE_LOCATION:
714 *expr_p
715 = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
716 break;
717 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
718 *expr_p
719 = fold_builtin_is_corresponding_member
720 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
721 &CALL_EXPR_ARG (*expr_p, 0));
722 break;
723 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
724 *expr_p
725 = fold_builtin_is_pointer_inverconvertible_with_class
726 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
727 &CALL_EXPR_ARG (*expr_p, 0));
728 break;
729 default:
730 break;
731 }
732 }
733 break;
734
735 case TARGET_EXPR:
736 /* A TARGET_EXPR that expresses direct-initialization should have been
737 elided by cp_gimplify_init_expr. */
738 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
739 ret = GS_UNHANDLED;
740 break;
741
742 case PTRMEM_CST:
743 *expr_p = cplus_expand_constant (*expr_p);
744 if (TREE_CODE (*expr_p) == PTRMEM_CST)
745 ret = GS_ERROR;
746 else
747 ret = GS_OK;
748 break;
749
750 case RETURN_EXPR:
751 if (TREE_OPERAND (*expr_p, 0)
752 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
753 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
754 {
755 expr_p = &TREE_OPERAND (*expr_p, 0);
756 /* Avoid going through the INIT_EXPR case, which can
757 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
758 goto modify_expr_case;
759 }
760 /* Fall through. */
761
762 default:
763 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
764 break;
765 }
766
767 /* Restore saved state. */
768 if (STATEMENT_CODE_P (code))
769 current_stmt_tree ()->stmts_are_full_exprs_p
770 = saved_stmts_are_full_exprs_p;
771
772 return ret;
773 }
774
775 static inline bool
776 is_invisiref_parm (const_tree t)
777 {
778 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
779 && DECL_BY_REFERENCE (t));
780 }
781
782 /* A stable comparison routine for use with splay trees and DECLs. */
783
784 static int
785 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
786 {
787 tree a = (tree) xa;
788 tree b = (tree) xb;
789
790 return DECL_UID (a) - DECL_UID (b);
791 }
792
793 /* OpenMP context during genericization. */
794
795 struct cp_genericize_omp_taskreg
796 {
797 bool is_parallel;
798 bool default_shared;
799 struct cp_genericize_omp_taskreg *outer;
800 splay_tree variables;
801 };
802
803 /* Return true if genericization should try to determine if
804 DECL is firstprivate or shared within task regions. */
805
806 static bool
807 omp_var_to_track (tree decl)
808 {
809 tree type = TREE_TYPE (decl);
810 if (is_invisiref_parm (decl))
811 type = TREE_TYPE (type);
812 else if (TYPE_REF_P (type))
813 type = TREE_TYPE (type);
814 while (TREE_CODE (type) == ARRAY_TYPE)
815 type = TREE_TYPE (type);
816 if (type == error_mark_node || !CLASS_TYPE_P (type))
817 return false;
818 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
819 return false;
820 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
821 return false;
822 return true;
823 }
824
825 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
826
827 static void
828 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
829 {
830 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
831 (splay_tree_key) decl);
832 if (n == NULL)
833 {
834 int flags = OMP_CLAUSE_DEFAULT_SHARED;
835 if (omp_ctx->outer)
836 omp_cxx_notice_variable (omp_ctx->outer, decl);
837 if (!omp_ctx->default_shared)
838 {
839 struct cp_genericize_omp_taskreg *octx;
840
841 for (octx = omp_ctx->outer; octx; octx = octx->outer)
842 {
843 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
844 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
845 {
846 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
847 break;
848 }
849 if (octx->is_parallel)
850 break;
851 }
852 if (octx == NULL
853 && (TREE_CODE (decl) == PARM_DECL
854 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
855 && DECL_CONTEXT (decl) == current_function_decl)))
856 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
857 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
858 {
859 /* DECL is implicitly determined firstprivate in
860 the current task construct. Ensure copy ctor and
861 dtor are instantiated, because during gimplification
862 it will be already too late. */
863 tree type = TREE_TYPE (decl);
864 if (is_invisiref_parm (decl))
865 type = TREE_TYPE (type);
866 else if (TYPE_REF_P (type))
867 type = TREE_TYPE (type);
868 while (TREE_CODE (type) == ARRAY_TYPE)
869 type = TREE_TYPE (type);
870 get_copy_ctor (type, tf_none);
871 get_dtor (type, tf_none);
872 }
873 }
874 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
875 }
876 }
877
878 /* Genericization context. */
879
880 struct cp_genericize_data
881 {
882 hash_set<tree> *p_set;
883 auto_vec<tree> bind_expr_stack;
884 struct cp_genericize_omp_taskreg *omp_ctx;
885 tree try_block;
886 bool no_sanitize_p;
887 bool handle_invisiref_parm_p;
888 };
889
890 /* Perform any pre-gimplification folding of C++ front end trees to
891 GENERIC.
892 Note: The folding of none-omp cases is something to move into
893 the middle-end. As for now we have most foldings only on GENERIC
894 in fold-const, we need to perform this before transformation to
895 GIMPLE-form. */
896
897 static tree
898 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
899 {
900 tree stmt = *stmt_p;
901 enum tree_code code = TREE_CODE (stmt);
902
903 switch (code)
904 {
905 case PTRMEM_CST:
906 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
907 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
908 {
909 if (!((hash_set<tree> *) data)->add (stmt))
910 error_at (PTRMEM_CST_LOCATION (stmt),
911 "taking address of an immediate function %qD",
912 PTRMEM_CST_MEMBER (stmt));
913 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
914 break;
915 }
916 break;
917
918 case ADDR_EXPR:
919 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
920 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
921 {
922 error_at (EXPR_LOCATION (stmt),
923 "taking address of an immediate function %qD",
924 TREE_OPERAND (stmt, 0));
925 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
926 break;
927 }
928 break;
929
930 case CALL_EXPR:
931 if (tree fndecl = cp_get_callee_fndecl_nofold (stmt))
932 if (DECL_IMMEDIATE_FUNCTION_P (fndecl)
933 && source_location_current_p (fndecl))
934 *stmt_p = stmt = cxx_constant_value (stmt);
935 break;
936
937 default:
938 break;
939 }
940
941 *stmt_p = stmt = cp_fold (*stmt_p);
942
943 if (((hash_set<tree> *) data)->add (stmt))
944 {
945 /* Don't walk subtrees of stmts we've already walked once, otherwise
946 we can have exponential complexity with e.g. lots of nested
947 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
948 always the same tree, which the first time cp_fold_r has been
949 called on it had the subtrees walked. */
950 *walk_subtrees = 0;
951 return NULL;
952 }
953
954 code = TREE_CODE (stmt);
955 switch (code)
956 {
957 tree x;
958 int i, n;
959 case OMP_FOR:
960 case OMP_SIMD:
961 case OMP_DISTRIBUTE:
962 case OMP_LOOP:
963 case OMP_TASKLOOP:
964 case OACC_LOOP:
965 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
966 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
967 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
968 x = OMP_FOR_COND (stmt);
969 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
970 {
971 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
972 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
973 }
974 else if (x && TREE_CODE (x) == TREE_VEC)
975 {
976 n = TREE_VEC_LENGTH (x);
977 for (i = 0; i < n; i++)
978 {
979 tree o = TREE_VEC_ELT (x, i);
980 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
981 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
982 }
983 }
984 x = OMP_FOR_INCR (stmt);
985 if (x && TREE_CODE (x) == TREE_VEC)
986 {
987 n = TREE_VEC_LENGTH (x);
988 for (i = 0; i < n; i++)
989 {
990 tree o = TREE_VEC_ELT (x, i);
991 if (o && TREE_CODE (o) == MODIFY_EXPR)
992 o = TREE_OPERAND (o, 1);
993 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
994 || TREE_CODE (o) == POINTER_PLUS_EXPR))
995 {
996 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
997 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
998 }
999 }
1000 }
1001 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1002 *walk_subtrees = 0;
1003 return NULL;
1004
1005 case IF_STMT:
1006 if (IF_STMT_CONSTEVAL_P (stmt))
1007 {
1008 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1009 boolean_false_node. */
1010 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1011 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1012 *walk_subtrees = 0;
1013 return NULL;
1014 }
1015 break;
1016
1017 default:
1018 break;
1019 }
1020
1021 return NULL;
1022 }
1023
1024 /* Fold ALL the trees! FIXME we should be able to remove this, but
1025 apparently that still causes optimization regressions. */
1026
1027 void
1028 cp_fold_function (tree fndecl)
1029 {
1030 hash_set<tree> pset;
1031 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1032 }
1033
1034 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1035
1036 static tree genericize_spaceship (tree expr)
1037 {
1038 iloc_sentinel s (cp_expr_location (expr));
1039 tree type = TREE_TYPE (expr);
1040 tree op0 = TREE_OPERAND (expr, 0);
1041 tree op1 = TREE_OPERAND (expr, 1);
1042 return genericize_spaceship (input_location, type, op0, op1);
1043 }
1044
1045 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1046 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1047 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1048 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1049
1050 tree
1051 predeclare_vla (tree expr)
1052 {
1053 tree type = TREE_TYPE (expr);
1054 if (type == error_mark_node)
1055 return expr;
1056 if (is_typedef_decl (expr))
1057 type = DECL_ORIGINAL_TYPE (expr);
1058
1059 /* We need to strip pointers for gimplify_type_sizes. */
1060 tree vla = type;
1061 while (POINTER_TYPE_P (vla))
1062 {
1063 if (TYPE_NAME (vla))
1064 return expr;
1065 vla = TREE_TYPE (vla);
1066 }
1067 if (vla == type || TYPE_NAME (vla)
1068 || !variably_modified_type_p (vla, NULL_TREE))
1069 return expr;
1070
1071 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1072 DECL_ARTIFICIAL (decl) = 1;
1073 TYPE_NAME (vla) = decl;
1074 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1075 if (DECL_P (expr))
1076 {
1077 add_stmt (dexp);
1078 return NULL_TREE;
1079 }
1080 else
1081 {
1082 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1083 return expr;
1084 }
1085 }
1086
1087 /* Perform any pre-gimplification lowering of C++ front end trees to
1088 GENERIC. */
1089
1090 static tree
1091 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1092 {
1093 tree stmt = *stmt_p;
1094 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1095 hash_set<tree> *p_set = wtd->p_set;
1096
1097 /* If in an OpenMP context, note var uses. */
1098 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1099 && (VAR_P (stmt)
1100 || TREE_CODE (stmt) == PARM_DECL
1101 || TREE_CODE (stmt) == RESULT_DECL)
1102 && omp_var_to_track (stmt))
1103 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1104
1105 /* Don't dereference parms in a thunk, pass the references through. */
1106 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1107 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1108 {
1109 *walk_subtrees = 0;
1110 return NULL;
1111 }
1112
1113 /* Dereference invisible reference parms. */
1114 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1115 {
1116 *stmt_p = convert_from_reference (stmt);
1117 p_set->add (*stmt_p);
1118 *walk_subtrees = 0;
1119 return NULL;
1120 }
1121
1122 /* Map block scope extern declarations to visible declarations with the
1123 same name and type in outer scopes if any. */
1124 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1125 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1126 {
1127 if (alias != error_mark_node)
1128 {
1129 *stmt_p = alias;
1130 TREE_USED (alias) |= TREE_USED (stmt);
1131 }
1132 *walk_subtrees = 0;
1133 return NULL;
1134 }
1135
1136 if (TREE_CODE (stmt) == INTEGER_CST
1137 && TYPE_REF_P (TREE_TYPE (stmt))
1138 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1139 && !wtd->no_sanitize_p)
1140 {
1141 ubsan_maybe_instrument_reference (stmt_p);
1142 if (*stmt_p != stmt)
1143 {
1144 *walk_subtrees = 0;
1145 return NULL_TREE;
1146 }
1147 }
1148
1149 /* Other than invisiref parms, don't walk the same tree twice. */
1150 if (p_set->contains (stmt))
1151 {
1152 *walk_subtrees = 0;
1153 return NULL_TREE;
1154 }
1155
1156 switch (TREE_CODE (stmt))
1157 {
1158 case ADDR_EXPR:
1159 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1160 {
1161 /* If in an OpenMP context, note var uses. */
1162 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1163 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1164 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1165 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1166 *walk_subtrees = 0;
1167 }
1168 break;
1169
1170 case RETURN_EXPR:
1171 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1172 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1173 *walk_subtrees = 0;
1174 break;
1175
1176 case OMP_CLAUSE:
1177 switch (OMP_CLAUSE_CODE (stmt))
1178 {
1179 case OMP_CLAUSE_LASTPRIVATE:
1180 /* Don't dereference an invisiref in OpenMP clauses. */
1181 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1182 {
1183 *walk_subtrees = 0;
1184 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1185 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1186 cp_genericize_r, data, NULL);
1187 }
1188 break;
1189 case OMP_CLAUSE_PRIVATE:
1190 /* Don't dereference an invisiref in OpenMP clauses. */
1191 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1192 *walk_subtrees = 0;
1193 else if (wtd->omp_ctx != NULL)
1194 {
1195 /* Private clause doesn't cause any references to the
1196 var in outer contexts, avoid calling
1197 omp_cxx_notice_variable for it. */
1198 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1199 wtd->omp_ctx = NULL;
1200 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1201 data, NULL);
1202 wtd->omp_ctx = old;
1203 *walk_subtrees = 0;
1204 }
1205 break;
1206 case OMP_CLAUSE_SHARED:
1207 case OMP_CLAUSE_FIRSTPRIVATE:
1208 case OMP_CLAUSE_COPYIN:
1209 case OMP_CLAUSE_COPYPRIVATE:
1210 case OMP_CLAUSE_INCLUSIVE:
1211 case OMP_CLAUSE_EXCLUSIVE:
1212 /* Don't dereference an invisiref in OpenMP clauses. */
1213 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1214 *walk_subtrees = 0;
1215 break;
1216 case OMP_CLAUSE_REDUCTION:
1217 case OMP_CLAUSE_IN_REDUCTION:
1218 case OMP_CLAUSE_TASK_REDUCTION:
1219 /* Don't dereference an invisiref in reduction clause's
1220 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1221 still needs to be genericized. */
1222 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1223 {
1224 *walk_subtrees = 0;
1225 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1226 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1227 cp_genericize_r, data, NULL);
1228 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1229 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1230 cp_genericize_r, data, NULL);
1231 }
1232 break;
1233 default:
1234 break;
1235 }
1236 break;
1237
1238 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1239 to lower this construct before scanning it, so we need to lower these
1240 before doing anything else. */
1241 case CLEANUP_STMT:
1242 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1243 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1244 : TRY_FINALLY_EXPR,
1245 void_type_node,
1246 CLEANUP_BODY (stmt),
1247 CLEANUP_EXPR (stmt));
1248 break;
1249
1250 case IF_STMT:
1251 genericize_if_stmt (stmt_p);
1252 /* *stmt_p has changed, tail recurse to handle it again. */
1253 return cp_genericize_r (stmt_p, walk_subtrees, data);
1254
1255 /* COND_EXPR might have incompatible types in branches if one or both
1256 arms are bitfields. Fix it up now. */
1257 case COND_EXPR:
1258 {
1259 tree type_left
1260 = (TREE_OPERAND (stmt, 1)
1261 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1262 : NULL_TREE);
1263 tree type_right
1264 = (TREE_OPERAND (stmt, 2)
1265 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1266 : NULL_TREE);
1267 if (type_left
1268 && !useless_type_conversion_p (TREE_TYPE (stmt),
1269 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1270 {
1271 TREE_OPERAND (stmt, 1)
1272 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1273 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1274 type_left));
1275 }
1276 if (type_right
1277 && !useless_type_conversion_p (TREE_TYPE (stmt),
1278 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1279 {
1280 TREE_OPERAND (stmt, 2)
1281 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1282 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1283 type_right));
1284 }
1285 }
1286 break;
1287
1288 case BIND_EXPR:
1289 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1290 {
1291 tree decl;
1292 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1293 if (VAR_P (decl)
1294 && !DECL_EXTERNAL (decl)
1295 && omp_var_to_track (decl))
1296 {
1297 splay_tree_node n
1298 = splay_tree_lookup (wtd->omp_ctx->variables,
1299 (splay_tree_key) decl);
1300 if (n == NULL)
1301 splay_tree_insert (wtd->omp_ctx->variables,
1302 (splay_tree_key) decl,
1303 TREE_STATIC (decl)
1304 ? OMP_CLAUSE_DEFAULT_SHARED
1305 : OMP_CLAUSE_DEFAULT_PRIVATE);
1306 }
1307 }
1308 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1309 {
1310 /* The point here is to not sanitize static initializers. */
1311 bool no_sanitize_p = wtd->no_sanitize_p;
1312 wtd->no_sanitize_p = true;
1313 for (tree decl = BIND_EXPR_VARS (stmt);
1314 decl;
1315 decl = DECL_CHAIN (decl))
1316 if (VAR_P (decl)
1317 && TREE_STATIC (decl)
1318 && DECL_INITIAL (decl))
1319 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1320 wtd->no_sanitize_p = no_sanitize_p;
1321 }
1322 wtd->bind_expr_stack.safe_push (stmt);
1323 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1324 cp_genericize_r, data, NULL);
1325 wtd->bind_expr_stack.pop ();
1326 break;
1327
1328 case USING_STMT:
1329 {
1330 tree block = NULL_TREE;
1331
1332 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1333 BLOCK, and append an IMPORTED_DECL to its
1334 BLOCK_VARS chained list. */
1335 if (wtd->bind_expr_stack.exists ())
1336 {
1337 int i;
1338 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1339 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1340 break;
1341 }
1342 if (block)
1343 {
1344 tree decl = TREE_OPERAND (stmt, 0);
1345 gcc_assert (decl);
1346
1347 if (undeduced_auto_decl (decl))
1348 /* Omit from the GENERIC, the back-end can't handle it. */;
1349 else
1350 {
1351 tree using_directive = make_node (IMPORTED_DECL);
1352 TREE_TYPE (using_directive) = void_type_node;
1353 DECL_CONTEXT (using_directive) = current_function_decl;
1354
1355 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1356 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1357 BLOCK_VARS (block) = using_directive;
1358 }
1359 }
1360 /* The USING_STMT won't appear in GENERIC. */
1361 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1362 *walk_subtrees = 0;
1363 }
1364 break;
1365
1366 case DECL_EXPR:
1367 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1368 {
1369 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1370 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1371 *walk_subtrees = 0;
1372 }
1373 else
1374 {
1375 tree d = DECL_EXPR_DECL (stmt);
1376 if (VAR_P (d))
1377 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1378 }
1379 break;
1380
1381 case OMP_PARALLEL:
1382 case OMP_TASK:
1383 case OMP_TASKLOOP:
1384 {
1385 struct cp_genericize_omp_taskreg omp_ctx;
1386 tree c, decl;
1387 splay_tree_node n;
1388
1389 *walk_subtrees = 0;
1390 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1391 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1392 omp_ctx.default_shared = omp_ctx.is_parallel;
1393 omp_ctx.outer = wtd->omp_ctx;
1394 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1395 wtd->omp_ctx = &omp_ctx;
1396 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1397 switch (OMP_CLAUSE_CODE (c))
1398 {
1399 case OMP_CLAUSE_SHARED:
1400 case OMP_CLAUSE_PRIVATE:
1401 case OMP_CLAUSE_FIRSTPRIVATE:
1402 case OMP_CLAUSE_LASTPRIVATE:
1403 decl = OMP_CLAUSE_DECL (c);
1404 if (decl == error_mark_node || !omp_var_to_track (decl))
1405 break;
1406 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1407 if (n != NULL)
1408 break;
1409 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1410 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1411 ? OMP_CLAUSE_DEFAULT_SHARED
1412 : OMP_CLAUSE_DEFAULT_PRIVATE);
1413 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1414 omp_cxx_notice_variable (omp_ctx.outer, decl);
1415 break;
1416 case OMP_CLAUSE_DEFAULT:
1417 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1418 omp_ctx.default_shared = true;
1419 default:
1420 break;
1421 }
1422 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1423 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1424 cp_genericize_r, cp_walk_subtrees);
1425 else
1426 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1427 wtd->omp_ctx = omp_ctx.outer;
1428 splay_tree_delete (omp_ctx.variables);
1429 }
1430 break;
1431
1432 case OMP_TARGET:
1433 cfun->has_omp_target = true;
1434 break;
1435
1436 case TRY_BLOCK:
1437 {
1438 *walk_subtrees = 0;
1439 tree try_block = wtd->try_block;
1440 wtd->try_block = stmt;
1441 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1442 wtd->try_block = try_block;
1443 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1444 }
1445 break;
1446
1447 case MUST_NOT_THROW_EXPR:
1448 /* MUST_NOT_THROW_COND might be something else with TM. */
1449 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1450 {
1451 *walk_subtrees = 0;
1452 tree try_block = wtd->try_block;
1453 wtd->try_block = stmt;
1454 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1455 wtd->try_block = try_block;
1456 }
1457 break;
1458
1459 case THROW_EXPR:
1460 {
1461 location_t loc = location_of (stmt);
1462 if (warning_suppressed_p (stmt /* What warning? */))
1463 /* Never mind. */;
1464 else if (wtd->try_block)
1465 {
1466 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1467 {
1468 auto_diagnostic_group d;
1469 if (warning_at (loc, OPT_Wterminate,
1470 "%<throw%> will always call %<terminate%>")
1471 && cxx_dialect >= cxx11
1472 && DECL_DESTRUCTOR_P (current_function_decl))
1473 inform (loc, "in C++11 destructors default to %<noexcept%>");
1474 }
1475 }
1476 else
1477 {
1478 if (warn_cxx11_compat && cxx_dialect < cxx11
1479 && DECL_DESTRUCTOR_P (current_function_decl)
1480 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1481 == NULL_TREE)
1482 && (get_defaulted_eh_spec (current_function_decl)
1483 == empty_except_spec))
1484 warning_at (loc, OPT_Wc__11_compat,
1485 "in C++11 this %<throw%> will call %<terminate%> "
1486 "because destructors default to %<noexcept%>");
1487 }
1488 }
1489 break;
1490
1491 case CONVERT_EXPR:
1492 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1493 break;
1494
1495 case SPACESHIP_EXPR:
1496 *stmt_p = genericize_spaceship (*stmt_p);
1497 break;
1498
1499 case PTRMEM_CST:
1500 /* By the time we get here we're handing off to the back end, so we don't
1501 need or want to preserve PTRMEM_CST anymore. */
1502 *stmt_p = cplus_expand_constant (stmt);
1503 *walk_subtrees = 0;
1504 break;
1505
1506 case MEM_REF:
1507 /* For MEM_REF, make sure not to sanitize the second operand even
1508 if it has reference type. It is just an offset with a type
1509 holding other information. There is no other processing we
1510 need to do for INTEGER_CSTs, so just ignore the second argument
1511 unconditionally. */
1512 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1513 *walk_subtrees = 0;
1514 break;
1515
1516 case NOP_EXPR:
1517 *stmt_p = predeclare_vla (*stmt_p);
1518 if (!wtd->no_sanitize_p
1519 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1520 && TYPE_REF_P (TREE_TYPE (stmt)))
1521 ubsan_maybe_instrument_reference (stmt_p);
1522 break;
1523
1524 case CALL_EXPR:
1525 /* Evaluate function concept checks instead of treating them as
1526 normal functions. */
1527 if (concept_check_p (stmt))
1528 {
1529 *stmt_p = evaluate_concept_check (stmt);
1530 * walk_subtrees = 0;
1531 break;
1532 }
1533
1534 if (!wtd->no_sanitize_p
1535 && sanitize_flags_p ((SANITIZE_NULL
1536 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1537 {
1538 tree fn = CALL_EXPR_FN (stmt);
1539 if (fn != NULL_TREE
1540 && !error_operand_p (fn)
1541 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1542 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1543 {
1544 bool is_ctor
1545 = TREE_CODE (fn) == ADDR_EXPR
1546 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1547 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1548 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1549 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1550 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1551 cp_ubsan_maybe_instrument_member_call (stmt);
1552 }
1553 else if (fn == NULL_TREE
1554 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1555 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1556 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1557 *walk_subtrees = 0;
1558 }
1559 /* Fall through. */
1560 case AGGR_INIT_EXPR:
1561 /* For calls to a multi-versioned function, overload resolution
1562 returns the function with the highest target priority, that is,
1563 the version that will checked for dispatching first. If this
1564 version is inlinable, a direct call to this version can be made
1565 otherwise the call should go through the dispatcher. */
1566 {
1567 tree fn = cp_get_callee_fndecl_nofold (stmt);
1568 if (fn && DECL_FUNCTION_VERSIONED (fn)
1569 && (current_function_decl == NULL
1570 || !targetm.target_option.can_inline_p (current_function_decl,
1571 fn)))
1572 if (tree dis = get_function_version_dispatcher (fn))
1573 {
1574 mark_versions_used (dis);
1575 dis = build_address (dis);
1576 if (TREE_CODE (stmt) == CALL_EXPR)
1577 CALL_EXPR_FN (stmt) = dis;
1578 else
1579 AGGR_INIT_EXPR_FN (stmt) = dis;
1580 }
1581 }
1582 break;
1583
1584 case TARGET_EXPR:
1585 if (TARGET_EXPR_INITIAL (stmt)
1586 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1587 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1588 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1589 break;
1590
1591 case TEMPLATE_ID_EXPR:
1592 gcc_assert (concept_check_p (stmt));
1593 /* Emit the value of the concept check. */
1594 *stmt_p = evaluate_concept_check (stmt);
1595 walk_subtrees = 0;
1596 break;
1597
1598 case OMP_DISTRIBUTE:
1599 /* Need to explicitly instantiate copy ctors on class iterators of
1600 composite distribute parallel for. */
1601 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1602 {
1603 tree *data[4] = { NULL, NULL, NULL, NULL };
1604 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1605 find_combined_omp_for, data, NULL);
1606 if (inner != NULL_TREE
1607 && TREE_CODE (inner) == OMP_FOR)
1608 {
1609 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1610 if (OMP_FOR_ORIG_DECLS (inner)
1611 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1612 i)) == TREE_LIST
1613 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1614 i)))
1615 {
1616 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1617 /* Class iterators aren't allowed on OMP_SIMD, so the only
1618 case we need to solve is distribute parallel for. */
1619 gcc_assert (TREE_CODE (inner) == OMP_FOR
1620 && data[1]);
1621 tree orig_decl = TREE_PURPOSE (orig);
1622 tree c, cl = NULL_TREE;
1623 for (c = OMP_FOR_CLAUSES (inner);
1624 c; c = OMP_CLAUSE_CHAIN (c))
1625 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1626 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1627 && OMP_CLAUSE_DECL (c) == orig_decl)
1628 {
1629 cl = c;
1630 break;
1631 }
1632 if (cl == NULL_TREE)
1633 {
1634 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1635 c; c = OMP_CLAUSE_CHAIN (c))
1636 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1637 && OMP_CLAUSE_DECL (c) == orig_decl)
1638 {
1639 cl = c;
1640 break;
1641 }
1642 }
1643 if (cl)
1644 {
1645 orig_decl = require_complete_type (orig_decl);
1646 tree inner_type = TREE_TYPE (orig_decl);
1647 if (orig_decl == error_mark_node)
1648 continue;
1649 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1650 inner_type = TREE_TYPE (inner_type);
1651
1652 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1653 inner_type = TREE_TYPE (inner_type);
1654 get_copy_ctor (inner_type, tf_warning_or_error);
1655 }
1656 }
1657 }
1658 }
1659 /* FALLTHRU */
1660
1661 case FOR_STMT:
1662 case WHILE_STMT:
1663 case DO_STMT:
1664 case SWITCH_STMT:
1665 case CONTINUE_STMT:
1666 case BREAK_STMT:
1667 case OMP_FOR:
1668 case OMP_SIMD:
1669 case OMP_LOOP:
1670 case OACC_LOOP:
1671 case STATEMENT_LIST:
1672 /* These cases are handled by shared code. */
1673 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1674 cp_genericize_r, cp_walk_subtrees);
1675 break;
1676
1677 case BIT_CAST_EXPR:
1678 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1679 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1680 break;
1681
1682 default:
1683 if (IS_TYPE_OR_DECL_P (stmt))
1684 *walk_subtrees = 0;
1685 break;
1686 }
1687
1688 p_set->add (*stmt_p);
1689
1690 return NULL;
1691 }
1692
1693 /* Lower C++ front end trees to GENERIC in T_P. */
1694
1695 static void
1696 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1697 {
1698 struct cp_genericize_data wtd;
1699
1700 wtd.p_set = new hash_set<tree>;
1701 wtd.bind_expr_stack.create (0);
1702 wtd.omp_ctx = NULL;
1703 wtd.try_block = NULL_TREE;
1704 wtd.no_sanitize_p = false;
1705 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1706 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1707 delete wtd.p_set;
1708 if (sanitize_flags_p (SANITIZE_VPTR))
1709 cp_ubsan_instrument_member_accesses (t_p);
1710 }
1711
1712 /* If a function that should end with a return in non-void
1713 function doesn't obviously end with return, add ubsan
1714 instrumentation code to verify it at runtime. If -fsanitize=return
1715 is not enabled, instrument __builtin_unreachable. */
1716
1717 static void
1718 cp_maybe_instrument_return (tree fndecl)
1719 {
1720 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1721 || DECL_CONSTRUCTOR_P (fndecl)
1722 || DECL_DESTRUCTOR_P (fndecl)
1723 || !targetm.warn_func_return (fndecl))
1724 return;
1725
1726 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1727 /* Don't add __builtin_unreachable () if not optimizing, it will not
1728 improve any optimizations in that case, just break UB code.
1729 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1730 UBSan covers this with ubsan_instrument_return above where sufficient
1731 information is provided, while the __builtin_unreachable () below
1732 if return sanitization is disabled will just result in hard to
1733 understand runtime error without location. */
1734 && (!optimize
1735 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1736 return;
1737
1738 tree t = DECL_SAVED_TREE (fndecl);
1739 while (t)
1740 {
1741 switch (TREE_CODE (t))
1742 {
1743 case BIND_EXPR:
1744 t = BIND_EXPR_BODY (t);
1745 continue;
1746 case TRY_FINALLY_EXPR:
1747 case CLEANUP_POINT_EXPR:
1748 t = TREE_OPERAND (t, 0);
1749 continue;
1750 case STATEMENT_LIST:
1751 {
1752 tree_stmt_iterator i = tsi_last (t);
1753 while (!tsi_end_p (i))
1754 {
1755 tree p = tsi_stmt (i);
1756 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1757 break;
1758 tsi_prev (&i);
1759 }
1760 if (!tsi_end_p (i))
1761 {
1762 t = tsi_stmt (i);
1763 continue;
1764 }
1765 }
1766 break;
1767 case RETURN_EXPR:
1768 return;
1769 default:
1770 break;
1771 }
1772 break;
1773 }
1774 if (t == NULL_TREE)
1775 return;
1776 tree *p = &DECL_SAVED_TREE (fndecl);
1777 if (TREE_CODE (*p) == BIND_EXPR)
1778 p = &BIND_EXPR_BODY (*p);
1779
1780 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1781 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1782 t = ubsan_instrument_return (loc);
1783 else
1784 {
1785 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1786 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1787 }
1788
1789 append_to_statement_list (t, p);
1790 }
1791
1792 void
1793 cp_genericize (tree fndecl)
1794 {
1795 tree t;
1796
1797 /* Fix up the types of parms passed by invisible reference. */
1798 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1799 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1800 {
1801 /* If a function's arguments are copied to create a thunk,
1802 then DECL_BY_REFERENCE will be set -- but the type of the
1803 argument will be a pointer type, so we will never get
1804 here. */
1805 gcc_assert (!DECL_BY_REFERENCE (t));
1806 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1807 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1808 DECL_BY_REFERENCE (t) = 1;
1809 TREE_ADDRESSABLE (t) = 0;
1810 relayout_decl (t);
1811 }
1812
1813 /* Do the same for the return value. */
1814 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1815 {
1816 t = DECL_RESULT (fndecl);
1817 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1818 DECL_BY_REFERENCE (t) = 1;
1819 TREE_ADDRESSABLE (t) = 0;
1820 relayout_decl (t);
1821 if (DECL_NAME (t))
1822 {
1823 /* Adjust DECL_VALUE_EXPR of the original var. */
1824 tree outer = outer_curly_brace_block (current_function_decl);
1825 tree var;
1826
1827 if (outer)
1828 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1829 if (VAR_P (var)
1830 && DECL_NAME (t) == DECL_NAME (var)
1831 && DECL_HAS_VALUE_EXPR_P (var)
1832 && DECL_VALUE_EXPR (var) == t)
1833 {
1834 tree val = convert_from_reference (t);
1835 SET_DECL_VALUE_EXPR (var, val);
1836 break;
1837 }
1838 }
1839 }
1840
1841 /* If we're a clone, the body is already GIMPLE. */
1842 if (DECL_CLONED_FUNCTION_P (fndecl))
1843 return;
1844
1845 /* Allow cp_genericize calls to be nested. */
1846 bc_state_t save_state;
1847 save_bc_state (&save_state);
1848
1849 /* We do want to see every occurrence of the parms, so we can't just use
1850 walk_tree's hash functionality. */
1851 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1852
1853 cp_maybe_instrument_return (fndecl);
1854
1855 /* Do everything else. */
1856 c_genericize (fndecl);
1857 restore_bc_state (&save_state);
1858 }
1859 \f
1860 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1861 NULL if there is in fact nothing to do. ARG2 may be null if FN
1862 actually only takes one argument. */
1863
1864 static tree
1865 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1866 {
1867 tree defparm, parm, t;
1868 int i = 0;
1869 int nargs;
1870 tree *argarray;
1871
1872 if (fn == NULL)
1873 return NULL;
1874
1875 nargs = list_length (DECL_ARGUMENTS (fn));
1876 argarray = XALLOCAVEC (tree, nargs);
1877
1878 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1879 if (arg2)
1880 defparm = TREE_CHAIN (defparm);
1881
1882 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1883 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1884 {
1885 tree inner_type = TREE_TYPE (arg1);
1886 tree start1, end1, p1;
1887 tree start2 = NULL, p2 = NULL;
1888 tree ret = NULL, lab;
1889
1890 start1 = arg1;
1891 start2 = arg2;
1892 do
1893 {
1894 inner_type = TREE_TYPE (inner_type);
1895 start1 = build4 (ARRAY_REF, inner_type, start1,
1896 size_zero_node, NULL, NULL);
1897 if (arg2)
1898 start2 = build4 (ARRAY_REF, inner_type, start2,
1899 size_zero_node, NULL, NULL);
1900 }
1901 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1902 start1 = build_fold_addr_expr_loc (input_location, start1);
1903 if (arg2)
1904 start2 = build_fold_addr_expr_loc (input_location, start2);
1905
1906 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1907 end1 = fold_build_pointer_plus (start1, end1);
1908
1909 p1 = create_tmp_var (TREE_TYPE (start1));
1910 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1911 append_to_statement_list (t, &ret);
1912
1913 if (arg2)
1914 {
1915 p2 = create_tmp_var (TREE_TYPE (start2));
1916 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1917 append_to_statement_list (t, &ret);
1918 }
1919
1920 lab = create_artificial_label (input_location);
1921 t = build1 (LABEL_EXPR, void_type_node, lab);
1922 append_to_statement_list (t, &ret);
1923
1924 argarray[i++] = p1;
1925 if (arg2)
1926 argarray[i++] = p2;
1927 /* Handle default arguments. */
1928 for (parm = defparm; parm && parm != void_list_node;
1929 parm = TREE_CHAIN (parm), i++)
1930 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1931 TREE_PURPOSE (parm), fn,
1932 i - is_method, tf_warning_or_error);
1933 t = build_call_a (fn, i, argarray);
1934 t = fold_convert (void_type_node, t);
1935 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1936 append_to_statement_list (t, &ret);
1937
1938 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1939 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1940 append_to_statement_list (t, &ret);
1941
1942 if (arg2)
1943 {
1944 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1945 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1946 append_to_statement_list (t, &ret);
1947 }
1948
1949 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1950 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1951 append_to_statement_list (t, &ret);
1952
1953 return ret;
1954 }
1955 else
1956 {
1957 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1958 if (arg2)
1959 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1960 /* Handle default arguments. */
1961 for (parm = defparm; parm && parm != void_list_node;
1962 parm = TREE_CHAIN (parm), i++)
1963 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1964 TREE_PURPOSE (parm), fn,
1965 i - is_method, tf_warning_or_error);
1966 t = build_call_a (fn, i, argarray);
1967 t = fold_convert (void_type_node, t);
1968 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1969 }
1970 }
1971
1972 /* Return code to initialize DECL with its default constructor, or
1973 NULL if there's nothing to do. */
1974
1975 tree
1976 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1977 {
1978 tree info = CP_OMP_CLAUSE_INFO (clause);
1979 tree ret = NULL;
1980
1981 if (info)
1982 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1983
1984 return ret;
1985 }
1986
1987 /* Return code to initialize DST with a copy constructor from SRC. */
1988
1989 tree
1990 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1991 {
1992 tree info = CP_OMP_CLAUSE_INFO (clause);
1993 tree ret = NULL;
1994
1995 if (info)
1996 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1997 if (ret == NULL)
1998 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1999
2000 return ret;
2001 }
2002
2003 /* Similarly, except use an assignment operator instead. */
2004
2005 tree
2006 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2007 {
2008 tree info = CP_OMP_CLAUSE_INFO (clause);
2009 tree ret = NULL;
2010
2011 if (info)
2012 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2013 if (ret == NULL)
2014 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2015
2016 return ret;
2017 }
2018
2019 /* Return code to destroy DECL. */
2020
2021 tree
2022 cxx_omp_clause_dtor (tree clause, tree decl)
2023 {
2024 tree info = CP_OMP_CLAUSE_INFO (clause);
2025 tree ret = NULL;
2026
2027 if (info)
2028 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2029
2030 return ret;
2031 }
2032
2033 /* True if OpenMP should privatize what this DECL points to rather
2034 than the DECL itself. */
2035
2036 bool
2037 cxx_omp_privatize_by_reference (const_tree decl)
2038 {
2039 return (TYPE_REF_P (TREE_TYPE (decl))
2040 || is_invisiref_parm (decl));
2041 }
2042
2043 /* Return true if DECL is const qualified var having no mutable member. */
2044 bool
2045 cxx_omp_const_qual_no_mutable (tree decl)
2046 {
2047 tree type = TREE_TYPE (decl);
2048 if (TYPE_REF_P (type))
2049 {
2050 if (!is_invisiref_parm (decl))
2051 return false;
2052 type = TREE_TYPE (type);
2053
2054 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2055 {
2056 /* NVR doesn't preserve const qualification of the
2057 variable's type. */
2058 tree outer = outer_curly_brace_block (current_function_decl);
2059 tree var;
2060
2061 if (outer)
2062 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2063 if (VAR_P (var)
2064 && DECL_NAME (decl) == DECL_NAME (var)
2065 && (TYPE_MAIN_VARIANT (type)
2066 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2067 {
2068 if (TYPE_READONLY (TREE_TYPE (var)))
2069 type = TREE_TYPE (var);
2070 break;
2071 }
2072 }
2073 }
2074
2075 if (type == error_mark_node)
2076 return false;
2077
2078 /* Variables with const-qualified type having no mutable member
2079 are predetermined shared. */
2080 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2081 return true;
2082
2083 return false;
2084 }
2085
2086 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2087 of DECL is predetermined. */
2088
2089 enum omp_clause_default_kind
2090 cxx_omp_predetermined_sharing_1 (tree decl)
2091 {
2092 /* Static data members are predetermined shared. */
2093 if (TREE_STATIC (decl))
2094 {
2095 tree ctx = CP_DECL_CONTEXT (decl);
2096 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2097 return OMP_CLAUSE_DEFAULT_SHARED;
2098
2099 if (c_omp_predefined_variable (decl))
2100 return OMP_CLAUSE_DEFAULT_SHARED;
2101 }
2102
2103 /* this may not be specified in data-sharing clauses, still we need
2104 to predetermined it firstprivate. */
2105 if (decl == current_class_ptr)
2106 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2107
2108 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2109 }
2110
2111 /* Likewise, but also include the artificial vars. We don't want to
2112 disallow the artificial vars being mentioned in explicit clauses,
2113 as we use artificial vars e.g. for loop constructs with random
2114 access iterators other than pointers, but during gimplification
2115 we want to treat them as predetermined. */
2116
2117 enum omp_clause_default_kind
2118 cxx_omp_predetermined_sharing (tree decl)
2119 {
2120 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2121 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2122 return ret;
2123
2124 /* Predetermine artificial variables holding integral values, those
2125 are usually result of gimplify_one_sizepos or SAVE_EXPR
2126 gimplification. */
2127 if (VAR_P (decl)
2128 && DECL_ARTIFICIAL (decl)
2129 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2130 && !(DECL_LANG_SPECIFIC (decl)
2131 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2132 return OMP_CLAUSE_DEFAULT_SHARED;
2133
2134 /* Similarly for typeinfo symbols. */
2135 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2136 return OMP_CLAUSE_DEFAULT_SHARED;
2137
2138 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2139 }
2140
2141 enum omp_clause_defaultmap_kind
2142 cxx_omp_predetermined_mapping (tree decl)
2143 {
2144 /* Predetermine artificial variables holding integral values, those
2145 are usually result of gimplify_one_sizepos or SAVE_EXPR
2146 gimplification. */
2147 if (VAR_P (decl)
2148 && DECL_ARTIFICIAL (decl)
2149 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2150 && !(DECL_LANG_SPECIFIC (decl)
2151 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2152 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2153
2154 if (c_omp_predefined_variable (decl))
2155 return OMP_CLAUSE_DEFAULTMAP_TO;
2156
2157 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2158 }
2159
2160 /* Finalize an implicitly determined clause. */
2161
2162 void
2163 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2164 {
2165 tree decl, inner_type;
2166 bool make_shared = false;
2167
2168 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2169 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2170 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2171 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2172 return;
2173
2174 decl = OMP_CLAUSE_DECL (c);
2175 decl = require_complete_type (decl);
2176 inner_type = TREE_TYPE (decl);
2177 if (decl == error_mark_node)
2178 make_shared = true;
2179 else if (TYPE_REF_P (TREE_TYPE (decl)))
2180 inner_type = TREE_TYPE (inner_type);
2181
2182 /* We're interested in the base element, not arrays. */
2183 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2184 inner_type = TREE_TYPE (inner_type);
2185
2186 /* Check for special function availability by building a call to one.
2187 Save the results, because later we won't be in the right context
2188 for making these queries. */
2189 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2190 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2191 if (!make_shared
2192 && CLASS_TYPE_P (inner_type)
2193 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2194 true))
2195 make_shared = true;
2196
2197 if (make_shared)
2198 {
2199 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2200 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2201 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2202 }
2203 }
2204
2205 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2206 disregarded in OpenMP construct, because it is going to be
2207 remapped during OpenMP lowering. SHARED is true if DECL
2208 is going to be shared, false if it is going to be privatized. */
2209
2210 bool
2211 cxx_omp_disregard_value_expr (tree decl, bool shared)
2212 {
2213 if (shared)
2214 return false;
2215 if (VAR_P (decl)
2216 && DECL_HAS_VALUE_EXPR_P (decl)
2217 && DECL_ARTIFICIAL (decl)
2218 && DECL_LANG_SPECIFIC (decl)
2219 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2220 return true;
2221 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2222 return true;
2223 return false;
2224 }
2225
2226 /* Fold expression X which is used as an rvalue if RVAL is true. */
2227
2228 tree
2229 cp_fold_maybe_rvalue (tree x, bool rval)
2230 {
2231 while (true)
2232 {
2233 x = cp_fold (x);
2234 if (rval)
2235 x = mark_rvalue_use (x);
2236 if (rval && DECL_P (x)
2237 && !TYPE_REF_P (TREE_TYPE (x)))
2238 {
2239 tree v = decl_constant_value (x);
2240 if (v != x && v != error_mark_node)
2241 {
2242 x = v;
2243 continue;
2244 }
2245 }
2246 break;
2247 }
2248 return x;
2249 }
2250
2251 /* Fold expression X which is used as an rvalue. */
2252
2253 tree
2254 cp_fold_rvalue (tree x)
2255 {
2256 return cp_fold_maybe_rvalue (x, true);
2257 }
2258
2259 /* Perform folding on expression X. */
2260
2261 tree
2262 cp_fully_fold (tree x)
2263 {
2264 if (processing_template_decl)
2265 return x;
2266 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2267 have to call both. */
2268 if (cxx_dialect >= cxx11)
2269 {
2270 x = maybe_constant_value (x);
2271 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2272 a TARGET_EXPR; undo that here. */
2273 if (TREE_CODE (x) == TARGET_EXPR)
2274 x = TARGET_EXPR_INITIAL (x);
2275 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2276 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2277 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2278 x = TREE_OPERAND (x, 0);
2279 }
2280 return cp_fold_rvalue (x);
2281 }
2282
2283 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2284 in some cases. */
2285
2286 tree
2287 cp_fully_fold_init (tree x)
2288 {
2289 if (processing_template_decl)
2290 return x;
2291 x = cp_fully_fold (x);
2292 hash_set<tree> pset;
2293 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2294 return x;
2295 }
2296
2297 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2298 and certain changes are made to the folding done. Or should be (FIXME). We
2299 never touch maybe_const, as it is only used for the C front-end
2300 C_MAYBE_CONST_EXPR. */
2301
2302 tree
2303 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2304 {
2305 return cp_fold_maybe_rvalue (x, !lval);
2306 }
2307
2308 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2309
2310 /* Dispose of the whole FOLD_CACHE. */
2311
2312 void
2313 clear_fold_cache (void)
2314 {
2315 if (fold_cache != NULL)
2316 fold_cache->empty ();
2317 }
2318
2319 /* This function tries to fold an expression X.
2320 To avoid combinatorial explosion, folding results are kept in fold_cache.
2321 If X is invalid, we don't fold at all.
2322 For performance reasons we don't cache expressions representing a
2323 declaration or constant.
2324 Function returns X or its folded variant. */
2325
2326 static tree
2327 cp_fold (tree x)
2328 {
2329 tree op0, op1, op2, op3;
2330 tree org_x = x, r = NULL_TREE;
2331 enum tree_code code;
2332 location_t loc;
2333 bool rval_ops = true;
2334
2335 if (!x || x == error_mark_node)
2336 return x;
2337
2338 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2339 return x;
2340
2341 /* Don't bother to cache DECLs or constants. */
2342 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2343 return x;
2344
2345 if (fold_cache == NULL)
2346 fold_cache = hash_map<tree, tree>::create_ggc (101);
2347
2348 if (tree *cached = fold_cache->get (x))
2349 return *cached;
2350
2351 uid_sensitive_constexpr_evaluation_checker c;
2352
2353 code = TREE_CODE (x);
2354 switch (code)
2355 {
2356 case CLEANUP_POINT_EXPR:
2357 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2358 effects. */
2359 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2360 if (!TREE_SIDE_EFFECTS (r))
2361 x = r;
2362 break;
2363
2364 case SIZEOF_EXPR:
2365 x = fold_sizeof_expr (x);
2366 break;
2367
2368 case VIEW_CONVERT_EXPR:
2369 rval_ops = false;
2370 /* FALLTHRU */
2371 case CONVERT_EXPR:
2372 case NOP_EXPR:
2373 case NON_LVALUE_EXPR:
2374
2375 if (VOID_TYPE_P (TREE_TYPE (x)))
2376 {
2377 /* This is just to make sure we don't end up with casts to
2378 void from error_mark_node. If we just return x, then
2379 cp_fold_r might fold the operand into error_mark_node and
2380 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2381 during gimplification doesn't like such casts.
2382 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2383 folding of the operand should be in the caches and if in cp_fold_r
2384 it will modify it in place. */
2385 op0 = cp_fold (TREE_OPERAND (x, 0));
2386 if (op0 == error_mark_node)
2387 x = error_mark_node;
2388 break;
2389 }
2390
2391 loc = EXPR_LOCATION (x);
2392 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2393
2394 if (code == CONVERT_EXPR
2395 && SCALAR_TYPE_P (TREE_TYPE (x))
2396 && op0 != void_node)
2397 /* During parsing we used convert_to_*_nofold; re-convert now using the
2398 folding variants, since fold() doesn't do those transformations. */
2399 x = fold (convert (TREE_TYPE (x), op0));
2400 else if (op0 != TREE_OPERAND (x, 0))
2401 {
2402 if (op0 == error_mark_node)
2403 x = error_mark_node;
2404 else
2405 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2406 }
2407 else
2408 x = fold (x);
2409
2410 /* Conversion of an out-of-range value has implementation-defined
2411 behavior; the language considers it different from arithmetic
2412 overflow, which is undefined. */
2413 if (TREE_CODE (op0) == INTEGER_CST
2414 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2415 TREE_OVERFLOW (x) = false;
2416
2417 break;
2418
2419 case INDIRECT_REF:
2420 /* We don't need the decltype(auto) obfuscation anymore. */
2421 if (REF_PARENTHESIZED_P (x))
2422 {
2423 tree p = maybe_undo_parenthesized_ref (x);
2424 if (p != x)
2425 return cp_fold (p);
2426 }
2427 goto unary;
2428
2429 case ADDR_EXPR:
2430 loc = EXPR_LOCATION (x);
2431 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2432
2433 /* Cope with user tricks that amount to offsetof. */
2434 if (op0 != error_mark_node
2435 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2436 {
2437 tree val = get_base_address (op0);
2438 if (val
2439 && INDIRECT_REF_P (val)
2440 && COMPLETE_TYPE_P (TREE_TYPE (val))
2441 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2442 {
2443 val = TREE_OPERAND (val, 0);
2444 STRIP_NOPS (val);
2445 val = maybe_constant_value (val);
2446 if (TREE_CODE (val) == INTEGER_CST)
2447 return fold_offsetof (op0, TREE_TYPE (x));
2448 }
2449 }
2450 goto finish_unary;
2451
2452 case REALPART_EXPR:
2453 case IMAGPART_EXPR:
2454 rval_ops = false;
2455 /* FALLTHRU */
2456 case CONJ_EXPR:
2457 case FIX_TRUNC_EXPR:
2458 case FLOAT_EXPR:
2459 case NEGATE_EXPR:
2460 case ABS_EXPR:
2461 case ABSU_EXPR:
2462 case BIT_NOT_EXPR:
2463 case TRUTH_NOT_EXPR:
2464 case FIXED_CONVERT_EXPR:
2465 unary:
2466
2467 loc = EXPR_LOCATION (x);
2468 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2469
2470 finish_unary:
2471 if (op0 != TREE_OPERAND (x, 0))
2472 {
2473 if (op0 == error_mark_node)
2474 x = error_mark_node;
2475 else
2476 {
2477 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2478 if (code == INDIRECT_REF
2479 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2480 {
2481 TREE_READONLY (x) = TREE_READONLY (org_x);
2482 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2483 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2484 }
2485 }
2486 }
2487 else
2488 x = fold (x);
2489
2490 gcc_assert (TREE_CODE (x) != COND_EXPR
2491 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2492 break;
2493
2494 case UNARY_PLUS_EXPR:
2495 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2496 if (op0 == error_mark_node)
2497 x = error_mark_node;
2498 else
2499 x = fold_convert (TREE_TYPE (x), op0);
2500 break;
2501
2502 case POSTDECREMENT_EXPR:
2503 case POSTINCREMENT_EXPR:
2504 case INIT_EXPR:
2505 case PREDECREMENT_EXPR:
2506 case PREINCREMENT_EXPR:
2507 case COMPOUND_EXPR:
2508 case MODIFY_EXPR:
2509 rval_ops = false;
2510 /* FALLTHRU */
2511 case POINTER_PLUS_EXPR:
2512 case PLUS_EXPR:
2513 case POINTER_DIFF_EXPR:
2514 case MINUS_EXPR:
2515 case MULT_EXPR:
2516 case TRUNC_DIV_EXPR:
2517 case CEIL_DIV_EXPR:
2518 case FLOOR_DIV_EXPR:
2519 case ROUND_DIV_EXPR:
2520 case TRUNC_MOD_EXPR:
2521 case CEIL_MOD_EXPR:
2522 case ROUND_MOD_EXPR:
2523 case RDIV_EXPR:
2524 case EXACT_DIV_EXPR:
2525 case MIN_EXPR:
2526 case MAX_EXPR:
2527 case LSHIFT_EXPR:
2528 case RSHIFT_EXPR:
2529 case LROTATE_EXPR:
2530 case RROTATE_EXPR:
2531 case BIT_AND_EXPR:
2532 case BIT_IOR_EXPR:
2533 case BIT_XOR_EXPR:
2534 case TRUTH_AND_EXPR:
2535 case TRUTH_ANDIF_EXPR:
2536 case TRUTH_OR_EXPR:
2537 case TRUTH_ORIF_EXPR:
2538 case TRUTH_XOR_EXPR:
2539 case LT_EXPR: case LE_EXPR:
2540 case GT_EXPR: case GE_EXPR:
2541 case EQ_EXPR: case NE_EXPR:
2542 case UNORDERED_EXPR: case ORDERED_EXPR:
2543 case UNLT_EXPR: case UNLE_EXPR:
2544 case UNGT_EXPR: case UNGE_EXPR:
2545 case UNEQ_EXPR: case LTGT_EXPR:
2546 case RANGE_EXPR: case COMPLEX_EXPR:
2547
2548 loc = EXPR_LOCATION (x);
2549 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2550 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2551
2552 /* decltype(nullptr) has only one value, so optimize away all comparisons
2553 with that type right away, keeping them in the IL causes troubles for
2554 various optimizations. */
2555 if (COMPARISON_CLASS_P (org_x)
2556 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2557 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2558 {
2559 switch (code)
2560 {
2561 case EQ_EXPR:
2562 x = constant_boolean_node (true, TREE_TYPE (x));
2563 break;
2564 case NE_EXPR:
2565 x = constant_boolean_node (false, TREE_TYPE (x));
2566 break;
2567 default:
2568 gcc_unreachable ();
2569 }
2570 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2571 op0, op1);
2572 }
2573
2574 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2575 {
2576 if (op0 == error_mark_node || op1 == error_mark_node)
2577 x = error_mark_node;
2578 else
2579 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2580 }
2581 else
2582 x = fold (x);
2583
2584 /* This is only needed for -Wnonnull-compare and only if
2585 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2586 generation, we do it always. */
2587 if (COMPARISON_CLASS_P (org_x))
2588 {
2589 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2590 ;
2591 else if (COMPARISON_CLASS_P (x))
2592 {
2593 if (warn_nonnull_compare
2594 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2595 suppress_warning (x, OPT_Wnonnull_compare);
2596 }
2597 /* Otherwise give up on optimizing these, let GIMPLE folders
2598 optimize those later on. */
2599 else if (op0 != TREE_OPERAND (org_x, 0)
2600 || op1 != TREE_OPERAND (org_x, 1))
2601 {
2602 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2603 if (warn_nonnull_compare
2604 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2605 suppress_warning (x, OPT_Wnonnull_compare);
2606 }
2607 else
2608 x = org_x;
2609 }
2610
2611 break;
2612
2613 case VEC_COND_EXPR:
2614 case COND_EXPR:
2615 loc = EXPR_LOCATION (x);
2616 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2617 op1 = cp_fold (TREE_OPERAND (x, 1));
2618 op2 = cp_fold (TREE_OPERAND (x, 2));
2619
2620 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2621 {
2622 warning_sentinel s (warn_int_in_bool_context);
2623 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2624 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2625 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2626 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2627 }
2628 else if (VOID_TYPE_P (TREE_TYPE (x)))
2629 {
2630 if (TREE_CODE (op0) == INTEGER_CST)
2631 {
2632 /* If the condition is constant, fold can fold away
2633 the COND_EXPR. If some statement-level uses of COND_EXPR
2634 have one of the branches NULL, avoid folding crash. */
2635 if (!op1)
2636 op1 = build_empty_stmt (loc);
2637 if (!op2)
2638 op2 = build_empty_stmt (loc);
2639 }
2640 else
2641 {
2642 /* Otherwise, don't bother folding a void condition, since
2643 it can't produce a constant value. */
2644 if (op0 != TREE_OPERAND (x, 0)
2645 || op1 != TREE_OPERAND (x, 1)
2646 || op2 != TREE_OPERAND (x, 2))
2647 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2648 break;
2649 }
2650 }
2651
2652 if (op0 != TREE_OPERAND (x, 0)
2653 || op1 != TREE_OPERAND (x, 1)
2654 || op2 != TREE_OPERAND (x, 2))
2655 {
2656 if (op0 == error_mark_node
2657 || op1 == error_mark_node
2658 || op2 == error_mark_node)
2659 x = error_mark_node;
2660 else
2661 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2662 }
2663 else
2664 x = fold (x);
2665
2666 /* A COND_EXPR might have incompatible types in branches if one or both
2667 arms are bitfields. If folding exposed such a branch, fix it up. */
2668 if (TREE_CODE (x) != code
2669 && x != error_mark_node
2670 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2671 x = fold_convert (TREE_TYPE (org_x), x);
2672
2673 break;
2674
2675 case CALL_EXPR:
2676 {
2677 int sv = optimize, nw = sv;
2678 tree callee = get_callee_fndecl (x);
2679
2680 /* Some built-in function calls will be evaluated at compile-time in
2681 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2682 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2683 if (callee && fndecl_built_in_p (callee) && !optimize
2684 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2685 && current_function_decl
2686 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2687 nw = 1;
2688
2689 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2690 {
2691 switch (DECL_FE_FUNCTION_CODE (callee))
2692 {
2693 /* Defer folding __builtin_is_constant_evaluated. */
2694 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2695 break;
2696 case CP_BUILT_IN_SOURCE_LOCATION:
2697 x = fold_builtin_source_location (EXPR_LOCATION (x));
2698 break;
2699 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2700 x = fold_builtin_is_corresponding_member
2701 (EXPR_LOCATION (x), call_expr_nargs (x),
2702 &CALL_EXPR_ARG (x, 0));
2703 break;
2704 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2705 x = fold_builtin_is_pointer_inverconvertible_with_class
2706 (EXPR_LOCATION (x), call_expr_nargs (x),
2707 &CALL_EXPR_ARG (x, 0));
2708 break;
2709 default:
2710 break;
2711 }
2712 break;
2713 }
2714
2715 if (callee
2716 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2717 BUILT_IN_FRONTEND))
2718 {
2719 x = fold_builtin_source_location (EXPR_LOCATION (x));
2720 break;
2721 }
2722
2723 bool changed = false;
2724 int m = call_expr_nargs (x);
2725 for (int i = 0; i < m; i++)
2726 {
2727 r = cp_fold (CALL_EXPR_ARG (x, i));
2728 if (r != CALL_EXPR_ARG (x, i))
2729 {
2730 if (r == error_mark_node)
2731 {
2732 x = error_mark_node;
2733 break;
2734 }
2735 if (!changed)
2736 x = copy_node (x);
2737 CALL_EXPR_ARG (x, i) = r;
2738 changed = true;
2739 }
2740 }
2741 if (x == error_mark_node)
2742 break;
2743
2744 optimize = nw;
2745 r = fold (x);
2746 optimize = sv;
2747
2748 if (TREE_CODE (r) != CALL_EXPR)
2749 {
2750 x = cp_fold (r);
2751 break;
2752 }
2753
2754 optimize = nw;
2755
2756 /* Invoke maybe_constant_value for functions declared
2757 constexpr and not called with AGGR_INIT_EXPRs.
2758 TODO:
2759 Do constexpr expansion of expressions where the call itself is not
2760 constant, but the call followed by an INDIRECT_REF is. */
2761 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2762 && !flag_no_inline)
2763 r = maybe_constant_value (x);
2764 optimize = sv;
2765
2766 if (TREE_CODE (r) != CALL_EXPR)
2767 {
2768 if (DECL_CONSTRUCTOR_P (callee))
2769 {
2770 loc = EXPR_LOCATION (x);
2771 tree s = build_fold_indirect_ref_loc (loc,
2772 CALL_EXPR_ARG (x, 0));
2773 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2774 }
2775 x = r;
2776 break;
2777 }
2778
2779 break;
2780 }
2781
2782 case CONSTRUCTOR:
2783 {
2784 unsigned i;
2785 constructor_elt *p;
2786 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2787 vec<constructor_elt, va_gc> *nelts = NULL;
2788 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2789 {
2790 tree op = cp_fold (p->value);
2791 if (op != p->value)
2792 {
2793 if (op == error_mark_node)
2794 {
2795 x = error_mark_node;
2796 vec_free (nelts);
2797 break;
2798 }
2799 if (nelts == NULL)
2800 nelts = elts->copy ();
2801 (*nelts)[i].value = op;
2802 }
2803 }
2804 if (nelts)
2805 {
2806 x = build_constructor (TREE_TYPE (x), nelts);
2807 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2808 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2809 }
2810 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2811 x = fold (x);
2812 break;
2813 }
2814 case TREE_VEC:
2815 {
2816 bool changed = false;
2817 int n = TREE_VEC_LENGTH (x);
2818
2819 for (int i = 0; i < n; i++)
2820 {
2821 tree op = cp_fold (TREE_VEC_ELT (x, i));
2822 if (op != TREE_VEC_ELT (x, i))
2823 {
2824 if (!changed)
2825 x = copy_node (x);
2826 TREE_VEC_ELT (x, i) = op;
2827 changed = true;
2828 }
2829 }
2830 }
2831
2832 break;
2833
2834 case ARRAY_REF:
2835 case ARRAY_RANGE_REF:
2836
2837 loc = EXPR_LOCATION (x);
2838 op0 = cp_fold (TREE_OPERAND (x, 0));
2839 op1 = cp_fold (TREE_OPERAND (x, 1));
2840 op2 = cp_fold (TREE_OPERAND (x, 2));
2841 op3 = cp_fold (TREE_OPERAND (x, 3));
2842
2843 if (op0 != TREE_OPERAND (x, 0)
2844 || op1 != TREE_OPERAND (x, 1)
2845 || op2 != TREE_OPERAND (x, 2)
2846 || op3 != TREE_OPERAND (x, 3))
2847 {
2848 if (op0 == error_mark_node
2849 || op1 == error_mark_node
2850 || op2 == error_mark_node
2851 || op3 == error_mark_node)
2852 x = error_mark_node;
2853 else
2854 {
2855 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2856 TREE_READONLY (x) = TREE_READONLY (org_x);
2857 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2858 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2859 }
2860 }
2861
2862 x = fold (x);
2863 break;
2864
2865 case SAVE_EXPR:
2866 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2867 folding, evaluates to an invariant. In that case no need to wrap
2868 this folded tree with a SAVE_EXPR. */
2869 r = cp_fold (TREE_OPERAND (x, 0));
2870 if (tree_invariant_p (r))
2871 x = r;
2872 break;
2873
2874 case REQUIRES_EXPR:
2875 x = evaluate_requires_expr (x);
2876 break;
2877
2878 default:
2879 return org_x;
2880 }
2881
2882 if (EXPR_P (x) && TREE_CODE (x) == code)
2883 {
2884 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2885 copy_warning (x, org_x);
2886 }
2887
2888 if (!c.evaluation_restricted_p ())
2889 {
2890 fold_cache->put (org_x, x);
2891 /* Prevent that we try to fold an already folded result again. */
2892 if (x != org_x)
2893 fold_cache->put (x, x);
2894 }
2895
2896 return x;
2897 }
2898
2899 /* Look up either "hot" or "cold" in attribute list LIST. */
2900
2901 tree
2902 lookup_hotness_attribute (tree list)
2903 {
2904 for (; list; list = TREE_CHAIN (list))
2905 {
2906 tree name = get_attribute_name (list);
2907 if (is_attribute_p ("hot", name)
2908 || is_attribute_p ("cold", name)
2909 || is_attribute_p ("likely", name)
2910 || is_attribute_p ("unlikely", name))
2911 break;
2912 }
2913 return list;
2914 }
2915
2916 /* Remove both "hot" and "cold" attributes from LIST. */
2917
2918 static tree
2919 remove_hotness_attribute (tree list)
2920 {
2921 list = remove_attribute ("hot", list);
2922 list = remove_attribute ("cold", list);
2923 list = remove_attribute ("likely", list);
2924 list = remove_attribute ("unlikely", list);
2925 return list;
2926 }
2927
2928 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2929 PREDICT_EXPR. */
2930
2931 tree
2932 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
2933 {
2934 if (std_attrs == error_mark_node)
2935 return std_attrs;
2936 if (tree attr = lookup_hotness_attribute (std_attrs))
2937 {
2938 tree name = get_attribute_name (attr);
2939 bool hot = (is_attribute_p ("hot", name)
2940 || is_attribute_p ("likely", name));
2941 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2942 hot ? TAKEN : NOT_TAKEN);
2943 SET_EXPR_LOCATION (pred, attrs_loc);
2944 add_stmt (pred);
2945 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2946 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2947 get_attribute_name (other), name);
2948 std_attrs = remove_hotness_attribute (std_attrs);
2949 }
2950 return std_attrs;
2951 }
2952
2953 /* Helper of fold_builtin_source_location, return the
2954 std::source_location::__impl type after performing verification
2955 on it. LOC is used for reporting any errors. */
2956
2957 static tree
2958 get_source_location_impl_type (location_t loc)
2959 {
2960 tree name = get_identifier ("source_location");
2961 tree decl = lookup_qualified_name (std_node, name);
2962 if (TREE_CODE (decl) != TYPE_DECL)
2963 {
2964 auto_diagnostic_group d;
2965 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
2966 qualified_name_lookup_error (std_node, name, decl, loc);
2967 else
2968 error_at (loc, "%qD is not a type", decl);
2969 return error_mark_node;
2970 }
2971 name = get_identifier ("__impl");
2972 tree type = TREE_TYPE (decl);
2973 decl = lookup_qualified_name (type, name);
2974 if (TREE_CODE (decl) != TYPE_DECL)
2975 {
2976 auto_diagnostic_group d;
2977 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
2978 qualified_name_lookup_error (type, name, decl, loc);
2979 else
2980 error_at (loc, "%qD is not a type", decl);
2981 return error_mark_node;
2982 }
2983 type = TREE_TYPE (decl);
2984 if (TREE_CODE (type) != RECORD_TYPE)
2985 {
2986 error_at (loc, "%qD is not a class type", decl);
2987 return error_mark_node;
2988 }
2989
2990 int cnt = 0;
2991 for (tree field = TYPE_FIELDS (type);
2992 (field = next_initializable_field (field)) != NULL_TREE;
2993 field = DECL_CHAIN (field))
2994 {
2995 if (DECL_NAME (field) != NULL_TREE)
2996 {
2997 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
2998 if (strcmp (n, "_M_file_name") == 0
2999 || strcmp (n, "_M_function_name") == 0)
3000 {
3001 if (TREE_TYPE (field) != const_string_type_node)
3002 {
3003 error_at (loc, "%qD does not have %<const char *%> type",
3004 field);
3005 return error_mark_node;
3006 }
3007 cnt++;
3008 continue;
3009 }
3010 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3011 {
3012 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3013 {
3014 error_at (loc, "%qD does not have integral type", field);
3015 return error_mark_node;
3016 }
3017 cnt++;
3018 continue;
3019 }
3020 }
3021 cnt = 0;
3022 break;
3023 }
3024 if (cnt != 4)
3025 {
3026 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3027 "non-static data members %<_M_file_name%>, "
3028 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3029 return error_mark_node;
3030 }
3031 return build_qualified_type (type, TYPE_QUAL_CONST);
3032 }
3033
3034 /* Type for source_location_table hash_set. */
3035 struct GTY((for_user)) source_location_table_entry {
3036 location_t loc;
3037 unsigned uid;
3038 tree var;
3039 };
3040
3041 /* Traits class for function start hash maps below. */
3042
3043 struct source_location_table_entry_hash
3044 : ggc_remove <source_location_table_entry>
3045 {
3046 typedef source_location_table_entry value_type;
3047 typedef source_location_table_entry compare_type;
3048
3049 static hashval_t
3050 hash (const source_location_table_entry &ref)
3051 {
3052 inchash::hash hstate (0);
3053 hstate.add_int (ref.loc);
3054 hstate.add_int (ref.uid);
3055 return hstate.end ();
3056 }
3057
3058 static bool
3059 equal (const source_location_table_entry &ref1,
3060 const source_location_table_entry &ref2)
3061 {
3062 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3063 }
3064
3065 static void
3066 mark_deleted (source_location_table_entry &ref)
3067 {
3068 ref.loc = UNKNOWN_LOCATION;
3069 ref.uid = -1U;
3070 ref.var = NULL_TREE;
3071 }
3072
3073 static const bool empty_zero_p = true;
3074
3075 static void
3076 mark_empty (source_location_table_entry &ref)
3077 {
3078 ref.loc = UNKNOWN_LOCATION;
3079 ref.uid = 0;
3080 ref.var = NULL_TREE;
3081 }
3082
3083 static bool
3084 is_deleted (const source_location_table_entry &ref)
3085 {
3086 return (ref.loc == UNKNOWN_LOCATION
3087 && ref.uid == -1U
3088 && ref.var == NULL_TREE);
3089 }
3090
3091 static bool
3092 is_empty (const source_location_table_entry &ref)
3093 {
3094 return (ref.loc == UNKNOWN_LOCATION
3095 && ref.uid == 0
3096 && ref.var == NULL_TREE);
3097 }
3098
3099 static void
3100 pch_nx (source_location_table_entry &p)
3101 {
3102 extern void gt_pch_nx (source_location_table_entry &);
3103 gt_pch_nx (p);
3104 }
3105
3106 static void
3107 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3108 {
3109 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3110 void *);
3111 gt_pch_nx (&p, op, cookie);
3112 }
3113 };
3114
3115 static GTY(()) hash_table <source_location_table_entry_hash>
3116 *source_location_table;
3117 static GTY(()) unsigned int source_location_id;
3118
3119 /* Fold __builtin_source_location () call. LOC is the location
3120 of the call. */
3121
3122 tree
3123 fold_builtin_source_location (location_t loc)
3124 {
3125 if (source_location_impl == NULL_TREE)
3126 {
3127 auto_diagnostic_group d;
3128 source_location_impl = get_source_location_impl_type (loc);
3129 if (source_location_impl == error_mark_node)
3130 inform (loc, "evaluating %qs", "__builtin_source_location");
3131 }
3132 if (source_location_impl == error_mark_node)
3133 return build_zero_cst (const_ptr_type_node);
3134 if (source_location_table == NULL)
3135 source_location_table
3136 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3137 const line_map_ordinary *map;
3138 source_location_table_entry entry;
3139 entry.loc
3140 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3141 &map);
3142 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3143 entry.var = error_mark_node;
3144 source_location_table_entry *entryp
3145 = source_location_table->find_slot (entry, INSERT);
3146 tree var;
3147 if (entryp->var)
3148 var = entryp->var;
3149 else
3150 {
3151 char tmp_name[32];
3152 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3153 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3154 source_location_impl);
3155 TREE_STATIC (var) = 1;
3156 TREE_PUBLIC (var) = 0;
3157 DECL_ARTIFICIAL (var) = 1;
3158 DECL_IGNORED_P (var) = 1;
3159 DECL_EXTERNAL (var) = 0;
3160 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3161 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3162 layout_decl (var, 0);
3163
3164 vec<constructor_elt, va_gc> *v = NULL;
3165 vec_alloc (v, 4);
3166 for (tree field = TYPE_FIELDS (source_location_impl);
3167 (field = next_initializable_field (field)) != NULL_TREE;
3168 field = DECL_CHAIN (field))
3169 {
3170 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3171 tree val = NULL_TREE;
3172 if (strcmp (n, "_M_file_name") == 0)
3173 {
3174 if (const char *fname = LOCATION_FILE (loc))
3175 {
3176 fname = remap_macro_filename (fname);
3177 val = build_string_literal (strlen (fname) + 1, fname);
3178 }
3179 else
3180 val = build_string_literal (1, "");
3181 }
3182 else if (strcmp (n, "_M_function_name") == 0)
3183 {
3184 const char *name = "";
3185
3186 if (current_function_decl)
3187 name = cxx_printable_name (current_function_decl, 2);
3188
3189 val = build_string_literal (strlen (name) + 1, name);
3190 }
3191 else if (strcmp (n, "_M_line") == 0)
3192 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3193 else if (strcmp (n, "_M_column") == 0)
3194 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3195 else
3196 gcc_unreachable ();
3197 CONSTRUCTOR_APPEND_ELT (v, field, val);
3198 }
3199
3200 tree ctor = build_constructor (source_location_impl, v);
3201 TREE_CONSTANT (ctor) = 1;
3202 TREE_STATIC (ctor) = 1;
3203 DECL_INITIAL (var) = ctor;
3204 varpool_node::finalize_decl (var);
3205 *entryp = entry;
3206 entryp->var = var;
3207 }
3208
3209 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3210 }
3211
3212 #include "gt-cp-cp-gimplify.h"