]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/cp-gimplify.c
Update copyright years.
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and gimple.c.
2
3 Copyright (C) 2002-2020 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43
44 /* Forward declarations. */
45
46 static tree cp_genericize_r (tree *, int *, void *);
47 static tree cp_fold_r (tree *, int *, void *);
48 static void cp_genericize_tree (tree*, bool);
49 static tree cp_fold (tree);
50
51 /* Local declarations. */
52
53 enum bc_t { bc_break = 0, bc_continue = 1 };
54
55 /* Stack of labels which are targets for "break" or "continue",
56 linked through TREE_CHAIN. */
57 static tree bc_label[2];
58
59 /* Begin a scope which can be exited by a break or continue statement. BC
60 indicates which.
61
62 Just creates a label with location LOCATION and pushes it into the current
63 context. */
64
65 static tree
66 begin_bc_block (enum bc_t bc, location_t location)
67 {
68 tree label = create_artificial_label (location);
69 DECL_CHAIN (label) = bc_label[bc];
70 bc_label[bc] = label;
71 if (bc == bc_break)
72 LABEL_DECL_BREAK (label) = true;
73 else
74 LABEL_DECL_CONTINUE (label) = true;
75 return label;
76 }
77
78 /* Finish a scope which can be exited by a break or continue statement.
79 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
80 an expression for the contents of the scope.
81
82 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
83 BLOCK. Otherwise, just forget the label. */
84
85 static void
86 finish_bc_block (tree *block, enum bc_t bc, tree label)
87 {
88 gcc_assert (label == bc_label[bc]);
89
90 if (TREE_USED (label))
91 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
92 block);
93
94 bc_label[bc] = DECL_CHAIN (label);
95 DECL_CHAIN (label) = NULL_TREE;
96 }
97
98 /* Get the LABEL_EXPR to represent a break or continue statement
99 in the current block scope. BC indicates which. */
100
101 static tree
102 get_bc_label (enum bc_t bc)
103 {
104 tree label = bc_label[bc];
105
106 /* Mark the label used for finish_bc_block. */
107 TREE_USED (label) = 1;
108 return label;
109 }
110
111 /* Genericize a TRY_BLOCK. */
112
113 static void
114 genericize_try_block (tree *stmt_p)
115 {
116 tree body = TRY_STMTS (*stmt_p);
117 tree cleanup = TRY_HANDLERS (*stmt_p);
118
119 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
120 }
121
122 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
123
124 static void
125 genericize_catch_block (tree *stmt_p)
126 {
127 tree type = HANDLER_TYPE (*stmt_p);
128 tree body = HANDLER_BODY (*stmt_p);
129
130 /* FIXME should the caught type go in TREE_TYPE? */
131 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
132 }
133
134 /* A terser interface for building a representation of an exception
135 specification. */
136
137 static tree
138 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
139 {
140 tree t;
141
142 /* FIXME should the allowed types go in TREE_TYPE? */
143 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
144 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
145
146 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
147 append_to_statement_list (body, &TREE_OPERAND (t, 0));
148
149 return t;
150 }
151
152 /* Genericize an EH_SPEC_BLOCK by converting it to a
153 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
154
155 static void
156 genericize_eh_spec_block (tree *stmt_p)
157 {
158 tree body = EH_SPEC_STMTS (*stmt_p);
159 tree allowed = EH_SPEC_RAISES (*stmt_p);
160 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
161
162 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
163 TREE_NO_WARNING (*stmt_p) = true;
164 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
165 }
166
167 /* Return the first non-compound statement in STMT. */
168
169 tree
170 first_stmt (tree stmt)
171 {
172 switch (TREE_CODE (stmt))
173 {
174 case STATEMENT_LIST:
175 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
176 return first_stmt (p->stmt);
177 return void_node;
178
179 case BIND_EXPR:
180 return first_stmt (BIND_EXPR_BODY (stmt));
181
182 default:
183 return stmt;
184 }
185 }
186
187 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
188
189 static void
190 genericize_if_stmt (tree *stmt_p)
191 {
192 tree stmt, cond, then_, else_;
193 location_t locus = EXPR_LOCATION (*stmt_p);
194
195 stmt = *stmt_p;
196 cond = IF_COND (stmt);
197 then_ = THEN_CLAUSE (stmt);
198 else_ = ELSE_CLAUSE (stmt);
199
200 if (then_ && else_)
201 {
202 tree ft = first_stmt (then_);
203 tree fe = first_stmt (else_);
204 br_predictor pr;
205 if (TREE_CODE (ft) == PREDICT_EXPR
206 && TREE_CODE (fe) == PREDICT_EXPR
207 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
208 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
209 {
210 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
211 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
212 warning_at (&richloc, OPT_Wattributes,
213 "both branches of %<if%> statement marked as %qs",
214 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
215 }
216 }
217
218 if (!then_)
219 then_ = build_empty_stmt (locus);
220 if (!else_)
221 else_ = build_empty_stmt (locus);
222
223 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
224 stmt = then_;
225 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
226 stmt = else_;
227 else
228 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
229 if (!EXPR_HAS_LOCATION (stmt))
230 protected_set_expr_location (stmt, locus);
231 *stmt_p = stmt;
232 }
233
234 /* Build a generic representation of one of the C loop forms. COND is the
235 loop condition or NULL_TREE. BODY is the (possibly compound) statement
236 controlled by the loop. INCR is the increment expression of a for-loop,
237 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
238 evaluated before the loop body as in while and for loops, or after the
239 loop body as in do-while loops. */
240
241 static void
242 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
243 tree incr, bool cond_is_first, int *walk_subtrees,
244 void *data)
245 {
246 tree blab, clab;
247 tree exit = NULL;
248 tree stmt_list = NULL;
249 tree debug_begin = NULL;
250
251 if (EXPR_LOCATION (incr) == UNKNOWN_LOCATION)
252 protected_set_expr_location (incr, start_locus);
253
254 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
255 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
256
257 blab = begin_bc_block (bc_break, start_locus);
258 clab = begin_bc_block (bc_continue, start_locus);
259
260 cp_walk_tree (&body, cp_genericize_r, data, NULL);
261 *walk_subtrees = 0;
262
263 if (MAY_HAVE_DEBUG_MARKER_STMTS
264 && (!cond || !integer_zerop (cond)))
265 {
266 debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
267 SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus));
268 }
269
270 if (cond && TREE_CODE (cond) != INTEGER_CST)
271 {
272 /* If COND is constant, don't bother building an exit. If it's false,
273 we won't build a loop. If it's true, any exits are in the body. */
274 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
275 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
276 get_bc_label (bc_break));
277 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
278 build_empty_stmt (cloc), exit);
279 }
280
281 if (exit && cond_is_first)
282 {
283 append_to_statement_list (debug_begin, &stmt_list);
284 debug_begin = NULL_TREE;
285 append_to_statement_list (exit, &stmt_list);
286 }
287 append_to_statement_list (body, &stmt_list);
288 finish_bc_block (&stmt_list, bc_continue, clab);
289 if (incr)
290 {
291 if (MAY_HAVE_DEBUG_MARKER_STMTS)
292 {
293 tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
294 SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus));
295 append_to_statement_list (d, &stmt_list);
296 }
297 append_to_statement_list (incr, &stmt_list);
298 }
299 append_to_statement_list (debug_begin, &stmt_list);
300 if (exit && !cond_is_first)
301 append_to_statement_list (exit, &stmt_list);
302
303 if (!stmt_list)
304 stmt_list = build_empty_stmt (start_locus);
305
306 tree loop;
307 if (cond && integer_zerop (cond))
308 {
309 if (cond_is_first)
310 loop = fold_build3_loc (start_locus, COND_EXPR,
311 void_type_node, cond, stmt_list,
312 build_empty_stmt (start_locus));
313 else
314 loop = stmt_list;
315 }
316 else
317 {
318 location_t loc = start_locus;
319 if (!cond || integer_nonzerop (cond))
320 loc = EXPR_LOCATION (expr_first (body));
321 if (loc == UNKNOWN_LOCATION)
322 loc = start_locus;
323 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
324 }
325
326 stmt_list = NULL;
327 append_to_statement_list (loop, &stmt_list);
328 finish_bc_block (&stmt_list, bc_break, blab);
329 if (!stmt_list)
330 stmt_list = build_empty_stmt (start_locus);
331
332 *stmt_p = stmt_list;
333 }
334
335 /* Genericize a FOR_STMT node *STMT_P. */
336
337 static void
338 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
339 {
340 tree stmt = *stmt_p;
341 tree expr = NULL;
342 tree loop;
343 tree init = FOR_INIT_STMT (stmt);
344
345 if (init)
346 {
347 cp_walk_tree (&init, cp_genericize_r, data, NULL);
348 append_to_statement_list (init, &expr);
349 }
350
351 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
352 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
353 append_to_statement_list (loop, &expr);
354 if (expr == NULL_TREE)
355 expr = loop;
356 *stmt_p = expr;
357 }
358
359 /* Genericize a WHILE_STMT node *STMT_P. */
360
361 static void
362 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
363 {
364 tree stmt = *stmt_p;
365 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
366 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
367 }
368
369 /* Genericize a DO_STMT node *STMT_P. */
370
371 static void
372 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
373 {
374 tree stmt = *stmt_p;
375 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
376 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
377 }
378
379 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
380
381 static void
382 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
383 {
384 tree stmt = *stmt_p;
385 tree break_block, body, cond, type;
386 location_t stmt_locus = EXPR_LOCATION (stmt);
387
388 body = SWITCH_STMT_BODY (stmt);
389 if (!body)
390 body = build_empty_stmt (stmt_locus);
391 cond = SWITCH_STMT_COND (stmt);
392 type = SWITCH_STMT_TYPE (stmt);
393
394 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
395
396 break_block = begin_bc_block (bc_break, stmt_locus);
397
398 cp_walk_tree (&body, cp_genericize_r, data, NULL);
399 cp_walk_tree (&type, cp_genericize_r, data, NULL);
400 *walk_subtrees = 0;
401
402 if (TREE_USED (break_block))
403 SWITCH_BREAK_LABEL_P (break_block) = 1;
404 finish_bc_block (&body, bc_break, break_block);
405 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
406 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
407 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
408 || !TREE_USED (break_block));
409 }
410
411 /* Genericize a CONTINUE_STMT node *STMT_P. */
412
413 static void
414 genericize_continue_stmt (tree *stmt_p)
415 {
416 tree stmt_list = NULL;
417 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
418 tree label = get_bc_label (bc_continue);
419 location_t location = EXPR_LOCATION (*stmt_p);
420 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
421 append_to_statement_list_force (pred, &stmt_list);
422 append_to_statement_list (jump, &stmt_list);
423 *stmt_p = stmt_list;
424 }
425
426 /* Genericize a BREAK_STMT node *STMT_P. */
427
428 static void
429 genericize_break_stmt (tree *stmt_p)
430 {
431 tree label = get_bc_label (bc_break);
432 location_t location = EXPR_LOCATION (*stmt_p);
433 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
434 }
435
436 /* Genericize a OMP_FOR node *STMT_P. */
437
438 static void
439 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
440 {
441 tree stmt = *stmt_p;
442 location_t locus = EXPR_LOCATION (stmt);
443 tree clab = begin_bc_block (bc_continue, locus);
444
445 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
446 if (TREE_CODE (stmt) != OMP_TASKLOOP)
447 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
448 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
449 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
450 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
451 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
452 *walk_subtrees = 0;
453
454 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
455 }
456
457 /* Hook into the middle of gimplifying an OMP_FOR node. */
458
459 static enum gimplify_status
460 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
461 {
462 tree for_stmt = *expr_p;
463 gimple_seq seq = NULL;
464
465 /* Protect ourselves from recursion. */
466 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
467 return GS_UNHANDLED;
468 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
469
470 gimplify_and_add (for_stmt, &seq);
471 gimple_seq_add_seq (pre_p, seq);
472
473 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
474
475 return GS_ALL_DONE;
476 }
477
478 /* Gimplify an EXPR_STMT node. */
479
480 static void
481 gimplify_expr_stmt (tree *stmt_p)
482 {
483 tree stmt = EXPR_STMT_EXPR (*stmt_p);
484
485 if (stmt == error_mark_node)
486 stmt = NULL;
487
488 /* Gimplification of a statement expression will nullify the
489 statement if all its side effects are moved to *PRE_P and *POST_P.
490
491 In this case we will not want to emit the gimplified statement.
492 However, we may still want to emit a warning, so we do that before
493 gimplification. */
494 if (stmt && warn_unused_value)
495 {
496 if (!TREE_SIDE_EFFECTS (stmt))
497 {
498 if (!IS_EMPTY_STMT (stmt)
499 && !VOID_TYPE_P (TREE_TYPE (stmt))
500 && !TREE_NO_WARNING (stmt))
501 warning (OPT_Wunused_value, "statement with no effect");
502 }
503 else
504 warn_if_unused_value (stmt, input_location);
505 }
506
507 if (stmt == NULL_TREE)
508 stmt = alloc_stmt_list ();
509
510 *stmt_p = stmt;
511 }
512
513 /* Gimplify initialization from an AGGR_INIT_EXPR. */
514
515 static void
516 cp_gimplify_init_expr (tree *expr_p, gimple_seq *pre_p)
517 {
518 tree from = TREE_OPERAND (*expr_p, 1);
519 tree to = TREE_OPERAND (*expr_p, 0);
520 tree t;
521
522 /* What about code that pulls out the temp and uses it elsewhere? I
523 think that such code never uses the TARGET_EXPR as an initializer. If
524 I'm wrong, we'll abort because the temp won't have any RTL. In that
525 case, I guess we'll need to replace references somehow. */
526 if (TREE_CODE (from) == TARGET_EXPR)
527 from = TARGET_EXPR_INITIAL (from);
528
529 /* If we might need to clean up a partially constructed object, break down
530 the CONSTRUCTOR with split_nonconstant_init. */
531 if (TREE_CODE (from) == CONSTRUCTOR
532 && TREE_SIDE_EFFECTS (from)
533 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (to)))
534 {
535 gimplify_expr (&to, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
536 replace_placeholders (from, to);
537 from = split_nonconstant_init (to, from);
538 cp_genericize_tree (&from, false);
539 *expr_p = from;
540 return;
541 }
542
543 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
544 inside the TARGET_EXPR. */
545 for (t = from; t; )
546 {
547 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
548
549 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
550 replace the slot operand with our target.
551
552 Should we add a target parm to gimplify_expr instead? No, as in this
553 case we want to replace the INIT_EXPR. */
554 if (TREE_CODE (sub) == AGGR_INIT_EXPR
555 || TREE_CODE (sub) == VEC_INIT_EXPR)
556 {
557 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
558 AGGR_INIT_EXPR_SLOT (sub) = to;
559 else
560 VEC_INIT_EXPR_SLOT (sub) = to;
561 *expr_p = from;
562
563 /* The initialization is now a side-effect, so the container can
564 become void. */
565 if (from != sub)
566 TREE_TYPE (from) = void_type_node;
567 }
568
569 /* Handle aggregate NSDMI. */
570 replace_placeholders (sub, to);
571
572 if (t == sub)
573 break;
574 else
575 t = TREE_OPERAND (t, 1);
576 }
577
578 }
579
580 /* Gimplify a MUST_NOT_THROW_EXPR. */
581
582 static enum gimplify_status
583 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
584 {
585 tree stmt = *expr_p;
586 tree temp = voidify_wrapper_expr (stmt, NULL);
587 tree body = TREE_OPERAND (stmt, 0);
588 gimple_seq try_ = NULL;
589 gimple_seq catch_ = NULL;
590 gimple *mnt;
591
592 gimplify_and_add (body, &try_);
593 mnt = gimple_build_eh_must_not_throw (terminate_fn);
594 gimple_seq_add_stmt_without_update (&catch_, mnt);
595 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
596
597 gimple_seq_add_stmt_without_update (pre_p, mnt);
598 if (temp)
599 {
600 *expr_p = temp;
601 return GS_OK;
602 }
603
604 *expr_p = NULL;
605 return GS_ALL_DONE;
606 }
607
608 /* Return TRUE if an operand (OP) of a given TYPE being copied is
609 really just an empty class copy.
610
611 Check that the operand has a simple form so that TARGET_EXPRs and
612 non-empty CONSTRUCTORs get reduced properly, and we leave the
613 return slot optimization alone because it isn't a copy. */
614
615 bool
616 simple_empty_class_p (tree type, tree op, tree_code code)
617 {
618 if (TREE_CODE (op) == COMPOUND_EXPR)
619 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
620 return
621 (TREE_CODE (op) == EMPTY_CLASS_EXPR
622 || code == MODIFY_EXPR
623 || is_gimple_lvalue (op)
624 || INDIRECT_REF_P (op)
625 || (TREE_CODE (op) == CONSTRUCTOR
626 && CONSTRUCTOR_NELTS (op) == 0)
627 || (TREE_CODE (op) == CALL_EXPR
628 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
629 && !TREE_CLOBBER_P (op)
630 && is_really_empty_class (type, /*ignore_vptr*/true);
631 }
632
633 /* Returns true if evaluating E as an lvalue has side-effects;
634 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
635 have side-effects until there is a read or write through it. */
636
637 static bool
638 lvalue_has_side_effects (tree e)
639 {
640 if (!TREE_SIDE_EFFECTS (e))
641 return false;
642 while (handled_component_p (e))
643 {
644 if (TREE_CODE (e) == ARRAY_REF
645 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
646 return true;
647 e = TREE_OPERAND (e, 0);
648 }
649 if (DECL_P (e))
650 /* Just naming a variable has no side-effects. */
651 return false;
652 else if (INDIRECT_REF_P (e))
653 /* Similarly, indirection has no side-effects. */
654 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
655 else
656 /* For anything else, trust TREE_SIDE_EFFECTS. */
657 return TREE_SIDE_EFFECTS (e);
658 }
659
660 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
661 by expressions with side-effects in other operands. */
662
663 static enum gimplify_status
664 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
665 bool (*gimple_test_f) (tree))
666 {
667 enum gimplify_status t
668 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
669 if (t == GS_ERROR)
670 return GS_ERROR;
671 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
672 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
673 return t;
674 }
675
676 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
677
678 int
679 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
680 {
681 int saved_stmts_are_full_exprs_p = 0;
682 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
683 enum tree_code code = TREE_CODE (*expr_p);
684 enum gimplify_status ret;
685
686 if (STATEMENT_CODE_P (code))
687 {
688 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
689 current_stmt_tree ()->stmts_are_full_exprs_p
690 = STMT_IS_FULL_EXPR_P (*expr_p);
691 }
692
693 switch (code)
694 {
695 case AGGR_INIT_EXPR:
696 simplify_aggr_init_expr (expr_p);
697 ret = GS_OK;
698 break;
699
700 case VEC_INIT_EXPR:
701 {
702 location_t loc = input_location;
703 tree init = VEC_INIT_EXPR_INIT (*expr_p);
704 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
705 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
706 input_location = EXPR_LOCATION (*expr_p);
707 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
708 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
709 from_array,
710 tf_warning_or_error);
711 hash_set<tree> pset;
712 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
713 cp_genericize_tree (expr_p, false);
714 ret = GS_OK;
715 input_location = loc;
716 }
717 break;
718
719 case THROW_EXPR:
720 /* FIXME communicate throw type to back end, probably by moving
721 THROW_EXPR into ../tree.def. */
722 *expr_p = TREE_OPERAND (*expr_p, 0);
723 ret = GS_OK;
724 break;
725
726 case MUST_NOT_THROW_EXPR:
727 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
728 break;
729
730 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
731 LHS of an assignment might also be involved in the RHS, as in bug
732 25979. */
733 case INIT_EXPR:
734 cp_gimplify_init_expr (expr_p, pre_p);
735 if (TREE_CODE (*expr_p) != INIT_EXPR)
736 return GS_OK;
737 /* Fall through. */
738 case MODIFY_EXPR:
739 modify_expr_case:
740 {
741 /* If the back end isn't clever enough to know that the lhs and rhs
742 types are the same, add an explicit conversion. */
743 tree op0 = TREE_OPERAND (*expr_p, 0);
744 tree op1 = TREE_OPERAND (*expr_p, 1);
745
746 if (!error_operand_p (op0)
747 && !error_operand_p (op1)
748 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
749 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
750 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
751 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
752 TREE_TYPE (op0), op1);
753
754 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
755 {
756 /* Remove any copies of empty classes. Also drop volatile
757 variables on the RHS to avoid infinite recursion from
758 gimplify_expr trying to load the value. */
759 if (TREE_SIDE_EFFECTS (op1))
760 {
761 if (TREE_THIS_VOLATILE (op1)
762 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
763 op1 = build_fold_addr_expr (op1);
764
765 gimplify_and_add (op1, pre_p);
766 }
767 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
768 is_gimple_lvalue, fb_lvalue);
769 *expr_p = TREE_OPERAND (*expr_p, 0);
770 }
771 /* P0145 says that the RHS is sequenced before the LHS.
772 gimplify_modify_expr gimplifies the RHS before the LHS, but that
773 isn't quite strong enough in two cases:
774
775 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
776 mean it's evaluated after the LHS.
777
778 2) the value calculation of the RHS is also sequenced before the
779 LHS, so for scalar assignment we need to preevaluate if the
780 RHS could be affected by LHS side-effects even if it has no
781 side-effects of its own. We don't need this for classes because
782 class assignment takes its RHS by reference. */
783 else if (flag_strong_eval_order > 1
784 && TREE_CODE (*expr_p) == MODIFY_EXPR
785 && lvalue_has_side_effects (op0)
786 && (TREE_CODE (op1) == CALL_EXPR
787 || (SCALAR_TYPE_P (TREE_TYPE (op1))
788 && !TREE_CONSTANT (op1))))
789 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
790 }
791 ret = GS_OK;
792 break;
793
794 case EMPTY_CLASS_EXPR:
795 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
796 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
797 ret = GS_OK;
798 break;
799
800 case BASELINK:
801 *expr_p = BASELINK_FUNCTIONS (*expr_p);
802 ret = GS_OK;
803 break;
804
805 case TRY_BLOCK:
806 genericize_try_block (expr_p);
807 ret = GS_OK;
808 break;
809
810 case HANDLER:
811 genericize_catch_block (expr_p);
812 ret = GS_OK;
813 break;
814
815 case EH_SPEC_BLOCK:
816 genericize_eh_spec_block (expr_p);
817 ret = GS_OK;
818 break;
819
820 case USING_STMT:
821 gcc_unreachable ();
822
823 case FOR_STMT:
824 case WHILE_STMT:
825 case DO_STMT:
826 case SWITCH_STMT:
827 case CONTINUE_STMT:
828 case BREAK_STMT:
829 gcc_unreachable ();
830
831 case OMP_FOR:
832 case OMP_SIMD:
833 case OMP_DISTRIBUTE:
834 case OMP_LOOP:
835 case OMP_TASKLOOP:
836 ret = cp_gimplify_omp_for (expr_p, pre_p);
837 break;
838
839 case EXPR_STMT:
840 gimplify_expr_stmt (expr_p);
841 ret = GS_OK;
842 break;
843
844 case UNARY_PLUS_EXPR:
845 {
846 tree arg = TREE_OPERAND (*expr_p, 0);
847 tree type = TREE_TYPE (*expr_p);
848 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
849 : arg;
850 ret = GS_OK;
851 }
852 break;
853
854 case CALL_EXPR:
855 ret = GS_OK;
856 if (flag_strong_eval_order == 2
857 && CALL_EXPR_FN (*expr_p)
858 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
859 {
860 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
861 enum gimplify_status t
862 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
863 is_gimple_call_addr);
864 if (t == GS_ERROR)
865 ret = GS_ERROR;
866 /* GIMPLE considers most pointer conversion useless, but for
867 calls we actually care about the exact function pointer type. */
868 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
869 CALL_EXPR_FN (*expr_p)
870 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
871 }
872 if (!CALL_EXPR_FN (*expr_p))
873 /* Internal function call. */;
874 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
875 {
876 /* This is a call to a (compound) assignment operator that used
877 the operator syntax; gimplify the RHS first. */
878 gcc_assert (call_expr_nargs (*expr_p) == 2);
879 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
880 enum gimplify_status t
881 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
882 if (t == GS_ERROR)
883 ret = GS_ERROR;
884 }
885 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
886 {
887 /* Leave the last argument for gimplify_call_expr, to avoid problems
888 with __builtin_va_arg_pack(). */
889 int nargs = call_expr_nargs (*expr_p) - 1;
890 for (int i = 0; i < nargs; ++i)
891 {
892 enum gimplify_status t
893 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
894 if (t == GS_ERROR)
895 ret = GS_ERROR;
896 }
897 }
898 else if (flag_strong_eval_order
899 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
900 {
901 /* If flag_strong_eval_order, evaluate the object argument first. */
902 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
903 if (INDIRECT_TYPE_P (fntype))
904 fntype = TREE_TYPE (fntype);
905 if (TREE_CODE (fntype) == METHOD_TYPE)
906 {
907 enum gimplify_status t
908 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
909 if (t == GS_ERROR)
910 ret = GS_ERROR;
911 }
912 }
913 if (ret != GS_ERROR)
914 {
915 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
916 if (decl
917 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
918 BUILT_IN_FRONTEND))
919 *expr_p = boolean_false_node;
920 else if (decl
921 && fndecl_built_in_p (decl, CP_BUILT_IN_SOURCE_LOCATION,
922 BUILT_IN_FRONTEND))
923 *expr_p = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
924 }
925 break;
926
927 case RETURN_EXPR:
928 if (TREE_OPERAND (*expr_p, 0)
929 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
930 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
931 {
932 expr_p = &TREE_OPERAND (*expr_p, 0);
933 code = TREE_CODE (*expr_p);
934 /* Avoid going through the INIT_EXPR case, which can
935 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
936 goto modify_expr_case;
937 }
938 /* Fall through. */
939
940 default:
941 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
942 break;
943 }
944
945 /* Restore saved state. */
946 if (STATEMENT_CODE_P (code))
947 current_stmt_tree ()->stmts_are_full_exprs_p
948 = saved_stmts_are_full_exprs_p;
949
950 return ret;
951 }
952
953 static inline bool
954 is_invisiref_parm (const_tree t)
955 {
956 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
957 && DECL_BY_REFERENCE (t));
958 }
959
960 /* Return true if the uid in both int tree maps are equal. */
961
962 bool
963 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
964 {
965 return (a->uid == b->uid);
966 }
967
968 /* Hash a UID in a cxx_int_tree_map. */
969
970 unsigned int
971 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
972 {
973 return item->uid;
974 }
975
976 /* A stable comparison routine for use with splay trees and DECLs. */
977
978 static int
979 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
980 {
981 tree a = (tree) xa;
982 tree b = (tree) xb;
983
984 return DECL_UID (a) - DECL_UID (b);
985 }
986
987 /* OpenMP context during genericization. */
988
989 struct cp_genericize_omp_taskreg
990 {
991 bool is_parallel;
992 bool default_shared;
993 struct cp_genericize_omp_taskreg *outer;
994 splay_tree variables;
995 };
996
997 /* Return true if genericization should try to determine if
998 DECL is firstprivate or shared within task regions. */
999
1000 static bool
1001 omp_var_to_track (tree decl)
1002 {
1003 tree type = TREE_TYPE (decl);
1004 if (is_invisiref_parm (decl))
1005 type = TREE_TYPE (type);
1006 else if (TYPE_REF_P (type))
1007 type = TREE_TYPE (type);
1008 while (TREE_CODE (type) == ARRAY_TYPE)
1009 type = TREE_TYPE (type);
1010 if (type == error_mark_node || !CLASS_TYPE_P (type))
1011 return false;
1012 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
1013 return false;
1014 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1015 return false;
1016 return true;
1017 }
1018
1019 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
1020
1021 static void
1022 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
1023 {
1024 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
1025 (splay_tree_key) decl);
1026 if (n == NULL)
1027 {
1028 int flags = OMP_CLAUSE_DEFAULT_SHARED;
1029 if (omp_ctx->outer)
1030 omp_cxx_notice_variable (omp_ctx->outer, decl);
1031 if (!omp_ctx->default_shared)
1032 {
1033 struct cp_genericize_omp_taskreg *octx;
1034
1035 for (octx = omp_ctx->outer; octx; octx = octx->outer)
1036 {
1037 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1038 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1039 {
1040 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1041 break;
1042 }
1043 if (octx->is_parallel)
1044 break;
1045 }
1046 if (octx == NULL
1047 && (TREE_CODE (decl) == PARM_DECL
1048 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1049 && DECL_CONTEXT (decl) == current_function_decl)))
1050 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1051 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1052 {
1053 /* DECL is implicitly determined firstprivate in
1054 the current task construct. Ensure copy ctor and
1055 dtor are instantiated, because during gimplification
1056 it will be already too late. */
1057 tree type = TREE_TYPE (decl);
1058 if (is_invisiref_parm (decl))
1059 type = TREE_TYPE (type);
1060 else if (TYPE_REF_P (type))
1061 type = TREE_TYPE (type);
1062 while (TREE_CODE (type) == ARRAY_TYPE)
1063 type = TREE_TYPE (type);
1064 get_copy_ctor (type, tf_none);
1065 get_dtor (type, tf_none);
1066 }
1067 }
1068 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1069 }
1070 }
1071
1072 /* Genericization context. */
1073
1074 struct cp_genericize_data
1075 {
1076 hash_set<tree> *p_set;
1077 vec<tree> bind_expr_stack;
1078 struct cp_genericize_omp_taskreg *omp_ctx;
1079 tree try_block;
1080 bool no_sanitize_p;
1081 bool handle_invisiref_parm_p;
1082 };
1083
1084 /* Perform any pre-gimplification folding of C++ front end trees to
1085 GENERIC.
1086 Note: The folding of none-omp cases is something to move into
1087 the middle-end. As for now we have most foldings only on GENERIC
1088 in fold-const, we need to perform this before transformation to
1089 GIMPLE-form. */
1090
1091 static tree
1092 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1093 {
1094 tree stmt;
1095 enum tree_code code;
1096
1097 *stmt_p = stmt = cp_fold (*stmt_p);
1098
1099 if (((hash_set<tree> *) data)->add (stmt))
1100 {
1101 /* Don't walk subtrees of stmts we've already walked once, otherwise
1102 we can have exponential complexity with e.g. lots of nested
1103 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1104 always the same tree, which the first time cp_fold_r has been
1105 called on it had the subtrees walked. */
1106 *walk_subtrees = 0;
1107 return NULL;
1108 }
1109
1110 code = TREE_CODE (stmt);
1111 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1112 || code == OMP_LOOP || code == OMP_TASKLOOP || code == OACC_LOOP)
1113 {
1114 tree x;
1115 int i, n;
1116
1117 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1118 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1119 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1120 x = OMP_FOR_COND (stmt);
1121 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1122 {
1123 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1124 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1125 }
1126 else if (x && TREE_CODE (x) == TREE_VEC)
1127 {
1128 n = TREE_VEC_LENGTH (x);
1129 for (i = 0; i < n; i++)
1130 {
1131 tree o = TREE_VEC_ELT (x, i);
1132 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1133 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1134 }
1135 }
1136 x = OMP_FOR_INCR (stmt);
1137 if (x && TREE_CODE (x) == TREE_VEC)
1138 {
1139 n = TREE_VEC_LENGTH (x);
1140 for (i = 0; i < n; i++)
1141 {
1142 tree o = TREE_VEC_ELT (x, i);
1143 if (o && TREE_CODE (o) == MODIFY_EXPR)
1144 o = TREE_OPERAND (o, 1);
1145 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1146 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1147 {
1148 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1149 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1150 }
1151 }
1152 }
1153 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1154 *walk_subtrees = 0;
1155 }
1156
1157 return NULL;
1158 }
1159
1160 /* Fold ALL the trees! FIXME we should be able to remove this, but
1161 apparently that still causes optimization regressions. */
1162
1163 void
1164 cp_fold_function (tree fndecl)
1165 {
1166 hash_set<tree> pset;
1167 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1168 }
1169
1170 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1171
1172 static tree genericize_spaceship (tree expr)
1173 {
1174 iloc_sentinel s (cp_expr_location (expr));
1175 tree type = TREE_TYPE (expr);
1176 tree op0 = TREE_OPERAND (expr, 0);
1177 tree op1 = TREE_OPERAND (expr, 1);
1178 return genericize_spaceship (type, op0, op1);
1179 }
1180
1181 /* Perform any pre-gimplification lowering of C++ front end trees to
1182 GENERIC. */
1183
1184 static tree
1185 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1186 {
1187 tree stmt = *stmt_p;
1188 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1189 hash_set<tree> *p_set = wtd->p_set;
1190
1191 /* If in an OpenMP context, note var uses. */
1192 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1193 && (VAR_P (stmt)
1194 || TREE_CODE (stmt) == PARM_DECL
1195 || TREE_CODE (stmt) == RESULT_DECL)
1196 && omp_var_to_track (stmt))
1197 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1198
1199 /* Don't dereference parms in a thunk, pass the references through. */
1200 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1201 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1202 {
1203 *walk_subtrees = 0;
1204 return NULL;
1205 }
1206
1207 /* Dereference invisible reference parms. */
1208 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1209 {
1210 *stmt_p = convert_from_reference (stmt);
1211 p_set->add (*stmt_p);
1212 *walk_subtrees = 0;
1213 return NULL;
1214 }
1215
1216 /* Map block scope extern declarations to visible declarations with the
1217 same name and type in outer scopes if any. */
1218 if (cp_function_chain->extern_decl_map
1219 && VAR_OR_FUNCTION_DECL_P (stmt)
1220 && DECL_EXTERNAL (stmt))
1221 {
1222 struct cxx_int_tree_map *h, in;
1223 in.uid = DECL_UID (stmt);
1224 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1225 if (h)
1226 {
1227 *stmt_p = h->to;
1228 TREE_USED (h->to) |= TREE_USED (stmt);
1229 *walk_subtrees = 0;
1230 return NULL;
1231 }
1232 }
1233
1234 if (TREE_CODE (stmt) == INTEGER_CST
1235 && TYPE_REF_P (TREE_TYPE (stmt))
1236 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1237 && !wtd->no_sanitize_p)
1238 {
1239 ubsan_maybe_instrument_reference (stmt_p);
1240 if (*stmt_p != stmt)
1241 {
1242 *walk_subtrees = 0;
1243 return NULL_TREE;
1244 }
1245 }
1246
1247 /* Other than invisiref parms, don't walk the same tree twice. */
1248 if (p_set->contains (stmt))
1249 {
1250 *walk_subtrees = 0;
1251 return NULL_TREE;
1252 }
1253
1254 switch (TREE_CODE (stmt))
1255 {
1256 case ADDR_EXPR:
1257 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1258 {
1259 /* If in an OpenMP context, note var uses. */
1260 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1261 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1262 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1263 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1264 *walk_subtrees = 0;
1265 }
1266 break;
1267
1268 case RETURN_EXPR:
1269 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1270 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1271 *walk_subtrees = 0;
1272 break;
1273
1274 case OMP_CLAUSE:
1275 switch (OMP_CLAUSE_CODE (stmt))
1276 {
1277 case OMP_CLAUSE_LASTPRIVATE:
1278 /* Don't dereference an invisiref in OpenMP clauses. */
1279 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1280 {
1281 *walk_subtrees = 0;
1282 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1283 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1284 cp_genericize_r, data, NULL);
1285 }
1286 break;
1287 case OMP_CLAUSE_PRIVATE:
1288 /* Don't dereference an invisiref in OpenMP clauses. */
1289 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1290 *walk_subtrees = 0;
1291 else if (wtd->omp_ctx != NULL)
1292 {
1293 /* Private clause doesn't cause any references to the
1294 var in outer contexts, avoid calling
1295 omp_cxx_notice_variable for it. */
1296 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1297 wtd->omp_ctx = NULL;
1298 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1299 data, NULL);
1300 wtd->omp_ctx = old;
1301 *walk_subtrees = 0;
1302 }
1303 break;
1304 case OMP_CLAUSE_SHARED:
1305 case OMP_CLAUSE_FIRSTPRIVATE:
1306 case OMP_CLAUSE_COPYIN:
1307 case OMP_CLAUSE_COPYPRIVATE:
1308 case OMP_CLAUSE_INCLUSIVE:
1309 case OMP_CLAUSE_EXCLUSIVE:
1310 /* Don't dereference an invisiref in OpenMP clauses. */
1311 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1312 *walk_subtrees = 0;
1313 break;
1314 case OMP_CLAUSE_REDUCTION:
1315 case OMP_CLAUSE_IN_REDUCTION:
1316 case OMP_CLAUSE_TASK_REDUCTION:
1317 /* Don't dereference an invisiref in reduction clause's
1318 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1319 still needs to be genericized. */
1320 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1321 {
1322 *walk_subtrees = 0;
1323 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1324 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1325 cp_genericize_r, data, NULL);
1326 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1327 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1328 cp_genericize_r, data, NULL);
1329 }
1330 break;
1331 default:
1332 break;
1333 }
1334 break;
1335
1336 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1337 to lower this construct before scanning it, so we need to lower these
1338 before doing anything else. */
1339 case CLEANUP_STMT:
1340 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1341 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1342 : TRY_FINALLY_EXPR,
1343 void_type_node,
1344 CLEANUP_BODY (stmt),
1345 CLEANUP_EXPR (stmt));
1346 break;
1347
1348 case IF_STMT:
1349 genericize_if_stmt (stmt_p);
1350 /* *stmt_p has changed, tail recurse to handle it again. */
1351 return cp_genericize_r (stmt_p, walk_subtrees, data);
1352
1353 /* COND_EXPR might have incompatible types in branches if one or both
1354 arms are bitfields. Fix it up now. */
1355 case COND_EXPR:
1356 {
1357 tree type_left
1358 = (TREE_OPERAND (stmt, 1)
1359 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1360 : NULL_TREE);
1361 tree type_right
1362 = (TREE_OPERAND (stmt, 2)
1363 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1364 : NULL_TREE);
1365 if (type_left
1366 && !useless_type_conversion_p (TREE_TYPE (stmt),
1367 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1368 {
1369 TREE_OPERAND (stmt, 1)
1370 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1371 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1372 type_left));
1373 }
1374 if (type_right
1375 && !useless_type_conversion_p (TREE_TYPE (stmt),
1376 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1377 {
1378 TREE_OPERAND (stmt, 2)
1379 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1380 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1381 type_right));
1382 }
1383 }
1384 break;
1385
1386 case BIND_EXPR:
1387 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1388 {
1389 tree decl;
1390 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1391 if (VAR_P (decl)
1392 && !DECL_EXTERNAL (decl)
1393 && omp_var_to_track (decl))
1394 {
1395 splay_tree_node n
1396 = splay_tree_lookup (wtd->omp_ctx->variables,
1397 (splay_tree_key) decl);
1398 if (n == NULL)
1399 splay_tree_insert (wtd->omp_ctx->variables,
1400 (splay_tree_key) decl,
1401 TREE_STATIC (decl)
1402 ? OMP_CLAUSE_DEFAULT_SHARED
1403 : OMP_CLAUSE_DEFAULT_PRIVATE);
1404 }
1405 }
1406 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1407 {
1408 /* The point here is to not sanitize static initializers. */
1409 bool no_sanitize_p = wtd->no_sanitize_p;
1410 wtd->no_sanitize_p = true;
1411 for (tree decl = BIND_EXPR_VARS (stmt);
1412 decl;
1413 decl = DECL_CHAIN (decl))
1414 if (VAR_P (decl)
1415 && TREE_STATIC (decl)
1416 && DECL_INITIAL (decl))
1417 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1418 wtd->no_sanitize_p = no_sanitize_p;
1419 }
1420 wtd->bind_expr_stack.safe_push (stmt);
1421 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1422 cp_genericize_r, data, NULL);
1423 wtd->bind_expr_stack.pop ();
1424 break;
1425
1426 case USING_STMT:
1427 {
1428 tree block = NULL_TREE;
1429
1430 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1431 BLOCK, and append an IMPORTED_DECL to its
1432 BLOCK_VARS chained list. */
1433 if (wtd->bind_expr_stack.exists ())
1434 {
1435 int i;
1436 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1437 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1438 break;
1439 }
1440 if (block)
1441 {
1442 tree decl = TREE_OPERAND (stmt, 0);
1443 gcc_assert (decl);
1444
1445 if (undeduced_auto_decl (decl))
1446 /* Omit from the GENERIC, the back-end can't handle it. */;
1447 else
1448 {
1449 tree using_directive = make_node (IMPORTED_DECL);
1450 TREE_TYPE (using_directive) = void_type_node;
1451
1452 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1453 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1454 BLOCK_VARS (block) = using_directive;
1455 }
1456 }
1457 /* The USING_STMT won't appear in GENERIC. */
1458 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1459 *walk_subtrees = 0;
1460 }
1461 break;
1462
1463 case DECL_EXPR:
1464 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1465 {
1466 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1467 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1468 *walk_subtrees = 0;
1469 }
1470 else
1471 {
1472 tree d = DECL_EXPR_DECL (stmt);
1473 if (VAR_P (d))
1474 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1475 }
1476 break;
1477
1478 case OMP_PARALLEL:
1479 case OMP_TASK:
1480 case OMP_TASKLOOP:
1481 {
1482 struct cp_genericize_omp_taskreg omp_ctx;
1483 tree c, decl;
1484 splay_tree_node n;
1485
1486 *walk_subtrees = 0;
1487 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1488 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1489 omp_ctx.default_shared = omp_ctx.is_parallel;
1490 omp_ctx.outer = wtd->omp_ctx;
1491 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1492 wtd->omp_ctx = &omp_ctx;
1493 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1494 switch (OMP_CLAUSE_CODE (c))
1495 {
1496 case OMP_CLAUSE_SHARED:
1497 case OMP_CLAUSE_PRIVATE:
1498 case OMP_CLAUSE_FIRSTPRIVATE:
1499 case OMP_CLAUSE_LASTPRIVATE:
1500 decl = OMP_CLAUSE_DECL (c);
1501 if (decl == error_mark_node || !omp_var_to_track (decl))
1502 break;
1503 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1504 if (n != NULL)
1505 break;
1506 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1507 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1508 ? OMP_CLAUSE_DEFAULT_SHARED
1509 : OMP_CLAUSE_DEFAULT_PRIVATE);
1510 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1511 omp_cxx_notice_variable (omp_ctx.outer, decl);
1512 break;
1513 case OMP_CLAUSE_DEFAULT:
1514 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1515 omp_ctx.default_shared = true;
1516 default:
1517 break;
1518 }
1519 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1520 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1521 else
1522 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1523 wtd->omp_ctx = omp_ctx.outer;
1524 splay_tree_delete (omp_ctx.variables);
1525 }
1526 break;
1527
1528 case TRY_BLOCK:
1529 {
1530 *walk_subtrees = 0;
1531 tree try_block = wtd->try_block;
1532 wtd->try_block = stmt;
1533 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1534 wtd->try_block = try_block;
1535 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1536 }
1537 break;
1538
1539 case MUST_NOT_THROW_EXPR:
1540 /* MUST_NOT_THROW_COND might be something else with TM. */
1541 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1542 {
1543 *walk_subtrees = 0;
1544 tree try_block = wtd->try_block;
1545 wtd->try_block = stmt;
1546 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1547 wtd->try_block = try_block;
1548 }
1549 break;
1550
1551 case THROW_EXPR:
1552 {
1553 location_t loc = location_of (stmt);
1554 if (TREE_NO_WARNING (stmt))
1555 /* Never mind. */;
1556 else if (wtd->try_block)
1557 {
1558 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1559 {
1560 auto_diagnostic_group d;
1561 if (warning_at (loc, OPT_Wterminate,
1562 "%<throw%> will always call %<terminate%>")
1563 && cxx_dialect >= cxx11
1564 && DECL_DESTRUCTOR_P (current_function_decl))
1565 inform (loc, "in C++11 destructors default to %<noexcept%>");
1566 }
1567 }
1568 else
1569 {
1570 if (warn_cxx11_compat && cxx_dialect < cxx11
1571 && DECL_DESTRUCTOR_P (current_function_decl)
1572 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1573 == NULL_TREE)
1574 && (get_defaulted_eh_spec (current_function_decl)
1575 == empty_except_spec))
1576 warning_at (loc, OPT_Wc__11_compat,
1577 "in C++11 this %<throw%> will call %<terminate%> "
1578 "because destructors default to %<noexcept%>");
1579 }
1580 }
1581 break;
1582
1583 case CONVERT_EXPR:
1584 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1585 break;
1586
1587 case FOR_STMT:
1588 genericize_for_stmt (stmt_p, walk_subtrees, data);
1589 break;
1590
1591 case WHILE_STMT:
1592 genericize_while_stmt (stmt_p, walk_subtrees, data);
1593 break;
1594
1595 case DO_STMT:
1596 genericize_do_stmt (stmt_p, walk_subtrees, data);
1597 break;
1598
1599 case SWITCH_STMT:
1600 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1601 break;
1602
1603 case CONTINUE_STMT:
1604 genericize_continue_stmt (stmt_p);
1605 break;
1606
1607 case BREAK_STMT:
1608 genericize_break_stmt (stmt_p);
1609 break;
1610
1611 case SPACESHIP_EXPR:
1612 *stmt_p = genericize_spaceship (*stmt_p);
1613 break;
1614
1615 case OMP_FOR:
1616 case OMP_SIMD:
1617 case OMP_DISTRIBUTE:
1618 case OMP_LOOP:
1619 case OACC_LOOP:
1620 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1621 break;
1622
1623 case PTRMEM_CST:
1624 /* By the time we get here we're handing off to the back end, so we don't
1625 need or want to preserve PTRMEM_CST anymore. */
1626 *stmt_p = cplus_expand_constant (stmt);
1627 *walk_subtrees = 0;
1628 break;
1629
1630 case MEM_REF:
1631 /* For MEM_REF, make sure not to sanitize the second operand even
1632 if it has reference type. It is just an offset with a type
1633 holding other information. There is no other processing we
1634 need to do for INTEGER_CSTs, so just ignore the second argument
1635 unconditionally. */
1636 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1637 *walk_subtrees = 0;
1638 break;
1639
1640 case NOP_EXPR:
1641 if (!wtd->no_sanitize_p
1642 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1643 && TYPE_REF_P (TREE_TYPE (stmt)))
1644 ubsan_maybe_instrument_reference (stmt_p);
1645 break;
1646
1647 case CALL_EXPR:
1648 /* Evaluate function concept checks instead of treating them as
1649 normal functions. */
1650 if (concept_check_p (stmt))
1651 {
1652 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1653 * walk_subtrees = 0;
1654 break;
1655 }
1656
1657 if (!wtd->no_sanitize_p
1658 && sanitize_flags_p ((SANITIZE_NULL
1659 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1660 {
1661 tree fn = CALL_EXPR_FN (stmt);
1662 if (fn != NULL_TREE
1663 && !error_operand_p (fn)
1664 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1665 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1666 {
1667 bool is_ctor
1668 = TREE_CODE (fn) == ADDR_EXPR
1669 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1670 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1671 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1672 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1673 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1674 cp_ubsan_maybe_instrument_member_call (stmt);
1675 }
1676 else if (fn == NULL_TREE
1677 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1678 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1679 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1680 *walk_subtrees = 0;
1681 }
1682 /* Fall through. */
1683 case AGGR_INIT_EXPR:
1684 /* For calls to a multi-versioned function, overload resolution
1685 returns the function with the highest target priority, that is,
1686 the version that will checked for dispatching first. If this
1687 version is inlinable, a direct call to this version can be made
1688 otherwise the call should go through the dispatcher. */
1689 {
1690 tree fn = cp_get_callee_fndecl_nofold (stmt);
1691 if (fn && DECL_FUNCTION_VERSIONED (fn)
1692 && (current_function_decl == NULL
1693 || !targetm.target_option.can_inline_p (current_function_decl,
1694 fn)))
1695 if (tree dis = get_function_version_dispatcher (fn))
1696 {
1697 mark_versions_used (dis);
1698 dis = build_address (dis);
1699 if (TREE_CODE (stmt) == CALL_EXPR)
1700 CALL_EXPR_FN (stmt) = dis;
1701 else
1702 AGGR_INIT_EXPR_FN (stmt) = dis;
1703 }
1704 }
1705 break;
1706
1707 case TARGET_EXPR:
1708 if (TARGET_EXPR_INITIAL (stmt)
1709 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1710 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1711 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1712 break;
1713
1714 case TEMPLATE_ID_EXPR:
1715 gcc_assert (concept_check_p (stmt));
1716 /* Emit the value of the concept check. */
1717 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error);
1718 walk_subtrees = 0;
1719 break;
1720
1721 default:
1722 if (IS_TYPE_OR_DECL_P (stmt))
1723 *walk_subtrees = 0;
1724 break;
1725 }
1726
1727 p_set->add (*stmt_p);
1728
1729 return NULL;
1730 }
1731
1732 /* Lower C++ front end trees to GENERIC in T_P. */
1733
1734 static void
1735 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1736 {
1737 struct cp_genericize_data wtd;
1738
1739 wtd.p_set = new hash_set<tree>;
1740 wtd.bind_expr_stack.create (0);
1741 wtd.omp_ctx = NULL;
1742 wtd.try_block = NULL_TREE;
1743 wtd.no_sanitize_p = false;
1744 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1745 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1746 delete wtd.p_set;
1747 wtd.bind_expr_stack.release ();
1748 if (sanitize_flags_p (SANITIZE_VPTR))
1749 cp_ubsan_instrument_member_accesses (t_p);
1750 }
1751
1752 /* If a function that should end with a return in non-void
1753 function doesn't obviously end with return, add ubsan
1754 instrumentation code to verify it at runtime. If -fsanitize=return
1755 is not enabled, instrument __builtin_unreachable. */
1756
1757 static void
1758 cp_maybe_instrument_return (tree fndecl)
1759 {
1760 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1761 || DECL_CONSTRUCTOR_P (fndecl)
1762 || DECL_DESTRUCTOR_P (fndecl)
1763 || !targetm.warn_func_return (fndecl))
1764 return;
1765
1766 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1767 /* Don't add __builtin_unreachable () if not optimizing, it will not
1768 improve any optimizations in that case, just break UB code.
1769 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1770 UBSan covers this with ubsan_instrument_return above where sufficient
1771 information is provided, while the __builtin_unreachable () below
1772 if return sanitization is disabled will just result in hard to
1773 understand runtime error without location. */
1774 && (!optimize
1775 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1776 return;
1777
1778 tree t = DECL_SAVED_TREE (fndecl);
1779 while (t)
1780 {
1781 switch (TREE_CODE (t))
1782 {
1783 case BIND_EXPR:
1784 t = BIND_EXPR_BODY (t);
1785 continue;
1786 case TRY_FINALLY_EXPR:
1787 case CLEANUP_POINT_EXPR:
1788 t = TREE_OPERAND (t, 0);
1789 continue;
1790 case STATEMENT_LIST:
1791 {
1792 tree_stmt_iterator i = tsi_last (t);
1793 while (!tsi_end_p (i))
1794 {
1795 tree p = tsi_stmt (i);
1796 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1797 break;
1798 tsi_prev (&i);
1799 }
1800 if (!tsi_end_p (i))
1801 {
1802 t = tsi_stmt (i);
1803 continue;
1804 }
1805 }
1806 break;
1807 case RETURN_EXPR:
1808 return;
1809 default:
1810 break;
1811 }
1812 break;
1813 }
1814 if (t == NULL_TREE)
1815 return;
1816 tree *p = &DECL_SAVED_TREE (fndecl);
1817 if (TREE_CODE (*p) == BIND_EXPR)
1818 p = &BIND_EXPR_BODY (*p);
1819
1820 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1821 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1822 t = ubsan_instrument_return (loc);
1823 else
1824 {
1825 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1826 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1827 }
1828
1829 append_to_statement_list (t, p);
1830 }
1831
1832 void
1833 cp_genericize (tree fndecl)
1834 {
1835 tree t;
1836
1837 /* Fix up the types of parms passed by invisible reference. */
1838 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1839 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1840 {
1841 /* If a function's arguments are copied to create a thunk,
1842 then DECL_BY_REFERENCE will be set -- but the type of the
1843 argument will be a pointer type, so we will never get
1844 here. */
1845 gcc_assert (!DECL_BY_REFERENCE (t));
1846 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1847 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1848 DECL_BY_REFERENCE (t) = 1;
1849 TREE_ADDRESSABLE (t) = 0;
1850 relayout_decl (t);
1851 }
1852
1853 /* Do the same for the return value. */
1854 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1855 {
1856 t = DECL_RESULT (fndecl);
1857 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1858 DECL_BY_REFERENCE (t) = 1;
1859 TREE_ADDRESSABLE (t) = 0;
1860 relayout_decl (t);
1861 if (DECL_NAME (t))
1862 {
1863 /* Adjust DECL_VALUE_EXPR of the original var. */
1864 tree outer = outer_curly_brace_block (current_function_decl);
1865 tree var;
1866
1867 if (outer)
1868 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1869 if (VAR_P (var)
1870 && DECL_NAME (t) == DECL_NAME (var)
1871 && DECL_HAS_VALUE_EXPR_P (var)
1872 && DECL_VALUE_EXPR (var) == t)
1873 {
1874 tree val = convert_from_reference (t);
1875 SET_DECL_VALUE_EXPR (var, val);
1876 break;
1877 }
1878 }
1879 }
1880
1881 /* If we're a clone, the body is already GIMPLE. */
1882 if (DECL_CLONED_FUNCTION_P (fndecl))
1883 return;
1884
1885 /* Allow cp_genericize calls to be nested. */
1886 tree save_bc_label[2];
1887 save_bc_label[bc_break] = bc_label[bc_break];
1888 save_bc_label[bc_continue] = bc_label[bc_continue];
1889 bc_label[bc_break] = NULL_TREE;
1890 bc_label[bc_continue] = NULL_TREE;
1891
1892 /* We do want to see every occurrence of the parms, so we can't just use
1893 walk_tree's hash functionality. */
1894 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1895
1896 cp_maybe_instrument_return (fndecl);
1897
1898 /* Do everything else. */
1899 c_genericize (fndecl);
1900
1901 gcc_assert (bc_label[bc_break] == NULL);
1902 gcc_assert (bc_label[bc_continue] == NULL);
1903 bc_label[bc_break] = save_bc_label[bc_break];
1904 bc_label[bc_continue] = save_bc_label[bc_continue];
1905 }
1906 \f
1907 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1908 NULL if there is in fact nothing to do. ARG2 may be null if FN
1909 actually only takes one argument. */
1910
1911 static tree
1912 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1913 {
1914 tree defparm, parm, t;
1915 int i = 0;
1916 int nargs;
1917 tree *argarray;
1918
1919 if (fn == NULL)
1920 return NULL;
1921
1922 nargs = list_length (DECL_ARGUMENTS (fn));
1923 argarray = XALLOCAVEC (tree, nargs);
1924
1925 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1926 if (arg2)
1927 defparm = TREE_CHAIN (defparm);
1928
1929 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1930 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1931 {
1932 tree inner_type = TREE_TYPE (arg1);
1933 tree start1, end1, p1;
1934 tree start2 = NULL, p2 = NULL;
1935 tree ret = NULL, lab;
1936
1937 start1 = arg1;
1938 start2 = arg2;
1939 do
1940 {
1941 inner_type = TREE_TYPE (inner_type);
1942 start1 = build4 (ARRAY_REF, inner_type, start1,
1943 size_zero_node, NULL, NULL);
1944 if (arg2)
1945 start2 = build4 (ARRAY_REF, inner_type, start2,
1946 size_zero_node, NULL, NULL);
1947 }
1948 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1949 start1 = build_fold_addr_expr_loc (input_location, start1);
1950 if (arg2)
1951 start2 = build_fold_addr_expr_loc (input_location, start2);
1952
1953 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1954 end1 = fold_build_pointer_plus (start1, end1);
1955
1956 p1 = create_tmp_var (TREE_TYPE (start1));
1957 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1958 append_to_statement_list (t, &ret);
1959
1960 if (arg2)
1961 {
1962 p2 = create_tmp_var (TREE_TYPE (start2));
1963 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1964 append_to_statement_list (t, &ret);
1965 }
1966
1967 lab = create_artificial_label (input_location);
1968 t = build1 (LABEL_EXPR, void_type_node, lab);
1969 append_to_statement_list (t, &ret);
1970
1971 argarray[i++] = p1;
1972 if (arg2)
1973 argarray[i++] = p2;
1974 /* Handle default arguments. */
1975 for (parm = defparm; parm && parm != void_list_node;
1976 parm = TREE_CHAIN (parm), i++)
1977 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1978 TREE_PURPOSE (parm), fn,
1979 i - is_method, tf_warning_or_error);
1980 t = build_call_a (fn, i, argarray);
1981 t = fold_convert (void_type_node, t);
1982 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1983 append_to_statement_list (t, &ret);
1984
1985 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1986 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1987 append_to_statement_list (t, &ret);
1988
1989 if (arg2)
1990 {
1991 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1992 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1993 append_to_statement_list (t, &ret);
1994 }
1995
1996 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1997 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1998 append_to_statement_list (t, &ret);
1999
2000 return ret;
2001 }
2002 else
2003 {
2004 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2005 if (arg2)
2006 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2007 /* Handle default arguments. */
2008 for (parm = defparm; parm && parm != void_list_node;
2009 parm = TREE_CHAIN (parm), i++)
2010 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2011 TREE_PURPOSE (parm), fn,
2012 i - is_method, tf_warning_or_error);
2013 t = build_call_a (fn, i, argarray);
2014 t = fold_convert (void_type_node, t);
2015 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2016 }
2017 }
2018
2019 /* Return code to initialize DECL with its default constructor, or
2020 NULL if there's nothing to do. */
2021
2022 tree
2023 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2024 {
2025 tree info = CP_OMP_CLAUSE_INFO (clause);
2026 tree ret = NULL;
2027
2028 if (info)
2029 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2030
2031 return ret;
2032 }
2033
2034 /* Return code to initialize DST with a copy constructor from SRC. */
2035
2036 tree
2037 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2038 {
2039 tree info = CP_OMP_CLAUSE_INFO (clause);
2040 tree ret = NULL;
2041
2042 if (info)
2043 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2044 if (ret == NULL)
2045 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2046
2047 return ret;
2048 }
2049
2050 /* Similarly, except use an assignment operator instead. */
2051
2052 tree
2053 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2054 {
2055 tree info = CP_OMP_CLAUSE_INFO (clause);
2056 tree ret = NULL;
2057
2058 if (info)
2059 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2060 if (ret == NULL)
2061 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2062
2063 return ret;
2064 }
2065
2066 /* Return code to destroy DECL. */
2067
2068 tree
2069 cxx_omp_clause_dtor (tree clause, tree decl)
2070 {
2071 tree info = CP_OMP_CLAUSE_INFO (clause);
2072 tree ret = NULL;
2073
2074 if (info)
2075 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2076
2077 return ret;
2078 }
2079
2080 /* True if OpenMP should privatize what this DECL points to rather
2081 than the DECL itself. */
2082
2083 bool
2084 cxx_omp_privatize_by_reference (const_tree decl)
2085 {
2086 return (TYPE_REF_P (TREE_TYPE (decl))
2087 || is_invisiref_parm (decl));
2088 }
2089
2090 /* Return true if DECL is const qualified var having no mutable member. */
2091 bool
2092 cxx_omp_const_qual_no_mutable (tree decl)
2093 {
2094 tree type = TREE_TYPE (decl);
2095 if (TYPE_REF_P (type))
2096 {
2097 if (!is_invisiref_parm (decl))
2098 return false;
2099 type = TREE_TYPE (type);
2100
2101 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2102 {
2103 /* NVR doesn't preserve const qualification of the
2104 variable's type. */
2105 tree outer = outer_curly_brace_block (current_function_decl);
2106 tree var;
2107
2108 if (outer)
2109 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2110 if (VAR_P (var)
2111 && DECL_NAME (decl) == DECL_NAME (var)
2112 && (TYPE_MAIN_VARIANT (type)
2113 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2114 {
2115 if (TYPE_READONLY (TREE_TYPE (var)))
2116 type = TREE_TYPE (var);
2117 break;
2118 }
2119 }
2120 }
2121
2122 if (type == error_mark_node)
2123 return false;
2124
2125 /* Variables with const-qualified type having no mutable member
2126 are predetermined shared. */
2127 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2128 return true;
2129
2130 return false;
2131 }
2132
2133 /* True if OpenMP sharing attribute of DECL is predetermined. */
2134
2135 enum omp_clause_default_kind
2136 cxx_omp_predetermined_sharing_1 (tree decl)
2137 {
2138 /* Static data members are predetermined shared. */
2139 if (TREE_STATIC (decl))
2140 {
2141 tree ctx = CP_DECL_CONTEXT (decl);
2142 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2143 return OMP_CLAUSE_DEFAULT_SHARED;
2144
2145 if (c_omp_predefined_variable (decl))
2146 return OMP_CLAUSE_DEFAULT_SHARED;
2147 }
2148
2149 /* this may not be specified in data-sharing clauses, still we need
2150 to predetermined it firstprivate. */
2151 if (decl == current_class_ptr)
2152 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2153
2154 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2155 }
2156
2157 /* Likewise, but also include the artificial vars. We don't want to
2158 disallow the artificial vars being mentioned in explicit clauses,
2159 as we use artificial vars e.g. for loop constructs with random
2160 access iterators other than pointers, but during gimplification
2161 we want to treat them as predetermined. */
2162
2163 enum omp_clause_default_kind
2164 cxx_omp_predetermined_sharing (tree decl)
2165 {
2166 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2167 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2168 return ret;
2169
2170 /* Predetermine artificial variables holding integral values, those
2171 are usually result of gimplify_one_sizepos or SAVE_EXPR
2172 gimplification. */
2173 if (VAR_P (decl)
2174 && DECL_ARTIFICIAL (decl)
2175 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2176 && !(DECL_LANG_SPECIFIC (decl)
2177 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2178 return OMP_CLAUSE_DEFAULT_SHARED;
2179
2180 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2181 }
2182
2183 /* Finalize an implicitly determined clause. */
2184
2185 void
2186 cxx_omp_finish_clause (tree c, gimple_seq *)
2187 {
2188 tree decl, inner_type;
2189 bool make_shared = false;
2190
2191 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2192 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2193 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2194 return;
2195
2196 decl = OMP_CLAUSE_DECL (c);
2197 decl = require_complete_type (decl);
2198 inner_type = TREE_TYPE (decl);
2199 if (decl == error_mark_node)
2200 make_shared = true;
2201 else if (TYPE_REF_P (TREE_TYPE (decl)))
2202 inner_type = TREE_TYPE (inner_type);
2203
2204 /* We're interested in the base element, not arrays. */
2205 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2206 inner_type = TREE_TYPE (inner_type);
2207
2208 /* Check for special function availability by building a call to one.
2209 Save the results, because later we won't be in the right context
2210 for making these queries. */
2211 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2212 if (!make_shared
2213 && CLASS_TYPE_P (inner_type)
2214 && cxx_omp_create_clause_info (c, inner_type, !first, first, !first,
2215 true))
2216 make_shared = true;
2217
2218 if (make_shared)
2219 {
2220 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2221 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2222 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2223 }
2224 }
2225
2226 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2227 disregarded in OpenMP construct, because it is going to be
2228 remapped during OpenMP lowering. SHARED is true if DECL
2229 is going to be shared, false if it is going to be privatized. */
2230
2231 bool
2232 cxx_omp_disregard_value_expr (tree decl, bool shared)
2233 {
2234 return !shared
2235 && VAR_P (decl)
2236 && DECL_HAS_VALUE_EXPR_P (decl)
2237 && DECL_ARTIFICIAL (decl)
2238 && DECL_LANG_SPECIFIC (decl)
2239 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2240 }
2241
2242 /* Fold expression X which is used as an rvalue if RVAL is true. */
2243
2244 tree
2245 cp_fold_maybe_rvalue (tree x, bool rval)
2246 {
2247 while (true)
2248 {
2249 x = cp_fold (x);
2250 if (rval)
2251 x = mark_rvalue_use (x);
2252 if (rval && DECL_P (x)
2253 && !TYPE_REF_P (TREE_TYPE (x)))
2254 {
2255 tree v = decl_constant_value (x);
2256 if (v != x && v != error_mark_node)
2257 {
2258 x = v;
2259 continue;
2260 }
2261 }
2262 break;
2263 }
2264 return x;
2265 }
2266
2267 /* Fold expression X which is used as an rvalue. */
2268
2269 tree
2270 cp_fold_rvalue (tree x)
2271 {
2272 return cp_fold_maybe_rvalue (x, true);
2273 }
2274
2275 /* Perform folding on expression X. */
2276
2277 tree
2278 cp_fully_fold (tree x)
2279 {
2280 if (processing_template_decl)
2281 return x;
2282 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2283 have to call both. */
2284 if (cxx_dialect >= cxx11)
2285 {
2286 x = maybe_constant_value (x);
2287 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2288 a TARGET_EXPR; undo that here. */
2289 if (TREE_CODE (x) == TARGET_EXPR)
2290 x = TARGET_EXPR_INITIAL (x);
2291 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2292 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2293 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2294 x = TREE_OPERAND (x, 0);
2295 }
2296 return cp_fold_rvalue (x);
2297 }
2298
2299 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2300 in some cases. */
2301
2302 tree
2303 cp_fully_fold_init (tree x)
2304 {
2305 if (processing_template_decl)
2306 return x;
2307 x = cp_fully_fold (x);
2308 hash_set<tree> pset;
2309 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2310 return x;
2311 }
2312
2313 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2314 and certain changes are made to the folding done. Or should be (FIXME). We
2315 never touch maybe_const, as it is only used for the C front-end
2316 C_MAYBE_CONST_EXPR. */
2317
2318 tree
2319 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2320 {
2321 return cp_fold_maybe_rvalue (x, !lval);
2322 }
2323
2324 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2325
2326 /* Dispose of the whole FOLD_CACHE. */
2327
2328 void
2329 clear_fold_cache (void)
2330 {
2331 if (fold_cache != NULL)
2332 fold_cache->empty ();
2333 }
2334
2335 /* This function tries to fold an expression X.
2336 To avoid combinatorial explosion, folding results are kept in fold_cache.
2337 If X is invalid, we don't fold at all.
2338 For performance reasons we don't cache expressions representing a
2339 declaration or constant.
2340 Function returns X or its folded variant. */
2341
2342 static tree
2343 cp_fold (tree x)
2344 {
2345 tree op0, op1, op2, op3;
2346 tree org_x = x, r = NULL_TREE;
2347 enum tree_code code;
2348 location_t loc;
2349 bool rval_ops = true;
2350
2351 if (!x || x == error_mark_node)
2352 return x;
2353
2354 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2355 return x;
2356
2357 /* Don't bother to cache DECLs or constants. */
2358 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2359 return x;
2360
2361 if (fold_cache == NULL)
2362 fold_cache = hash_map<tree, tree>::create_ggc (101);
2363
2364 if (tree *cached = fold_cache->get (x))
2365 return *cached;
2366
2367 code = TREE_CODE (x);
2368 switch (code)
2369 {
2370 case CLEANUP_POINT_EXPR:
2371 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2372 effects. */
2373 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2374 if (!TREE_SIDE_EFFECTS (r))
2375 x = r;
2376 break;
2377
2378 case SIZEOF_EXPR:
2379 x = fold_sizeof_expr (x);
2380 break;
2381
2382 case VIEW_CONVERT_EXPR:
2383 rval_ops = false;
2384 /* FALLTHRU */
2385 case CONVERT_EXPR:
2386 case NOP_EXPR:
2387 case NON_LVALUE_EXPR:
2388
2389 if (VOID_TYPE_P (TREE_TYPE (x)))
2390 {
2391 /* This is just to make sure we don't end up with casts to
2392 void from error_mark_node. If we just return x, then
2393 cp_fold_r might fold the operand into error_mark_node and
2394 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2395 during gimplification doesn't like such casts.
2396 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2397 folding of the operand should be in the caches and if in cp_fold_r
2398 it will modify it in place. */
2399 op0 = cp_fold (TREE_OPERAND (x, 0));
2400 if (op0 == error_mark_node)
2401 x = error_mark_node;
2402 break;
2403 }
2404
2405 loc = EXPR_LOCATION (x);
2406 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2407
2408 if (code == CONVERT_EXPR
2409 && SCALAR_TYPE_P (TREE_TYPE (x))
2410 && op0 != void_node)
2411 /* During parsing we used convert_to_*_nofold; re-convert now using the
2412 folding variants, since fold() doesn't do those transformations. */
2413 x = fold (convert (TREE_TYPE (x), op0));
2414 else if (op0 != TREE_OPERAND (x, 0))
2415 {
2416 if (op0 == error_mark_node)
2417 x = error_mark_node;
2418 else
2419 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2420 }
2421 else
2422 x = fold (x);
2423
2424 /* Conversion of an out-of-range value has implementation-defined
2425 behavior; the language considers it different from arithmetic
2426 overflow, which is undefined. */
2427 if (TREE_CODE (op0) == INTEGER_CST
2428 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2429 TREE_OVERFLOW (x) = false;
2430
2431 break;
2432
2433 case INDIRECT_REF:
2434 /* We don't need the decltype(auto) obfuscation anymore. */
2435 if (REF_PARENTHESIZED_P (x))
2436 {
2437 tree p = maybe_undo_parenthesized_ref (x);
2438 return cp_fold (p);
2439 }
2440 goto unary;
2441
2442 case ADDR_EXPR:
2443 loc = EXPR_LOCATION (x);
2444 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2445
2446 /* Cope with user tricks that amount to offsetof. */
2447 if (op0 != error_mark_node
2448 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2449 {
2450 tree val = get_base_address (op0);
2451 if (val
2452 && INDIRECT_REF_P (val)
2453 && COMPLETE_TYPE_P (TREE_TYPE (val))
2454 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2455 {
2456 val = TREE_OPERAND (val, 0);
2457 STRIP_NOPS (val);
2458 val = maybe_constant_value (val);
2459 if (TREE_CODE (val) == INTEGER_CST)
2460 return fold_offsetof (op0, TREE_TYPE (x));
2461 }
2462 }
2463 goto finish_unary;
2464
2465 case REALPART_EXPR:
2466 case IMAGPART_EXPR:
2467 rval_ops = false;
2468 /* FALLTHRU */
2469 case CONJ_EXPR:
2470 case FIX_TRUNC_EXPR:
2471 case FLOAT_EXPR:
2472 case NEGATE_EXPR:
2473 case ABS_EXPR:
2474 case ABSU_EXPR:
2475 case BIT_NOT_EXPR:
2476 case TRUTH_NOT_EXPR:
2477 case FIXED_CONVERT_EXPR:
2478 unary:
2479
2480 loc = EXPR_LOCATION (x);
2481 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2482
2483 finish_unary:
2484 if (op0 != TREE_OPERAND (x, 0))
2485 {
2486 if (op0 == error_mark_node)
2487 x = error_mark_node;
2488 else
2489 {
2490 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2491 if (code == INDIRECT_REF
2492 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2493 {
2494 TREE_READONLY (x) = TREE_READONLY (org_x);
2495 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2496 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2497 }
2498 }
2499 }
2500 else
2501 x = fold (x);
2502
2503 gcc_assert (TREE_CODE (x) != COND_EXPR
2504 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2505 break;
2506
2507 case UNARY_PLUS_EXPR:
2508 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2509 if (op0 == error_mark_node)
2510 x = error_mark_node;
2511 else
2512 x = fold_convert (TREE_TYPE (x), op0);
2513 break;
2514
2515 case POSTDECREMENT_EXPR:
2516 case POSTINCREMENT_EXPR:
2517 case INIT_EXPR:
2518 case PREDECREMENT_EXPR:
2519 case PREINCREMENT_EXPR:
2520 case COMPOUND_EXPR:
2521 case MODIFY_EXPR:
2522 rval_ops = false;
2523 /* FALLTHRU */
2524 case POINTER_PLUS_EXPR:
2525 case PLUS_EXPR:
2526 case POINTER_DIFF_EXPR:
2527 case MINUS_EXPR:
2528 case MULT_EXPR:
2529 case TRUNC_DIV_EXPR:
2530 case CEIL_DIV_EXPR:
2531 case FLOOR_DIV_EXPR:
2532 case ROUND_DIV_EXPR:
2533 case TRUNC_MOD_EXPR:
2534 case CEIL_MOD_EXPR:
2535 case ROUND_MOD_EXPR:
2536 case RDIV_EXPR:
2537 case EXACT_DIV_EXPR:
2538 case MIN_EXPR:
2539 case MAX_EXPR:
2540 case LSHIFT_EXPR:
2541 case RSHIFT_EXPR:
2542 case LROTATE_EXPR:
2543 case RROTATE_EXPR:
2544 case BIT_AND_EXPR:
2545 case BIT_IOR_EXPR:
2546 case BIT_XOR_EXPR:
2547 case TRUTH_AND_EXPR:
2548 case TRUTH_ANDIF_EXPR:
2549 case TRUTH_OR_EXPR:
2550 case TRUTH_ORIF_EXPR:
2551 case TRUTH_XOR_EXPR:
2552 case LT_EXPR: case LE_EXPR:
2553 case GT_EXPR: case GE_EXPR:
2554 case EQ_EXPR: case NE_EXPR:
2555 case UNORDERED_EXPR: case ORDERED_EXPR:
2556 case UNLT_EXPR: case UNLE_EXPR:
2557 case UNGT_EXPR: case UNGE_EXPR:
2558 case UNEQ_EXPR: case LTGT_EXPR:
2559 case RANGE_EXPR: case COMPLEX_EXPR:
2560
2561 loc = EXPR_LOCATION (x);
2562 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2563 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2564
2565 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2566 {
2567 if (op0 == error_mark_node || op1 == error_mark_node)
2568 x = error_mark_node;
2569 else
2570 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2571 }
2572 else
2573 x = fold (x);
2574
2575 /* This is only needed for -Wnonnull-compare and only if
2576 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2577 generation, we do it always. */
2578 if (COMPARISON_CLASS_P (org_x))
2579 {
2580 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2581 ;
2582 else if (COMPARISON_CLASS_P (x))
2583 {
2584 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2585 TREE_NO_WARNING (x) = 1;
2586 }
2587 /* Otherwise give up on optimizing these, let GIMPLE folders
2588 optimize those later on. */
2589 else if (op0 != TREE_OPERAND (org_x, 0)
2590 || op1 != TREE_OPERAND (org_x, 1))
2591 {
2592 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2593 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2594 TREE_NO_WARNING (x) = 1;
2595 }
2596 else
2597 x = org_x;
2598 }
2599 if (code == MODIFY_EXPR && TREE_CODE (x) == MODIFY_EXPR)
2600 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2601
2602 break;
2603
2604 case VEC_COND_EXPR:
2605 case COND_EXPR:
2606 loc = EXPR_LOCATION (x);
2607 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2608 op1 = cp_fold (TREE_OPERAND (x, 1));
2609 op2 = cp_fold (TREE_OPERAND (x, 2));
2610
2611 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2612 {
2613 warning_sentinel s (warn_int_in_bool_context);
2614 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2615 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2616 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2617 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2618 }
2619 else if (VOID_TYPE_P (TREE_TYPE (x)))
2620 {
2621 if (TREE_CODE (op0) == INTEGER_CST)
2622 {
2623 /* If the condition is constant, fold can fold away
2624 the COND_EXPR. If some statement-level uses of COND_EXPR
2625 have one of the branches NULL, avoid folding crash. */
2626 if (!op1)
2627 op1 = build_empty_stmt (loc);
2628 if (!op2)
2629 op2 = build_empty_stmt (loc);
2630 }
2631 else
2632 {
2633 /* Otherwise, don't bother folding a void condition, since
2634 it can't produce a constant value. */
2635 if (op0 != TREE_OPERAND (x, 0)
2636 || op1 != TREE_OPERAND (x, 1)
2637 || op2 != TREE_OPERAND (x, 2))
2638 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2639 break;
2640 }
2641 }
2642
2643 if (op0 != TREE_OPERAND (x, 0)
2644 || op1 != TREE_OPERAND (x, 1)
2645 || op2 != TREE_OPERAND (x, 2))
2646 {
2647 if (op0 == error_mark_node
2648 || op1 == error_mark_node
2649 || op2 == error_mark_node)
2650 x = error_mark_node;
2651 else
2652 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2653 }
2654 else
2655 x = fold (x);
2656
2657 /* A COND_EXPR might have incompatible types in branches if one or both
2658 arms are bitfields. If folding exposed such a branch, fix it up. */
2659 if (TREE_CODE (x) != code
2660 && x != error_mark_node
2661 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2662 x = fold_convert (TREE_TYPE (org_x), x);
2663
2664 break;
2665
2666 case CALL_EXPR:
2667 {
2668 int i, m, sv = optimize, nw = sv, changed = 0;
2669 tree callee = get_callee_fndecl (x);
2670
2671 /* Some built-in function calls will be evaluated at compile-time in
2672 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2673 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2674 if (callee && fndecl_built_in_p (callee) && !optimize
2675 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2676 && current_function_decl
2677 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2678 nw = 1;
2679
2680 /* Defer folding __builtin_is_constant_evaluated. */
2681 if (callee
2682 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2683 BUILT_IN_FRONTEND))
2684 break;
2685
2686 if (callee
2687 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2688 BUILT_IN_FRONTEND))
2689 {
2690 x = fold_builtin_source_location (EXPR_LOCATION (x));
2691 break;
2692 }
2693
2694 x = copy_node (x);
2695
2696 m = call_expr_nargs (x);
2697 for (i = 0; i < m; i++)
2698 {
2699 r = cp_fold (CALL_EXPR_ARG (x, i));
2700 if (r != CALL_EXPR_ARG (x, i))
2701 {
2702 if (r == error_mark_node)
2703 {
2704 x = error_mark_node;
2705 break;
2706 }
2707 changed = 1;
2708 }
2709 CALL_EXPR_ARG (x, i) = r;
2710 }
2711 if (x == error_mark_node)
2712 break;
2713
2714 optimize = nw;
2715 r = fold (x);
2716 optimize = sv;
2717
2718 if (TREE_CODE (r) != CALL_EXPR)
2719 {
2720 x = cp_fold (r);
2721 break;
2722 }
2723
2724 optimize = nw;
2725
2726 /* Invoke maybe_constant_value for functions declared
2727 constexpr and not called with AGGR_INIT_EXPRs.
2728 TODO:
2729 Do constexpr expansion of expressions where the call itself is not
2730 constant, but the call followed by an INDIRECT_REF is. */
2731 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2732 && !flag_no_inline)
2733 r = maybe_constant_value (x);
2734 optimize = sv;
2735
2736 if (TREE_CODE (r) != CALL_EXPR)
2737 {
2738 if (DECL_CONSTRUCTOR_P (callee))
2739 {
2740 loc = EXPR_LOCATION (x);
2741 tree s = build_fold_indirect_ref_loc (loc,
2742 CALL_EXPR_ARG (x, 0));
2743 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2744 }
2745 x = r;
2746 break;
2747 }
2748
2749 if (!changed)
2750 x = org_x;
2751 break;
2752 }
2753
2754 case CONSTRUCTOR:
2755 {
2756 unsigned i;
2757 constructor_elt *p;
2758 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2759 vec<constructor_elt, va_gc> *nelts = NULL;
2760 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2761 {
2762 tree op = cp_fold (p->value);
2763 if (op != p->value)
2764 {
2765 if (op == error_mark_node)
2766 {
2767 x = error_mark_node;
2768 vec_free (nelts);
2769 break;
2770 }
2771 if (nelts == NULL)
2772 nelts = elts->copy ();
2773 (*nelts)[i].value = op;
2774 }
2775 }
2776 if (nelts)
2777 {
2778 x = build_constructor (TREE_TYPE (x), nelts);
2779 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2780 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2781 }
2782 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2783 x = fold (x);
2784 break;
2785 }
2786 case TREE_VEC:
2787 {
2788 bool changed = false;
2789 releasing_vec vec;
2790 int i, n = TREE_VEC_LENGTH (x);
2791 vec_safe_reserve (vec, n);
2792
2793 for (i = 0; i < n; i++)
2794 {
2795 tree op = cp_fold (TREE_VEC_ELT (x, i));
2796 vec->quick_push (op);
2797 if (op != TREE_VEC_ELT (x, i))
2798 changed = true;
2799 }
2800
2801 if (changed)
2802 {
2803 r = copy_node (x);
2804 for (i = 0; i < n; i++)
2805 TREE_VEC_ELT (r, i) = (*vec)[i];
2806 x = r;
2807 }
2808 }
2809
2810 break;
2811
2812 case ARRAY_REF:
2813 case ARRAY_RANGE_REF:
2814
2815 loc = EXPR_LOCATION (x);
2816 op0 = cp_fold (TREE_OPERAND (x, 0));
2817 op1 = cp_fold (TREE_OPERAND (x, 1));
2818 op2 = cp_fold (TREE_OPERAND (x, 2));
2819 op3 = cp_fold (TREE_OPERAND (x, 3));
2820
2821 if (op0 != TREE_OPERAND (x, 0)
2822 || op1 != TREE_OPERAND (x, 1)
2823 || op2 != TREE_OPERAND (x, 2)
2824 || op3 != TREE_OPERAND (x, 3))
2825 {
2826 if (op0 == error_mark_node
2827 || op1 == error_mark_node
2828 || op2 == error_mark_node
2829 || op3 == error_mark_node)
2830 x = error_mark_node;
2831 else
2832 {
2833 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2834 TREE_READONLY (x) = TREE_READONLY (org_x);
2835 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2836 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2837 }
2838 }
2839
2840 x = fold (x);
2841 break;
2842
2843 case SAVE_EXPR:
2844 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2845 folding, evaluates to an invariant. In that case no need to wrap
2846 this folded tree with a SAVE_EXPR. */
2847 r = cp_fold (TREE_OPERAND (x, 0));
2848 if (tree_invariant_p (r))
2849 x = r;
2850 break;
2851
2852 default:
2853 return org_x;
2854 }
2855
2856 fold_cache->put (org_x, x);
2857 /* Prevent that we try to fold an already folded result again. */
2858 if (x != org_x)
2859 fold_cache->put (x, x);
2860
2861 return x;
2862 }
2863
2864 /* Look up either "hot" or "cold" in attribute list LIST. */
2865
2866 tree
2867 lookup_hotness_attribute (tree list)
2868 {
2869 for (; list; list = TREE_CHAIN (list))
2870 {
2871 tree name = get_attribute_name (list);
2872 if (is_attribute_p ("hot", name)
2873 || is_attribute_p ("cold", name)
2874 || is_attribute_p ("likely", name)
2875 || is_attribute_p ("unlikely", name))
2876 break;
2877 }
2878 return list;
2879 }
2880
2881 /* Remove both "hot" and "cold" attributes from LIST. */
2882
2883 static tree
2884 remove_hotness_attribute (tree list)
2885 {
2886 list = remove_attribute ("hot", list);
2887 list = remove_attribute ("cold", list);
2888 list = remove_attribute ("likely", list);
2889 list = remove_attribute ("unlikely", list);
2890 return list;
2891 }
2892
2893 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2894 PREDICT_EXPR. */
2895
2896 tree
2897 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
2898 {
2899 if (std_attrs == error_mark_node)
2900 return std_attrs;
2901 if (tree attr = lookup_hotness_attribute (std_attrs))
2902 {
2903 tree name = get_attribute_name (attr);
2904 bool hot = (is_attribute_p ("hot", name)
2905 || is_attribute_p ("likely", name));
2906 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2907 hot ? TAKEN : NOT_TAKEN);
2908 SET_EXPR_LOCATION (pred, attrs_loc);
2909 add_stmt (pred);
2910 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2911 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2912 get_attribute_name (other), name);
2913 std_attrs = remove_hotness_attribute (std_attrs);
2914 }
2915 return std_attrs;
2916 }
2917
2918 /* Helper of fold_builtin_source_location, return the
2919 std::source_location::__impl type after performing verification
2920 on it. LOC is used for reporting any errors. */
2921
2922 static tree
2923 get_source_location_impl_type (location_t loc)
2924 {
2925 tree name = get_identifier ("source_location");
2926 tree decl = lookup_qualified_name (std_node, name);
2927 if (TREE_CODE (decl) != TYPE_DECL)
2928 {
2929 auto_diagnostic_group d;
2930 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
2931 qualified_name_lookup_error (std_node, name, decl, loc);
2932 else
2933 error_at (loc, "%qD is not a type", decl);
2934 return error_mark_node;
2935 }
2936 name = get_identifier ("__impl");
2937 tree type = TREE_TYPE (decl);
2938 decl = lookup_qualified_name (type, name);
2939 if (TREE_CODE (decl) != TYPE_DECL)
2940 {
2941 auto_diagnostic_group d;
2942 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
2943 qualified_name_lookup_error (type, name, decl, loc);
2944 else
2945 error_at (loc, "%qD is not a type", decl);
2946 return error_mark_node;
2947 }
2948 type = TREE_TYPE (decl);
2949 if (TREE_CODE (type) != RECORD_TYPE)
2950 {
2951 error_at (loc, "%qD is not a class type", decl);
2952 return error_mark_node;
2953 }
2954
2955 int cnt = 0;
2956 for (tree field = TYPE_FIELDS (type);
2957 (field = next_initializable_field (field)) != NULL_TREE;
2958 field = DECL_CHAIN (field))
2959 {
2960 if (DECL_NAME (field) != NULL_TREE)
2961 {
2962 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
2963 if (strcmp (n, "_M_file_name") == 0
2964 || strcmp (n, "_M_function_name") == 0)
2965 {
2966 if (TREE_TYPE (field) != const_string_type_node)
2967 {
2968 error_at (loc, "%qD does not have %<const char *%> type",
2969 field);
2970 return error_mark_node;
2971 }
2972 cnt++;
2973 continue;
2974 }
2975 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
2976 {
2977 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
2978 {
2979 error_at (loc, "%qD does not have integral type", field);
2980 return error_mark_node;
2981 }
2982 cnt++;
2983 continue;
2984 }
2985 }
2986 cnt = 0;
2987 break;
2988 }
2989 if (cnt != 4)
2990 {
2991 error_at (loc, "%<std::source_location::__impl%> does not contain only "
2992 "non-static data members %<_M_file_name%>, "
2993 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
2994 return error_mark_node;
2995 }
2996 return build_qualified_type (type, TYPE_QUAL_CONST);
2997 }
2998
2999 /* Type for source_location_table hash_set. */
3000 struct GTY((for_user)) source_location_table_entry {
3001 location_t loc;
3002 unsigned uid;
3003 tree var;
3004 };
3005
3006 /* Traits class for function start hash maps below. */
3007
3008 struct source_location_table_entry_hash
3009 : ggc_remove <source_location_table_entry>
3010 {
3011 typedef source_location_table_entry value_type;
3012 typedef source_location_table_entry compare_type;
3013
3014 static hashval_t
3015 hash (const source_location_table_entry &ref)
3016 {
3017 inchash::hash hstate (0);
3018 hstate.add_int (ref.loc);
3019 hstate.add_int (ref.uid);
3020 return hstate.end ();
3021 }
3022
3023 static bool
3024 equal (const source_location_table_entry &ref1,
3025 const source_location_table_entry &ref2)
3026 {
3027 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3028 }
3029
3030 static void
3031 mark_deleted (source_location_table_entry &ref)
3032 {
3033 ref.loc = UNKNOWN_LOCATION;
3034 ref.uid = -1U;
3035 ref.var = NULL_TREE;
3036 }
3037
3038 static void
3039 mark_empty (source_location_table_entry &ref)
3040 {
3041 ref.loc = UNKNOWN_LOCATION;
3042 ref.uid = 0;
3043 ref.var = NULL_TREE;
3044 }
3045
3046 static bool
3047 is_deleted (const source_location_table_entry &ref)
3048 {
3049 return (ref.loc == UNKNOWN_LOCATION
3050 && ref.uid == -1U
3051 && ref.var == NULL_TREE);
3052 }
3053
3054 static bool
3055 is_empty (const source_location_table_entry &ref)
3056 {
3057 return (ref.loc == UNKNOWN_LOCATION
3058 && ref.uid == 0
3059 && ref.var == NULL_TREE);
3060 }
3061 };
3062
3063 static GTY(()) hash_table <source_location_table_entry_hash>
3064 *source_location_table;
3065 static GTY(()) unsigned int source_location_id;
3066
3067 /* Fold __builtin_source_location () call. LOC is the location
3068 of the call. */
3069
3070 tree
3071 fold_builtin_source_location (location_t loc)
3072 {
3073 if (source_location_impl == NULL_TREE)
3074 {
3075 auto_diagnostic_group d;
3076 source_location_impl = get_source_location_impl_type (loc);
3077 if (source_location_impl == error_mark_node)
3078 inform (loc, "evaluating %qs", "__builtin_source_location");
3079 }
3080 if (source_location_impl == error_mark_node)
3081 return build_zero_cst (const_ptr_type_node);
3082 if (source_location_table == NULL)
3083 source_location_table
3084 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3085 const line_map_ordinary *map;
3086 source_location_table_entry entry;
3087 entry.loc
3088 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3089 &map);
3090 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3091 entry.var = error_mark_node;
3092 source_location_table_entry *entryp
3093 = source_location_table->find_slot (entry, INSERT);
3094 tree var;
3095 if (entryp->var)
3096 var = entryp->var;
3097 else
3098 {
3099 char tmp_name[32];
3100 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3101 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3102 source_location_impl);
3103 TREE_STATIC (var) = 1;
3104 TREE_PUBLIC (var) = 0;
3105 DECL_ARTIFICIAL (var) = 1;
3106 DECL_IGNORED_P (var) = 1;
3107 DECL_EXTERNAL (var) = 0;
3108 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3109 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3110 layout_decl (var, 0);
3111
3112 vec<constructor_elt, va_gc> *v = NULL;
3113 vec_alloc (v, 4);
3114 for (tree field = TYPE_FIELDS (source_location_impl);
3115 (field = next_initializable_field (field)) != NULL_TREE;
3116 field = DECL_CHAIN (field))
3117 {
3118 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3119 tree val = NULL_TREE;
3120 if (strcmp (n, "_M_file_name") == 0)
3121 {
3122 if (const char *fname = LOCATION_FILE (loc))
3123 {
3124 fname = remap_macro_filename (fname);
3125 val = build_string_literal (strlen (fname) + 1, fname);
3126 }
3127 else
3128 val = build_string_literal (1, "");
3129 }
3130 else if (strcmp (n, "_M_function_name") == 0)
3131 {
3132 const char *name = "";
3133
3134 if (current_function_decl)
3135 name = cxx_printable_name (current_function_decl, 0);
3136
3137 val = build_string_literal (strlen (name) + 1, name);
3138 }
3139 else if (strcmp (n, "_M_line") == 0)
3140 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3141 else if (strcmp (n, "_M_column") == 0)
3142 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3143 else
3144 gcc_unreachable ();
3145 CONSTRUCTOR_APPEND_ELT (v, field, val);
3146 }
3147
3148 tree ctor = build_constructor (source_location_impl, v);
3149 TREE_CONSTANT (ctor) = 1;
3150 TREE_STATIC (ctor) = 1;
3151 DECL_INITIAL (var) = ctor;
3152 varpool_node::finalize_decl (var);
3153 *entryp = entry;
3154 entryp->var = var;
3155 }
3156
3157 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3158 }
3159
3160 #include "gt-cp-cp-gimplify.h"