]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/cp-gimplify.c
PR pch/90326
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3 Copyright (C) 2002-2019 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38
39 /* Forward declarations. */
40
41 static tree cp_genericize_r (tree *, int *, void *);
42 static tree cp_fold_r (tree *, int *, void *);
43 static void cp_genericize_tree (tree*, bool);
44 static tree cp_fold (tree);
45
46 /* Local declarations. */
47
48 enum bc_t { bc_break = 0, bc_continue = 1 };
49
50 /* Stack of labels which are targets for "break" or "continue",
51 linked through TREE_CHAIN. */
52 static tree bc_label[2];
53
54 /* Begin a scope which can be exited by a break or continue statement. BC
55 indicates which.
56
57 Just creates a label with location LOCATION and pushes it into the current
58 context. */
59
60 static tree
61 begin_bc_block (enum bc_t bc, location_t location)
62 {
63 tree label = create_artificial_label (location);
64 DECL_CHAIN (label) = bc_label[bc];
65 bc_label[bc] = label;
66 if (bc == bc_break)
67 LABEL_DECL_BREAK (label) = true;
68 else
69 LABEL_DECL_CONTINUE (label) = true;
70 return label;
71 }
72
73 /* Finish a scope which can be exited by a break or continue statement.
74 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
75 an expression for the contents of the scope.
76
77 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
78 BLOCK. Otherwise, just forget the label. */
79
80 static void
81 finish_bc_block (tree *block, enum bc_t bc, tree label)
82 {
83 gcc_assert (label == bc_label[bc]);
84
85 if (TREE_USED (label))
86 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
87 block);
88
89 bc_label[bc] = DECL_CHAIN (label);
90 DECL_CHAIN (label) = NULL_TREE;
91 }
92
93 /* Get the LABEL_EXPR to represent a break or continue statement
94 in the current block scope. BC indicates which. */
95
96 static tree
97 get_bc_label (enum bc_t bc)
98 {
99 tree label = bc_label[bc];
100
101 /* Mark the label used for finish_bc_block. */
102 TREE_USED (label) = 1;
103 return label;
104 }
105
106 /* Genericize a TRY_BLOCK. */
107
108 static void
109 genericize_try_block (tree *stmt_p)
110 {
111 tree body = TRY_STMTS (*stmt_p);
112 tree cleanup = TRY_HANDLERS (*stmt_p);
113
114 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
115 }
116
117 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
118
119 static void
120 genericize_catch_block (tree *stmt_p)
121 {
122 tree type = HANDLER_TYPE (*stmt_p);
123 tree body = HANDLER_BODY (*stmt_p);
124
125 /* FIXME should the caught type go in TREE_TYPE? */
126 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
127 }
128
129 /* A terser interface for building a representation of an exception
130 specification. */
131
132 static tree
133 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 {
135 tree t;
136
137 /* FIXME should the allowed types go in TREE_TYPE? */
138 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
139 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140
141 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
142 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143
144 return t;
145 }
146
147 /* Genericize an EH_SPEC_BLOCK by converting it to a
148 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149
150 static void
151 genericize_eh_spec_block (tree *stmt_p)
152 {
153 tree body = EH_SPEC_STMTS (*stmt_p);
154 tree allowed = EH_SPEC_RAISES (*stmt_p);
155 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
156
157 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
158 TREE_NO_WARNING (*stmt_p) = true;
159 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
160 }
161
162 /* Return the first non-compound statement in STMT. */
163
164 tree
165 first_stmt (tree stmt)
166 {
167 switch (TREE_CODE (stmt))
168 {
169 case STATEMENT_LIST:
170 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
171 return first_stmt (p->stmt);
172 return void_node;
173
174 case BIND_EXPR:
175 return first_stmt (BIND_EXPR_BODY (stmt));
176
177 default:
178 return stmt;
179 }
180 }
181
182 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
183
184 static void
185 genericize_if_stmt (tree *stmt_p)
186 {
187 tree stmt, cond, then_, else_;
188 location_t locus = EXPR_LOCATION (*stmt_p);
189
190 stmt = *stmt_p;
191 cond = IF_COND (stmt);
192 then_ = THEN_CLAUSE (stmt);
193 else_ = ELSE_CLAUSE (stmt);
194
195 if (then_ && else_)
196 {
197 tree ft = first_stmt (then_);
198 tree fe = first_stmt (else_);
199 br_predictor pr;
200 if (TREE_CODE (ft) == PREDICT_EXPR
201 && TREE_CODE (fe) == PREDICT_EXPR
202 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
203 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
204 {
205 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
206 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
207 warning_at (&richloc, OPT_Wattributes,
208 "both branches of %<if%> statement marked as %qs",
209 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
210 }
211 }
212
213 if (!then_)
214 then_ = build_empty_stmt (locus);
215 if (!else_)
216 else_ = build_empty_stmt (locus);
217
218 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
219 stmt = then_;
220 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
221 stmt = else_;
222 else
223 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
224 if (!EXPR_HAS_LOCATION (stmt))
225 protected_set_expr_location (stmt, locus);
226 *stmt_p = stmt;
227 }
228
229 /* Build a generic representation of one of the C loop forms. COND is the
230 loop condition or NULL_TREE. BODY is the (possibly compound) statement
231 controlled by the loop. INCR is the increment expression of a for-loop,
232 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
233 evaluated before the loop body as in while and for loops, or after the
234 loop body as in do-while loops. */
235
236 static void
237 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
238 tree incr, bool cond_is_first, int *walk_subtrees,
239 void *data)
240 {
241 tree blab, clab;
242 tree exit = NULL;
243 tree stmt_list = NULL;
244
245 protected_set_expr_location (incr, start_locus);
246
247 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
248 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
249
250 blab = begin_bc_block (bc_break, start_locus);
251 clab = begin_bc_block (bc_continue, start_locus);
252
253 cp_walk_tree (&body, cp_genericize_r, data, NULL);
254 *walk_subtrees = 0;
255
256 if (cond && TREE_CODE (cond) != INTEGER_CST)
257 {
258 /* If COND is constant, don't bother building an exit. If it's false,
259 we won't build a loop. If it's true, any exits are in the body. */
260 location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
261 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
262 get_bc_label (bc_break));
263 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
264 build_empty_stmt (cloc), exit);
265 }
266
267 if (exit && cond_is_first)
268 append_to_statement_list (exit, &stmt_list);
269 append_to_statement_list (body, &stmt_list);
270 finish_bc_block (&stmt_list, bc_continue, clab);
271 append_to_statement_list (incr, &stmt_list);
272 if (exit && !cond_is_first)
273 append_to_statement_list (exit, &stmt_list);
274
275 if (!stmt_list)
276 stmt_list = build_empty_stmt (start_locus);
277
278 tree loop;
279 if (cond && integer_zerop (cond))
280 {
281 if (cond_is_first)
282 loop = fold_build3_loc (start_locus, COND_EXPR,
283 void_type_node, cond, stmt_list,
284 build_empty_stmt (start_locus));
285 else
286 loop = stmt_list;
287 }
288 else
289 {
290 location_t loc = start_locus;
291 if (!cond || integer_nonzerop (cond))
292 loc = EXPR_LOCATION (expr_first (body));
293 if (loc == UNKNOWN_LOCATION)
294 loc = start_locus;
295 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
296 }
297
298 stmt_list = NULL;
299 append_to_statement_list (loop, &stmt_list);
300 finish_bc_block (&stmt_list, bc_break, blab);
301 if (!stmt_list)
302 stmt_list = build_empty_stmt (start_locus);
303
304 *stmt_p = stmt_list;
305 }
306
307 /* Genericize a FOR_STMT node *STMT_P. */
308
309 static void
310 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
311 {
312 tree stmt = *stmt_p;
313 tree expr = NULL;
314 tree loop;
315 tree init = FOR_INIT_STMT (stmt);
316
317 if (init)
318 {
319 cp_walk_tree (&init, cp_genericize_r, data, NULL);
320 append_to_statement_list (init, &expr);
321 }
322
323 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
324 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
325 append_to_statement_list (loop, &expr);
326 if (expr == NULL_TREE)
327 expr = loop;
328 *stmt_p = expr;
329 }
330
331 /* Genericize a WHILE_STMT node *STMT_P. */
332
333 static void
334 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
335 {
336 tree stmt = *stmt_p;
337 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
338 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
339 }
340
341 /* Genericize a DO_STMT node *STMT_P. */
342
343 static void
344 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
345 {
346 tree stmt = *stmt_p;
347 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
348 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
349 }
350
351 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
352
353 static void
354 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
355 {
356 tree stmt = *stmt_p;
357 tree break_block, body, cond, type;
358 location_t stmt_locus = EXPR_LOCATION (stmt);
359
360 body = SWITCH_STMT_BODY (stmt);
361 if (!body)
362 body = build_empty_stmt (stmt_locus);
363 cond = SWITCH_STMT_COND (stmt);
364 type = SWITCH_STMT_TYPE (stmt);
365
366 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
367
368 break_block = begin_bc_block (bc_break, stmt_locus);
369
370 cp_walk_tree (&body, cp_genericize_r, data, NULL);
371 cp_walk_tree (&type, cp_genericize_r, data, NULL);
372 *walk_subtrees = 0;
373
374 if (TREE_USED (break_block))
375 SWITCH_BREAK_LABEL_P (break_block) = 1;
376 finish_bc_block (&body, bc_break, break_block);
377 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
378 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
379 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
380 || !TREE_USED (break_block));
381 }
382
383 /* Genericize a CONTINUE_STMT node *STMT_P. */
384
385 static void
386 genericize_continue_stmt (tree *stmt_p)
387 {
388 tree stmt_list = NULL;
389 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
390 tree label = get_bc_label (bc_continue);
391 location_t location = EXPR_LOCATION (*stmt_p);
392 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
393 append_to_statement_list_force (pred, &stmt_list);
394 append_to_statement_list (jump, &stmt_list);
395 *stmt_p = stmt_list;
396 }
397
398 /* Genericize a BREAK_STMT node *STMT_P. */
399
400 static void
401 genericize_break_stmt (tree *stmt_p)
402 {
403 tree label = get_bc_label (bc_break);
404 location_t location = EXPR_LOCATION (*stmt_p);
405 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
406 }
407
408 /* Genericize a OMP_FOR node *STMT_P. */
409
410 static void
411 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
412 {
413 tree stmt = *stmt_p;
414 location_t locus = EXPR_LOCATION (stmt);
415 tree clab = begin_bc_block (bc_continue, locus);
416
417 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
418 if (TREE_CODE (stmt) != OMP_TASKLOOP)
419 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
420 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
421 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
422 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
423 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
424 *walk_subtrees = 0;
425
426 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
427 }
428
429 /* Hook into the middle of gimplifying an OMP_FOR node. */
430
431 static enum gimplify_status
432 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
433 {
434 tree for_stmt = *expr_p;
435 gimple_seq seq = NULL;
436
437 /* Protect ourselves from recursion. */
438 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
439 return GS_UNHANDLED;
440 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
441
442 gimplify_and_add (for_stmt, &seq);
443 gimple_seq_add_seq (pre_p, seq);
444
445 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
446
447 return GS_ALL_DONE;
448 }
449
450 /* Gimplify an EXPR_STMT node. */
451
452 static void
453 gimplify_expr_stmt (tree *stmt_p)
454 {
455 tree stmt = EXPR_STMT_EXPR (*stmt_p);
456
457 if (stmt == error_mark_node)
458 stmt = NULL;
459
460 /* Gimplification of a statement expression will nullify the
461 statement if all its side effects are moved to *PRE_P and *POST_P.
462
463 In this case we will not want to emit the gimplified statement.
464 However, we may still want to emit a warning, so we do that before
465 gimplification. */
466 if (stmt && warn_unused_value)
467 {
468 if (!TREE_SIDE_EFFECTS (stmt))
469 {
470 if (!IS_EMPTY_STMT (stmt)
471 && !VOID_TYPE_P (TREE_TYPE (stmt))
472 && !TREE_NO_WARNING (stmt))
473 warning (OPT_Wunused_value, "statement with no effect");
474 }
475 else
476 warn_if_unused_value (stmt, input_location);
477 }
478
479 if (stmt == NULL_TREE)
480 stmt = alloc_stmt_list ();
481
482 *stmt_p = stmt;
483 }
484
485 /* Gimplify initialization from an AGGR_INIT_EXPR. */
486
487 static void
488 cp_gimplify_init_expr (tree *expr_p)
489 {
490 tree from = TREE_OPERAND (*expr_p, 1);
491 tree to = TREE_OPERAND (*expr_p, 0);
492 tree t;
493
494 /* What about code that pulls out the temp and uses it elsewhere? I
495 think that such code never uses the TARGET_EXPR as an initializer. If
496 I'm wrong, we'll abort because the temp won't have any RTL. In that
497 case, I guess we'll need to replace references somehow. */
498 if (TREE_CODE (from) == TARGET_EXPR)
499 from = TARGET_EXPR_INITIAL (from);
500
501 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
502 inside the TARGET_EXPR. */
503 for (t = from; t; )
504 {
505 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
506
507 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
508 replace the slot operand with our target.
509
510 Should we add a target parm to gimplify_expr instead? No, as in this
511 case we want to replace the INIT_EXPR. */
512 if (TREE_CODE (sub) == AGGR_INIT_EXPR
513 || TREE_CODE (sub) == VEC_INIT_EXPR)
514 {
515 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
516 AGGR_INIT_EXPR_SLOT (sub) = to;
517 else
518 VEC_INIT_EXPR_SLOT (sub) = to;
519 *expr_p = from;
520
521 /* The initialization is now a side-effect, so the container can
522 become void. */
523 if (from != sub)
524 TREE_TYPE (from) = void_type_node;
525 }
526
527 /* Handle aggregate NSDMI. */
528 replace_placeholders (sub, to);
529
530 if (t == sub)
531 break;
532 else
533 t = TREE_OPERAND (t, 1);
534 }
535
536 }
537
538 /* Gimplify a MUST_NOT_THROW_EXPR. */
539
540 static enum gimplify_status
541 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
542 {
543 tree stmt = *expr_p;
544 tree temp = voidify_wrapper_expr (stmt, NULL);
545 tree body = TREE_OPERAND (stmt, 0);
546 gimple_seq try_ = NULL;
547 gimple_seq catch_ = NULL;
548 gimple *mnt;
549
550 gimplify_and_add (body, &try_);
551 mnt = gimple_build_eh_must_not_throw (terminate_fn);
552 gimple_seq_add_stmt_without_update (&catch_, mnt);
553 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
554
555 gimple_seq_add_stmt_without_update (pre_p, mnt);
556 if (temp)
557 {
558 *expr_p = temp;
559 return GS_OK;
560 }
561
562 *expr_p = NULL;
563 return GS_ALL_DONE;
564 }
565
566 /* Return TRUE if an operand (OP) of a given TYPE being copied is
567 really just an empty class copy.
568
569 Check that the operand has a simple form so that TARGET_EXPRs and
570 non-empty CONSTRUCTORs get reduced properly, and we leave the
571 return slot optimization alone because it isn't a copy. */
572
573 static bool
574 simple_empty_class_p (tree type, tree op)
575 {
576 return
577 ((TREE_CODE (op) == COMPOUND_EXPR
578 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
579 || TREE_CODE (op) == EMPTY_CLASS_EXPR
580 || is_gimple_lvalue (op)
581 || INDIRECT_REF_P (op)
582 || (TREE_CODE (op) == CONSTRUCTOR
583 && CONSTRUCTOR_NELTS (op) == 0
584 && !TREE_CLOBBER_P (op))
585 || (TREE_CODE (op) == CALL_EXPR
586 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
587 && is_really_empty_class (type, /*ignore_vptr*/true);
588 }
589
590 /* Returns true if evaluating E as an lvalue has side-effects;
591 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
592 have side-effects until there is a read or write through it. */
593
594 static bool
595 lvalue_has_side_effects (tree e)
596 {
597 if (!TREE_SIDE_EFFECTS (e))
598 return false;
599 while (handled_component_p (e))
600 {
601 if (TREE_CODE (e) == ARRAY_REF
602 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
603 return true;
604 e = TREE_OPERAND (e, 0);
605 }
606 if (DECL_P (e))
607 /* Just naming a variable has no side-effects. */
608 return false;
609 else if (INDIRECT_REF_P (e))
610 /* Similarly, indirection has no side-effects. */
611 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
612 else
613 /* For anything else, trust TREE_SIDE_EFFECTS. */
614 return TREE_SIDE_EFFECTS (e);
615 }
616
617 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
618
619 int
620 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
621 {
622 int saved_stmts_are_full_exprs_p = 0;
623 location_t loc = cp_expr_loc_or_loc (*expr_p, input_location);
624 enum tree_code code = TREE_CODE (*expr_p);
625 enum gimplify_status ret;
626
627 if (STATEMENT_CODE_P (code))
628 {
629 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
630 current_stmt_tree ()->stmts_are_full_exprs_p
631 = STMT_IS_FULL_EXPR_P (*expr_p);
632 }
633
634 switch (code)
635 {
636 case AGGR_INIT_EXPR:
637 simplify_aggr_init_expr (expr_p);
638 ret = GS_OK;
639 break;
640
641 case VEC_INIT_EXPR:
642 {
643 location_t loc = input_location;
644 tree init = VEC_INIT_EXPR_INIT (*expr_p);
645 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
646 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
647 input_location = EXPR_LOCATION (*expr_p);
648 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
649 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
650 from_array,
651 tf_warning_or_error);
652 hash_set<tree> pset;
653 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
654 cp_genericize_tree (expr_p, false);
655 ret = GS_OK;
656 input_location = loc;
657 }
658 break;
659
660 case THROW_EXPR:
661 /* FIXME communicate throw type to back end, probably by moving
662 THROW_EXPR into ../tree.def. */
663 *expr_p = TREE_OPERAND (*expr_p, 0);
664 ret = GS_OK;
665 break;
666
667 case MUST_NOT_THROW_EXPR:
668 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
669 break;
670
671 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
672 LHS of an assignment might also be involved in the RHS, as in bug
673 25979. */
674 case INIT_EXPR:
675 cp_gimplify_init_expr (expr_p);
676 if (TREE_CODE (*expr_p) != INIT_EXPR)
677 return GS_OK;
678 /* Fall through. */
679 case MODIFY_EXPR:
680 modify_expr_case:
681 {
682 /* If the back end isn't clever enough to know that the lhs and rhs
683 types are the same, add an explicit conversion. */
684 tree op0 = TREE_OPERAND (*expr_p, 0);
685 tree op1 = TREE_OPERAND (*expr_p, 1);
686
687 if (!error_operand_p (op0)
688 && !error_operand_p (op1)
689 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
690 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
691 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
692 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
693 TREE_TYPE (op0), op1);
694
695 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
696 {
697 /* Remove any copies of empty classes. Also drop volatile
698 variables on the RHS to avoid infinite recursion from
699 gimplify_expr trying to load the value. */
700 if (TREE_SIDE_EFFECTS (op1))
701 {
702 if (TREE_THIS_VOLATILE (op1)
703 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
704 op1 = build_fold_addr_expr (op1);
705
706 gimplify_and_add (op1, pre_p);
707 }
708 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
709 is_gimple_lvalue, fb_lvalue);
710 *expr_p = TREE_OPERAND (*expr_p, 0);
711 }
712 /* P0145 says that the RHS is sequenced before the LHS.
713 gimplify_modify_expr gimplifies the RHS before the LHS, but that
714 isn't quite strong enough in two cases:
715
716 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
717 mean it's evaluated after the LHS.
718
719 2) the value calculation of the RHS is also sequenced before the
720 LHS, so for scalar assignment we need to preevaluate if the
721 RHS could be affected by LHS side-effects even if it has no
722 side-effects of its own. We don't need this for classes because
723 class assignment takes its RHS by reference. */
724 else if (flag_strong_eval_order > 1
725 && TREE_CODE (*expr_p) == MODIFY_EXPR
726 && lvalue_has_side_effects (op0)
727 && (TREE_CODE (op1) == CALL_EXPR
728 || (SCALAR_TYPE_P (TREE_TYPE (op1))
729 && !TREE_CONSTANT (op1))))
730 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
731 }
732 ret = GS_OK;
733 break;
734
735 case EMPTY_CLASS_EXPR:
736 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
737 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
738 ret = GS_OK;
739 break;
740
741 case BASELINK:
742 *expr_p = BASELINK_FUNCTIONS (*expr_p);
743 ret = GS_OK;
744 break;
745
746 case TRY_BLOCK:
747 genericize_try_block (expr_p);
748 ret = GS_OK;
749 break;
750
751 case HANDLER:
752 genericize_catch_block (expr_p);
753 ret = GS_OK;
754 break;
755
756 case EH_SPEC_BLOCK:
757 genericize_eh_spec_block (expr_p);
758 ret = GS_OK;
759 break;
760
761 case USING_STMT:
762 gcc_unreachable ();
763
764 case FOR_STMT:
765 case WHILE_STMT:
766 case DO_STMT:
767 case SWITCH_STMT:
768 case CONTINUE_STMT:
769 case BREAK_STMT:
770 gcc_unreachable ();
771
772 case OMP_FOR:
773 case OMP_SIMD:
774 case OMP_DISTRIBUTE:
775 case OMP_TASKLOOP:
776 ret = cp_gimplify_omp_for (expr_p, pre_p);
777 break;
778
779 case EXPR_STMT:
780 gimplify_expr_stmt (expr_p);
781 ret = GS_OK;
782 break;
783
784 case UNARY_PLUS_EXPR:
785 {
786 tree arg = TREE_OPERAND (*expr_p, 0);
787 tree type = TREE_TYPE (*expr_p);
788 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
789 : arg;
790 ret = GS_OK;
791 }
792 break;
793
794 case CALL_EXPR:
795 ret = GS_OK;
796 if (!CALL_EXPR_FN (*expr_p))
797 /* Internal function call. */;
798 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
799 {
800 /* This is a call to a (compound) assignment operator that used
801 the operator syntax; gimplify the RHS first. */
802 gcc_assert (call_expr_nargs (*expr_p) == 2);
803 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
804 enum gimplify_status t
805 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
806 if (t == GS_ERROR)
807 ret = GS_ERROR;
808 }
809 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
810 {
811 /* Leave the last argument for gimplify_call_expr, to avoid problems
812 with __builtin_va_arg_pack(). */
813 int nargs = call_expr_nargs (*expr_p) - 1;
814 for (int i = 0; i < nargs; ++i)
815 {
816 enum gimplify_status t
817 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
818 if (t == GS_ERROR)
819 ret = GS_ERROR;
820 }
821 }
822 else if (flag_strong_eval_order
823 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
824 {
825 /* If flag_strong_eval_order, evaluate the object argument first. */
826 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
827 if (INDIRECT_TYPE_P (fntype))
828 fntype = TREE_TYPE (fntype);
829 if (TREE_CODE (fntype) == METHOD_TYPE)
830 {
831 enum gimplify_status t
832 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
833 if (t == GS_ERROR)
834 ret = GS_ERROR;
835 }
836 }
837 if (ret != GS_ERROR)
838 {
839 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
840 if (decl
841 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
842 BUILT_IN_FRONTEND))
843 *expr_p = boolean_false_node;
844 }
845 break;
846
847 case RETURN_EXPR:
848 if (TREE_OPERAND (*expr_p, 0)
849 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
850 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
851 {
852 expr_p = &TREE_OPERAND (*expr_p, 0);
853 code = TREE_CODE (*expr_p);
854 /* Avoid going through the INIT_EXPR case, which can
855 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
856 goto modify_expr_case;
857 }
858 /* Fall through. */
859
860 default:
861 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
862 break;
863 }
864
865 /* Restore saved state. */
866 if (STATEMENT_CODE_P (code))
867 current_stmt_tree ()->stmts_are_full_exprs_p
868 = saved_stmts_are_full_exprs_p;
869
870 return ret;
871 }
872
873 static inline bool
874 is_invisiref_parm (const_tree t)
875 {
876 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
877 && DECL_BY_REFERENCE (t));
878 }
879
880 /* Return true if the uid in both int tree maps are equal. */
881
882 bool
883 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
884 {
885 return (a->uid == b->uid);
886 }
887
888 /* Hash a UID in a cxx_int_tree_map. */
889
890 unsigned int
891 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
892 {
893 return item->uid;
894 }
895
896 /* A stable comparison routine for use with splay trees and DECLs. */
897
898 static int
899 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
900 {
901 tree a = (tree) xa;
902 tree b = (tree) xb;
903
904 return DECL_UID (a) - DECL_UID (b);
905 }
906
907 /* OpenMP context during genericization. */
908
909 struct cp_genericize_omp_taskreg
910 {
911 bool is_parallel;
912 bool default_shared;
913 struct cp_genericize_omp_taskreg *outer;
914 splay_tree variables;
915 };
916
917 /* Return true if genericization should try to determine if
918 DECL is firstprivate or shared within task regions. */
919
920 static bool
921 omp_var_to_track (tree decl)
922 {
923 tree type = TREE_TYPE (decl);
924 if (is_invisiref_parm (decl))
925 type = TREE_TYPE (type);
926 else if (TYPE_REF_P (type))
927 type = TREE_TYPE (type);
928 while (TREE_CODE (type) == ARRAY_TYPE)
929 type = TREE_TYPE (type);
930 if (type == error_mark_node || !CLASS_TYPE_P (type))
931 return false;
932 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
933 return false;
934 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
935 return false;
936 return true;
937 }
938
939 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
940
941 static void
942 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
943 {
944 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
945 (splay_tree_key) decl);
946 if (n == NULL)
947 {
948 int flags = OMP_CLAUSE_DEFAULT_SHARED;
949 if (omp_ctx->outer)
950 omp_cxx_notice_variable (omp_ctx->outer, decl);
951 if (!omp_ctx->default_shared)
952 {
953 struct cp_genericize_omp_taskreg *octx;
954
955 for (octx = omp_ctx->outer; octx; octx = octx->outer)
956 {
957 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
958 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
959 {
960 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
961 break;
962 }
963 if (octx->is_parallel)
964 break;
965 }
966 if (octx == NULL
967 && (TREE_CODE (decl) == PARM_DECL
968 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
969 && DECL_CONTEXT (decl) == current_function_decl)))
970 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
971 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
972 {
973 /* DECL is implicitly determined firstprivate in
974 the current task construct. Ensure copy ctor and
975 dtor are instantiated, because during gimplification
976 it will be already too late. */
977 tree type = TREE_TYPE (decl);
978 if (is_invisiref_parm (decl))
979 type = TREE_TYPE (type);
980 else if (TYPE_REF_P (type))
981 type = TREE_TYPE (type);
982 while (TREE_CODE (type) == ARRAY_TYPE)
983 type = TREE_TYPE (type);
984 get_copy_ctor (type, tf_none);
985 get_dtor (type, tf_none);
986 }
987 }
988 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
989 }
990 }
991
992 /* Genericization context. */
993
994 struct cp_genericize_data
995 {
996 hash_set<tree> *p_set;
997 vec<tree> bind_expr_stack;
998 struct cp_genericize_omp_taskreg *omp_ctx;
999 tree try_block;
1000 bool no_sanitize_p;
1001 bool handle_invisiref_parm_p;
1002 };
1003
1004 /* Perform any pre-gimplification folding of C++ front end trees to
1005 GENERIC.
1006 Note: The folding of none-omp cases is something to move into
1007 the middle-end. As for now we have most foldings only on GENERIC
1008 in fold-const, we need to perform this before transformation to
1009 GIMPLE-form. */
1010
1011 static tree
1012 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1013 {
1014 tree stmt;
1015 enum tree_code code;
1016
1017 *stmt_p = stmt = cp_fold (*stmt_p);
1018
1019 if (((hash_set<tree> *) data)->add (stmt))
1020 {
1021 /* Don't walk subtrees of stmts we've already walked once, otherwise
1022 we can have exponential complexity with e.g. lots of nested
1023 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1024 always the same tree, which the first time cp_fold_r has been
1025 called on it had the subtrees walked. */
1026 *walk_subtrees = 0;
1027 return NULL;
1028 }
1029
1030 code = TREE_CODE (stmt);
1031 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1032 || code == OMP_TASKLOOP || code == OACC_LOOP)
1033 {
1034 tree x;
1035 int i, n;
1036
1037 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1038 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1039 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1040 x = OMP_FOR_COND (stmt);
1041 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1042 {
1043 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1044 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1045 }
1046 else if (x && TREE_CODE (x) == TREE_VEC)
1047 {
1048 n = TREE_VEC_LENGTH (x);
1049 for (i = 0; i < n; i++)
1050 {
1051 tree o = TREE_VEC_ELT (x, i);
1052 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1053 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1054 }
1055 }
1056 x = OMP_FOR_INCR (stmt);
1057 if (x && TREE_CODE (x) == TREE_VEC)
1058 {
1059 n = TREE_VEC_LENGTH (x);
1060 for (i = 0; i < n; i++)
1061 {
1062 tree o = TREE_VEC_ELT (x, i);
1063 if (o && TREE_CODE (o) == MODIFY_EXPR)
1064 o = TREE_OPERAND (o, 1);
1065 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1066 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1067 {
1068 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1069 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1070 }
1071 }
1072 }
1073 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1074 *walk_subtrees = 0;
1075 }
1076
1077 return NULL;
1078 }
1079
1080 /* Fold ALL the trees! FIXME we should be able to remove this, but
1081 apparently that still causes optimization regressions. */
1082
1083 void
1084 cp_fold_function (tree fndecl)
1085 {
1086 hash_set<tree> pset;
1087 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1088 }
1089
1090 /* Perform any pre-gimplification lowering of C++ front end trees to
1091 GENERIC. */
1092
1093 static tree
1094 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1095 {
1096 tree stmt = *stmt_p;
1097 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1098 hash_set<tree> *p_set = wtd->p_set;
1099
1100 /* If in an OpenMP context, note var uses. */
1101 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1102 && (VAR_P (stmt)
1103 || TREE_CODE (stmt) == PARM_DECL
1104 || TREE_CODE (stmt) == RESULT_DECL)
1105 && omp_var_to_track (stmt))
1106 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1107
1108 /* Don't dereference parms in a thunk, pass the references through. */
1109 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1110 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1111 {
1112 *walk_subtrees = 0;
1113 return NULL;
1114 }
1115
1116 /* Dereference invisible reference parms. */
1117 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1118 {
1119 *stmt_p = convert_from_reference (stmt);
1120 p_set->add (*stmt_p);
1121 *walk_subtrees = 0;
1122 return NULL;
1123 }
1124
1125 /* Map block scope extern declarations to visible declarations with the
1126 same name and type in outer scopes if any. */
1127 if (cp_function_chain->extern_decl_map
1128 && VAR_OR_FUNCTION_DECL_P (stmt)
1129 && DECL_EXTERNAL (stmt))
1130 {
1131 struct cxx_int_tree_map *h, in;
1132 in.uid = DECL_UID (stmt);
1133 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1134 if (h)
1135 {
1136 *stmt_p = h->to;
1137 TREE_USED (h->to) |= TREE_USED (stmt);
1138 *walk_subtrees = 0;
1139 return NULL;
1140 }
1141 }
1142
1143 if (TREE_CODE (stmt) == INTEGER_CST
1144 && TYPE_REF_P (TREE_TYPE (stmt))
1145 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1146 && !wtd->no_sanitize_p)
1147 {
1148 ubsan_maybe_instrument_reference (stmt_p);
1149 if (*stmt_p != stmt)
1150 {
1151 *walk_subtrees = 0;
1152 return NULL_TREE;
1153 }
1154 }
1155
1156 /* Other than invisiref parms, don't walk the same tree twice. */
1157 if (p_set->contains (stmt))
1158 {
1159 *walk_subtrees = 0;
1160 return NULL_TREE;
1161 }
1162
1163 switch (TREE_CODE (stmt))
1164 {
1165 case ADDR_EXPR:
1166 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1167 {
1168 /* If in an OpenMP context, note var uses. */
1169 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1170 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1171 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1172 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1173 *walk_subtrees = 0;
1174 }
1175 break;
1176
1177 case RETURN_EXPR:
1178 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1179 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1180 *walk_subtrees = 0;
1181 break;
1182
1183 case OMP_CLAUSE:
1184 switch (OMP_CLAUSE_CODE (stmt))
1185 {
1186 case OMP_CLAUSE_LASTPRIVATE:
1187 /* Don't dereference an invisiref in OpenMP clauses. */
1188 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1189 {
1190 *walk_subtrees = 0;
1191 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1192 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1193 cp_genericize_r, data, NULL);
1194 }
1195 break;
1196 case OMP_CLAUSE_PRIVATE:
1197 /* Don't dereference an invisiref in OpenMP clauses. */
1198 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1199 *walk_subtrees = 0;
1200 else if (wtd->omp_ctx != NULL)
1201 {
1202 /* Private clause doesn't cause any references to the
1203 var in outer contexts, avoid calling
1204 omp_cxx_notice_variable for it. */
1205 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1206 wtd->omp_ctx = NULL;
1207 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1208 data, NULL);
1209 wtd->omp_ctx = old;
1210 *walk_subtrees = 0;
1211 }
1212 break;
1213 case OMP_CLAUSE_SHARED:
1214 case OMP_CLAUSE_FIRSTPRIVATE:
1215 case OMP_CLAUSE_COPYIN:
1216 case OMP_CLAUSE_COPYPRIVATE:
1217 /* Don't dereference an invisiref in OpenMP clauses. */
1218 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1219 *walk_subtrees = 0;
1220 break;
1221 case OMP_CLAUSE_REDUCTION:
1222 case OMP_CLAUSE_IN_REDUCTION:
1223 case OMP_CLAUSE_TASK_REDUCTION:
1224 /* Don't dereference an invisiref in reduction clause's
1225 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1226 still needs to be genericized. */
1227 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1228 {
1229 *walk_subtrees = 0;
1230 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1231 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1232 cp_genericize_r, data, NULL);
1233 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1234 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1235 cp_genericize_r, data, NULL);
1236 }
1237 break;
1238 default:
1239 break;
1240 }
1241 break;
1242
1243 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1244 to lower this construct before scanning it, so we need to lower these
1245 before doing anything else. */
1246 case CLEANUP_STMT:
1247 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1248 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1249 : TRY_FINALLY_EXPR,
1250 void_type_node,
1251 CLEANUP_BODY (stmt),
1252 CLEANUP_EXPR (stmt));
1253 break;
1254
1255 case IF_STMT:
1256 genericize_if_stmt (stmt_p);
1257 /* *stmt_p has changed, tail recurse to handle it again. */
1258 return cp_genericize_r (stmt_p, walk_subtrees, data);
1259
1260 /* COND_EXPR might have incompatible types in branches if one or both
1261 arms are bitfields. Fix it up now. */
1262 case COND_EXPR:
1263 {
1264 tree type_left
1265 = (TREE_OPERAND (stmt, 1)
1266 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1267 : NULL_TREE);
1268 tree type_right
1269 = (TREE_OPERAND (stmt, 2)
1270 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1271 : NULL_TREE);
1272 if (type_left
1273 && !useless_type_conversion_p (TREE_TYPE (stmt),
1274 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1275 {
1276 TREE_OPERAND (stmt, 1)
1277 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1278 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1279 type_left));
1280 }
1281 if (type_right
1282 && !useless_type_conversion_p (TREE_TYPE (stmt),
1283 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1284 {
1285 TREE_OPERAND (stmt, 2)
1286 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1287 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1288 type_right));
1289 }
1290 }
1291 break;
1292
1293 case BIND_EXPR:
1294 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1295 {
1296 tree decl;
1297 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1298 if (VAR_P (decl)
1299 && !DECL_EXTERNAL (decl)
1300 && omp_var_to_track (decl))
1301 {
1302 splay_tree_node n
1303 = splay_tree_lookup (wtd->omp_ctx->variables,
1304 (splay_tree_key) decl);
1305 if (n == NULL)
1306 splay_tree_insert (wtd->omp_ctx->variables,
1307 (splay_tree_key) decl,
1308 TREE_STATIC (decl)
1309 ? OMP_CLAUSE_DEFAULT_SHARED
1310 : OMP_CLAUSE_DEFAULT_PRIVATE);
1311 }
1312 }
1313 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1314 {
1315 /* The point here is to not sanitize static initializers. */
1316 bool no_sanitize_p = wtd->no_sanitize_p;
1317 wtd->no_sanitize_p = true;
1318 for (tree decl = BIND_EXPR_VARS (stmt);
1319 decl;
1320 decl = DECL_CHAIN (decl))
1321 if (VAR_P (decl)
1322 && TREE_STATIC (decl)
1323 && DECL_INITIAL (decl))
1324 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1325 wtd->no_sanitize_p = no_sanitize_p;
1326 }
1327 wtd->bind_expr_stack.safe_push (stmt);
1328 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1329 cp_genericize_r, data, NULL);
1330 wtd->bind_expr_stack.pop ();
1331 break;
1332
1333 case USING_STMT:
1334 {
1335 tree block = NULL_TREE;
1336
1337 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1338 BLOCK, and append an IMPORTED_DECL to its
1339 BLOCK_VARS chained list. */
1340 if (wtd->bind_expr_stack.exists ())
1341 {
1342 int i;
1343 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1344 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1345 break;
1346 }
1347 if (block)
1348 {
1349 tree decl = TREE_OPERAND (stmt, 0);
1350 gcc_assert (decl);
1351
1352 if (undeduced_auto_decl (decl))
1353 /* Omit from the GENERIC, the back-end can't handle it. */;
1354 else
1355 {
1356 tree using_directive = make_node (IMPORTED_DECL);
1357 TREE_TYPE (using_directive) = void_type_node;
1358
1359 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1360 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1361 BLOCK_VARS (block) = using_directive;
1362 }
1363 }
1364 /* The USING_STMT won't appear in GENERIC. */
1365 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1366 *walk_subtrees = 0;
1367 }
1368 break;
1369
1370 case DECL_EXPR:
1371 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1372 {
1373 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1374 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1375 *walk_subtrees = 0;
1376 }
1377 else
1378 {
1379 tree d = DECL_EXPR_DECL (stmt);
1380 if (VAR_P (d))
1381 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1382 }
1383 break;
1384
1385 case OMP_PARALLEL:
1386 case OMP_TASK:
1387 case OMP_TASKLOOP:
1388 {
1389 struct cp_genericize_omp_taskreg omp_ctx;
1390 tree c, decl;
1391 splay_tree_node n;
1392
1393 *walk_subtrees = 0;
1394 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1395 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1396 omp_ctx.default_shared = omp_ctx.is_parallel;
1397 omp_ctx.outer = wtd->omp_ctx;
1398 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1399 wtd->omp_ctx = &omp_ctx;
1400 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1401 switch (OMP_CLAUSE_CODE (c))
1402 {
1403 case OMP_CLAUSE_SHARED:
1404 case OMP_CLAUSE_PRIVATE:
1405 case OMP_CLAUSE_FIRSTPRIVATE:
1406 case OMP_CLAUSE_LASTPRIVATE:
1407 decl = OMP_CLAUSE_DECL (c);
1408 if (decl == error_mark_node || !omp_var_to_track (decl))
1409 break;
1410 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1411 if (n != NULL)
1412 break;
1413 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1414 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1415 ? OMP_CLAUSE_DEFAULT_SHARED
1416 : OMP_CLAUSE_DEFAULT_PRIVATE);
1417 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1418 omp_cxx_notice_variable (omp_ctx.outer, decl);
1419 break;
1420 case OMP_CLAUSE_DEFAULT:
1421 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1422 omp_ctx.default_shared = true;
1423 default:
1424 break;
1425 }
1426 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1427 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1428 else
1429 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1430 wtd->omp_ctx = omp_ctx.outer;
1431 splay_tree_delete (omp_ctx.variables);
1432 }
1433 break;
1434
1435 case TRY_BLOCK:
1436 {
1437 *walk_subtrees = 0;
1438 tree try_block = wtd->try_block;
1439 wtd->try_block = stmt;
1440 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1441 wtd->try_block = try_block;
1442 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1443 }
1444 break;
1445
1446 case MUST_NOT_THROW_EXPR:
1447 /* MUST_NOT_THROW_COND might be something else with TM. */
1448 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1449 {
1450 *walk_subtrees = 0;
1451 tree try_block = wtd->try_block;
1452 wtd->try_block = stmt;
1453 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1454 wtd->try_block = try_block;
1455 }
1456 break;
1457
1458 case THROW_EXPR:
1459 {
1460 location_t loc = location_of (stmt);
1461 if (TREE_NO_WARNING (stmt))
1462 /* Never mind. */;
1463 else if (wtd->try_block)
1464 {
1465 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1466 {
1467 auto_diagnostic_group d;
1468 if (warning_at (loc, OPT_Wterminate,
1469 "throw will always call terminate()")
1470 && cxx_dialect >= cxx11
1471 && DECL_DESTRUCTOR_P (current_function_decl))
1472 inform (loc, "in C++11 destructors default to noexcept");
1473 }
1474 }
1475 else
1476 {
1477 if (warn_cxx11_compat && cxx_dialect < cxx11
1478 && DECL_DESTRUCTOR_P (current_function_decl)
1479 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1480 == NULL_TREE)
1481 && (get_defaulted_eh_spec (current_function_decl)
1482 == empty_except_spec))
1483 warning_at (loc, OPT_Wc__11_compat,
1484 "in C++11 this throw will terminate because "
1485 "destructors default to noexcept");
1486 }
1487 }
1488 break;
1489
1490 case CONVERT_EXPR:
1491 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1492 break;
1493
1494 case FOR_STMT:
1495 genericize_for_stmt (stmt_p, walk_subtrees, data);
1496 break;
1497
1498 case WHILE_STMT:
1499 genericize_while_stmt (stmt_p, walk_subtrees, data);
1500 break;
1501
1502 case DO_STMT:
1503 genericize_do_stmt (stmt_p, walk_subtrees, data);
1504 break;
1505
1506 case SWITCH_STMT:
1507 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1508 break;
1509
1510 case CONTINUE_STMT:
1511 genericize_continue_stmt (stmt_p);
1512 break;
1513
1514 case BREAK_STMT:
1515 genericize_break_stmt (stmt_p);
1516 break;
1517
1518 case OMP_FOR:
1519 case OMP_SIMD:
1520 case OMP_DISTRIBUTE:
1521 case OACC_LOOP:
1522 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1523 break;
1524
1525 case PTRMEM_CST:
1526 /* By the time we get here we're handing off to the back end, so we don't
1527 need or want to preserve PTRMEM_CST anymore. */
1528 *stmt_p = cplus_expand_constant (stmt);
1529 *walk_subtrees = 0;
1530 break;
1531
1532 case MEM_REF:
1533 /* For MEM_REF, make sure not to sanitize the second operand even
1534 if it has reference type. It is just an offset with a type
1535 holding other information. There is no other processing we
1536 need to do for INTEGER_CSTs, so just ignore the second argument
1537 unconditionally. */
1538 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1539 *walk_subtrees = 0;
1540 break;
1541
1542 case NOP_EXPR:
1543 if (!wtd->no_sanitize_p
1544 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1545 && TYPE_REF_P (TREE_TYPE (stmt)))
1546 ubsan_maybe_instrument_reference (stmt_p);
1547 break;
1548
1549 case CALL_EXPR:
1550 if (!wtd->no_sanitize_p
1551 && sanitize_flags_p ((SANITIZE_NULL
1552 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1553 {
1554 tree fn = CALL_EXPR_FN (stmt);
1555 if (fn != NULL_TREE
1556 && !error_operand_p (fn)
1557 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1558 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1559 {
1560 bool is_ctor
1561 = TREE_CODE (fn) == ADDR_EXPR
1562 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1563 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1564 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1565 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1566 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1567 cp_ubsan_maybe_instrument_member_call (stmt);
1568 }
1569 else if (fn == NULL_TREE
1570 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1571 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1572 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1573 *walk_subtrees = 0;
1574 }
1575 /* Fall through. */
1576 case AGGR_INIT_EXPR:
1577 /* For calls to a multi-versioned function, overload resolution
1578 returns the function with the highest target priority, that is,
1579 the version that will checked for dispatching first. If this
1580 version is inlinable, a direct call to this version can be made
1581 otherwise the call should go through the dispatcher. */
1582 {
1583 tree fn = cp_get_callee_fndecl_nofold (stmt);
1584 if (fn && DECL_FUNCTION_VERSIONED (fn)
1585 && (current_function_decl == NULL
1586 || !targetm.target_option.can_inline_p (current_function_decl,
1587 fn)))
1588 if (tree dis = get_function_version_dispatcher (fn))
1589 {
1590 mark_versions_used (dis);
1591 dis = build_address (dis);
1592 if (TREE_CODE (stmt) == CALL_EXPR)
1593 CALL_EXPR_FN (stmt) = dis;
1594 else
1595 AGGR_INIT_EXPR_FN (stmt) = dis;
1596 }
1597 }
1598 break;
1599
1600 case TARGET_EXPR:
1601 if (TARGET_EXPR_INITIAL (stmt)
1602 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1603 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1604 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1605 break;
1606
1607 default:
1608 if (IS_TYPE_OR_DECL_P (stmt))
1609 *walk_subtrees = 0;
1610 break;
1611 }
1612
1613 p_set->add (*stmt_p);
1614
1615 return NULL;
1616 }
1617
1618 /* Lower C++ front end trees to GENERIC in T_P. */
1619
1620 static void
1621 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1622 {
1623 struct cp_genericize_data wtd;
1624
1625 wtd.p_set = new hash_set<tree>;
1626 wtd.bind_expr_stack.create (0);
1627 wtd.omp_ctx = NULL;
1628 wtd.try_block = NULL_TREE;
1629 wtd.no_sanitize_p = false;
1630 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1631 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1632 delete wtd.p_set;
1633 wtd.bind_expr_stack.release ();
1634 if (sanitize_flags_p (SANITIZE_VPTR))
1635 cp_ubsan_instrument_member_accesses (t_p);
1636 }
1637
1638 /* If a function that should end with a return in non-void
1639 function doesn't obviously end with return, add ubsan
1640 instrumentation code to verify it at runtime. If -fsanitize=return
1641 is not enabled, instrument __builtin_unreachable. */
1642
1643 static void
1644 cp_maybe_instrument_return (tree fndecl)
1645 {
1646 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1647 || DECL_CONSTRUCTOR_P (fndecl)
1648 || DECL_DESTRUCTOR_P (fndecl)
1649 || !targetm.warn_func_return (fndecl))
1650 return;
1651
1652 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1653 /* Don't add __builtin_unreachable () if not optimizing, it will not
1654 improve any optimizations in that case, just break UB code.
1655 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1656 UBSan covers this with ubsan_instrument_return above where sufficient
1657 information is provided, while the __builtin_unreachable () below
1658 if return sanitization is disabled will just result in hard to
1659 understand runtime error without location. */
1660 && (!optimize
1661 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1662 return;
1663
1664 tree t = DECL_SAVED_TREE (fndecl);
1665 while (t)
1666 {
1667 switch (TREE_CODE (t))
1668 {
1669 case BIND_EXPR:
1670 t = BIND_EXPR_BODY (t);
1671 continue;
1672 case TRY_FINALLY_EXPR:
1673 case CLEANUP_POINT_EXPR:
1674 t = TREE_OPERAND (t, 0);
1675 continue;
1676 case STATEMENT_LIST:
1677 {
1678 tree_stmt_iterator i = tsi_last (t);
1679 while (!tsi_end_p (i))
1680 {
1681 tree p = tsi_stmt (i);
1682 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1683 break;
1684 tsi_prev (&i);
1685 }
1686 if (!tsi_end_p (i))
1687 {
1688 t = tsi_stmt (i);
1689 continue;
1690 }
1691 }
1692 break;
1693 case RETURN_EXPR:
1694 return;
1695 default:
1696 break;
1697 }
1698 break;
1699 }
1700 if (t == NULL_TREE)
1701 return;
1702 tree *p = &DECL_SAVED_TREE (fndecl);
1703 if (TREE_CODE (*p) == BIND_EXPR)
1704 p = &BIND_EXPR_BODY (*p);
1705
1706 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1707 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1708 t = ubsan_instrument_return (loc);
1709 else
1710 {
1711 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1712 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1713 }
1714
1715 append_to_statement_list (t, p);
1716 }
1717
1718 void
1719 cp_genericize (tree fndecl)
1720 {
1721 tree t;
1722
1723 /* Fix up the types of parms passed by invisible reference. */
1724 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1725 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1726 {
1727 /* If a function's arguments are copied to create a thunk,
1728 then DECL_BY_REFERENCE will be set -- but the type of the
1729 argument will be a pointer type, so we will never get
1730 here. */
1731 gcc_assert (!DECL_BY_REFERENCE (t));
1732 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1733 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1734 DECL_BY_REFERENCE (t) = 1;
1735 TREE_ADDRESSABLE (t) = 0;
1736 relayout_decl (t);
1737 }
1738
1739 /* Do the same for the return value. */
1740 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1741 {
1742 t = DECL_RESULT (fndecl);
1743 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1744 DECL_BY_REFERENCE (t) = 1;
1745 TREE_ADDRESSABLE (t) = 0;
1746 relayout_decl (t);
1747 if (DECL_NAME (t))
1748 {
1749 /* Adjust DECL_VALUE_EXPR of the original var. */
1750 tree outer = outer_curly_brace_block (current_function_decl);
1751 tree var;
1752
1753 if (outer)
1754 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1755 if (VAR_P (var)
1756 && DECL_NAME (t) == DECL_NAME (var)
1757 && DECL_HAS_VALUE_EXPR_P (var)
1758 && DECL_VALUE_EXPR (var) == t)
1759 {
1760 tree val = convert_from_reference (t);
1761 SET_DECL_VALUE_EXPR (var, val);
1762 break;
1763 }
1764 }
1765 }
1766
1767 /* If we're a clone, the body is already GIMPLE. */
1768 if (DECL_CLONED_FUNCTION_P (fndecl))
1769 return;
1770
1771 /* Allow cp_genericize calls to be nested. */
1772 tree save_bc_label[2];
1773 save_bc_label[bc_break] = bc_label[bc_break];
1774 save_bc_label[bc_continue] = bc_label[bc_continue];
1775 bc_label[bc_break] = NULL_TREE;
1776 bc_label[bc_continue] = NULL_TREE;
1777
1778 /* We do want to see every occurrence of the parms, so we can't just use
1779 walk_tree's hash functionality. */
1780 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1781
1782 cp_maybe_instrument_return (fndecl);
1783
1784 /* Do everything else. */
1785 c_genericize (fndecl);
1786
1787 gcc_assert (bc_label[bc_break] == NULL);
1788 gcc_assert (bc_label[bc_continue] == NULL);
1789 bc_label[bc_break] = save_bc_label[bc_break];
1790 bc_label[bc_continue] = save_bc_label[bc_continue];
1791 }
1792 \f
1793 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1794 NULL if there is in fact nothing to do. ARG2 may be null if FN
1795 actually only takes one argument. */
1796
1797 static tree
1798 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1799 {
1800 tree defparm, parm, t;
1801 int i = 0;
1802 int nargs;
1803 tree *argarray;
1804
1805 if (fn == NULL)
1806 return NULL;
1807
1808 nargs = list_length (DECL_ARGUMENTS (fn));
1809 argarray = XALLOCAVEC (tree, nargs);
1810
1811 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1812 if (arg2)
1813 defparm = TREE_CHAIN (defparm);
1814
1815 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1816 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1817 {
1818 tree inner_type = TREE_TYPE (arg1);
1819 tree start1, end1, p1;
1820 tree start2 = NULL, p2 = NULL;
1821 tree ret = NULL, lab;
1822
1823 start1 = arg1;
1824 start2 = arg2;
1825 do
1826 {
1827 inner_type = TREE_TYPE (inner_type);
1828 start1 = build4 (ARRAY_REF, inner_type, start1,
1829 size_zero_node, NULL, NULL);
1830 if (arg2)
1831 start2 = build4 (ARRAY_REF, inner_type, start2,
1832 size_zero_node, NULL, NULL);
1833 }
1834 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1835 start1 = build_fold_addr_expr_loc (input_location, start1);
1836 if (arg2)
1837 start2 = build_fold_addr_expr_loc (input_location, start2);
1838
1839 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1840 end1 = fold_build_pointer_plus (start1, end1);
1841
1842 p1 = create_tmp_var (TREE_TYPE (start1));
1843 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1844 append_to_statement_list (t, &ret);
1845
1846 if (arg2)
1847 {
1848 p2 = create_tmp_var (TREE_TYPE (start2));
1849 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1850 append_to_statement_list (t, &ret);
1851 }
1852
1853 lab = create_artificial_label (input_location);
1854 t = build1 (LABEL_EXPR, void_type_node, lab);
1855 append_to_statement_list (t, &ret);
1856
1857 argarray[i++] = p1;
1858 if (arg2)
1859 argarray[i++] = p2;
1860 /* Handle default arguments. */
1861 for (parm = defparm; parm && parm != void_list_node;
1862 parm = TREE_CHAIN (parm), i++)
1863 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1864 TREE_PURPOSE (parm), fn,
1865 i - is_method, tf_warning_or_error);
1866 t = build_call_a (fn, i, argarray);
1867 t = fold_convert (void_type_node, t);
1868 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1869 append_to_statement_list (t, &ret);
1870
1871 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1872 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1873 append_to_statement_list (t, &ret);
1874
1875 if (arg2)
1876 {
1877 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1878 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1879 append_to_statement_list (t, &ret);
1880 }
1881
1882 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1883 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1884 append_to_statement_list (t, &ret);
1885
1886 return ret;
1887 }
1888 else
1889 {
1890 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1891 if (arg2)
1892 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1893 /* Handle default arguments. */
1894 for (parm = defparm; parm && parm != void_list_node;
1895 parm = TREE_CHAIN (parm), i++)
1896 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1897 TREE_PURPOSE (parm), fn,
1898 i - is_method, tf_warning_or_error);
1899 t = build_call_a (fn, i, argarray);
1900 t = fold_convert (void_type_node, t);
1901 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1902 }
1903 }
1904
1905 /* Return code to initialize DECL with its default constructor, or
1906 NULL if there's nothing to do. */
1907
1908 tree
1909 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1910 {
1911 tree info = CP_OMP_CLAUSE_INFO (clause);
1912 tree ret = NULL;
1913
1914 if (info)
1915 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1916
1917 return ret;
1918 }
1919
1920 /* Return code to initialize DST with a copy constructor from SRC. */
1921
1922 tree
1923 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1924 {
1925 tree info = CP_OMP_CLAUSE_INFO (clause);
1926 tree ret = NULL;
1927
1928 if (info)
1929 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1930 if (ret == NULL)
1931 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1932
1933 return ret;
1934 }
1935
1936 /* Similarly, except use an assignment operator instead. */
1937
1938 tree
1939 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1940 {
1941 tree info = CP_OMP_CLAUSE_INFO (clause);
1942 tree ret = NULL;
1943
1944 if (info)
1945 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1946 if (ret == NULL)
1947 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1948
1949 return ret;
1950 }
1951
1952 /* Return code to destroy DECL. */
1953
1954 tree
1955 cxx_omp_clause_dtor (tree clause, tree decl)
1956 {
1957 tree info = CP_OMP_CLAUSE_INFO (clause);
1958 tree ret = NULL;
1959
1960 if (info)
1961 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1962
1963 return ret;
1964 }
1965
1966 /* True if OpenMP should privatize what this DECL points to rather
1967 than the DECL itself. */
1968
1969 bool
1970 cxx_omp_privatize_by_reference (const_tree decl)
1971 {
1972 return (TYPE_REF_P (TREE_TYPE (decl))
1973 || is_invisiref_parm (decl));
1974 }
1975
1976 /* Return true if DECL is const qualified var having no mutable member. */
1977 bool
1978 cxx_omp_const_qual_no_mutable (tree decl)
1979 {
1980 tree type = TREE_TYPE (decl);
1981 if (TYPE_REF_P (type))
1982 {
1983 if (!is_invisiref_parm (decl))
1984 return false;
1985 type = TREE_TYPE (type);
1986
1987 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1988 {
1989 /* NVR doesn't preserve const qualification of the
1990 variable's type. */
1991 tree outer = outer_curly_brace_block (current_function_decl);
1992 tree var;
1993
1994 if (outer)
1995 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1996 if (VAR_P (var)
1997 && DECL_NAME (decl) == DECL_NAME (var)
1998 && (TYPE_MAIN_VARIANT (type)
1999 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2000 {
2001 if (TYPE_READONLY (TREE_TYPE (var)))
2002 type = TREE_TYPE (var);
2003 break;
2004 }
2005 }
2006 }
2007
2008 if (type == error_mark_node)
2009 return false;
2010
2011 /* Variables with const-qualified type having no mutable member
2012 are predetermined shared. */
2013 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2014 return true;
2015
2016 return false;
2017 }
2018
2019 /* True if OpenMP sharing attribute of DECL is predetermined. */
2020
2021 enum omp_clause_default_kind
2022 cxx_omp_predetermined_sharing_1 (tree decl)
2023 {
2024 /* Static data members are predetermined shared. */
2025 if (TREE_STATIC (decl))
2026 {
2027 tree ctx = CP_DECL_CONTEXT (decl);
2028 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2029 return OMP_CLAUSE_DEFAULT_SHARED;
2030 }
2031
2032 /* this may not be specified in data-sharing clauses, still we need
2033 to predetermined it firstprivate. */
2034 if (decl == current_class_ptr)
2035 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2036
2037 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2038 }
2039
2040 /* Likewise, but also include the artificial vars. We don't want to
2041 disallow the artificial vars being mentioned in explicit clauses,
2042 as we use artificial vars e.g. for loop constructs with random
2043 access iterators other than pointers, but during gimplification
2044 we want to treat them as predetermined. */
2045
2046 enum omp_clause_default_kind
2047 cxx_omp_predetermined_sharing (tree decl)
2048 {
2049 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2050 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2051 return ret;
2052
2053 /* Predetermine artificial variables holding integral values, those
2054 are usually result of gimplify_one_sizepos or SAVE_EXPR
2055 gimplification. */
2056 if (VAR_P (decl)
2057 && DECL_ARTIFICIAL (decl)
2058 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2059 && !(DECL_LANG_SPECIFIC (decl)
2060 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2061 return OMP_CLAUSE_DEFAULT_SHARED;
2062
2063 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2064 }
2065
2066 /* Finalize an implicitly determined clause. */
2067
2068 void
2069 cxx_omp_finish_clause (tree c, gimple_seq *)
2070 {
2071 tree decl, inner_type;
2072 bool make_shared = false;
2073
2074 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2075 return;
2076
2077 decl = OMP_CLAUSE_DECL (c);
2078 decl = require_complete_type (decl);
2079 inner_type = TREE_TYPE (decl);
2080 if (decl == error_mark_node)
2081 make_shared = true;
2082 else if (TYPE_REF_P (TREE_TYPE (decl)))
2083 inner_type = TREE_TYPE (inner_type);
2084
2085 /* We're interested in the base element, not arrays. */
2086 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2087 inner_type = TREE_TYPE (inner_type);
2088
2089 /* Check for special function availability by building a call to one.
2090 Save the results, because later we won't be in the right context
2091 for making these queries. */
2092 if (!make_shared
2093 && CLASS_TYPE_P (inner_type)
2094 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
2095 make_shared = true;
2096
2097 if (make_shared)
2098 {
2099 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2100 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2101 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2102 }
2103 }
2104
2105 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2106 disregarded in OpenMP construct, because it is going to be
2107 remapped during OpenMP lowering. SHARED is true if DECL
2108 is going to be shared, false if it is going to be privatized. */
2109
2110 bool
2111 cxx_omp_disregard_value_expr (tree decl, bool shared)
2112 {
2113 return !shared
2114 && VAR_P (decl)
2115 && DECL_HAS_VALUE_EXPR_P (decl)
2116 && DECL_ARTIFICIAL (decl)
2117 && DECL_LANG_SPECIFIC (decl)
2118 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2119 }
2120
2121 /* Fold expression X which is used as an rvalue if RVAL is true. */
2122
2123 tree
2124 cp_fold_maybe_rvalue (tree x, bool rval)
2125 {
2126 while (true)
2127 {
2128 x = cp_fold (x);
2129 if (rval)
2130 x = mark_rvalue_use (x);
2131 if (rval && DECL_P (x)
2132 && !TYPE_REF_P (TREE_TYPE (x)))
2133 {
2134 tree v = decl_constant_value (x);
2135 if (v != x && v != error_mark_node)
2136 {
2137 x = v;
2138 continue;
2139 }
2140 }
2141 break;
2142 }
2143 return x;
2144 }
2145
2146 /* Fold expression X which is used as an rvalue. */
2147
2148 tree
2149 cp_fold_rvalue (tree x)
2150 {
2151 return cp_fold_maybe_rvalue (x, true);
2152 }
2153
2154 /* Perform folding on expression X. */
2155
2156 tree
2157 cp_fully_fold (tree x)
2158 {
2159 if (processing_template_decl)
2160 return x;
2161 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2162 have to call both. */
2163 if (cxx_dialect >= cxx11)
2164 {
2165 x = maybe_constant_value (x);
2166 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2167 a TARGET_EXPR; undo that here. */
2168 if (TREE_CODE (x) == TARGET_EXPR)
2169 x = TARGET_EXPR_INITIAL (x);
2170 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2171 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2172 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2173 x = TREE_OPERAND (x, 0);
2174 }
2175 return cp_fold_rvalue (x);
2176 }
2177
2178 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2179 in some cases. */
2180
2181 tree
2182 cp_fully_fold_init (tree x)
2183 {
2184 if (processing_template_decl)
2185 return x;
2186 x = cp_fully_fold (x);
2187 hash_set<tree> pset;
2188 cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2189 return x;
2190 }
2191
2192 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2193 and certain changes are made to the folding done. Or should be (FIXME). We
2194 never touch maybe_const, as it is only used for the C front-end
2195 C_MAYBE_CONST_EXPR. */
2196
2197 tree
2198 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2199 {
2200 return cp_fold_maybe_rvalue (x, !lval);
2201 }
2202
2203 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2204
2205 /* Dispose of the whole FOLD_CACHE. */
2206
2207 void
2208 clear_fold_cache (void)
2209 {
2210 if (fold_cache != NULL)
2211 fold_cache->empty ();
2212 }
2213
2214 /* This function tries to fold an expression X.
2215 To avoid combinatorial explosion, folding results are kept in fold_cache.
2216 If X is invalid, we don't fold at all.
2217 For performance reasons we don't cache expressions representing a
2218 declaration or constant.
2219 Function returns X or its folded variant. */
2220
2221 static tree
2222 cp_fold (tree x)
2223 {
2224 tree op0, op1, op2, op3;
2225 tree org_x = x, r = NULL_TREE;
2226 enum tree_code code;
2227 location_t loc;
2228 bool rval_ops = true;
2229
2230 if (!x || x == error_mark_node)
2231 return x;
2232
2233 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2234 return x;
2235
2236 /* Don't bother to cache DECLs or constants. */
2237 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2238 return x;
2239
2240 if (fold_cache == NULL)
2241 fold_cache = hash_map<tree, tree>::create_ggc (101);
2242
2243 if (tree *cached = fold_cache->get (x))
2244 return *cached;
2245
2246 code = TREE_CODE (x);
2247 switch (code)
2248 {
2249 case CLEANUP_POINT_EXPR:
2250 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2251 effects. */
2252 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2253 if (!TREE_SIDE_EFFECTS (r))
2254 x = r;
2255 break;
2256
2257 case SIZEOF_EXPR:
2258 x = fold_sizeof_expr (x);
2259 break;
2260
2261 case VIEW_CONVERT_EXPR:
2262 rval_ops = false;
2263 /* FALLTHRU */
2264 case CONVERT_EXPR:
2265 case NOP_EXPR:
2266 case NON_LVALUE_EXPR:
2267
2268 if (VOID_TYPE_P (TREE_TYPE (x)))
2269 {
2270 /* This is just to make sure we don't end up with casts to
2271 void from error_mark_node. If we just return x, then
2272 cp_fold_r might fold the operand into error_mark_node and
2273 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2274 during gimplification doesn't like such casts.
2275 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2276 folding of the operand should be in the caches and if in cp_fold_r
2277 it will modify it in place. */
2278 op0 = cp_fold (TREE_OPERAND (x, 0));
2279 if (op0 == error_mark_node)
2280 x = error_mark_node;
2281 break;
2282 }
2283
2284 loc = EXPR_LOCATION (x);
2285 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2286
2287 if (code == CONVERT_EXPR
2288 && SCALAR_TYPE_P (TREE_TYPE (x))
2289 && op0 != void_node)
2290 /* During parsing we used convert_to_*_nofold; re-convert now using the
2291 folding variants, since fold() doesn't do those transformations. */
2292 x = fold (convert (TREE_TYPE (x), op0));
2293 else if (op0 != TREE_OPERAND (x, 0))
2294 {
2295 if (op0 == error_mark_node)
2296 x = error_mark_node;
2297 else
2298 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2299 }
2300 else
2301 x = fold (x);
2302
2303 /* Conversion of an out-of-range value has implementation-defined
2304 behavior; the language considers it different from arithmetic
2305 overflow, which is undefined. */
2306 if (TREE_CODE (op0) == INTEGER_CST
2307 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2308 TREE_OVERFLOW (x) = false;
2309
2310 break;
2311
2312 case INDIRECT_REF:
2313 /* We don't need the decltype(auto) obfuscation anymore. */
2314 if (REF_PARENTHESIZED_P (x))
2315 {
2316 tree p = maybe_undo_parenthesized_ref (x);
2317 return cp_fold (p);
2318 }
2319 goto unary;
2320
2321 case ADDR_EXPR:
2322 loc = EXPR_LOCATION (x);
2323 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2324
2325 /* Cope with user tricks that amount to offsetof. */
2326 if (op0 != error_mark_node
2327 && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2328 && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2329 {
2330 tree val = get_base_address (op0);
2331 if (val
2332 && INDIRECT_REF_P (val)
2333 && COMPLETE_TYPE_P (TREE_TYPE (val))
2334 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2335 {
2336 val = TREE_OPERAND (val, 0);
2337 STRIP_NOPS (val);
2338 val = maybe_constant_value (val);
2339 if (TREE_CODE (val) == INTEGER_CST)
2340 return fold_offsetof (op0, TREE_TYPE (x));
2341 }
2342 }
2343 goto finish_unary;
2344
2345 case REALPART_EXPR:
2346 case IMAGPART_EXPR:
2347 rval_ops = false;
2348 /* FALLTHRU */
2349 case CONJ_EXPR:
2350 case FIX_TRUNC_EXPR:
2351 case FLOAT_EXPR:
2352 case NEGATE_EXPR:
2353 case ABS_EXPR:
2354 case ABSU_EXPR:
2355 case BIT_NOT_EXPR:
2356 case TRUTH_NOT_EXPR:
2357 case FIXED_CONVERT_EXPR:
2358 unary:
2359
2360 loc = EXPR_LOCATION (x);
2361 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2362
2363 finish_unary:
2364 if (op0 != TREE_OPERAND (x, 0))
2365 {
2366 if (op0 == error_mark_node)
2367 x = error_mark_node;
2368 else
2369 {
2370 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2371 if (code == INDIRECT_REF
2372 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2373 {
2374 TREE_READONLY (x) = TREE_READONLY (org_x);
2375 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2376 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2377 }
2378 }
2379 }
2380 else
2381 x = fold (x);
2382
2383 gcc_assert (TREE_CODE (x) != COND_EXPR
2384 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2385 break;
2386
2387 case UNARY_PLUS_EXPR:
2388 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2389 if (op0 == error_mark_node)
2390 x = error_mark_node;
2391 else
2392 x = fold_convert (TREE_TYPE (x), op0);
2393 break;
2394
2395 case POSTDECREMENT_EXPR:
2396 case POSTINCREMENT_EXPR:
2397 case INIT_EXPR:
2398 case PREDECREMENT_EXPR:
2399 case PREINCREMENT_EXPR:
2400 case COMPOUND_EXPR:
2401 case MODIFY_EXPR:
2402 rval_ops = false;
2403 /* FALLTHRU */
2404 case POINTER_PLUS_EXPR:
2405 case PLUS_EXPR:
2406 case POINTER_DIFF_EXPR:
2407 case MINUS_EXPR:
2408 case MULT_EXPR:
2409 case TRUNC_DIV_EXPR:
2410 case CEIL_DIV_EXPR:
2411 case FLOOR_DIV_EXPR:
2412 case ROUND_DIV_EXPR:
2413 case TRUNC_MOD_EXPR:
2414 case CEIL_MOD_EXPR:
2415 case ROUND_MOD_EXPR:
2416 case RDIV_EXPR:
2417 case EXACT_DIV_EXPR:
2418 case MIN_EXPR:
2419 case MAX_EXPR:
2420 case LSHIFT_EXPR:
2421 case RSHIFT_EXPR:
2422 case LROTATE_EXPR:
2423 case RROTATE_EXPR:
2424 case BIT_AND_EXPR:
2425 case BIT_IOR_EXPR:
2426 case BIT_XOR_EXPR:
2427 case TRUTH_AND_EXPR:
2428 case TRUTH_ANDIF_EXPR:
2429 case TRUTH_OR_EXPR:
2430 case TRUTH_ORIF_EXPR:
2431 case TRUTH_XOR_EXPR:
2432 case LT_EXPR: case LE_EXPR:
2433 case GT_EXPR: case GE_EXPR:
2434 case EQ_EXPR: case NE_EXPR:
2435 case UNORDERED_EXPR: case ORDERED_EXPR:
2436 case UNLT_EXPR: case UNLE_EXPR:
2437 case UNGT_EXPR: case UNGE_EXPR:
2438 case UNEQ_EXPR: case LTGT_EXPR:
2439 case RANGE_EXPR: case COMPLEX_EXPR:
2440
2441 loc = EXPR_LOCATION (x);
2442 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2443 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2444
2445 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2446 {
2447 if (op0 == error_mark_node || op1 == error_mark_node)
2448 x = error_mark_node;
2449 else
2450 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2451 }
2452 else
2453 x = fold (x);
2454
2455 /* This is only needed for -Wnonnull-compare and only if
2456 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2457 generation, we do it always. */
2458 if (COMPARISON_CLASS_P (org_x))
2459 {
2460 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2461 ;
2462 else if (COMPARISON_CLASS_P (x))
2463 {
2464 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2465 TREE_NO_WARNING (x) = 1;
2466 }
2467 /* Otherwise give up on optimizing these, let GIMPLE folders
2468 optimize those later on. */
2469 else if (op0 != TREE_OPERAND (org_x, 0)
2470 || op1 != TREE_OPERAND (org_x, 1))
2471 {
2472 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2473 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2474 TREE_NO_WARNING (x) = 1;
2475 }
2476 else
2477 x = org_x;
2478 }
2479 break;
2480
2481 case VEC_COND_EXPR:
2482 case COND_EXPR:
2483 loc = EXPR_LOCATION (x);
2484 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2485 op1 = cp_fold (TREE_OPERAND (x, 1));
2486 op2 = cp_fold (TREE_OPERAND (x, 2));
2487
2488 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2489 {
2490 warning_sentinel s (warn_int_in_bool_context);
2491 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2492 op1 = cp_truthvalue_conversion (op1);
2493 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2494 op2 = cp_truthvalue_conversion (op2);
2495 }
2496 else if (VOID_TYPE_P (TREE_TYPE (x)))
2497 {
2498 if (TREE_CODE (op0) == INTEGER_CST)
2499 {
2500 /* If the condition is constant, fold can fold away
2501 the COND_EXPR. If some statement-level uses of COND_EXPR
2502 have one of the branches NULL, avoid folding crash. */
2503 if (!op1)
2504 op1 = build_empty_stmt (loc);
2505 if (!op2)
2506 op2 = build_empty_stmt (loc);
2507 }
2508 else
2509 {
2510 /* Otherwise, don't bother folding a void condition, since
2511 it can't produce a constant value. */
2512 if (op0 != TREE_OPERAND (x, 0)
2513 || op1 != TREE_OPERAND (x, 1)
2514 || op2 != TREE_OPERAND (x, 2))
2515 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2516 break;
2517 }
2518 }
2519
2520 if (op0 != TREE_OPERAND (x, 0)
2521 || op1 != TREE_OPERAND (x, 1)
2522 || op2 != TREE_OPERAND (x, 2))
2523 {
2524 if (op0 == error_mark_node
2525 || op1 == error_mark_node
2526 || op2 == error_mark_node)
2527 x = error_mark_node;
2528 else
2529 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2530 }
2531 else
2532 x = fold (x);
2533
2534 /* A COND_EXPR might have incompatible types in branches if one or both
2535 arms are bitfields. If folding exposed such a branch, fix it up. */
2536 if (TREE_CODE (x) != code
2537 && x != error_mark_node
2538 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2539 x = fold_convert (TREE_TYPE (org_x), x);
2540
2541 break;
2542
2543 case CALL_EXPR:
2544 {
2545 int i, m, sv = optimize, nw = sv, changed = 0;
2546 tree callee = get_callee_fndecl (x);
2547
2548 /* Some built-in function calls will be evaluated at compile-time in
2549 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2550 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2551 if (callee && fndecl_built_in_p (callee) && !optimize
2552 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2553 && current_function_decl
2554 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2555 nw = 1;
2556
2557 /* Defer folding __builtin_is_constant_evaluated. */
2558 if (callee
2559 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2560 BUILT_IN_FRONTEND))
2561 break;
2562
2563 x = copy_node (x);
2564
2565 m = call_expr_nargs (x);
2566 for (i = 0; i < m; i++)
2567 {
2568 r = cp_fold (CALL_EXPR_ARG (x, i));
2569 if (r != CALL_EXPR_ARG (x, i))
2570 {
2571 if (r == error_mark_node)
2572 {
2573 x = error_mark_node;
2574 break;
2575 }
2576 changed = 1;
2577 }
2578 CALL_EXPR_ARG (x, i) = r;
2579 }
2580 if (x == error_mark_node)
2581 break;
2582
2583 optimize = nw;
2584 r = fold (x);
2585 optimize = sv;
2586
2587 if (TREE_CODE (r) != CALL_EXPR)
2588 {
2589 x = cp_fold (r);
2590 break;
2591 }
2592
2593 optimize = nw;
2594
2595 /* Invoke maybe_constant_value for functions declared
2596 constexpr and not called with AGGR_INIT_EXPRs.
2597 TODO:
2598 Do constexpr expansion of expressions where the call itself is not
2599 constant, but the call followed by an INDIRECT_REF is. */
2600 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2601 && !flag_no_inline)
2602 r = maybe_constant_value (x);
2603 optimize = sv;
2604
2605 if (TREE_CODE (r) != CALL_EXPR)
2606 {
2607 if (DECL_CONSTRUCTOR_P (callee))
2608 {
2609 loc = EXPR_LOCATION (x);
2610 tree s = build_fold_indirect_ref_loc (loc,
2611 CALL_EXPR_ARG (x, 0));
2612 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2613 }
2614 x = r;
2615 break;
2616 }
2617
2618 if (!changed)
2619 x = org_x;
2620 break;
2621 }
2622
2623 case CONSTRUCTOR:
2624 {
2625 unsigned i;
2626 constructor_elt *p;
2627 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2628 vec<constructor_elt, va_gc> *nelts = NULL;
2629 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2630 {
2631 tree op = cp_fold (p->value);
2632 if (op != p->value)
2633 {
2634 if (op == error_mark_node)
2635 {
2636 x = error_mark_node;
2637 vec_free (nelts);
2638 break;
2639 }
2640 if (nelts == NULL)
2641 nelts = elts->copy ();
2642 (*nelts)[i].value = op;
2643 }
2644 }
2645 if (nelts)
2646 {
2647 x = build_constructor (TREE_TYPE (x), nelts);
2648 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2649 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2650 }
2651 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2652 x = fold (x);
2653 break;
2654 }
2655 case TREE_VEC:
2656 {
2657 bool changed = false;
2658 vec<tree, va_gc> *vec = make_tree_vector ();
2659 int i, n = TREE_VEC_LENGTH (x);
2660 vec_safe_reserve (vec, n);
2661
2662 for (i = 0; i < n; i++)
2663 {
2664 tree op = cp_fold (TREE_VEC_ELT (x, i));
2665 vec->quick_push (op);
2666 if (op != TREE_VEC_ELT (x, i))
2667 changed = true;
2668 }
2669
2670 if (changed)
2671 {
2672 r = copy_node (x);
2673 for (i = 0; i < n; i++)
2674 TREE_VEC_ELT (r, i) = (*vec)[i];
2675 x = r;
2676 }
2677
2678 release_tree_vector (vec);
2679 }
2680
2681 break;
2682
2683 case ARRAY_REF:
2684 case ARRAY_RANGE_REF:
2685
2686 loc = EXPR_LOCATION (x);
2687 op0 = cp_fold (TREE_OPERAND (x, 0));
2688 op1 = cp_fold (TREE_OPERAND (x, 1));
2689 op2 = cp_fold (TREE_OPERAND (x, 2));
2690 op3 = cp_fold (TREE_OPERAND (x, 3));
2691
2692 if (op0 != TREE_OPERAND (x, 0)
2693 || op1 != TREE_OPERAND (x, 1)
2694 || op2 != TREE_OPERAND (x, 2)
2695 || op3 != TREE_OPERAND (x, 3))
2696 {
2697 if (op0 == error_mark_node
2698 || op1 == error_mark_node
2699 || op2 == error_mark_node
2700 || op3 == error_mark_node)
2701 x = error_mark_node;
2702 else
2703 {
2704 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2705 TREE_READONLY (x) = TREE_READONLY (org_x);
2706 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2707 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2708 }
2709 }
2710
2711 x = fold (x);
2712 break;
2713
2714 case SAVE_EXPR:
2715 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2716 folding, evaluates to an invariant. In that case no need to wrap
2717 this folded tree with a SAVE_EXPR. */
2718 r = cp_fold (TREE_OPERAND (x, 0));
2719 if (tree_invariant_p (r))
2720 x = r;
2721 break;
2722
2723 default:
2724 return org_x;
2725 }
2726
2727 fold_cache->put (org_x, x);
2728 /* Prevent that we try to fold an already folded result again. */
2729 if (x != org_x)
2730 fold_cache->put (x, x);
2731
2732 return x;
2733 }
2734
2735 /* Look up either "hot" or "cold" in attribute list LIST. */
2736
2737 tree
2738 lookup_hotness_attribute (tree list)
2739 {
2740 for (; list; list = TREE_CHAIN (list))
2741 {
2742 tree name = get_attribute_name (list);
2743 if (is_attribute_p ("hot", name)
2744 || is_attribute_p ("cold", name)
2745 || is_attribute_p ("likely", name)
2746 || is_attribute_p ("unlikely", name))
2747 break;
2748 }
2749 return list;
2750 }
2751
2752 /* Remove both "hot" and "cold" attributes from LIST. */
2753
2754 static tree
2755 remove_hotness_attribute (tree list)
2756 {
2757 list = remove_attribute ("hot", list);
2758 list = remove_attribute ("cold", list);
2759 list = remove_attribute ("likely", list);
2760 list = remove_attribute ("unlikely", list);
2761 return list;
2762 }
2763
2764 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2765 PREDICT_EXPR. */
2766
2767 tree
2768 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
2769 {
2770 if (std_attrs == error_mark_node)
2771 return std_attrs;
2772 if (tree attr = lookup_hotness_attribute (std_attrs))
2773 {
2774 tree name = get_attribute_name (attr);
2775 bool hot = (is_attribute_p ("hot", name)
2776 || is_attribute_p ("likely", name));
2777 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2778 hot ? TAKEN : NOT_TAKEN);
2779 SET_EXPR_LOCATION (pred, attrs_loc);
2780 add_stmt (pred);
2781 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2782 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2783 get_attribute_name (other), name);
2784 std_attrs = remove_hotness_attribute (std_attrs);
2785 }
2786 return std_attrs;
2787 }
2788
2789 #include "gt-cp-cp-gimplify.h"