]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/cp-gimplify.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3 Copyright (C) 2002-2015 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "function.h"
27 #include "predict.h"
28 #include "basic-block.h"
29 #include "tree.h"
30 #include "cp-tree.h"
31 #include "gimple.h"
32 #include "hard-reg-set.h"
33 #include "alias.h"
34 #include "stor-layout.h"
35 #include "c-family/c-common.h"
36 #include "tree-iterator.h"
37 #include "internal-fn.h"
38 #include "gimplify.h"
39 #include "flags.h"
40 #include "splay-tree.h"
41 #include "target.h"
42 #include "c-family/c-ubsan.h"
43 #include "cilk.h"
44 #include "gimplify.h"
45
46 /* Forward declarations. */
47
48 static tree cp_genericize_r (tree *, int *, void *);
49 static void cp_genericize_tree (tree*);
50
51 /* Local declarations. */
52
53 enum bc_t { bc_break = 0, bc_continue = 1 };
54
55 /* Stack of labels which are targets for "break" or "continue",
56 linked through TREE_CHAIN. */
57 static tree bc_label[2];
58
59 /* Begin a scope which can be exited by a break or continue statement. BC
60 indicates which.
61
62 Just creates a label with location LOCATION and pushes it into the current
63 context. */
64
65 static tree
66 begin_bc_block (enum bc_t bc, location_t location)
67 {
68 tree label = create_artificial_label (location);
69 DECL_CHAIN (label) = bc_label[bc];
70 bc_label[bc] = label;
71 if (bc == bc_break)
72 LABEL_DECL_BREAK (label) = true;
73 else
74 LABEL_DECL_CONTINUE (label) = true;
75 return label;
76 }
77
78 /* Finish a scope which can be exited by a break or continue statement.
79 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
80 an expression for the contents of the scope.
81
82 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
83 BLOCK. Otherwise, just forget the label. */
84
85 static void
86 finish_bc_block (tree *block, enum bc_t bc, tree label)
87 {
88 gcc_assert (label == bc_label[bc]);
89
90 if (TREE_USED (label))
91 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
92 block);
93
94 bc_label[bc] = DECL_CHAIN (label);
95 DECL_CHAIN (label) = NULL_TREE;
96 }
97
98 /* Get the LABEL_EXPR to represent a break or continue statement
99 in the current block scope. BC indicates which. */
100
101 static tree
102 get_bc_label (enum bc_t bc)
103 {
104 tree label = bc_label[bc];
105
106 /* Mark the label used for finish_bc_block. */
107 TREE_USED (label) = 1;
108 return label;
109 }
110
111 /* Genericize a TRY_BLOCK. */
112
113 static void
114 genericize_try_block (tree *stmt_p)
115 {
116 tree body = TRY_STMTS (*stmt_p);
117 tree cleanup = TRY_HANDLERS (*stmt_p);
118
119 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
120 }
121
122 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
123
124 static void
125 genericize_catch_block (tree *stmt_p)
126 {
127 tree type = HANDLER_TYPE (*stmt_p);
128 tree body = HANDLER_BODY (*stmt_p);
129
130 /* FIXME should the caught type go in TREE_TYPE? */
131 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
132 }
133
134 /* A terser interface for building a representation of an exception
135 specification. */
136
137 static tree
138 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
139 {
140 tree t;
141
142 /* FIXME should the allowed types go in TREE_TYPE? */
143 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
144 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
145
146 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
147 append_to_statement_list (body, &TREE_OPERAND (t, 0));
148
149 return t;
150 }
151
152 /* Genericize an EH_SPEC_BLOCK by converting it to a
153 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
154
155 static void
156 genericize_eh_spec_block (tree *stmt_p)
157 {
158 tree body = EH_SPEC_STMTS (*stmt_p);
159 tree allowed = EH_SPEC_RAISES (*stmt_p);
160 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
161
162 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
163 TREE_NO_WARNING (*stmt_p) = true;
164 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
165 }
166
167 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
168
169 static void
170 genericize_if_stmt (tree *stmt_p)
171 {
172 tree stmt, cond, then_, else_;
173 location_t locus = EXPR_LOCATION (*stmt_p);
174
175 stmt = *stmt_p;
176 cond = IF_COND (stmt);
177 then_ = THEN_CLAUSE (stmt);
178 else_ = ELSE_CLAUSE (stmt);
179
180 if (!then_)
181 then_ = build_empty_stmt (locus);
182 if (!else_)
183 else_ = build_empty_stmt (locus);
184
185 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
186 stmt = then_;
187 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
188 stmt = else_;
189 else
190 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
191 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
192 SET_EXPR_LOCATION (stmt, locus);
193 *stmt_p = stmt;
194 }
195
196 /* Build a generic representation of one of the C loop forms. COND is the
197 loop condition or NULL_TREE. BODY is the (possibly compound) statement
198 controlled by the loop. INCR is the increment expression of a for-loop,
199 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
200 evaluated before the loop body as in while and for loops, or after the
201 loop body as in do-while loops. */
202
203 static void
204 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
205 tree incr, bool cond_is_first, int *walk_subtrees,
206 void *data)
207 {
208 tree blab, clab;
209 tree exit = NULL;
210 tree stmt_list = NULL;
211
212 blab = begin_bc_block (bc_break, start_locus);
213 clab = begin_bc_block (bc_continue, start_locus);
214
215 if (incr && EXPR_P (incr))
216 SET_EXPR_LOCATION (incr, start_locus);
217
218 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
219 cp_walk_tree (&body, cp_genericize_r, data, NULL);
220 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
221 *walk_subtrees = 0;
222
223 if (cond && TREE_CODE (cond) != INTEGER_CST)
224 {
225 /* If COND is constant, don't bother building an exit. If it's false,
226 we won't build a loop. If it's true, any exits are in the body. */
227 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
228 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
229 get_bc_label (bc_break));
230 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
231 build_empty_stmt (cloc), exit);
232 }
233
234 if (exit && cond_is_first)
235 append_to_statement_list (exit, &stmt_list);
236 append_to_statement_list (body, &stmt_list);
237 finish_bc_block (&stmt_list, bc_continue, clab);
238 append_to_statement_list (incr, &stmt_list);
239 if (exit && !cond_is_first)
240 append_to_statement_list (exit, &stmt_list);
241
242 if (!stmt_list)
243 stmt_list = build_empty_stmt (start_locus);
244
245 tree loop;
246 if (cond && integer_zerop (cond))
247 {
248 if (cond_is_first)
249 loop = fold_build3_loc (start_locus, COND_EXPR,
250 void_type_node, cond, stmt_list,
251 build_empty_stmt (start_locus));
252 else
253 loop = stmt_list;
254 }
255 else
256 loop = build1_loc (start_locus, LOOP_EXPR, void_type_node, stmt_list);
257
258 stmt_list = NULL;
259 append_to_statement_list (loop, &stmt_list);
260 finish_bc_block (&stmt_list, bc_break, blab);
261 if (!stmt_list)
262 stmt_list = build_empty_stmt (start_locus);
263
264 *stmt_p = stmt_list;
265 }
266
267 /* Genericize a FOR_STMT node *STMT_P. */
268
269 static void
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
271 {
272 tree stmt = *stmt_p;
273 tree expr = NULL;
274 tree loop;
275 tree init = FOR_INIT_STMT (stmt);
276
277 if (init)
278 {
279 cp_walk_tree (&init, cp_genericize_r, data, NULL);
280 append_to_statement_list (init, &expr);
281 }
282
283 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285 append_to_statement_list (loop, &expr);
286 if (expr == NULL_TREE)
287 expr = loop;
288 *stmt_p = expr;
289 }
290
291 /* Genericize a WHILE_STMT node *STMT_P. */
292
293 static void
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
295 {
296 tree stmt = *stmt_p;
297 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
299 }
300
301 /* Genericize a DO_STMT node *STMT_P. */
302
303 static void
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
305 {
306 tree stmt = *stmt_p;
307 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
309 }
310
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
312
313 static void
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
315 {
316 tree stmt = *stmt_p;
317 tree break_block, body, cond, type;
318 location_t stmt_locus = EXPR_LOCATION (stmt);
319
320 break_block = begin_bc_block (bc_break, stmt_locus);
321
322 body = SWITCH_STMT_BODY (stmt);
323 if (!body)
324 body = build_empty_stmt (stmt_locus);
325 cond = SWITCH_STMT_COND (stmt);
326 type = SWITCH_STMT_TYPE (stmt);
327
328 cp_walk_tree (&body, cp_genericize_r, data, NULL);
329 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330 cp_walk_tree (&type, cp_genericize_r, data, NULL);
331 *walk_subtrees = 0;
332
333 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
334 finish_bc_block (stmt_p, bc_break, break_block);
335 }
336
337 /* Genericize a CONTINUE_STMT node *STMT_P. */
338
339 static void
340 genericize_continue_stmt (tree *stmt_p)
341 {
342 tree stmt_list = NULL;
343 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
344 tree label = get_bc_label (bc_continue);
345 location_t location = EXPR_LOCATION (*stmt_p);
346 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
347 append_to_statement_list (pred, &stmt_list);
348 append_to_statement_list (jump, &stmt_list);
349 *stmt_p = stmt_list;
350 }
351
352 /* Genericize a BREAK_STMT node *STMT_P. */
353
354 static void
355 genericize_break_stmt (tree *stmt_p)
356 {
357 tree label = get_bc_label (bc_break);
358 location_t location = EXPR_LOCATION (*stmt_p);
359 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
360 }
361
362 /* Genericize a OMP_FOR node *STMT_P. */
363
364 static void
365 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
366 {
367 tree stmt = *stmt_p;
368 location_t locus = EXPR_LOCATION (stmt);
369 tree clab = begin_bc_block (bc_continue, locus);
370
371 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
372 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
373 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
374 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
375 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
377 *walk_subtrees = 0;
378
379 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
380 }
381
382 /* Hook into the middle of gimplifying an OMP_FOR node. */
383
384 static enum gimplify_status
385 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
386 {
387 tree for_stmt = *expr_p;
388 gimple_seq seq = NULL;
389
390 /* Protect ourselves from recursion. */
391 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
392 return GS_UNHANDLED;
393 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
394
395 gimplify_and_add (for_stmt, &seq);
396 gimple_seq_add_seq (pre_p, seq);
397
398 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
399
400 return GS_ALL_DONE;
401 }
402
403 /* Gimplify an EXPR_STMT node. */
404
405 static void
406 gimplify_expr_stmt (tree *stmt_p)
407 {
408 tree stmt = EXPR_STMT_EXPR (*stmt_p);
409
410 if (stmt == error_mark_node)
411 stmt = NULL;
412
413 /* Gimplification of a statement expression will nullify the
414 statement if all its side effects are moved to *PRE_P and *POST_P.
415
416 In this case we will not want to emit the gimplified statement.
417 However, we may still want to emit a warning, so we do that before
418 gimplification. */
419 if (stmt && warn_unused_value)
420 {
421 if (!TREE_SIDE_EFFECTS (stmt))
422 {
423 if (!IS_EMPTY_STMT (stmt)
424 && !VOID_TYPE_P (TREE_TYPE (stmt))
425 && !TREE_NO_WARNING (stmt))
426 warning (OPT_Wunused_value, "statement with no effect");
427 }
428 else
429 warn_if_unused_value (stmt, input_location);
430 }
431
432 if (stmt == NULL_TREE)
433 stmt = alloc_stmt_list ();
434
435 *stmt_p = stmt;
436 }
437
438 /* Gimplify initialization from an AGGR_INIT_EXPR. */
439
440 static void
441 cp_gimplify_init_expr (tree *expr_p)
442 {
443 tree from = TREE_OPERAND (*expr_p, 1);
444 tree to = TREE_OPERAND (*expr_p, 0);
445 tree t;
446
447 /* What about code that pulls out the temp and uses it elsewhere? I
448 think that such code never uses the TARGET_EXPR as an initializer. If
449 I'm wrong, we'll abort because the temp won't have any RTL. In that
450 case, I guess we'll need to replace references somehow. */
451 if (TREE_CODE (from) == TARGET_EXPR)
452 from = TARGET_EXPR_INITIAL (from);
453
454 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
455 inside the TARGET_EXPR. */
456 for (t = from; t; )
457 {
458 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
459
460 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
461 replace the slot operand with our target.
462
463 Should we add a target parm to gimplify_expr instead? No, as in this
464 case we want to replace the INIT_EXPR. */
465 if (TREE_CODE (sub) == AGGR_INIT_EXPR
466 || TREE_CODE (sub) == VEC_INIT_EXPR)
467 {
468 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
469 AGGR_INIT_EXPR_SLOT (sub) = to;
470 else
471 VEC_INIT_EXPR_SLOT (sub) = to;
472 *expr_p = from;
473
474 /* The initialization is now a side-effect, so the container can
475 become void. */
476 if (from != sub)
477 TREE_TYPE (from) = void_type_node;
478 }
479
480 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
481 /* Handle aggregate NSDMI. */
482 replace_placeholders (sub, to);
483
484 if (t == sub)
485 break;
486 else
487 t = TREE_OPERAND (t, 1);
488 }
489
490 }
491
492 /* Gimplify a MUST_NOT_THROW_EXPR. */
493
494 static enum gimplify_status
495 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
496 {
497 tree stmt = *expr_p;
498 tree temp = voidify_wrapper_expr (stmt, NULL);
499 tree body = TREE_OPERAND (stmt, 0);
500 gimple_seq try_ = NULL;
501 gimple_seq catch_ = NULL;
502 gimple mnt;
503
504 gimplify_and_add (body, &try_);
505 mnt = gimple_build_eh_must_not_throw (terminate_node);
506 gimple_seq_add_stmt_without_update (&catch_, mnt);
507 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
508
509 gimple_seq_add_stmt_without_update (pre_p, mnt);
510 if (temp)
511 {
512 *expr_p = temp;
513 return GS_OK;
514 }
515
516 *expr_p = NULL;
517 return GS_ALL_DONE;
518 }
519
520 /* Return TRUE if an operand (OP) of a given TYPE being copied is
521 really just an empty class copy.
522
523 Check that the operand has a simple form so that TARGET_EXPRs and
524 non-empty CONSTRUCTORs get reduced properly, and we leave the
525 return slot optimization alone because it isn't a copy. */
526
527 static bool
528 simple_empty_class_p (tree type, tree op)
529 {
530 return
531 ((TREE_CODE (op) == COMPOUND_EXPR
532 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
533 || is_gimple_lvalue (op)
534 || INDIRECT_REF_P (op)
535 || (TREE_CODE (op) == CONSTRUCTOR
536 && CONSTRUCTOR_NELTS (op) == 0
537 && !TREE_CLOBBER_P (op))
538 || (TREE_CODE (op) == CALL_EXPR
539 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
540 && is_really_empty_class (type);
541 }
542
543 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
544
545 int
546 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
547 {
548 int saved_stmts_are_full_exprs_p = 0;
549 enum tree_code code = TREE_CODE (*expr_p);
550 enum gimplify_status ret;
551
552 if (STATEMENT_CODE_P (code))
553 {
554 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
555 current_stmt_tree ()->stmts_are_full_exprs_p
556 = STMT_IS_FULL_EXPR_P (*expr_p);
557 }
558
559 switch (code)
560 {
561 case PTRMEM_CST:
562 *expr_p = cplus_expand_constant (*expr_p);
563 ret = GS_OK;
564 break;
565
566 case AGGR_INIT_EXPR:
567 simplify_aggr_init_expr (expr_p);
568 ret = GS_OK;
569 break;
570
571 case VEC_INIT_EXPR:
572 {
573 location_t loc = input_location;
574 tree init = VEC_INIT_EXPR_INIT (*expr_p);
575 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
576 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
577 input_location = EXPR_LOCATION (*expr_p);
578 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
579 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
580 from_array,
581 tf_warning_or_error);
582 cp_genericize_tree (expr_p);
583 ret = GS_OK;
584 input_location = loc;
585 }
586 break;
587
588 case THROW_EXPR:
589 /* FIXME communicate throw type to back end, probably by moving
590 THROW_EXPR into ../tree.def. */
591 *expr_p = TREE_OPERAND (*expr_p, 0);
592 ret = GS_OK;
593 break;
594
595 case MUST_NOT_THROW_EXPR:
596 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
597 break;
598
599 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
600 LHS of an assignment might also be involved in the RHS, as in bug
601 25979. */
602 case INIT_EXPR:
603 if (fn_contains_cilk_spawn_p (cfun)
604 && cilk_detect_spawn_and_unwrap (expr_p)
605 && !seen_error ())
606 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
607 cp_gimplify_init_expr (expr_p);
608 if (TREE_CODE (*expr_p) != INIT_EXPR)
609 return GS_OK;
610 /* Otherwise fall through. */
611 case MODIFY_EXPR:
612 modify_expr_case:
613 {
614 if (fn_contains_cilk_spawn_p (cfun)
615 && cilk_detect_spawn_and_unwrap (expr_p)
616 && !seen_error ())
617 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
618
619 /* If the back end isn't clever enough to know that the lhs and rhs
620 types are the same, add an explicit conversion. */
621 tree op0 = TREE_OPERAND (*expr_p, 0);
622 tree op1 = TREE_OPERAND (*expr_p, 1);
623
624 if (!error_operand_p (op0)
625 && !error_operand_p (op1)
626 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
627 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
628 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
629 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
630 TREE_TYPE (op0), op1);
631
632 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
633 {
634 /* Remove any copies of empty classes. Also drop volatile
635 variables on the RHS to avoid infinite recursion from
636 gimplify_expr trying to load the value. */
637 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
638 is_gimple_lvalue, fb_lvalue);
639 if (TREE_SIDE_EFFECTS (op1))
640 {
641 if (TREE_THIS_VOLATILE (op1)
642 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
643 op1 = build_fold_addr_expr (op1);
644
645 gimplify_and_add (op1, pre_p);
646 }
647 *expr_p = TREE_OPERAND (*expr_p, 0);
648 }
649 }
650 ret = GS_OK;
651 break;
652
653 case EMPTY_CLASS_EXPR:
654 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
655 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
656 ret = GS_OK;
657 break;
658
659 case BASELINK:
660 *expr_p = BASELINK_FUNCTIONS (*expr_p);
661 ret = GS_OK;
662 break;
663
664 case TRY_BLOCK:
665 genericize_try_block (expr_p);
666 ret = GS_OK;
667 break;
668
669 case HANDLER:
670 genericize_catch_block (expr_p);
671 ret = GS_OK;
672 break;
673
674 case EH_SPEC_BLOCK:
675 genericize_eh_spec_block (expr_p);
676 ret = GS_OK;
677 break;
678
679 case USING_STMT:
680 gcc_unreachable ();
681
682 case FOR_STMT:
683 case WHILE_STMT:
684 case DO_STMT:
685 case SWITCH_STMT:
686 case CONTINUE_STMT:
687 case BREAK_STMT:
688 gcc_unreachable ();
689
690 case OMP_FOR:
691 case OMP_SIMD:
692 case OMP_DISTRIBUTE:
693 ret = cp_gimplify_omp_for (expr_p, pre_p);
694 break;
695
696 case EXPR_STMT:
697 gimplify_expr_stmt (expr_p);
698 ret = GS_OK;
699 break;
700
701 case UNARY_PLUS_EXPR:
702 {
703 tree arg = TREE_OPERAND (*expr_p, 0);
704 tree type = TREE_TYPE (*expr_p);
705 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
706 : arg;
707 ret = GS_OK;
708 }
709 break;
710
711 case CILK_SPAWN_STMT:
712 gcc_assert
713 (fn_contains_cilk_spawn_p (cfun)
714 && cilk_detect_spawn_and_unwrap (expr_p));
715
716 /* If errors are seen, then just process it as a CALL_EXPR. */
717 if (!seen_error ())
718 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
719
720 case CALL_EXPR:
721 if (fn_contains_cilk_spawn_p (cfun)
722 && cilk_detect_spawn_and_unwrap (expr_p)
723 && !seen_error ())
724 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
725
726 /* DR 1030 says that we need to evaluate the elements of an
727 initializer-list in forward order even when it's used as arguments to
728 a constructor. So if the target wants to evaluate them in reverse
729 order and there's more than one argument other than 'this', gimplify
730 them in order. */
731 ret = GS_OK;
732 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
733 && call_expr_nargs (*expr_p) > 2)
734 {
735 int nargs = call_expr_nargs (*expr_p);
736 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
737 for (int i = 1; i < nargs; ++i)
738 {
739 enum gimplify_status t
740 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
741 if (t == GS_ERROR)
742 ret = GS_ERROR;
743 }
744 }
745 break;
746
747 case RETURN_EXPR:
748 if (TREE_OPERAND (*expr_p, 0)
749 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
750 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
751 {
752 expr_p = &TREE_OPERAND (*expr_p, 0);
753 code = TREE_CODE (*expr_p);
754 /* Avoid going through the INIT_EXPR case, which can
755 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
756 goto modify_expr_case;
757 }
758 /* Fall through. */
759
760 default:
761 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
762 break;
763 }
764
765 /* Restore saved state. */
766 if (STATEMENT_CODE_P (code))
767 current_stmt_tree ()->stmts_are_full_exprs_p
768 = saved_stmts_are_full_exprs_p;
769
770 return ret;
771 }
772
773 static inline bool
774 is_invisiref_parm (const_tree t)
775 {
776 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
777 && DECL_BY_REFERENCE (t));
778 }
779
780 /* Return true if the uid in both int tree maps are equal. */
781
782 bool
783 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
784 {
785 return (a->uid == b->uid);
786 }
787
788 /* Hash a UID in a cxx_int_tree_map. */
789
790 unsigned int
791 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
792 {
793 return item->uid;
794 }
795
796 /* A stable comparison routine for use with splay trees and DECLs. */
797
798 static int
799 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
800 {
801 tree a = (tree) xa;
802 tree b = (tree) xb;
803
804 return DECL_UID (a) - DECL_UID (b);
805 }
806
807 /* OpenMP context during genericization. */
808
809 struct cp_genericize_omp_taskreg
810 {
811 bool is_parallel;
812 bool default_shared;
813 struct cp_genericize_omp_taskreg *outer;
814 splay_tree variables;
815 };
816
817 /* Return true if genericization should try to determine if
818 DECL is firstprivate or shared within task regions. */
819
820 static bool
821 omp_var_to_track (tree decl)
822 {
823 tree type = TREE_TYPE (decl);
824 if (is_invisiref_parm (decl))
825 type = TREE_TYPE (type);
826 while (TREE_CODE (type) == ARRAY_TYPE)
827 type = TREE_TYPE (type);
828 if (type == error_mark_node || !CLASS_TYPE_P (type))
829 return false;
830 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
831 return false;
832 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
833 return false;
834 return true;
835 }
836
837 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
838
839 static void
840 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
841 {
842 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
843 (splay_tree_key) decl);
844 if (n == NULL)
845 {
846 int flags = OMP_CLAUSE_DEFAULT_SHARED;
847 if (omp_ctx->outer)
848 omp_cxx_notice_variable (omp_ctx->outer, decl);
849 if (!omp_ctx->default_shared)
850 {
851 struct cp_genericize_omp_taskreg *octx;
852
853 for (octx = omp_ctx->outer; octx; octx = octx->outer)
854 {
855 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
856 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
857 {
858 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
859 break;
860 }
861 if (octx->is_parallel)
862 break;
863 }
864 if (octx == NULL
865 && (TREE_CODE (decl) == PARM_DECL
866 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
867 && DECL_CONTEXT (decl) == current_function_decl)))
868 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
869 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
870 {
871 /* DECL is implicitly determined firstprivate in
872 the current task construct. Ensure copy ctor and
873 dtor are instantiated, because during gimplification
874 it will be already too late. */
875 tree type = TREE_TYPE (decl);
876 if (is_invisiref_parm (decl))
877 type = TREE_TYPE (type);
878 while (TREE_CODE (type) == ARRAY_TYPE)
879 type = TREE_TYPE (type);
880 get_copy_ctor (type, tf_none);
881 get_dtor (type, tf_none);
882 }
883 }
884 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
885 }
886 }
887
888 /* Genericization context. */
889
890 struct cp_genericize_data
891 {
892 hash_set<tree> *p_set;
893 vec<tree> bind_expr_stack;
894 struct cp_genericize_omp_taskreg *omp_ctx;
895 tree try_block;
896 bool no_sanitize_p;
897 };
898
899 /* Perform any pre-gimplification lowering of C++ front end trees to
900 GENERIC. */
901
902 static tree
903 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
904 {
905 tree stmt = *stmt_p;
906 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
907 hash_set<tree> *p_set = wtd->p_set;
908
909 /* If in an OpenMP context, note var uses. */
910 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
911 && (VAR_P (stmt)
912 || TREE_CODE (stmt) == PARM_DECL
913 || TREE_CODE (stmt) == RESULT_DECL)
914 && omp_var_to_track (stmt))
915 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
916
917 if (is_invisiref_parm (stmt)
918 /* Don't dereference parms in a thunk, pass the references through. */
919 && !(DECL_THUNK_P (current_function_decl)
920 && TREE_CODE (stmt) == PARM_DECL))
921 {
922 *stmt_p = convert_from_reference (stmt);
923 *walk_subtrees = 0;
924 return NULL;
925 }
926
927 /* Map block scope extern declarations to visible declarations with the
928 same name and type in outer scopes if any. */
929 if (cp_function_chain->extern_decl_map
930 && VAR_OR_FUNCTION_DECL_P (stmt)
931 && DECL_EXTERNAL (stmt))
932 {
933 struct cxx_int_tree_map *h, in;
934 in.uid = DECL_UID (stmt);
935 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
936 if (h)
937 {
938 *stmt_p = h->to;
939 *walk_subtrees = 0;
940 return NULL;
941 }
942 }
943
944 /* Other than invisiref parms, don't walk the same tree twice. */
945 if (p_set->contains (stmt))
946 {
947 *walk_subtrees = 0;
948 return NULL_TREE;
949 }
950
951 if (TREE_CODE (stmt) == ADDR_EXPR
952 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
953 {
954 /* If in an OpenMP context, note var uses. */
955 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
956 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
957 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
958 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
959 *walk_subtrees = 0;
960 }
961 else if (TREE_CODE (stmt) == RETURN_EXPR
962 && TREE_OPERAND (stmt, 0)
963 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
964 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
965 *walk_subtrees = 0;
966 else if (TREE_CODE (stmt) == OMP_CLAUSE)
967 switch (OMP_CLAUSE_CODE (stmt))
968 {
969 case OMP_CLAUSE_LASTPRIVATE:
970 /* Don't dereference an invisiref in OpenMP clauses. */
971 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
972 {
973 *walk_subtrees = 0;
974 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
975 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
976 cp_genericize_r, data, NULL);
977 }
978 break;
979 case OMP_CLAUSE_PRIVATE:
980 /* Don't dereference an invisiref in OpenMP clauses. */
981 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
982 *walk_subtrees = 0;
983 else if (wtd->omp_ctx != NULL)
984 {
985 /* Private clause doesn't cause any references to the
986 var in outer contexts, avoid calling
987 omp_cxx_notice_variable for it. */
988 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
989 wtd->omp_ctx = NULL;
990 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
991 data, NULL);
992 wtd->omp_ctx = old;
993 *walk_subtrees = 0;
994 }
995 break;
996 case OMP_CLAUSE_SHARED:
997 case OMP_CLAUSE_FIRSTPRIVATE:
998 case OMP_CLAUSE_COPYIN:
999 case OMP_CLAUSE_COPYPRIVATE:
1000 /* Don't dereference an invisiref in OpenMP clauses. */
1001 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1002 *walk_subtrees = 0;
1003 break;
1004 case OMP_CLAUSE_REDUCTION:
1005 /* Don't dereference an invisiref in reduction clause's
1006 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1007 still needs to be genericized. */
1008 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1009 {
1010 *walk_subtrees = 0;
1011 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1012 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1013 cp_genericize_r, data, NULL);
1014 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1015 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1016 cp_genericize_r, data, NULL);
1017 }
1018 break;
1019 default:
1020 break;
1021 }
1022 else if (IS_TYPE_OR_DECL_P (stmt))
1023 *walk_subtrees = 0;
1024
1025 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1026 to lower this construct before scanning it, so we need to lower these
1027 before doing anything else. */
1028 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1029 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1030 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1031 : TRY_FINALLY_EXPR,
1032 void_type_node,
1033 CLEANUP_BODY (stmt),
1034 CLEANUP_EXPR (stmt));
1035
1036 else if (TREE_CODE (stmt) == IF_STMT)
1037 {
1038 genericize_if_stmt (stmt_p);
1039 /* *stmt_p has changed, tail recurse to handle it again. */
1040 return cp_genericize_r (stmt_p, walk_subtrees, data);
1041 }
1042
1043 /* COND_EXPR might have incompatible types in branches if one or both
1044 arms are bitfields. Fix it up now. */
1045 else if (TREE_CODE (stmt) == COND_EXPR)
1046 {
1047 tree type_left
1048 = (TREE_OPERAND (stmt, 1)
1049 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1050 : NULL_TREE);
1051 tree type_right
1052 = (TREE_OPERAND (stmt, 2)
1053 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1054 : NULL_TREE);
1055 if (type_left
1056 && !useless_type_conversion_p (TREE_TYPE (stmt),
1057 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1058 {
1059 TREE_OPERAND (stmt, 1)
1060 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1061 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1062 type_left));
1063 }
1064 if (type_right
1065 && !useless_type_conversion_p (TREE_TYPE (stmt),
1066 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1067 {
1068 TREE_OPERAND (stmt, 2)
1069 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1070 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1071 type_right));
1072 }
1073 }
1074
1075 else if (TREE_CODE (stmt) == BIND_EXPR)
1076 {
1077 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1078 {
1079 tree decl;
1080 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1081 if (VAR_P (decl)
1082 && !DECL_EXTERNAL (decl)
1083 && omp_var_to_track (decl))
1084 {
1085 splay_tree_node n
1086 = splay_tree_lookup (wtd->omp_ctx->variables,
1087 (splay_tree_key) decl);
1088 if (n == NULL)
1089 splay_tree_insert (wtd->omp_ctx->variables,
1090 (splay_tree_key) decl,
1091 TREE_STATIC (decl)
1092 ? OMP_CLAUSE_DEFAULT_SHARED
1093 : OMP_CLAUSE_DEFAULT_PRIVATE);
1094 }
1095 }
1096 if (flag_sanitize
1097 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1098 {
1099 /* The point here is to not sanitize static initializers. */
1100 bool no_sanitize_p = wtd->no_sanitize_p;
1101 wtd->no_sanitize_p = true;
1102 for (tree decl = BIND_EXPR_VARS (stmt);
1103 decl;
1104 decl = DECL_CHAIN (decl))
1105 if (VAR_P (decl)
1106 && TREE_STATIC (decl)
1107 && DECL_INITIAL (decl))
1108 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1109 wtd->no_sanitize_p = no_sanitize_p;
1110 }
1111 wtd->bind_expr_stack.safe_push (stmt);
1112 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1113 cp_genericize_r, data, NULL);
1114 wtd->bind_expr_stack.pop ();
1115 }
1116
1117 else if (TREE_CODE (stmt) == USING_STMT)
1118 {
1119 tree block = NULL_TREE;
1120
1121 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1122 BLOCK, and append an IMPORTED_DECL to its
1123 BLOCK_VARS chained list. */
1124 if (wtd->bind_expr_stack.exists ())
1125 {
1126 int i;
1127 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1128 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1129 break;
1130 }
1131 if (block)
1132 {
1133 tree using_directive;
1134 gcc_assert (TREE_OPERAND (stmt, 0));
1135
1136 using_directive = make_node (IMPORTED_DECL);
1137 TREE_TYPE (using_directive) = void_type_node;
1138
1139 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1140 = TREE_OPERAND (stmt, 0);
1141 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1142 BLOCK_VARS (block) = using_directive;
1143 }
1144 /* The USING_STMT won't appear in GENERIC. */
1145 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1146 *walk_subtrees = 0;
1147 }
1148
1149 else if (TREE_CODE (stmt) == DECL_EXPR
1150 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1151 {
1152 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1153 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1154 *walk_subtrees = 0;
1155 }
1156 else if (TREE_CODE (stmt) == DECL_EXPR)
1157 {
1158 tree d = DECL_EXPR_DECL (stmt);
1159 if (TREE_CODE (d) == VAR_DECL)
1160 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1161 }
1162 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1163 {
1164 struct cp_genericize_omp_taskreg omp_ctx;
1165 tree c, decl;
1166 splay_tree_node n;
1167
1168 *walk_subtrees = 0;
1169 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1170 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1171 omp_ctx.default_shared = omp_ctx.is_parallel;
1172 omp_ctx.outer = wtd->omp_ctx;
1173 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1174 wtd->omp_ctx = &omp_ctx;
1175 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1176 switch (OMP_CLAUSE_CODE (c))
1177 {
1178 case OMP_CLAUSE_SHARED:
1179 case OMP_CLAUSE_PRIVATE:
1180 case OMP_CLAUSE_FIRSTPRIVATE:
1181 case OMP_CLAUSE_LASTPRIVATE:
1182 decl = OMP_CLAUSE_DECL (c);
1183 if (decl == error_mark_node || !omp_var_to_track (decl))
1184 break;
1185 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1186 if (n != NULL)
1187 break;
1188 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1189 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1190 ? OMP_CLAUSE_DEFAULT_SHARED
1191 : OMP_CLAUSE_DEFAULT_PRIVATE);
1192 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1193 && omp_ctx.outer)
1194 omp_cxx_notice_variable (omp_ctx.outer, decl);
1195 break;
1196 case OMP_CLAUSE_DEFAULT:
1197 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1198 omp_ctx.default_shared = true;
1199 default:
1200 break;
1201 }
1202 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1203 wtd->omp_ctx = omp_ctx.outer;
1204 splay_tree_delete (omp_ctx.variables);
1205 }
1206 else if (TREE_CODE (stmt) == TRY_BLOCK)
1207 {
1208 *walk_subtrees = 0;
1209 tree try_block = wtd->try_block;
1210 wtd->try_block = stmt;
1211 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1212 wtd->try_block = try_block;
1213 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1214 }
1215 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1216 {
1217 /* MUST_NOT_THROW_COND might be something else with TM. */
1218 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1219 {
1220 *walk_subtrees = 0;
1221 tree try_block = wtd->try_block;
1222 wtd->try_block = stmt;
1223 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1224 wtd->try_block = try_block;
1225 }
1226 }
1227 else if (TREE_CODE (stmt) == THROW_EXPR)
1228 {
1229 location_t loc = location_of (stmt);
1230 if (TREE_NO_WARNING (stmt))
1231 /* Never mind. */;
1232 else if (wtd->try_block)
1233 {
1234 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1235 && warning_at (loc, OPT_Wterminate,
1236 "throw will always call terminate()")
1237 && cxx_dialect >= cxx11
1238 && DECL_DESTRUCTOR_P (current_function_decl))
1239 inform (loc, "in C++11 destructors default to noexcept");
1240 }
1241 else
1242 {
1243 if (warn_cxx11_compat && cxx_dialect < cxx11
1244 && DECL_DESTRUCTOR_P (current_function_decl)
1245 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1246 == NULL_TREE)
1247 && (get_defaulted_eh_spec (current_function_decl)
1248 == empty_except_spec))
1249 warning_at (loc, OPT_Wc__11_compat,
1250 "in C++11 this throw will terminate because "
1251 "destructors default to noexcept");
1252 }
1253 }
1254 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1255 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1256 else if (TREE_CODE (stmt) == FOR_STMT)
1257 genericize_for_stmt (stmt_p, walk_subtrees, data);
1258 else if (TREE_CODE (stmt) == WHILE_STMT)
1259 genericize_while_stmt (stmt_p, walk_subtrees, data);
1260 else if (TREE_CODE (stmt) == DO_STMT)
1261 genericize_do_stmt (stmt_p, walk_subtrees, data);
1262 else if (TREE_CODE (stmt) == SWITCH_STMT)
1263 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1264 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1265 genericize_continue_stmt (stmt_p);
1266 else if (TREE_CODE (stmt) == BREAK_STMT)
1267 genericize_break_stmt (stmt_p);
1268 else if (TREE_CODE (stmt) == OMP_FOR
1269 || TREE_CODE (stmt) == OMP_SIMD
1270 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1271 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1272 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1273 {
1274 if (SIZEOF_EXPR_TYPE_P (stmt))
1275 *stmt_p
1276 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1277 SIZEOF_EXPR, false);
1278 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1279 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1280 SIZEOF_EXPR, false);
1281 else
1282 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1283 SIZEOF_EXPR, false);
1284 if (*stmt_p == error_mark_node)
1285 *stmt_p = size_one_node;
1286 return NULL;
1287 }
1288 else if ((flag_sanitize
1289 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1290 && !wtd->no_sanitize_p)
1291 {
1292 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1293 && TREE_CODE (stmt) == NOP_EXPR
1294 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1295 ubsan_maybe_instrument_reference (stmt);
1296 else if (TREE_CODE (stmt) == CALL_EXPR)
1297 {
1298 tree fn = CALL_EXPR_FN (stmt);
1299 if (fn != NULL_TREE
1300 && !error_operand_p (fn)
1301 && POINTER_TYPE_P (TREE_TYPE (fn))
1302 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1303 {
1304 bool is_ctor
1305 = TREE_CODE (fn) == ADDR_EXPR
1306 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1307 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1308 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1309 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1310 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1311 cp_ubsan_maybe_instrument_member_call (stmt);
1312 }
1313 }
1314 }
1315
1316 p_set->add (*stmt_p);
1317
1318 return NULL;
1319 }
1320
1321 /* Lower C++ front end trees to GENERIC in T_P. */
1322
1323 static void
1324 cp_genericize_tree (tree* t_p)
1325 {
1326 struct cp_genericize_data wtd;
1327
1328 wtd.p_set = new hash_set<tree>;
1329 wtd.bind_expr_stack.create (0);
1330 wtd.omp_ctx = NULL;
1331 wtd.try_block = NULL_TREE;
1332 wtd.no_sanitize_p = false;
1333 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1334 delete wtd.p_set;
1335 wtd.bind_expr_stack.release ();
1336 if (flag_sanitize & SANITIZE_VPTR)
1337 cp_ubsan_instrument_member_accesses (t_p);
1338 }
1339
1340 /* If a function that should end with a return in non-void
1341 function doesn't obviously end with return, add ubsan
1342 instrumentation code to verify it at runtime. */
1343
1344 static void
1345 cp_ubsan_maybe_instrument_return (tree fndecl)
1346 {
1347 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1348 || DECL_CONSTRUCTOR_P (fndecl)
1349 || DECL_DESTRUCTOR_P (fndecl)
1350 || !targetm.warn_func_return (fndecl))
1351 return;
1352
1353 tree t = DECL_SAVED_TREE (fndecl);
1354 while (t)
1355 {
1356 switch (TREE_CODE (t))
1357 {
1358 case BIND_EXPR:
1359 t = BIND_EXPR_BODY (t);
1360 continue;
1361 case TRY_FINALLY_EXPR:
1362 t = TREE_OPERAND (t, 0);
1363 continue;
1364 case STATEMENT_LIST:
1365 {
1366 tree_stmt_iterator i = tsi_last (t);
1367 if (!tsi_end_p (i))
1368 {
1369 t = tsi_stmt (i);
1370 continue;
1371 }
1372 }
1373 break;
1374 case RETURN_EXPR:
1375 return;
1376 default:
1377 break;
1378 }
1379 break;
1380 }
1381 if (t == NULL_TREE)
1382 return;
1383 t = DECL_SAVED_TREE (fndecl);
1384 if (TREE_CODE (t) == BIND_EXPR
1385 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1386 {
1387 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1388 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1389 tsi_link_after (&i, t, TSI_NEW_STMT);
1390 }
1391 }
1392
1393 void
1394 cp_genericize (tree fndecl)
1395 {
1396 tree t;
1397
1398 /* Fix up the types of parms passed by invisible reference. */
1399 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1400 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1401 {
1402 /* If a function's arguments are copied to create a thunk,
1403 then DECL_BY_REFERENCE will be set -- but the type of the
1404 argument will be a pointer type, so we will never get
1405 here. */
1406 gcc_assert (!DECL_BY_REFERENCE (t));
1407 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1408 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1409 DECL_BY_REFERENCE (t) = 1;
1410 TREE_ADDRESSABLE (t) = 0;
1411 relayout_decl (t);
1412 }
1413
1414 /* Do the same for the return value. */
1415 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1416 {
1417 t = DECL_RESULT (fndecl);
1418 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1419 DECL_BY_REFERENCE (t) = 1;
1420 TREE_ADDRESSABLE (t) = 0;
1421 relayout_decl (t);
1422 if (DECL_NAME (t))
1423 {
1424 /* Adjust DECL_VALUE_EXPR of the original var. */
1425 tree outer = outer_curly_brace_block (current_function_decl);
1426 tree var;
1427
1428 if (outer)
1429 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1430 if (DECL_NAME (t) == DECL_NAME (var)
1431 && DECL_HAS_VALUE_EXPR_P (var)
1432 && DECL_VALUE_EXPR (var) == t)
1433 {
1434 tree val = convert_from_reference (t);
1435 SET_DECL_VALUE_EXPR (var, val);
1436 break;
1437 }
1438 }
1439 }
1440
1441 /* If we're a clone, the body is already GIMPLE. */
1442 if (DECL_CLONED_FUNCTION_P (fndecl))
1443 return;
1444
1445 /* Expand all the array notations here. */
1446 if (flag_cilkplus
1447 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1448 DECL_SAVED_TREE (fndecl) =
1449 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1450
1451 /* We do want to see every occurrence of the parms, so we can't just use
1452 walk_tree's hash functionality. */
1453 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1454
1455 if (flag_sanitize & SANITIZE_RETURN
1456 && do_ubsan_in_current_function ())
1457 cp_ubsan_maybe_instrument_return (fndecl);
1458
1459 /* Do everything else. */
1460 c_genericize (fndecl);
1461
1462 gcc_assert (bc_label[bc_break] == NULL);
1463 gcc_assert (bc_label[bc_continue] == NULL);
1464 }
1465 \f
1466 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1467 NULL if there is in fact nothing to do. ARG2 may be null if FN
1468 actually only takes one argument. */
1469
1470 static tree
1471 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1472 {
1473 tree defparm, parm, t;
1474 int i = 0;
1475 int nargs;
1476 tree *argarray;
1477
1478 if (fn == NULL)
1479 return NULL;
1480
1481 nargs = list_length (DECL_ARGUMENTS (fn));
1482 argarray = XALLOCAVEC (tree, nargs);
1483
1484 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1485 if (arg2)
1486 defparm = TREE_CHAIN (defparm);
1487
1488 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1489 {
1490 tree inner_type = TREE_TYPE (arg1);
1491 tree start1, end1, p1;
1492 tree start2 = NULL, p2 = NULL;
1493 tree ret = NULL, lab;
1494
1495 start1 = arg1;
1496 start2 = arg2;
1497 do
1498 {
1499 inner_type = TREE_TYPE (inner_type);
1500 start1 = build4 (ARRAY_REF, inner_type, start1,
1501 size_zero_node, NULL, NULL);
1502 if (arg2)
1503 start2 = build4 (ARRAY_REF, inner_type, start2,
1504 size_zero_node, NULL, NULL);
1505 }
1506 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1507 start1 = build_fold_addr_expr_loc (input_location, start1);
1508 if (arg2)
1509 start2 = build_fold_addr_expr_loc (input_location, start2);
1510
1511 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1512 end1 = fold_build_pointer_plus (start1, end1);
1513
1514 p1 = create_tmp_var (TREE_TYPE (start1));
1515 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1516 append_to_statement_list (t, &ret);
1517
1518 if (arg2)
1519 {
1520 p2 = create_tmp_var (TREE_TYPE (start2));
1521 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1522 append_to_statement_list (t, &ret);
1523 }
1524
1525 lab = create_artificial_label (input_location);
1526 t = build1 (LABEL_EXPR, void_type_node, lab);
1527 append_to_statement_list (t, &ret);
1528
1529 argarray[i++] = p1;
1530 if (arg2)
1531 argarray[i++] = p2;
1532 /* Handle default arguments. */
1533 for (parm = defparm; parm && parm != void_list_node;
1534 parm = TREE_CHAIN (parm), i++)
1535 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1536 TREE_PURPOSE (parm), fn, i,
1537 tf_warning_or_error);
1538 t = build_call_a (fn, i, argarray);
1539 t = fold_convert (void_type_node, t);
1540 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1541 append_to_statement_list (t, &ret);
1542
1543 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1544 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1545 append_to_statement_list (t, &ret);
1546
1547 if (arg2)
1548 {
1549 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1550 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1551 append_to_statement_list (t, &ret);
1552 }
1553
1554 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1555 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1556 append_to_statement_list (t, &ret);
1557
1558 return ret;
1559 }
1560 else
1561 {
1562 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1563 if (arg2)
1564 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1565 /* Handle default arguments. */
1566 for (parm = defparm; parm && parm != void_list_node;
1567 parm = TREE_CHAIN (parm), i++)
1568 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1569 TREE_PURPOSE (parm),
1570 fn, i, tf_warning_or_error);
1571 t = build_call_a (fn, i, argarray);
1572 t = fold_convert (void_type_node, t);
1573 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1574 }
1575 }
1576
1577 /* Return code to initialize DECL with its default constructor, or
1578 NULL if there's nothing to do. */
1579
1580 tree
1581 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1582 {
1583 tree info = CP_OMP_CLAUSE_INFO (clause);
1584 tree ret = NULL;
1585
1586 if (info)
1587 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1588
1589 return ret;
1590 }
1591
1592 /* Return code to initialize DST with a copy constructor from SRC. */
1593
1594 tree
1595 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1596 {
1597 tree info = CP_OMP_CLAUSE_INFO (clause);
1598 tree ret = NULL;
1599
1600 if (info)
1601 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1602 if (ret == NULL)
1603 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1604
1605 return ret;
1606 }
1607
1608 /* Similarly, except use an assignment operator instead. */
1609
1610 tree
1611 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1612 {
1613 tree info = CP_OMP_CLAUSE_INFO (clause);
1614 tree ret = NULL;
1615
1616 if (info)
1617 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1618 if (ret == NULL)
1619 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1620
1621 return ret;
1622 }
1623
1624 /* Return code to destroy DECL. */
1625
1626 tree
1627 cxx_omp_clause_dtor (tree clause, tree decl)
1628 {
1629 tree info = CP_OMP_CLAUSE_INFO (clause);
1630 tree ret = NULL;
1631
1632 if (info)
1633 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1634
1635 return ret;
1636 }
1637
1638 /* True if OpenMP should privatize what this DECL points to rather
1639 than the DECL itself. */
1640
1641 bool
1642 cxx_omp_privatize_by_reference (const_tree decl)
1643 {
1644 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1645 || is_invisiref_parm (decl));
1646 }
1647
1648 /* Return true if DECL is const qualified var having no mutable member. */
1649 bool
1650 cxx_omp_const_qual_no_mutable (tree decl)
1651 {
1652 tree type = TREE_TYPE (decl);
1653 if (TREE_CODE (type) == REFERENCE_TYPE)
1654 {
1655 if (!is_invisiref_parm (decl))
1656 return false;
1657 type = TREE_TYPE (type);
1658
1659 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1660 {
1661 /* NVR doesn't preserve const qualification of the
1662 variable's type. */
1663 tree outer = outer_curly_brace_block (current_function_decl);
1664 tree var;
1665
1666 if (outer)
1667 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1668 if (DECL_NAME (decl) == DECL_NAME (var)
1669 && (TYPE_MAIN_VARIANT (type)
1670 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1671 {
1672 if (TYPE_READONLY (TREE_TYPE (var)))
1673 type = TREE_TYPE (var);
1674 break;
1675 }
1676 }
1677 }
1678
1679 if (type == error_mark_node)
1680 return false;
1681
1682 /* Variables with const-qualified type having no mutable member
1683 are predetermined shared. */
1684 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1685 return true;
1686
1687 return false;
1688 }
1689
1690 /* True if OpenMP sharing attribute of DECL is predetermined. */
1691
1692 enum omp_clause_default_kind
1693 cxx_omp_predetermined_sharing (tree decl)
1694 {
1695 /* Static data members are predetermined shared. */
1696 if (TREE_STATIC (decl))
1697 {
1698 tree ctx = CP_DECL_CONTEXT (decl);
1699 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1700 return OMP_CLAUSE_DEFAULT_SHARED;
1701 }
1702
1703 /* Const qualified vars having no mutable member are predetermined
1704 shared. */
1705 if (cxx_omp_const_qual_no_mutable (decl))
1706 return OMP_CLAUSE_DEFAULT_SHARED;
1707
1708 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1709 }
1710
1711 /* Finalize an implicitly determined clause. */
1712
1713 void
1714 cxx_omp_finish_clause (tree c, gimple_seq *)
1715 {
1716 tree decl, inner_type;
1717 bool make_shared = false;
1718
1719 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1720 return;
1721
1722 decl = OMP_CLAUSE_DECL (c);
1723 decl = require_complete_type (decl);
1724 inner_type = TREE_TYPE (decl);
1725 if (decl == error_mark_node)
1726 make_shared = true;
1727 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1728 {
1729 if (is_invisiref_parm (decl))
1730 inner_type = TREE_TYPE (inner_type);
1731 else
1732 {
1733 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1734 decl);
1735 make_shared = true;
1736 }
1737 }
1738
1739 /* We're interested in the base element, not arrays. */
1740 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1741 inner_type = TREE_TYPE (inner_type);
1742
1743 /* Check for special function availability by building a call to one.
1744 Save the results, because later we won't be in the right context
1745 for making these queries. */
1746 if (!make_shared
1747 && CLASS_TYPE_P (inner_type)
1748 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1749 make_shared = true;
1750
1751 if (make_shared)
1752 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1753 }