]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/cp-gimplify.c
e2bf5470799428f739d283e9bd3d6e75da5c81a7
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3 Copyright (C) 2002-2014 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "cp-tree.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
31 #include "basic-block.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-expr.h"
35 #include "is-a.h"
36 #include "gimple.h"
37 #include "gimplify.h"
38 #include "hashtab.h"
39 #include "flags.h"
40 #include "splay-tree.h"
41 #include "target.h"
42 #include "c-family/c-ubsan.h"
43 #include "cilk.h"
44
45 /* Forward declarations. */
46
47 static tree cp_genericize_r (tree *, int *, void *);
48 static void cp_genericize_tree (tree*);
49
50 /* Local declarations. */
51
52 enum bc_t { bc_break = 0, bc_continue = 1 };
53
54 /* Stack of labels which are targets for "break" or "continue",
55 linked through TREE_CHAIN. */
56 static tree bc_label[2];
57
58 /* Begin a scope which can be exited by a break or continue statement. BC
59 indicates which.
60
61 Just creates a label with location LOCATION and pushes it into the current
62 context. */
63
64 static tree
65 begin_bc_block (enum bc_t bc, location_t location)
66 {
67 tree label = create_artificial_label (location);
68 DECL_CHAIN (label) = bc_label[bc];
69 bc_label[bc] = label;
70 return label;
71 }
72
73 /* Finish a scope which can be exited by a break or continue statement.
74 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
75 an expression for the contents of the scope.
76
77 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
78 BLOCK. Otherwise, just forget the label. */
79
80 static void
81 finish_bc_block (tree *block, enum bc_t bc, tree label)
82 {
83 gcc_assert (label == bc_label[bc]);
84
85 if (TREE_USED (label))
86 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
87 block);
88
89 bc_label[bc] = DECL_CHAIN (label);
90 DECL_CHAIN (label) = NULL_TREE;
91 }
92
93 /* Get the LABEL_EXPR to represent a break or continue statement
94 in the current block scope. BC indicates which. */
95
96 static tree
97 get_bc_label (enum bc_t bc)
98 {
99 tree label = bc_label[bc];
100
101 /* Mark the label used for finish_bc_block. */
102 TREE_USED (label) = 1;
103 return label;
104 }
105
106 /* Genericize a TRY_BLOCK. */
107
108 static void
109 genericize_try_block (tree *stmt_p)
110 {
111 tree body = TRY_STMTS (*stmt_p);
112 tree cleanup = TRY_HANDLERS (*stmt_p);
113
114 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
115 }
116
117 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
118
119 static void
120 genericize_catch_block (tree *stmt_p)
121 {
122 tree type = HANDLER_TYPE (*stmt_p);
123 tree body = HANDLER_BODY (*stmt_p);
124
125 /* FIXME should the caught type go in TREE_TYPE? */
126 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
127 }
128
129 /* A terser interface for building a representation of an exception
130 specification. */
131
132 static tree
133 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 {
135 tree t;
136
137 /* FIXME should the allowed types go in TREE_TYPE? */
138 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
139 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140
141 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
142 append_to_statement_list (body, &TREE_OPERAND (t, 0));
143
144 return t;
145 }
146
147 /* Genericize an EH_SPEC_BLOCK by converting it to a
148 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149
150 static void
151 genericize_eh_spec_block (tree *stmt_p)
152 {
153 tree body = EH_SPEC_STMTS (*stmt_p);
154 tree allowed = EH_SPEC_RAISES (*stmt_p);
155 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
156
157 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
158 TREE_NO_WARNING (*stmt_p) = true;
159 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
160 }
161
162 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
163
164 static void
165 genericize_if_stmt (tree *stmt_p)
166 {
167 tree stmt, cond, then_, else_;
168 location_t locus = EXPR_LOCATION (*stmt_p);
169
170 stmt = *stmt_p;
171 cond = IF_COND (stmt);
172 then_ = THEN_CLAUSE (stmt);
173 else_ = ELSE_CLAUSE (stmt);
174
175 if (!then_)
176 then_ = build_empty_stmt (locus);
177 if (!else_)
178 else_ = build_empty_stmt (locus);
179
180 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
181 stmt = then_;
182 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
183 stmt = else_;
184 else
185 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
186 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
187 SET_EXPR_LOCATION (stmt, locus);
188 *stmt_p = stmt;
189 }
190
191 /* Build a generic representation of one of the C loop forms. COND is the
192 loop condition or NULL_TREE. BODY is the (possibly compound) statement
193 controlled by the loop. INCR is the increment expression of a for-loop,
194 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
195 evaluated before the loop body as in while and for loops, or after the
196 loop body as in do-while loops. */
197
198 static void
199 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
200 tree incr, bool cond_is_first, int *walk_subtrees,
201 void *data)
202 {
203 tree blab, clab;
204 tree entry = NULL, exit = NULL, t;
205 tree stmt_list = NULL;
206
207 blab = begin_bc_block (bc_break, start_locus);
208 clab = begin_bc_block (bc_continue, start_locus);
209
210 if (incr && EXPR_P (incr))
211 SET_EXPR_LOCATION (incr, start_locus);
212
213 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
214 cp_walk_tree (&body, cp_genericize_r, data, NULL);
215 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
216 *walk_subtrees = 0;
217
218 /* If condition is zero don't generate a loop construct. */
219 if (cond && integer_zerop (cond))
220 {
221 if (cond_is_first)
222 {
223 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
224 get_bc_label (bc_break));
225 append_to_statement_list (t, &stmt_list);
226 }
227 }
228 else
229 {
230 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
231 tree top = build1 (LABEL_EXPR, void_type_node,
232 create_artificial_label (start_locus));
233
234 /* If we have an exit condition, then we build an IF with gotos either
235 out of the loop, or to the top of it. If there's no exit condition,
236 then we just build a jump back to the top. */
237 exit = build1 (GOTO_EXPR, void_type_node, LABEL_EXPR_LABEL (top));
238
239 if (cond && !integer_nonzerop (cond))
240 {
241 /* Canonicalize the loop condition to the end. This means
242 generating a branch to the loop condition. Reuse the
243 continue label, if possible. */
244 if (cond_is_first)
245 {
246 if (incr)
247 {
248 entry = build1 (LABEL_EXPR, void_type_node,
249 create_artificial_label (start_locus));
250 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
251 LABEL_EXPR_LABEL (entry));
252 }
253 else
254 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
255 get_bc_label (bc_continue));
256 append_to_statement_list (t, &stmt_list);
257 }
258
259 t = build1 (GOTO_EXPR, void_type_node, get_bc_label (bc_break));
260 exit = fold_build3_loc (start_locus,
261 COND_EXPR, void_type_node, cond, exit, t);
262 }
263
264 append_to_statement_list (top, &stmt_list);
265 }
266
267 append_to_statement_list (body, &stmt_list);
268 finish_bc_block (&stmt_list, bc_continue, clab);
269 append_to_statement_list (incr, &stmt_list);
270 append_to_statement_list (entry, &stmt_list);
271 append_to_statement_list (exit, &stmt_list);
272 finish_bc_block (&stmt_list, bc_break, blab);
273
274 if (stmt_list == NULL_TREE)
275 stmt_list = build1 (NOP_EXPR, void_type_node, integer_zero_node);
276
277 *stmt_p = stmt_list;
278 }
279
280 /* Genericize a FOR_STMT node *STMT_P. */
281
282 static void
283 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
284 {
285 tree stmt = *stmt_p;
286 tree expr = NULL;
287 tree loop;
288 tree init = FOR_INIT_STMT (stmt);
289
290 if (init)
291 {
292 cp_walk_tree (&init, cp_genericize_r, data, NULL);
293 append_to_statement_list (init, &expr);
294 }
295
296 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
297 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
298 append_to_statement_list (loop, &expr);
299 *stmt_p = expr;
300 }
301
302 /* Genericize a WHILE_STMT node *STMT_P. */
303
304 static void
305 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
306 {
307 tree stmt = *stmt_p;
308 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
309 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
310 }
311
312 /* Genericize a DO_STMT node *STMT_P. */
313
314 static void
315 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
316 {
317 tree stmt = *stmt_p;
318 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
319 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
320 }
321
322 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
323
324 static void
325 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
326 {
327 tree stmt = *stmt_p;
328 tree break_block, body, cond, type;
329 location_t stmt_locus = EXPR_LOCATION (stmt);
330
331 break_block = begin_bc_block (bc_break, stmt_locus);
332
333 body = SWITCH_STMT_BODY (stmt);
334 if (!body)
335 body = build_empty_stmt (stmt_locus);
336 cond = SWITCH_STMT_COND (stmt);
337 type = SWITCH_STMT_TYPE (stmt);
338
339 cp_walk_tree (&body, cp_genericize_r, data, NULL);
340 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
341 cp_walk_tree (&type, cp_genericize_r, data, NULL);
342 *walk_subtrees = 0;
343
344 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
345 finish_bc_block (stmt_p, bc_break, break_block);
346 }
347
348 /* Genericize a CONTINUE_STMT node *STMT_P. */
349
350 static void
351 genericize_continue_stmt (tree *stmt_p)
352 {
353 tree stmt_list = NULL;
354 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
355 tree label = get_bc_label (bc_continue);
356 location_t location = EXPR_LOCATION (*stmt_p);
357 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
358 append_to_statement_list (pred, &stmt_list);
359 append_to_statement_list (jump, &stmt_list);
360 *stmt_p = stmt_list;
361 }
362
363 /* Genericize a BREAK_STMT node *STMT_P. */
364
365 static void
366 genericize_break_stmt (tree *stmt_p)
367 {
368 tree label = get_bc_label (bc_break);
369 location_t location = EXPR_LOCATION (*stmt_p);
370 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
371 }
372
373 /* Genericize a OMP_FOR node *STMT_P. */
374
375 static void
376 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
377 {
378 tree stmt = *stmt_p;
379 location_t locus = EXPR_LOCATION (stmt);
380 tree clab = begin_bc_block (bc_continue, locus);
381
382 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
383 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
384 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
385 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
386 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
387 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
388 *walk_subtrees = 0;
389
390 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
391 }
392
393 /* Hook into the middle of gimplifying an OMP_FOR node. */
394
395 static enum gimplify_status
396 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
397 {
398 tree for_stmt = *expr_p;
399 gimple_seq seq = NULL;
400
401 /* Protect ourselves from recursion. */
402 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
403 return GS_UNHANDLED;
404 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
405
406 gimplify_and_add (for_stmt, &seq);
407 gimple_seq_add_seq (pre_p, seq);
408
409 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
410
411 return GS_ALL_DONE;
412 }
413
414 /* Gimplify an EXPR_STMT node. */
415
416 static void
417 gimplify_expr_stmt (tree *stmt_p)
418 {
419 tree stmt = EXPR_STMT_EXPR (*stmt_p);
420
421 if (stmt == error_mark_node)
422 stmt = NULL;
423
424 /* Gimplification of a statement expression will nullify the
425 statement if all its side effects are moved to *PRE_P and *POST_P.
426
427 In this case we will not want to emit the gimplified statement.
428 However, we may still want to emit a warning, so we do that before
429 gimplification. */
430 if (stmt && warn_unused_value)
431 {
432 if (!TREE_SIDE_EFFECTS (stmt))
433 {
434 if (!IS_EMPTY_STMT (stmt)
435 && !VOID_TYPE_P (TREE_TYPE (stmt))
436 && !TREE_NO_WARNING (stmt))
437 warning (OPT_Wunused_value, "statement with no effect");
438 }
439 else
440 warn_if_unused_value (stmt, input_location);
441 }
442
443 if (stmt == NULL_TREE)
444 stmt = alloc_stmt_list ();
445
446 *stmt_p = stmt;
447 }
448
449 /* Gimplify initialization from an AGGR_INIT_EXPR. */
450
451 static void
452 cp_gimplify_init_expr (tree *expr_p)
453 {
454 tree from = TREE_OPERAND (*expr_p, 1);
455 tree to = TREE_OPERAND (*expr_p, 0);
456 tree t;
457
458 /* What about code that pulls out the temp and uses it elsewhere? I
459 think that such code never uses the TARGET_EXPR as an initializer. If
460 I'm wrong, we'll abort because the temp won't have any RTL. In that
461 case, I guess we'll need to replace references somehow. */
462 if (TREE_CODE (from) == TARGET_EXPR)
463 from = TARGET_EXPR_INITIAL (from);
464
465 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
466 inside the TARGET_EXPR. */
467 for (t = from; t; )
468 {
469 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
470
471 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
472 replace the slot operand with our target.
473
474 Should we add a target parm to gimplify_expr instead? No, as in this
475 case we want to replace the INIT_EXPR. */
476 if (TREE_CODE (sub) == AGGR_INIT_EXPR
477 || TREE_CODE (sub) == VEC_INIT_EXPR)
478 {
479 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
480 AGGR_INIT_EXPR_SLOT (sub) = to;
481 else
482 VEC_INIT_EXPR_SLOT (sub) = to;
483 *expr_p = from;
484
485 /* The initialization is now a side-effect, so the container can
486 become void. */
487 if (from != sub)
488 TREE_TYPE (from) = void_type_node;
489 }
490
491 if (t == sub)
492 break;
493 else
494 t = TREE_OPERAND (t, 1);
495 }
496
497 }
498
499 /* Gimplify a MUST_NOT_THROW_EXPR. */
500
501 static enum gimplify_status
502 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
503 {
504 tree stmt = *expr_p;
505 tree temp = voidify_wrapper_expr (stmt, NULL);
506 tree body = TREE_OPERAND (stmt, 0);
507 gimple_seq try_ = NULL;
508 gimple_seq catch_ = NULL;
509 gimple mnt;
510
511 gimplify_and_add (body, &try_);
512 mnt = gimple_build_eh_must_not_throw (terminate_node);
513 gimple_seq_add_stmt_without_update (&catch_, mnt);
514 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
515
516 gimple_seq_add_stmt_without_update (pre_p, mnt);
517 if (temp)
518 {
519 *expr_p = temp;
520 return GS_OK;
521 }
522
523 *expr_p = NULL;
524 return GS_ALL_DONE;
525 }
526
527 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
528
529 int
530 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
531 {
532 int saved_stmts_are_full_exprs_p = 0;
533 enum tree_code code = TREE_CODE (*expr_p);
534 enum gimplify_status ret;
535
536 if (STATEMENT_CODE_P (code))
537 {
538 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
539 current_stmt_tree ()->stmts_are_full_exprs_p
540 = STMT_IS_FULL_EXPR_P (*expr_p);
541 }
542
543 switch (code)
544 {
545 case PTRMEM_CST:
546 *expr_p = cplus_expand_constant (*expr_p);
547 ret = GS_OK;
548 break;
549
550 case AGGR_INIT_EXPR:
551 simplify_aggr_init_expr (expr_p);
552 ret = GS_OK;
553 break;
554
555 case VEC_INIT_EXPR:
556 {
557 location_t loc = input_location;
558 tree init = VEC_INIT_EXPR_INIT (*expr_p);
559 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
560 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
561 input_location = EXPR_LOCATION (*expr_p);
562 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
563 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
564 from_array,
565 tf_warning_or_error);
566 cp_genericize_tree (expr_p);
567 ret = GS_OK;
568 input_location = loc;
569 }
570 break;
571
572 case THROW_EXPR:
573 /* FIXME communicate throw type to back end, probably by moving
574 THROW_EXPR into ../tree.def. */
575 *expr_p = TREE_OPERAND (*expr_p, 0);
576 ret = GS_OK;
577 break;
578
579 case MUST_NOT_THROW_EXPR:
580 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
581 break;
582
583 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
584 LHS of an assignment might also be involved in the RHS, as in bug
585 25979. */
586 case INIT_EXPR:
587 if (fn_contains_cilk_spawn_p (cfun)
588 && cilk_detect_spawn_and_unwrap (expr_p)
589 && !seen_error ())
590 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
591 cp_gimplify_init_expr (expr_p);
592 if (TREE_CODE (*expr_p) != INIT_EXPR)
593 return GS_OK;
594 /* Otherwise fall through. */
595 case MODIFY_EXPR:
596 {
597 if (fn_contains_cilk_spawn_p (cfun)
598 && cilk_detect_spawn_and_unwrap (expr_p)
599 && !seen_error ())
600 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
601
602 /* If the back end isn't clever enough to know that the lhs and rhs
603 types are the same, add an explicit conversion. */
604 tree op0 = TREE_OPERAND (*expr_p, 0);
605 tree op1 = TREE_OPERAND (*expr_p, 1);
606
607 if (!error_operand_p (op0)
608 && !error_operand_p (op1)
609 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
610 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
611 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
612 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
613 TREE_TYPE (op0), op1);
614
615 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
616 || (TREE_CODE (op1) == CONSTRUCTOR
617 && CONSTRUCTOR_NELTS (op1) == 0
618 && !TREE_CLOBBER_P (op1))
619 || (TREE_CODE (op1) == CALL_EXPR
620 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
621 && is_really_empty_class (TREE_TYPE (op0)))
622 {
623 /* Remove any copies of empty classes. We check that the RHS
624 has a simple form so that TARGET_EXPRs and non-empty
625 CONSTRUCTORs get reduced properly, and we leave the return
626 slot optimization alone because it isn't a copy (FIXME so it
627 shouldn't be represented as one).
628
629 Also drop volatile variables on the RHS to avoid infinite
630 recursion from gimplify_expr trying to load the value. */
631 if (!TREE_SIDE_EFFECTS (op1))
632 *expr_p = op0;
633 else if (TREE_THIS_VOLATILE (op1)
634 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
635 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
636 build_fold_addr_expr (op1), op0);
637 else
638 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
639 op0, op1);
640 }
641 }
642 ret = GS_OK;
643 break;
644
645 case EMPTY_CLASS_EXPR:
646 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
647 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
648 ret = GS_OK;
649 break;
650
651 case BASELINK:
652 *expr_p = BASELINK_FUNCTIONS (*expr_p);
653 ret = GS_OK;
654 break;
655
656 case TRY_BLOCK:
657 genericize_try_block (expr_p);
658 ret = GS_OK;
659 break;
660
661 case HANDLER:
662 genericize_catch_block (expr_p);
663 ret = GS_OK;
664 break;
665
666 case EH_SPEC_BLOCK:
667 genericize_eh_spec_block (expr_p);
668 ret = GS_OK;
669 break;
670
671 case USING_STMT:
672 gcc_unreachable ();
673
674 case FOR_STMT:
675 case WHILE_STMT:
676 case DO_STMT:
677 case SWITCH_STMT:
678 case CONTINUE_STMT:
679 case BREAK_STMT:
680 gcc_unreachable ();
681
682 case OMP_FOR:
683 case OMP_SIMD:
684 case OMP_DISTRIBUTE:
685 ret = cp_gimplify_omp_for (expr_p, pre_p);
686 break;
687
688 case EXPR_STMT:
689 gimplify_expr_stmt (expr_p);
690 ret = GS_OK;
691 break;
692
693 case UNARY_PLUS_EXPR:
694 {
695 tree arg = TREE_OPERAND (*expr_p, 0);
696 tree type = TREE_TYPE (*expr_p);
697 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
698 : arg;
699 ret = GS_OK;
700 }
701 break;
702
703 case CILK_SPAWN_STMT:
704 gcc_assert
705 (fn_contains_cilk_spawn_p (cfun)
706 && cilk_detect_spawn_and_unwrap (expr_p));
707
708 /* If errors are seen, then just process it as a CALL_EXPR. */
709 if (!seen_error ())
710 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
711
712 case CALL_EXPR:
713 if (fn_contains_cilk_spawn_p (cfun)
714 && cilk_detect_spawn_and_unwrap (expr_p)
715 && !seen_error ())
716 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
717
718 /* DR 1030 says that we need to evaluate the elements of an
719 initializer-list in forward order even when it's used as arguments to
720 a constructor. So if the target wants to evaluate them in reverse
721 order and there's more than one argument other than 'this', gimplify
722 them in order. */
723 ret = GS_OK;
724 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
725 && call_expr_nargs (*expr_p) > 2)
726 {
727 int nargs = call_expr_nargs (*expr_p);
728 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
729 for (int i = 1; i < nargs; ++i)
730 {
731 enum gimplify_status t
732 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
733 if (t == GS_ERROR)
734 ret = GS_ERROR;
735 }
736 }
737 break;
738
739 default:
740 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
741 break;
742 }
743
744 /* Restore saved state. */
745 if (STATEMENT_CODE_P (code))
746 current_stmt_tree ()->stmts_are_full_exprs_p
747 = saved_stmts_are_full_exprs_p;
748
749 return ret;
750 }
751
752 static inline bool
753 is_invisiref_parm (const_tree t)
754 {
755 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
756 && DECL_BY_REFERENCE (t));
757 }
758
759 /* Return true if the uid in both int tree maps are equal. */
760
761 int
762 cxx_int_tree_map_eq (const void *va, const void *vb)
763 {
764 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
765 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
766 return (a->uid == b->uid);
767 }
768
769 /* Hash a UID in a cxx_int_tree_map. */
770
771 unsigned int
772 cxx_int_tree_map_hash (const void *item)
773 {
774 return ((const struct cxx_int_tree_map *)item)->uid;
775 }
776
777 /* A stable comparison routine for use with splay trees and DECLs. */
778
779 static int
780 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
781 {
782 tree a = (tree) xa;
783 tree b = (tree) xb;
784
785 return DECL_UID (a) - DECL_UID (b);
786 }
787
788 /* OpenMP context during genericization. */
789
790 struct cp_genericize_omp_taskreg
791 {
792 bool is_parallel;
793 bool default_shared;
794 struct cp_genericize_omp_taskreg *outer;
795 splay_tree variables;
796 };
797
798 /* Return true if genericization should try to determine if
799 DECL is firstprivate or shared within task regions. */
800
801 static bool
802 omp_var_to_track (tree decl)
803 {
804 tree type = TREE_TYPE (decl);
805 if (is_invisiref_parm (decl))
806 type = TREE_TYPE (type);
807 while (TREE_CODE (type) == ARRAY_TYPE)
808 type = TREE_TYPE (type);
809 if (type == error_mark_node || !CLASS_TYPE_P (type))
810 return false;
811 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
812 return false;
813 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
814 return false;
815 return true;
816 }
817
818 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
819
820 static void
821 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
822 {
823 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
824 (splay_tree_key) decl);
825 if (n == NULL)
826 {
827 int flags = OMP_CLAUSE_DEFAULT_SHARED;
828 if (omp_ctx->outer)
829 omp_cxx_notice_variable (omp_ctx->outer, decl);
830 if (!omp_ctx->default_shared)
831 {
832 struct cp_genericize_omp_taskreg *octx;
833
834 for (octx = omp_ctx->outer; octx; octx = octx->outer)
835 {
836 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
837 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
838 {
839 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
840 break;
841 }
842 if (octx->is_parallel)
843 break;
844 }
845 if (octx == NULL
846 && (TREE_CODE (decl) == PARM_DECL
847 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
848 && DECL_CONTEXT (decl) == current_function_decl)))
849 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
850 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
851 {
852 /* DECL is implicitly determined firstprivate in
853 the current task construct. Ensure copy ctor and
854 dtor are instantiated, because during gimplification
855 it will be already too late. */
856 tree type = TREE_TYPE (decl);
857 if (is_invisiref_parm (decl))
858 type = TREE_TYPE (type);
859 while (TREE_CODE (type) == ARRAY_TYPE)
860 type = TREE_TYPE (type);
861 get_copy_ctor (type, tf_none);
862 get_dtor (type, tf_none);
863 }
864 }
865 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
866 }
867 }
868
869 /* Genericization context. */
870
871 struct cp_genericize_data
872 {
873 hash_set<tree> *p_set;
874 vec<tree> bind_expr_stack;
875 struct cp_genericize_omp_taskreg *omp_ctx;
876 };
877
878 /* Perform any pre-gimplification lowering of C++ front end trees to
879 GENERIC. */
880
881 static tree
882 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
883 {
884 tree stmt = *stmt_p;
885 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
886 hash_set<tree> *p_set = wtd->p_set;
887
888 /* If in an OpenMP context, note var uses. */
889 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
890 && (VAR_P (stmt)
891 || TREE_CODE (stmt) == PARM_DECL
892 || TREE_CODE (stmt) == RESULT_DECL)
893 && omp_var_to_track (stmt))
894 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
895
896 if (is_invisiref_parm (stmt)
897 /* Don't dereference parms in a thunk, pass the references through. */
898 && !(DECL_THUNK_P (current_function_decl)
899 && TREE_CODE (stmt) == PARM_DECL))
900 {
901 *stmt_p = convert_from_reference (stmt);
902 *walk_subtrees = 0;
903 return NULL;
904 }
905
906 /* Map block scope extern declarations to visible declarations with the
907 same name and type in outer scopes if any. */
908 if (cp_function_chain->extern_decl_map
909 && VAR_OR_FUNCTION_DECL_P (stmt)
910 && DECL_EXTERNAL (stmt))
911 {
912 struct cxx_int_tree_map *h, in;
913 in.uid = DECL_UID (stmt);
914 h = (struct cxx_int_tree_map *)
915 htab_find_with_hash (cp_function_chain->extern_decl_map,
916 &in, in.uid);
917 if (h)
918 {
919 *stmt_p = h->to;
920 *walk_subtrees = 0;
921 return NULL;
922 }
923 }
924
925 /* Other than invisiref parms, don't walk the same tree twice. */
926 if (p_set->contains (stmt))
927 {
928 *walk_subtrees = 0;
929 return NULL_TREE;
930 }
931
932 if (TREE_CODE (stmt) == ADDR_EXPR
933 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
934 {
935 /* If in an OpenMP context, note var uses. */
936 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
937 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
938 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
939 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
940 *walk_subtrees = 0;
941 }
942 else if (TREE_CODE (stmt) == RETURN_EXPR
943 && TREE_OPERAND (stmt, 0)
944 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
945 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
946 *walk_subtrees = 0;
947 else if (TREE_CODE (stmt) == OMP_CLAUSE)
948 switch (OMP_CLAUSE_CODE (stmt))
949 {
950 case OMP_CLAUSE_LASTPRIVATE:
951 /* Don't dereference an invisiref in OpenMP clauses. */
952 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
953 {
954 *walk_subtrees = 0;
955 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
956 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
957 cp_genericize_r, data, NULL);
958 }
959 break;
960 case OMP_CLAUSE_PRIVATE:
961 /* Don't dereference an invisiref in OpenMP clauses. */
962 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
963 *walk_subtrees = 0;
964 else if (wtd->omp_ctx != NULL)
965 {
966 /* Private clause doesn't cause any references to the
967 var in outer contexts, avoid calling
968 omp_cxx_notice_variable for it. */
969 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
970 wtd->omp_ctx = NULL;
971 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
972 data, NULL);
973 wtd->omp_ctx = old;
974 *walk_subtrees = 0;
975 }
976 break;
977 case OMP_CLAUSE_SHARED:
978 case OMP_CLAUSE_FIRSTPRIVATE:
979 case OMP_CLAUSE_COPYIN:
980 case OMP_CLAUSE_COPYPRIVATE:
981 /* Don't dereference an invisiref in OpenMP clauses. */
982 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
983 *walk_subtrees = 0;
984 break;
985 case OMP_CLAUSE_REDUCTION:
986 /* Don't dereference an invisiref in reduction clause's
987 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
988 still needs to be genericized. */
989 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
990 {
991 *walk_subtrees = 0;
992 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
993 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
994 cp_genericize_r, data, NULL);
995 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
996 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
997 cp_genericize_r, data, NULL);
998 }
999 break;
1000 default:
1001 break;
1002 }
1003 else if (IS_TYPE_OR_DECL_P (stmt))
1004 *walk_subtrees = 0;
1005
1006 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1007 to lower this construct before scanning it, so we need to lower these
1008 before doing anything else. */
1009 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1010 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1011 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1012 : TRY_FINALLY_EXPR,
1013 void_type_node,
1014 CLEANUP_BODY (stmt),
1015 CLEANUP_EXPR (stmt));
1016
1017 else if (TREE_CODE (stmt) == IF_STMT)
1018 {
1019 genericize_if_stmt (stmt_p);
1020 /* *stmt_p has changed, tail recurse to handle it again. */
1021 return cp_genericize_r (stmt_p, walk_subtrees, data);
1022 }
1023
1024 /* COND_EXPR might have incompatible types in branches if one or both
1025 arms are bitfields. Fix it up now. */
1026 else if (TREE_CODE (stmt) == COND_EXPR)
1027 {
1028 tree type_left
1029 = (TREE_OPERAND (stmt, 1)
1030 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1031 : NULL_TREE);
1032 tree type_right
1033 = (TREE_OPERAND (stmt, 2)
1034 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1035 : NULL_TREE);
1036 if (type_left
1037 && !useless_type_conversion_p (TREE_TYPE (stmt),
1038 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1039 {
1040 TREE_OPERAND (stmt, 1)
1041 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1042 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1043 type_left));
1044 }
1045 if (type_right
1046 && !useless_type_conversion_p (TREE_TYPE (stmt),
1047 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1048 {
1049 TREE_OPERAND (stmt, 2)
1050 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1051 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1052 type_right));
1053 }
1054 }
1055
1056 else if (TREE_CODE (stmt) == BIND_EXPR)
1057 {
1058 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1059 {
1060 tree decl;
1061 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1062 if (VAR_P (decl)
1063 && !DECL_EXTERNAL (decl)
1064 && omp_var_to_track (decl))
1065 {
1066 splay_tree_node n
1067 = splay_tree_lookup (wtd->omp_ctx->variables,
1068 (splay_tree_key) decl);
1069 if (n == NULL)
1070 splay_tree_insert (wtd->omp_ctx->variables,
1071 (splay_tree_key) decl,
1072 TREE_STATIC (decl)
1073 ? OMP_CLAUSE_DEFAULT_SHARED
1074 : OMP_CLAUSE_DEFAULT_PRIVATE);
1075 }
1076 }
1077 wtd->bind_expr_stack.safe_push (stmt);
1078 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1079 cp_genericize_r, data, NULL);
1080 wtd->bind_expr_stack.pop ();
1081 }
1082
1083 else if (TREE_CODE (stmt) == USING_STMT)
1084 {
1085 tree block = NULL_TREE;
1086
1087 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1088 BLOCK, and append an IMPORTED_DECL to its
1089 BLOCK_VARS chained list. */
1090 if (wtd->bind_expr_stack.exists ())
1091 {
1092 int i;
1093 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1094 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1095 break;
1096 }
1097 if (block)
1098 {
1099 tree using_directive;
1100 gcc_assert (TREE_OPERAND (stmt, 0));
1101
1102 using_directive = make_node (IMPORTED_DECL);
1103 TREE_TYPE (using_directive) = void_type_node;
1104
1105 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1106 = TREE_OPERAND (stmt, 0);
1107 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1108 BLOCK_VARS (block) = using_directive;
1109 }
1110 /* The USING_STMT won't appear in GENERIC. */
1111 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1112 *walk_subtrees = 0;
1113 }
1114
1115 else if (TREE_CODE (stmt) == DECL_EXPR
1116 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1117 {
1118 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1119 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1120 *walk_subtrees = 0;
1121 }
1122 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1123 {
1124 struct cp_genericize_omp_taskreg omp_ctx;
1125 tree c, decl;
1126 splay_tree_node n;
1127
1128 *walk_subtrees = 0;
1129 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1130 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1131 omp_ctx.default_shared = omp_ctx.is_parallel;
1132 omp_ctx.outer = wtd->omp_ctx;
1133 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1134 wtd->omp_ctx = &omp_ctx;
1135 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1136 switch (OMP_CLAUSE_CODE (c))
1137 {
1138 case OMP_CLAUSE_SHARED:
1139 case OMP_CLAUSE_PRIVATE:
1140 case OMP_CLAUSE_FIRSTPRIVATE:
1141 case OMP_CLAUSE_LASTPRIVATE:
1142 decl = OMP_CLAUSE_DECL (c);
1143 if (decl == error_mark_node || !omp_var_to_track (decl))
1144 break;
1145 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1146 if (n != NULL)
1147 break;
1148 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1149 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1150 ? OMP_CLAUSE_DEFAULT_SHARED
1151 : OMP_CLAUSE_DEFAULT_PRIVATE);
1152 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1153 && omp_ctx.outer)
1154 omp_cxx_notice_variable (omp_ctx.outer, decl);
1155 break;
1156 case OMP_CLAUSE_DEFAULT:
1157 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1158 omp_ctx.default_shared = true;
1159 default:
1160 break;
1161 }
1162 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1163 wtd->omp_ctx = omp_ctx.outer;
1164 splay_tree_delete (omp_ctx.variables);
1165 }
1166 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1167 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1168 else if (TREE_CODE (stmt) == FOR_STMT)
1169 genericize_for_stmt (stmt_p, walk_subtrees, data);
1170 else if (TREE_CODE (stmt) == WHILE_STMT)
1171 genericize_while_stmt (stmt_p, walk_subtrees, data);
1172 else if (TREE_CODE (stmt) == DO_STMT)
1173 genericize_do_stmt (stmt_p, walk_subtrees, data);
1174 else if (TREE_CODE (stmt) == SWITCH_STMT)
1175 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1176 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1177 genericize_continue_stmt (stmt_p);
1178 else if (TREE_CODE (stmt) == BREAK_STMT)
1179 genericize_break_stmt (stmt_p);
1180 else if (TREE_CODE (stmt) == OMP_FOR
1181 || TREE_CODE (stmt) == OMP_SIMD
1182 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1183 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1184 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1185 {
1186 if (SIZEOF_EXPR_TYPE_P (stmt))
1187 *stmt_p
1188 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1189 SIZEOF_EXPR, false);
1190 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1191 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1192 SIZEOF_EXPR, false);
1193 else
1194 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1195 SIZEOF_EXPR, false);
1196 if (*stmt_p == error_mark_node)
1197 *stmt_p = size_one_node;
1198 return NULL;
1199 }
1200 else if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1201 {
1202 if (TREE_CODE (stmt) == NOP_EXPR
1203 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1204 ubsan_maybe_instrument_reference (stmt);
1205 else if (TREE_CODE (stmt) == CALL_EXPR)
1206 {
1207 tree fn = CALL_EXPR_FN (stmt);
1208 if (fn != NULL_TREE
1209 && !error_operand_p (fn)
1210 && POINTER_TYPE_P (TREE_TYPE (fn))
1211 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1212 {
1213 bool is_ctor
1214 = TREE_CODE (fn) == ADDR_EXPR
1215 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1216 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1217 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1218 }
1219 }
1220 }
1221
1222 p_set->add (*stmt_p);
1223
1224 return NULL;
1225 }
1226
1227 /* Lower C++ front end trees to GENERIC in T_P. */
1228
1229 static void
1230 cp_genericize_tree (tree* t_p)
1231 {
1232 struct cp_genericize_data wtd;
1233
1234 wtd.p_set = new hash_set<tree>;
1235 wtd.bind_expr_stack.create (0);
1236 wtd.omp_ctx = NULL;
1237 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1238 delete wtd.p_set;
1239 wtd.bind_expr_stack.release ();
1240 }
1241
1242 /* If a function that should end with a return in non-void
1243 function doesn't obviously end with return, add ubsan
1244 instrumentation code to verify it at runtime. */
1245
1246 static void
1247 cp_ubsan_maybe_instrument_return (tree fndecl)
1248 {
1249 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1250 || DECL_CONSTRUCTOR_P (fndecl)
1251 || DECL_DESTRUCTOR_P (fndecl)
1252 || !targetm.warn_func_return (fndecl))
1253 return;
1254
1255 tree t = DECL_SAVED_TREE (fndecl);
1256 while (t)
1257 {
1258 switch (TREE_CODE (t))
1259 {
1260 case BIND_EXPR:
1261 t = BIND_EXPR_BODY (t);
1262 continue;
1263 case TRY_FINALLY_EXPR:
1264 t = TREE_OPERAND (t, 0);
1265 continue;
1266 case STATEMENT_LIST:
1267 {
1268 tree_stmt_iterator i = tsi_last (t);
1269 if (!tsi_end_p (i))
1270 {
1271 t = tsi_stmt (i);
1272 continue;
1273 }
1274 }
1275 break;
1276 case RETURN_EXPR:
1277 return;
1278 default:
1279 break;
1280 }
1281 break;
1282 }
1283 if (t == NULL_TREE)
1284 return;
1285 t = DECL_SAVED_TREE (fndecl);
1286 if (TREE_CODE (t) == BIND_EXPR
1287 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1288 {
1289 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1290 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1291 tsi_link_after (&i, t, TSI_NEW_STMT);
1292 }
1293 }
1294
1295 void
1296 cp_genericize (tree fndecl)
1297 {
1298 tree t;
1299
1300 /* Fix up the types of parms passed by invisible reference. */
1301 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1302 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1303 {
1304 /* If a function's arguments are copied to create a thunk,
1305 then DECL_BY_REFERENCE will be set -- but the type of the
1306 argument will be a pointer type, so we will never get
1307 here. */
1308 gcc_assert (!DECL_BY_REFERENCE (t));
1309 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1310 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1311 DECL_BY_REFERENCE (t) = 1;
1312 TREE_ADDRESSABLE (t) = 0;
1313 relayout_decl (t);
1314 }
1315
1316 /* Do the same for the return value. */
1317 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1318 {
1319 t = DECL_RESULT (fndecl);
1320 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1321 DECL_BY_REFERENCE (t) = 1;
1322 TREE_ADDRESSABLE (t) = 0;
1323 relayout_decl (t);
1324 if (DECL_NAME (t))
1325 {
1326 /* Adjust DECL_VALUE_EXPR of the original var. */
1327 tree outer = outer_curly_brace_block (current_function_decl);
1328 tree var;
1329
1330 if (outer)
1331 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1332 if (DECL_NAME (t) == DECL_NAME (var)
1333 && DECL_HAS_VALUE_EXPR_P (var)
1334 && DECL_VALUE_EXPR (var) == t)
1335 {
1336 tree val = convert_from_reference (t);
1337 SET_DECL_VALUE_EXPR (var, val);
1338 break;
1339 }
1340 }
1341 }
1342
1343 /* If we're a clone, the body is already GIMPLE. */
1344 if (DECL_CLONED_FUNCTION_P (fndecl))
1345 return;
1346
1347 /* Expand all the array notations here. */
1348 if (flag_cilkplus
1349 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1350 DECL_SAVED_TREE (fndecl) =
1351 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1352
1353 /* We do want to see every occurrence of the parms, so we can't just use
1354 walk_tree's hash functionality. */
1355 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1356
1357 if (flag_sanitize & SANITIZE_RETURN
1358 && current_function_decl != NULL_TREE
1359 && !lookup_attribute ("no_sanitize_undefined",
1360 DECL_ATTRIBUTES (current_function_decl)))
1361 cp_ubsan_maybe_instrument_return (fndecl);
1362
1363 /* Do everything else. */
1364 c_genericize (fndecl);
1365
1366 gcc_assert (bc_label[bc_break] == NULL);
1367 gcc_assert (bc_label[bc_continue] == NULL);
1368 }
1369 \f
1370 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1371 NULL if there is in fact nothing to do. ARG2 may be null if FN
1372 actually only takes one argument. */
1373
1374 static tree
1375 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1376 {
1377 tree defparm, parm, t;
1378 int i = 0;
1379 int nargs;
1380 tree *argarray;
1381
1382 if (fn == NULL)
1383 return NULL;
1384
1385 nargs = list_length (DECL_ARGUMENTS (fn));
1386 argarray = XALLOCAVEC (tree, nargs);
1387
1388 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1389 if (arg2)
1390 defparm = TREE_CHAIN (defparm);
1391
1392 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1393 {
1394 tree inner_type = TREE_TYPE (arg1);
1395 tree start1, end1, p1;
1396 tree start2 = NULL, p2 = NULL;
1397 tree ret = NULL, lab;
1398
1399 start1 = arg1;
1400 start2 = arg2;
1401 do
1402 {
1403 inner_type = TREE_TYPE (inner_type);
1404 start1 = build4 (ARRAY_REF, inner_type, start1,
1405 size_zero_node, NULL, NULL);
1406 if (arg2)
1407 start2 = build4 (ARRAY_REF, inner_type, start2,
1408 size_zero_node, NULL, NULL);
1409 }
1410 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1411 start1 = build_fold_addr_expr_loc (input_location, start1);
1412 if (arg2)
1413 start2 = build_fold_addr_expr_loc (input_location, start2);
1414
1415 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1416 end1 = fold_build_pointer_plus (start1, end1);
1417
1418 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1419 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1420 append_to_statement_list (t, &ret);
1421
1422 if (arg2)
1423 {
1424 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1425 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1426 append_to_statement_list (t, &ret);
1427 }
1428
1429 lab = create_artificial_label (input_location);
1430 t = build1 (LABEL_EXPR, void_type_node, lab);
1431 append_to_statement_list (t, &ret);
1432
1433 argarray[i++] = p1;
1434 if (arg2)
1435 argarray[i++] = p2;
1436 /* Handle default arguments. */
1437 for (parm = defparm; parm && parm != void_list_node;
1438 parm = TREE_CHAIN (parm), i++)
1439 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1440 TREE_PURPOSE (parm), fn, i,
1441 tf_warning_or_error);
1442 t = build_call_a (fn, i, argarray);
1443 t = fold_convert (void_type_node, t);
1444 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1445 append_to_statement_list (t, &ret);
1446
1447 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1448 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1449 append_to_statement_list (t, &ret);
1450
1451 if (arg2)
1452 {
1453 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1454 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1455 append_to_statement_list (t, &ret);
1456 }
1457
1458 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1459 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1460 append_to_statement_list (t, &ret);
1461
1462 return ret;
1463 }
1464 else
1465 {
1466 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1467 if (arg2)
1468 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1469 /* Handle default arguments. */
1470 for (parm = defparm; parm && parm != void_list_node;
1471 parm = TREE_CHAIN (parm), i++)
1472 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1473 TREE_PURPOSE (parm),
1474 fn, i, tf_warning_or_error);
1475 t = build_call_a (fn, i, argarray);
1476 t = fold_convert (void_type_node, t);
1477 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1478 }
1479 }
1480
1481 /* Return code to initialize DECL with its default constructor, or
1482 NULL if there's nothing to do. */
1483
1484 tree
1485 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1486 {
1487 tree info = CP_OMP_CLAUSE_INFO (clause);
1488 tree ret = NULL;
1489
1490 if (info)
1491 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1492
1493 return ret;
1494 }
1495
1496 /* Return code to initialize DST with a copy constructor from SRC. */
1497
1498 tree
1499 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1500 {
1501 tree info = CP_OMP_CLAUSE_INFO (clause);
1502 tree ret = NULL;
1503
1504 if (info)
1505 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1506 if (ret == NULL)
1507 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1508
1509 return ret;
1510 }
1511
1512 /* Similarly, except use an assignment operator instead. */
1513
1514 tree
1515 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1516 {
1517 tree info = CP_OMP_CLAUSE_INFO (clause);
1518 tree ret = NULL;
1519
1520 if (info)
1521 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1522 if (ret == NULL)
1523 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1524
1525 return ret;
1526 }
1527
1528 /* Return code to destroy DECL. */
1529
1530 tree
1531 cxx_omp_clause_dtor (tree clause, tree decl)
1532 {
1533 tree info = CP_OMP_CLAUSE_INFO (clause);
1534 tree ret = NULL;
1535
1536 if (info)
1537 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1538
1539 return ret;
1540 }
1541
1542 /* True if OpenMP should privatize what this DECL points to rather
1543 than the DECL itself. */
1544
1545 bool
1546 cxx_omp_privatize_by_reference (const_tree decl)
1547 {
1548 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1549 || is_invisiref_parm (decl));
1550 }
1551
1552 /* Return true if DECL is const qualified var having no mutable member. */
1553 bool
1554 cxx_omp_const_qual_no_mutable (tree decl)
1555 {
1556 tree type = TREE_TYPE (decl);
1557 if (TREE_CODE (type) == REFERENCE_TYPE)
1558 {
1559 if (!is_invisiref_parm (decl))
1560 return false;
1561 type = TREE_TYPE (type);
1562
1563 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1564 {
1565 /* NVR doesn't preserve const qualification of the
1566 variable's type. */
1567 tree outer = outer_curly_brace_block (current_function_decl);
1568 tree var;
1569
1570 if (outer)
1571 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1572 if (DECL_NAME (decl) == DECL_NAME (var)
1573 && (TYPE_MAIN_VARIANT (type)
1574 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1575 {
1576 if (TYPE_READONLY (TREE_TYPE (var)))
1577 type = TREE_TYPE (var);
1578 break;
1579 }
1580 }
1581 }
1582
1583 if (type == error_mark_node)
1584 return false;
1585
1586 /* Variables with const-qualified type having no mutable member
1587 are predetermined shared. */
1588 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1589 return true;
1590
1591 return false;
1592 }
1593
1594 /* True if OpenMP sharing attribute of DECL is predetermined. */
1595
1596 enum omp_clause_default_kind
1597 cxx_omp_predetermined_sharing (tree decl)
1598 {
1599 /* Static data members are predetermined shared. */
1600 if (TREE_STATIC (decl))
1601 {
1602 tree ctx = CP_DECL_CONTEXT (decl);
1603 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1604 return OMP_CLAUSE_DEFAULT_SHARED;
1605 }
1606
1607 /* Const qualified vars having no mutable member are predetermined
1608 shared. */
1609 if (cxx_omp_const_qual_no_mutable (decl))
1610 return OMP_CLAUSE_DEFAULT_SHARED;
1611
1612 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1613 }
1614
1615 /* Finalize an implicitly determined clause. */
1616
1617 void
1618 cxx_omp_finish_clause (tree c, gimple_seq *)
1619 {
1620 tree decl, inner_type;
1621 bool make_shared = false;
1622
1623 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1624 return;
1625
1626 decl = OMP_CLAUSE_DECL (c);
1627 decl = require_complete_type (decl);
1628 inner_type = TREE_TYPE (decl);
1629 if (decl == error_mark_node)
1630 make_shared = true;
1631 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1632 {
1633 if (is_invisiref_parm (decl))
1634 inner_type = TREE_TYPE (inner_type);
1635 else
1636 {
1637 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1638 decl);
1639 make_shared = true;
1640 }
1641 }
1642
1643 /* We're interested in the base element, not arrays. */
1644 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1645 inner_type = TREE_TYPE (inner_type);
1646
1647 /* Check for special function availability by building a call to one.
1648 Save the results, because later we won't be in the right context
1649 for making these queries. */
1650 if (!make_shared
1651 && CLASS_TYPE_P (inner_type)
1652 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1653 make_shared = true;
1654
1655 if (make_shared)
1656 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1657 }