]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/cp-gimplify.c
PR c++/70285
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3 Copyright (C) 2002-2016 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "cilk.h"
35
36 /* Forward declarations. */
37
38 static tree cp_genericize_r (tree *, int *, void *);
39 static tree cp_fold_r (tree *, int *, void *);
40 static void cp_genericize_tree (tree*);
41 static tree cp_fold (tree);
42
43 /* Local declarations. */
44
45 enum bc_t { bc_break = 0, bc_continue = 1 };
46
47 /* Stack of labels which are targets for "break" or "continue",
48 linked through TREE_CHAIN. */
49 static tree bc_label[2];
50
51 /* Begin a scope which can be exited by a break or continue statement. BC
52 indicates which.
53
54 Just creates a label with location LOCATION and pushes it into the current
55 context. */
56
57 static tree
58 begin_bc_block (enum bc_t bc, location_t location)
59 {
60 tree label = create_artificial_label (location);
61 DECL_CHAIN (label) = bc_label[bc];
62 bc_label[bc] = label;
63 if (bc == bc_break)
64 LABEL_DECL_BREAK (label) = true;
65 else
66 LABEL_DECL_CONTINUE (label) = true;
67 return label;
68 }
69
70 /* Finish a scope which can be exited by a break or continue statement.
71 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
72 an expression for the contents of the scope.
73
74 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
75 BLOCK. Otherwise, just forget the label. */
76
77 static void
78 finish_bc_block (tree *block, enum bc_t bc, tree label)
79 {
80 gcc_assert (label == bc_label[bc]);
81
82 if (TREE_USED (label))
83 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
84 block);
85
86 bc_label[bc] = DECL_CHAIN (label);
87 DECL_CHAIN (label) = NULL_TREE;
88 }
89
90 /* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
91 *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
92 TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
93 of gimplify_cilk_spawn. */
94
95 static void
96 cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
97 gimple_seq *post_p)
98 {
99 int ii = 0;
100
101 cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
102 if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
103 for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
104 gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
105 is_gimple_reg, fb_rvalue);
106 }
107
108
109 /* Get the LABEL_EXPR to represent a break or continue statement
110 in the current block scope. BC indicates which. */
111
112 static tree
113 get_bc_label (enum bc_t bc)
114 {
115 tree label = bc_label[bc];
116
117 /* Mark the label used for finish_bc_block. */
118 TREE_USED (label) = 1;
119 return label;
120 }
121
122 /* Genericize a TRY_BLOCK. */
123
124 static void
125 genericize_try_block (tree *stmt_p)
126 {
127 tree body = TRY_STMTS (*stmt_p);
128 tree cleanup = TRY_HANDLERS (*stmt_p);
129
130 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
131 }
132
133 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
134
135 static void
136 genericize_catch_block (tree *stmt_p)
137 {
138 tree type = HANDLER_TYPE (*stmt_p);
139 tree body = HANDLER_BODY (*stmt_p);
140
141 /* FIXME should the caught type go in TREE_TYPE? */
142 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
143 }
144
145 /* A terser interface for building a representation of an exception
146 specification. */
147
148 static tree
149 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
150 {
151 tree t;
152
153 /* FIXME should the allowed types go in TREE_TYPE? */
154 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
155 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
156
157 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
158 append_to_statement_list (body, &TREE_OPERAND (t, 0));
159
160 return t;
161 }
162
163 /* Genericize an EH_SPEC_BLOCK by converting it to a
164 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
165
166 static void
167 genericize_eh_spec_block (tree *stmt_p)
168 {
169 tree body = EH_SPEC_STMTS (*stmt_p);
170 tree allowed = EH_SPEC_RAISES (*stmt_p);
171 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
172
173 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
174 TREE_NO_WARNING (*stmt_p) = true;
175 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
176 }
177
178 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
179
180 static void
181 genericize_if_stmt (tree *stmt_p)
182 {
183 tree stmt, cond, then_, else_;
184 location_t locus = EXPR_LOCATION (*stmt_p);
185
186 stmt = *stmt_p;
187 cond = IF_COND (stmt);
188 then_ = THEN_CLAUSE (stmt);
189 else_ = ELSE_CLAUSE (stmt);
190
191 if (!then_)
192 then_ = build_empty_stmt (locus);
193 if (!else_)
194 else_ = build_empty_stmt (locus);
195
196 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
197 stmt = then_;
198 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
199 stmt = else_;
200 else
201 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
202 if (!EXPR_HAS_LOCATION (stmt))
203 protected_set_expr_location (stmt, locus);
204 *stmt_p = stmt;
205 }
206
207 /* Build a generic representation of one of the C loop forms. COND is the
208 loop condition or NULL_TREE. BODY is the (possibly compound) statement
209 controlled by the loop. INCR is the increment expression of a for-loop,
210 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
211 evaluated before the loop body as in while and for loops, or after the
212 loop body as in do-while loops. */
213
214 static void
215 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
216 tree incr, bool cond_is_first, int *walk_subtrees,
217 void *data)
218 {
219 tree blab, clab;
220 tree exit = NULL;
221 tree stmt_list = NULL;
222
223 blab = begin_bc_block (bc_break, start_locus);
224 clab = begin_bc_block (bc_continue, start_locus);
225
226 protected_set_expr_location (incr, start_locus);
227
228 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
229 cp_walk_tree (&body, cp_genericize_r, data, NULL);
230 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
231 *walk_subtrees = 0;
232
233 if (cond && TREE_CODE (cond) != INTEGER_CST)
234 {
235 /* If COND is constant, don't bother building an exit. If it's false,
236 we won't build a loop. If it's true, any exits are in the body. */
237 location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
238 exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
239 get_bc_label (bc_break));
240 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
241 build_empty_stmt (cloc), exit);
242 }
243
244 if (exit && cond_is_first)
245 append_to_statement_list (exit, &stmt_list);
246 append_to_statement_list (body, &stmt_list);
247 finish_bc_block (&stmt_list, bc_continue, clab);
248 append_to_statement_list (incr, &stmt_list);
249 if (exit && !cond_is_first)
250 append_to_statement_list (exit, &stmt_list);
251
252 if (!stmt_list)
253 stmt_list = build_empty_stmt (start_locus);
254
255 tree loop;
256 if (cond && integer_zerop (cond))
257 {
258 if (cond_is_first)
259 loop = fold_build3_loc (start_locus, COND_EXPR,
260 void_type_node, cond, stmt_list,
261 build_empty_stmt (start_locus));
262 else
263 loop = stmt_list;
264 }
265 else
266 {
267 location_t loc = start_locus;
268 if (!cond || integer_nonzerop (cond))
269 loc = EXPR_LOCATION (expr_first (body));
270 if (loc == UNKNOWN_LOCATION)
271 loc = start_locus;
272 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
273 }
274
275 stmt_list = NULL;
276 append_to_statement_list (loop, &stmt_list);
277 finish_bc_block (&stmt_list, bc_break, blab);
278 if (!stmt_list)
279 stmt_list = build_empty_stmt (start_locus);
280
281 *stmt_p = stmt_list;
282 }
283
284 /* Genericize a FOR_STMT node *STMT_P. */
285
286 static void
287 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
288 {
289 tree stmt = *stmt_p;
290 tree expr = NULL;
291 tree loop;
292 tree init = FOR_INIT_STMT (stmt);
293
294 if (init)
295 {
296 cp_walk_tree (&init, cp_genericize_r, data, NULL);
297 append_to_statement_list (init, &expr);
298 }
299
300 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
301 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
302 append_to_statement_list (loop, &expr);
303 if (expr == NULL_TREE)
304 expr = loop;
305 *stmt_p = expr;
306 }
307
308 /* Genericize a WHILE_STMT node *STMT_P. */
309
310 static void
311 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
312 {
313 tree stmt = *stmt_p;
314 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
315 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
316 }
317
318 /* Genericize a DO_STMT node *STMT_P. */
319
320 static void
321 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
322 {
323 tree stmt = *stmt_p;
324 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
325 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
326 }
327
328 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
329
330 static void
331 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
332 {
333 tree stmt = *stmt_p;
334 tree break_block, body, cond, type;
335 location_t stmt_locus = EXPR_LOCATION (stmt);
336
337 break_block = begin_bc_block (bc_break, stmt_locus);
338
339 body = SWITCH_STMT_BODY (stmt);
340 if (!body)
341 body = build_empty_stmt (stmt_locus);
342 cond = SWITCH_STMT_COND (stmt);
343 type = SWITCH_STMT_TYPE (stmt);
344
345 cp_walk_tree (&body, cp_genericize_r, data, NULL);
346 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
347 cp_walk_tree (&type, cp_genericize_r, data, NULL);
348 *walk_subtrees = 0;
349
350 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
351 finish_bc_block (stmt_p, bc_break, break_block);
352 }
353
354 /* Genericize a CONTINUE_STMT node *STMT_P. */
355
356 static void
357 genericize_continue_stmt (tree *stmt_p)
358 {
359 tree stmt_list = NULL;
360 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
361 tree label = get_bc_label (bc_continue);
362 location_t location = EXPR_LOCATION (*stmt_p);
363 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
364 append_to_statement_list (pred, &stmt_list);
365 append_to_statement_list (jump, &stmt_list);
366 *stmt_p = stmt_list;
367 }
368
369 /* Genericize a BREAK_STMT node *STMT_P. */
370
371 static void
372 genericize_break_stmt (tree *stmt_p)
373 {
374 tree label = get_bc_label (bc_break);
375 location_t location = EXPR_LOCATION (*stmt_p);
376 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
377 }
378
379 /* Genericize a OMP_FOR node *STMT_P. */
380
381 static void
382 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
383 {
384 tree stmt = *stmt_p;
385 location_t locus = EXPR_LOCATION (stmt);
386 tree clab = begin_bc_block (bc_continue, locus);
387
388 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
389 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
390 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
391 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
392 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
393 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
394 *walk_subtrees = 0;
395
396 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
397 }
398
399 /* Hook into the middle of gimplifying an OMP_FOR node. */
400
401 static enum gimplify_status
402 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
403 {
404 tree for_stmt = *expr_p;
405 gimple_seq seq = NULL;
406
407 /* Protect ourselves from recursion. */
408 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
409 return GS_UNHANDLED;
410 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
411
412 gimplify_and_add (for_stmt, &seq);
413 gimple_seq_add_seq (pre_p, seq);
414
415 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
416
417 return GS_ALL_DONE;
418 }
419
420 /* Gimplify an EXPR_STMT node. */
421
422 static void
423 gimplify_expr_stmt (tree *stmt_p)
424 {
425 tree stmt = EXPR_STMT_EXPR (*stmt_p);
426
427 if (stmt == error_mark_node)
428 stmt = NULL;
429
430 /* Gimplification of a statement expression will nullify the
431 statement if all its side effects are moved to *PRE_P and *POST_P.
432
433 In this case we will not want to emit the gimplified statement.
434 However, we may still want to emit a warning, so we do that before
435 gimplification. */
436 if (stmt && warn_unused_value)
437 {
438 if (!TREE_SIDE_EFFECTS (stmt))
439 {
440 if (!IS_EMPTY_STMT (stmt)
441 && !VOID_TYPE_P (TREE_TYPE (stmt))
442 && !TREE_NO_WARNING (stmt))
443 warning (OPT_Wunused_value, "statement with no effect");
444 }
445 else
446 warn_if_unused_value (stmt, input_location);
447 }
448
449 if (stmt == NULL_TREE)
450 stmt = alloc_stmt_list ();
451
452 *stmt_p = stmt;
453 }
454
455 /* Gimplify initialization from an AGGR_INIT_EXPR. */
456
457 static void
458 cp_gimplify_init_expr (tree *expr_p)
459 {
460 tree from = TREE_OPERAND (*expr_p, 1);
461 tree to = TREE_OPERAND (*expr_p, 0);
462 tree t;
463
464 /* What about code that pulls out the temp and uses it elsewhere? I
465 think that such code never uses the TARGET_EXPR as an initializer. If
466 I'm wrong, we'll abort because the temp won't have any RTL. In that
467 case, I guess we'll need to replace references somehow. */
468 if (TREE_CODE (from) == TARGET_EXPR)
469 from = TARGET_EXPR_INITIAL (from);
470
471 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
472 inside the TARGET_EXPR. */
473 for (t = from; t; )
474 {
475 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
476
477 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
478 replace the slot operand with our target.
479
480 Should we add a target parm to gimplify_expr instead? No, as in this
481 case we want to replace the INIT_EXPR. */
482 if (TREE_CODE (sub) == AGGR_INIT_EXPR
483 || TREE_CODE (sub) == VEC_INIT_EXPR)
484 {
485 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
486 AGGR_INIT_EXPR_SLOT (sub) = to;
487 else
488 VEC_INIT_EXPR_SLOT (sub) = to;
489 *expr_p = from;
490
491 /* The initialization is now a side-effect, so the container can
492 become void. */
493 if (from != sub)
494 TREE_TYPE (from) = void_type_node;
495 }
496
497 if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
498 /* Handle aggregate NSDMI. */
499 replace_placeholders (sub, to);
500
501 if (t == sub)
502 break;
503 else
504 t = TREE_OPERAND (t, 1);
505 }
506
507 }
508
509 /* Gimplify a MUST_NOT_THROW_EXPR. */
510
511 static enum gimplify_status
512 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
513 {
514 tree stmt = *expr_p;
515 tree temp = voidify_wrapper_expr (stmt, NULL);
516 tree body = TREE_OPERAND (stmt, 0);
517 gimple_seq try_ = NULL;
518 gimple_seq catch_ = NULL;
519 gimple *mnt;
520
521 gimplify_and_add (body, &try_);
522 mnt = gimple_build_eh_must_not_throw (terminate_node);
523 gimple_seq_add_stmt_without_update (&catch_, mnt);
524 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
525
526 gimple_seq_add_stmt_without_update (pre_p, mnt);
527 if (temp)
528 {
529 *expr_p = temp;
530 return GS_OK;
531 }
532
533 *expr_p = NULL;
534 return GS_ALL_DONE;
535 }
536
537 /* Return TRUE if an operand (OP) of a given TYPE being copied is
538 really just an empty class copy.
539
540 Check that the operand has a simple form so that TARGET_EXPRs and
541 non-empty CONSTRUCTORs get reduced properly, and we leave the
542 return slot optimization alone because it isn't a copy. */
543
544 static bool
545 simple_empty_class_p (tree type, tree op)
546 {
547 return
548 ((TREE_CODE (op) == COMPOUND_EXPR
549 && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
550 || is_gimple_lvalue (op)
551 || INDIRECT_REF_P (op)
552 || (TREE_CODE (op) == CONSTRUCTOR
553 && CONSTRUCTOR_NELTS (op) == 0
554 && !TREE_CLOBBER_P (op))
555 || (TREE_CODE (op) == CALL_EXPR
556 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
557 && is_really_empty_class (type);
558 }
559
560 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
561
562 int
563 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
564 {
565 int saved_stmts_are_full_exprs_p = 0;
566 enum tree_code code = TREE_CODE (*expr_p);
567 enum gimplify_status ret;
568
569 if (STATEMENT_CODE_P (code))
570 {
571 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
572 current_stmt_tree ()->stmts_are_full_exprs_p
573 = STMT_IS_FULL_EXPR_P (*expr_p);
574 }
575
576 switch (code)
577 {
578 case PTRMEM_CST:
579 *expr_p = cplus_expand_constant (*expr_p);
580 ret = GS_OK;
581 break;
582
583 case AGGR_INIT_EXPR:
584 simplify_aggr_init_expr (expr_p);
585 ret = GS_OK;
586 break;
587
588 case VEC_INIT_EXPR:
589 {
590 location_t loc = input_location;
591 tree init = VEC_INIT_EXPR_INIT (*expr_p);
592 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
593 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
594 input_location = EXPR_LOCATION (*expr_p);
595 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
596 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
597 from_array,
598 tf_warning_or_error);
599 cp_genericize_tree (expr_p);
600 ret = GS_OK;
601 input_location = loc;
602 }
603 break;
604
605 case THROW_EXPR:
606 /* FIXME communicate throw type to back end, probably by moving
607 THROW_EXPR into ../tree.def. */
608 *expr_p = TREE_OPERAND (*expr_p, 0);
609 ret = GS_OK;
610 break;
611
612 case MUST_NOT_THROW_EXPR:
613 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
614 break;
615
616 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
617 LHS of an assignment might also be involved in the RHS, as in bug
618 25979. */
619 case INIT_EXPR:
620 if (fn_contains_cilk_spawn_p (cfun))
621 {
622 if (cilk_detect_spawn_and_unwrap (expr_p))
623 {
624 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
625 pre_p, post_p);
626 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
627 }
628 if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
629 return GS_ERROR;
630 }
631
632 cp_gimplify_init_expr (expr_p);
633 if (TREE_CODE (*expr_p) != INIT_EXPR)
634 return GS_OK;
635 /* Otherwise fall through. */
636 case MODIFY_EXPR:
637 modify_expr_case:
638 {
639 if (fn_contains_cilk_spawn_p (cfun)
640 && cilk_detect_spawn_and_unwrap (expr_p)
641 && !seen_error ())
642 {
643 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
644 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
645 }
646 /* If the back end isn't clever enough to know that the lhs and rhs
647 types are the same, add an explicit conversion. */
648 tree op0 = TREE_OPERAND (*expr_p, 0);
649 tree op1 = TREE_OPERAND (*expr_p, 1);
650
651 if (!error_operand_p (op0)
652 && !error_operand_p (op1)
653 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
654 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
655 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
656 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
657 TREE_TYPE (op0), op1);
658
659 else if (simple_empty_class_p (TREE_TYPE (op0), op1))
660 {
661 /* Remove any copies of empty classes. Also drop volatile
662 variables on the RHS to avoid infinite recursion from
663 gimplify_expr trying to load the value. */
664 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
665 is_gimple_lvalue, fb_lvalue);
666 if (TREE_SIDE_EFFECTS (op1))
667 {
668 if (TREE_THIS_VOLATILE (op1)
669 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
670 op1 = build_fold_addr_expr (op1);
671
672 gimplify_and_add (op1, pre_p);
673 }
674 *expr_p = TREE_OPERAND (*expr_p, 0);
675 }
676 }
677 ret = GS_OK;
678 break;
679
680 case EMPTY_CLASS_EXPR:
681 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
682 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
683 ret = GS_OK;
684 break;
685
686 case BASELINK:
687 *expr_p = BASELINK_FUNCTIONS (*expr_p);
688 ret = GS_OK;
689 break;
690
691 case TRY_BLOCK:
692 genericize_try_block (expr_p);
693 ret = GS_OK;
694 break;
695
696 case HANDLER:
697 genericize_catch_block (expr_p);
698 ret = GS_OK;
699 break;
700
701 case EH_SPEC_BLOCK:
702 genericize_eh_spec_block (expr_p);
703 ret = GS_OK;
704 break;
705
706 case USING_STMT:
707 gcc_unreachable ();
708
709 case FOR_STMT:
710 case WHILE_STMT:
711 case DO_STMT:
712 case SWITCH_STMT:
713 case CONTINUE_STMT:
714 case BREAK_STMT:
715 gcc_unreachable ();
716
717 case OMP_FOR:
718 case OMP_SIMD:
719 case OMP_DISTRIBUTE:
720 case OMP_TASKLOOP:
721 ret = cp_gimplify_omp_for (expr_p, pre_p);
722 break;
723
724 case EXPR_STMT:
725 gimplify_expr_stmt (expr_p);
726 ret = GS_OK;
727 break;
728
729 case UNARY_PLUS_EXPR:
730 {
731 tree arg = TREE_OPERAND (*expr_p, 0);
732 tree type = TREE_TYPE (*expr_p);
733 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
734 : arg;
735 ret = GS_OK;
736 }
737 break;
738
739 case CILK_SPAWN_STMT:
740 gcc_assert(fn_contains_cilk_spawn_p (cfun)
741 && cilk_detect_spawn_and_unwrap (expr_p));
742
743 if (!seen_error ())
744 {
745 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
746 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
747 }
748 return GS_ERROR;
749
750 case CALL_EXPR:
751 if (fn_contains_cilk_spawn_p (cfun)
752 && cilk_detect_spawn_and_unwrap (expr_p)
753 && !seen_error ())
754 {
755 cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
756 return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
757 }
758 /* DR 1030 says that we need to evaluate the elements of an
759 initializer-list in forward order even when it's used as arguments to
760 a constructor. So if the target wants to evaluate them in reverse
761 order and there's more than one argument other than 'this', gimplify
762 them in order. */
763 ret = GS_OK;
764 if (PUSH_ARGS_REVERSED && CALL_EXPR_LIST_INIT_P (*expr_p)
765 && call_expr_nargs (*expr_p) > 2)
766 {
767 int nargs = call_expr_nargs (*expr_p);
768 location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
769 for (int i = 1; i < nargs; ++i)
770 {
771 enum gimplify_status t
772 = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
773 if (t == GS_ERROR)
774 ret = GS_ERROR;
775 }
776 }
777 break;
778
779 case RETURN_EXPR:
780 if (TREE_OPERAND (*expr_p, 0)
781 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
782 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
783 {
784 expr_p = &TREE_OPERAND (*expr_p, 0);
785 code = TREE_CODE (*expr_p);
786 /* Avoid going through the INIT_EXPR case, which can
787 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
788 goto modify_expr_case;
789 }
790 /* Fall through. */
791
792 default:
793 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
794 break;
795 }
796
797 /* Restore saved state. */
798 if (STATEMENT_CODE_P (code))
799 current_stmt_tree ()->stmts_are_full_exprs_p
800 = saved_stmts_are_full_exprs_p;
801
802 return ret;
803 }
804
805 static inline bool
806 is_invisiref_parm (const_tree t)
807 {
808 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
809 && DECL_BY_REFERENCE (t));
810 }
811
812 /* Return true if the uid in both int tree maps are equal. */
813
814 bool
815 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
816 {
817 return (a->uid == b->uid);
818 }
819
820 /* Hash a UID in a cxx_int_tree_map. */
821
822 unsigned int
823 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
824 {
825 return item->uid;
826 }
827
828 /* A stable comparison routine for use with splay trees and DECLs. */
829
830 static int
831 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
832 {
833 tree a = (tree) xa;
834 tree b = (tree) xb;
835
836 return DECL_UID (a) - DECL_UID (b);
837 }
838
839 /* OpenMP context during genericization. */
840
841 struct cp_genericize_omp_taskreg
842 {
843 bool is_parallel;
844 bool default_shared;
845 struct cp_genericize_omp_taskreg *outer;
846 splay_tree variables;
847 };
848
849 /* Return true if genericization should try to determine if
850 DECL is firstprivate or shared within task regions. */
851
852 static bool
853 omp_var_to_track (tree decl)
854 {
855 tree type = TREE_TYPE (decl);
856 if (is_invisiref_parm (decl))
857 type = TREE_TYPE (type);
858 while (TREE_CODE (type) == ARRAY_TYPE)
859 type = TREE_TYPE (type);
860 if (type == error_mark_node || !CLASS_TYPE_P (type))
861 return false;
862 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
863 return false;
864 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
865 return false;
866 return true;
867 }
868
869 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
870
871 static void
872 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
873 {
874 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
875 (splay_tree_key) decl);
876 if (n == NULL)
877 {
878 int flags = OMP_CLAUSE_DEFAULT_SHARED;
879 if (omp_ctx->outer)
880 omp_cxx_notice_variable (omp_ctx->outer, decl);
881 if (!omp_ctx->default_shared)
882 {
883 struct cp_genericize_omp_taskreg *octx;
884
885 for (octx = omp_ctx->outer; octx; octx = octx->outer)
886 {
887 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
888 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
889 {
890 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
891 break;
892 }
893 if (octx->is_parallel)
894 break;
895 }
896 if (octx == NULL
897 && (TREE_CODE (decl) == PARM_DECL
898 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
899 && DECL_CONTEXT (decl) == current_function_decl)))
900 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
901 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
902 {
903 /* DECL is implicitly determined firstprivate in
904 the current task construct. Ensure copy ctor and
905 dtor are instantiated, because during gimplification
906 it will be already too late. */
907 tree type = TREE_TYPE (decl);
908 if (is_invisiref_parm (decl))
909 type = TREE_TYPE (type);
910 while (TREE_CODE (type) == ARRAY_TYPE)
911 type = TREE_TYPE (type);
912 get_copy_ctor (type, tf_none);
913 get_dtor (type, tf_none);
914 }
915 }
916 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
917 }
918 }
919
920 /* Genericization context. */
921
922 struct cp_genericize_data
923 {
924 hash_set<tree> *p_set;
925 vec<tree> bind_expr_stack;
926 struct cp_genericize_omp_taskreg *omp_ctx;
927 tree try_block;
928 bool no_sanitize_p;
929 };
930
931 /* Perform any pre-gimplification folding of C++ front end trees to
932 GENERIC.
933 Note: The folding of none-omp cases is something to move into
934 the middle-end. As for now we have most foldings only on GENERIC
935 in fold-const, we need to perform this before transformation to
936 GIMPLE-form. */
937
938 static tree
939 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
940 {
941 tree stmt;
942 enum tree_code code;
943
944 *stmt_p = stmt = cp_fold (*stmt_p);
945
946 code = TREE_CODE (stmt);
947 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
948 || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
949 || code == OACC_LOOP)
950 {
951 tree x;
952 int i, n;
953
954 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
955 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
956 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
957 x = OMP_FOR_COND (stmt);
958 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
959 {
960 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
961 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
962 }
963 else if (x && TREE_CODE (x) == TREE_VEC)
964 {
965 n = TREE_VEC_LENGTH (x);
966 for (i = 0; i < n; i++)
967 {
968 tree o = TREE_VEC_ELT (x, i);
969 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
970 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
971 }
972 }
973 x = OMP_FOR_INCR (stmt);
974 if (x && TREE_CODE (x) == TREE_VEC)
975 {
976 n = TREE_VEC_LENGTH (x);
977 for (i = 0; i < n; i++)
978 {
979 tree o = TREE_VEC_ELT (x, i);
980 if (o && TREE_CODE (o) == MODIFY_EXPR)
981 o = TREE_OPERAND (o, 1);
982 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
983 || TREE_CODE (o) == POINTER_PLUS_EXPR))
984 {
985 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
986 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
987 }
988 }
989 }
990 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
991 *walk_subtrees = 0;
992 }
993
994 return NULL;
995 }
996
997 /* Fold ALL the trees! FIXME we should be able to remove this, but
998 apparently that still causes optimization regressions. */
999
1000 void
1001 cp_fold_function (tree fndecl)
1002 {
1003 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, NULL, NULL);
1004 }
1005
1006 /* Perform any pre-gimplification lowering of C++ front end trees to
1007 GENERIC. */
1008
1009 static tree
1010 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1011 {
1012 tree stmt = *stmt_p;
1013 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1014 hash_set<tree> *p_set = wtd->p_set;
1015
1016 /* If in an OpenMP context, note var uses. */
1017 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1018 && (VAR_P (stmt)
1019 || TREE_CODE (stmt) == PARM_DECL
1020 || TREE_CODE (stmt) == RESULT_DECL)
1021 && omp_var_to_track (stmt))
1022 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1023
1024 /* Don't dereference parms in a thunk, pass the references through. */
1025 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1026 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1027 {
1028 *walk_subtrees = 0;
1029 return NULL;
1030 }
1031
1032 /* Otherwise, do dereference invisible reference parms. */
1033 if (is_invisiref_parm (stmt))
1034 {
1035 *stmt_p = convert_from_reference (stmt);
1036 *walk_subtrees = 0;
1037 return NULL;
1038 }
1039
1040 /* Map block scope extern declarations to visible declarations with the
1041 same name and type in outer scopes if any. */
1042 if (cp_function_chain->extern_decl_map
1043 && VAR_OR_FUNCTION_DECL_P (stmt)
1044 && DECL_EXTERNAL (stmt))
1045 {
1046 struct cxx_int_tree_map *h, in;
1047 in.uid = DECL_UID (stmt);
1048 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1049 if (h)
1050 {
1051 *stmt_p = h->to;
1052 *walk_subtrees = 0;
1053 return NULL;
1054 }
1055 }
1056
1057 /* Other than invisiref parms, don't walk the same tree twice. */
1058 if (p_set->contains (stmt))
1059 {
1060 *walk_subtrees = 0;
1061 return NULL_TREE;
1062 }
1063
1064 if (TREE_CODE (stmt) == ADDR_EXPR
1065 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1066 {
1067 /* If in an OpenMP context, note var uses. */
1068 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1069 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1070 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1071 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1072 *walk_subtrees = 0;
1073 }
1074 else if (TREE_CODE (stmt) == RETURN_EXPR
1075 && TREE_OPERAND (stmt, 0)
1076 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1077 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1078 *walk_subtrees = 0;
1079 else if (TREE_CODE (stmt) == OMP_CLAUSE)
1080 switch (OMP_CLAUSE_CODE (stmt))
1081 {
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 /* Don't dereference an invisiref in OpenMP clauses. */
1084 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1085 {
1086 *walk_subtrees = 0;
1087 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1088 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1089 cp_genericize_r, data, NULL);
1090 }
1091 break;
1092 case OMP_CLAUSE_PRIVATE:
1093 /* Don't dereference an invisiref in OpenMP clauses. */
1094 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1095 *walk_subtrees = 0;
1096 else if (wtd->omp_ctx != NULL)
1097 {
1098 /* Private clause doesn't cause any references to the
1099 var in outer contexts, avoid calling
1100 omp_cxx_notice_variable for it. */
1101 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1102 wtd->omp_ctx = NULL;
1103 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1104 data, NULL);
1105 wtd->omp_ctx = old;
1106 *walk_subtrees = 0;
1107 }
1108 break;
1109 case OMP_CLAUSE_SHARED:
1110 case OMP_CLAUSE_FIRSTPRIVATE:
1111 case OMP_CLAUSE_COPYIN:
1112 case OMP_CLAUSE_COPYPRIVATE:
1113 /* Don't dereference an invisiref in OpenMP clauses. */
1114 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1115 *walk_subtrees = 0;
1116 break;
1117 case OMP_CLAUSE_REDUCTION:
1118 /* Don't dereference an invisiref in reduction clause's
1119 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1120 still needs to be genericized. */
1121 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1122 {
1123 *walk_subtrees = 0;
1124 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1125 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1126 cp_genericize_r, data, NULL);
1127 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1128 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1129 cp_genericize_r, data, NULL);
1130 }
1131 break;
1132 default:
1133 break;
1134 }
1135 else if (IS_TYPE_OR_DECL_P (stmt))
1136 *walk_subtrees = 0;
1137
1138 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1139 to lower this construct before scanning it, so we need to lower these
1140 before doing anything else. */
1141 else if (TREE_CODE (stmt) == CLEANUP_STMT)
1142 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1143 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1144 : TRY_FINALLY_EXPR,
1145 void_type_node,
1146 CLEANUP_BODY (stmt),
1147 CLEANUP_EXPR (stmt));
1148
1149 else if (TREE_CODE (stmt) == IF_STMT)
1150 {
1151 genericize_if_stmt (stmt_p);
1152 /* *stmt_p has changed, tail recurse to handle it again. */
1153 return cp_genericize_r (stmt_p, walk_subtrees, data);
1154 }
1155
1156 /* COND_EXPR might have incompatible types in branches if one or both
1157 arms are bitfields. Fix it up now. */
1158 else if (TREE_CODE (stmt) == COND_EXPR)
1159 {
1160 tree type_left
1161 = (TREE_OPERAND (stmt, 1)
1162 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1163 : NULL_TREE);
1164 tree type_right
1165 = (TREE_OPERAND (stmt, 2)
1166 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1167 : NULL_TREE);
1168 if (type_left
1169 && !useless_type_conversion_p (TREE_TYPE (stmt),
1170 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1171 {
1172 TREE_OPERAND (stmt, 1)
1173 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1174 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1175 type_left));
1176 }
1177 if (type_right
1178 && !useless_type_conversion_p (TREE_TYPE (stmt),
1179 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1180 {
1181 TREE_OPERAND (stmt, 2)
1182 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1183 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1184 type_right));
1185 }
1186 }
1187
1188 else if (TREE_CODE (stmt) == BIND_EXPR)
1189 {
1190 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1191 {
1192 tree decl;
1193 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1194 if (VAR_P (decl)
1195 && !DECL_EXTERNAL (decl)
1196 && omp_var_to_track (decl))
1197 {
1198 splay_tree_node n
1199 = splay_tree_lookup (wtd->omp_ctx->variables,
1200 (splay_tree_key) decl);
1201 if (n == NULL)
1202 splay_tree_insert (wtd->omp_ctx->variables,
1203 (splay_tree_key) decl,
1204 TREE_STATIC (decl)
1205 ? OMP_CLAUSE_DEFAULT_SHARED
1206 : OMP_CLAUSE_DEFAULT_PRIVATE);
1207 }
1208 }
1209 if (flag_sanitize
1210 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1211 {
1212 /* The point here is to not sanitize static initializers. */
1213 bool no_sanitize_p = wtd->no_sanitize_p;
1214 wtd->no_sanitize_p = true;
1215 for (tree decl = BIND_EXPR_VARS (stmt);
1216 decl;
1217 decl = DECL_CHAIN (decl))
1218 if (VAR_P (decl)
1219 && TREE_STATIC (decl)
1220 && DECL_INITIAL (decl))
1221 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1222 wtd->no_sanitize_p = no_sanitize_p;
1223 }
1224 wtd->bind_expr_stack.safe_push (stmt);
1225 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1226 cp_genericize_r, data, NULL);
1227 wtd->bind_expr_stack.pop ();
1228 }
1229
1230 else if (TREE_CODE (stmt) == USING_STMT)
1231 {
1232 tree block = NULL_TREE;
1233
1234 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1235 BLOCK, and append an IMPORTED_DECL to its
1236 BLOCK_VARS chained list. */
1237 if (wtd->bind_expr_stack.exists ())
1238 {
1239 int i;
1240 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1241 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1242 break;
1243 }
1244 if (block)
1245 {
1246 tree using_directive;
1247 gcc_assert (TREE_OPERAND (stmt, 0));
1248
1249 using_directive = make_node (IMPORTED_DECL);
1250 TREE_TYPE (using_directive) = void_type_node;
1251
1252 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1253 = TREE_OPERAND (stmt, 0);
1254 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1255 BLOCK_VARS (block) = using_directive;
1256 }
1257 /* The USING_STMT won't appear in GENERIC. */
1258 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1259 *walk_subtrees = 0;
1260 }
1261
1262 else if (TREE_CODE (stmt) == DECL_EXPR
1263 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1264 {
1265 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1266 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1267 *walk_subtrees = 0;
1268 }
1269 else if (TREE_CODE (stmt) == DECL_EXPR)
1270 {
1271 tree d = DECL_EXPR_DECL (stmt);
1272 if (TREE_CODE (d) == VAR_DECL)
1273 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1274 }
1275 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1276 {
1277 struct cp_genericize_omp_taskreg omp_ctx;
1278 tree c, decl;
1279 splay_tree_node n;
1280
1281 *walk_subtrees = 0;
1282 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1283 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1284 omp_ctx.default_shared = omp_ctx.is_parallel;
1285 omp_ctx.outer = wtd->omp_ctx;
1286 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1287 wtd->omp_ctx = &omp_ctx;
1288 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1289 switch (OMP_CLAUSE_CODE (c))
1290 {
1291 case OMP_CLAUSE_SHARED:
1292 case OMP_CLAUSE_PRIVATE:
1293 case OMP_CLAUSE_FIRSTPRIVATE:
1294 case OMP_CLAUSE_LASTPRIVATE:
1295 decl = OMP_CLAUSE_DECL (c);
1296 if (decl == error_mark_node || !omp_var_to_track (decl))
1297 break;
1298 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1299 if (n != NULL)
1300 break;
1301 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1302 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1303 ? OMP_CLAUSE_DEFAULT_SHARED
1304 : OMP_CLAUSE_DEFAULT_PRIVATE);
1305 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1306 && omp_ctx.outer)
1307 omp_cxx_notice_variable (omp_ctx.outer, decl);
1308 break;
1309 case OMP_CLAUSE_DEFAULT:
1310 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1311 omp_ctx.default_shared = true;
1312 default:
1313 break;
1314 }
1315 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1316 wtd->omp_ctx = omp_ctx.outer;
1317 splay_tree_delete (omp_ctx.variables);
1318 }
1319 else if (TREE_CODE (stmt) == TRY_BLOCK)
1320 {
1321 *walk_subtrees = 0;
1322 tree try_block = wtd->try_block;
1323 wtd->try_block = stmt;
1324 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1325 wtd->try_block = try_block;
1326 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1327 }
1328 else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
1329 {
1330 /* MUST_NOT_THROW_COND might be something else with TM. */
1331 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1332 {
1333 *walk_subtrees = 0;
1334 tree try_block = wtd->try_block;
1335 wtd->try_block = stmt;
1336 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1337 wtd->try_block = try_block;
1338 }
1339 }
1340 else if (TREE_CODE (stmt) == THROW_EXPR)
1341 {
1342 location_t loc = location_of (stmt);
1343 if (TREE_NO_WARNING (stmt))
1344 /* Never mind. */;
1345 else if (wtd->try_block)
1346 {
1347 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1348 && warning_at (loc, OPT_Wterminate,
1349 "throw will always call terminate()")
1350 && cxx_dialect >= cxx11
1351 && DECL_DESTRUCTOR_P (current_function_decl))
1352 inform (loc, "in C++11 destructors default to noexcept");
1353 }
1354 else
1355 {
1356 if (warn_cxx11_compat && cxx_dialect < cxx11
1357 && DECL_DESTRUCTOR_P (current_function_decl)
1358 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1359 == NULL_TREE)
1360 && (get_defaulted_eh_spec (current_function_decl)
1361 == empty_except_spec))
1362 warning_at (loc, OPT_Wc__11_compat,
1363 "in C++11 this throw will terminate because "
1364 "destructors default to noexcept");
1365 }
1366 }
1367 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1368 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1369 else if (TREE_CODE (stmt) == FOR_STMT)
1370 genericize_for_stmt (stmt_p, walk_subtrees, data);
1371 else if (TREE_CODE (stmt) == WHILE_STMT)
1372 genericize_while_stmt (stmt_p, walk_subtrees, data);
1373 else if (TREE_CODE (stmt) == DO_STMT)
1374 genericize_do_stmt (stmt_p, walk_subtrees, data);
1375 else if (TREE_CODE (stmt) == SWITCH_STMT)
1376 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1377 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1378 genericize_continue_stmt (stmt_p);
1379 else if (TREE_CODE (stmt) == BREAK_STMT)
1380 genericize_break_stmt (stmt_p);
1381 else if (TREE_CODE (stmt) == OMP_FOR
1382 || TREE_CODE (stmt) == OMP_SIMD
1383 || TREE_CODE (stmt) == OMP_DISTRIBUTE
1384 || TREE_CODE (stmt) == OMP_TASKLOOP)
1385 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1386 else if ((flag_sanitize
1387 & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1388 && !wtd->no_sanitize_p)
1389 {
1390 if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1391 && TREE_CODE (stmt) == NOP_EXPR
1392 && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1393 ubsan_maybe_instrument_reference (stmt);
1394 else if (TREE_CODE (stmt) == CALL_EXPR)
1395 {
1396 tree fn = CALL_EXPR_FN (stmt);
1397 if (fn != NULL_TREE
1398 && !error_operand_p (fn)
1399 && POINTER_TYPE_P (TREE_TYPE (fn))
1400 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1401 {
1402 bool is_ctor
1403 = TREE_CODE (fn) == ADDR_EXPR
1404 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1405 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1406 if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1407 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1408 if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
1409 cp_ubsan_maybe_instrument_member_call (stmt);
1410 }
1411 }
1412 }
1413
1414 p_set->add (*stmt_p);
1415
1416 return NULL;
1417 }
1418
1419 /* Lower C++ front end trees to GENERIC in T_P. */
1420
1421 static void
1422 cp_genericize_tree (tree* t_p)
1423 {
1424 struct cp_genericize_data wtd;
1425
1426 wtd.p_set = new hash_set<tree>;
1427 wtd.bind_expr_stack.create (0);
1428 wtd.omp_ctx = NULL;
1429 wtd.try_block = NULL_TREE;
1430 wtd.no_sanitize_p = false;
1431 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1432 delete wtd.p_set;
1433 wtd.bind_expr_stack.release ();
1434 if (flag_sanitize & SANITIZE_VPTR)
1435 cp_ubsan_instrument_member_accesses (t_p);
1436 }
1437
1438 /* If a function that should end with a return in non-void
1439 function doesn't obviously end with return, add ubsan
1440 instrumentation code to verify it at runtime. */
1441
1442 static void
1443 cp_ubsan_maybe_instrument_return (tree fndecl)
1444 {
1445 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1446 || DECL_CONSTRUCTOR_P (fndecl)
1447 || DECL_DESTRUCTOR_P (fndecl)
1448 || !targetm.warn_func_return (fndecl))
1449 return;
1450
1451 tree t = DECL_SAVED_TREE (fndecl);
1452 while (t)
1453 {
1454 switch (TREE_CODE (t))
1455 {
1456 case BIND_EXPR:
1457 t = BIND_EXPR_BODY (t);
1458 continue;
1459 case TRY_FINALLY_EXPR:
1460 t = TREE_OPERAND (t, 0);
1461 continue;
1462 case STATEMENT_LIST:
1463 {
1464 tree_stmt_iterator i = tsi_last (t);
1465 if (!tsi_end_p (i))
1466 {
1467 t = tsi_stmt (i);
1468 continue;
1469 }
1470 }
1471 break;
1472 case RETURN_EXPR:
1473 return;
1474 default:
1475 break;
1476 }
1477 break;
1478 }
1479 if (t == NULL_TREE)
1480 return;
1481 t = DECL_SAVED_TREE (fndecl);
1482 if (TREE_CODE (t) == BIND_EXPR
1483 && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
1484 {
1485 tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
1486 t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
1487 tsi_link_after (&i, t, TSI_NEW_STMT);
1488 }
1489 }
1490
1491 void
1492 cp_genericize (tree fndecl)
1493 {
1494 tree t;
1495
1496 /* Fix up the types of parms passed by invisible reference. */
1497 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1498 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1499 {
1500 /* If a function's arguments are copied to create a thunk,
1501 then DECL_BY_REFERENCE will be set -- but the type of the
1502 argument will be a pointer type, so we will never get
1503 here. */
1504 gcc_assert (!DECL_BY_REFERENCE (t));
1505 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1506 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1507 DECL_BY_REFERENCE (t) = 1;
1508 TREE_ADDRESSABLE (t) = 0;
1509 relayout_decl (t);
1510 }
1511
1512 /* Do the same for the return value. */
1513 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1514 {
1515 t = DECL_RESULT (fndecl);
1516 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1517 DECL_BY_REFERENCE (t) = 1;
1518 TREE_ADDRESSABLE (t) = 0;
1519 relayout_decl (t);
1520 if (DECL_NAME (t))
1521 {
1522 /* Adjust DECL_VALUE_EXPR of the original var. */
1523 tree outer = outer_curly_brace_block (current_function_decl);
1524 tree var;
1525
1526 if (outer)
1527 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1528 if (DECL_NAME (t) == DECL_NAME (var)
1529 && DECL_HAS_VALUE_EXPR_P (var)
1530 && DECL_VALUE_EXPR (var) == t)
1531 {
1532 tree val = convert_from_reference (t);
1533 SET_DECL_VALUE_EXPR (var, val);
1534 break;
1535 }
1536 }
1537 }
1538
1539 /* If we're a clone, the body is already GIMPLE. */
1540 if (DECL_CLONED_FUNCTION_P (fndecl))
1541 return;
1542
1543 /* Expand all the array notations here. */
1544 if (flag_cilkplus
1545 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1546 DECL_SAVED_TREE (fndecl) =
1547 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1548
1549 /* We do want to see every occurrence of the parms, so we can't just use
1550 walk_tree's hash functionality. */
1551 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1552
1553 if (flag_sanitize & SANITIZE_RETURN
1554 && do_ubsan_in_current_function ())
1555 cp_ubsan_maybe_instrument_return (fndecl);
1556
1557 /* Do everything else. */
1558 c_genericize (fndecl);
1559
1560 gcc_assert (bc_label[bc_break] == NULL);
1561 gcc_assert (bc_label[bc_continue] == NULL);
1562 }
1563 \f
1564 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1565 NULL if there is in fact nothing to do. ARG2 may be null if FN
1566 actually only takes one argument. */
1567
1568 static tree
1569 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1570 {
1571 tree defparm, parm, t;
1572 int i = 0;
1573 int nargs;
1574 tree *argarray;
1575
1576 if (fn == NULL)
1577 return NULL;
1578
1579 nargs = list_length (DECL_ARGUMENTS (fn));
1580 argarray = XALLOCAVEC (tree, nargs);
1581
1582 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1583 if (arg2)
1584 defparm = TREE_CHAIN (defparm);
1585
1586 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1587 {
1588 tree inner_type = TREE_TYPE (arg1);
1589 tree start1, end1, p1;
1590 tree start2 = NULL, p2 = NULL;
1591 tree ret = NULL, lab;
1592
1593 start1 = arg1;
1594 start2 = arg2;
1595 do
1596 {
1597 inner_type = TREE_TYPE (inner_type);
1598 start1 = build4 (ARRAY_REF, inner_type, start1,
1599 size_zero_node, NULL, NULL);
1600 if (arg2)
1601 start2 = build4 (ARRAY_REF, inner_type, start2,
1602 size_zero_node, NULL, NULL);
1603 }
1604 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1605 start1 = build_fold_addr_expr_loc (input_location, start1);
1606 if (arg2)
1607 start2 = build_fold_addr_expr_loc (input_location, start2);
1608
1609 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1610 end1 = fold_build_pointer_plus (start1, end1);
1611
1612 p1 = create_tmp_var (TREE_TYPE (start1));
1613 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1614 append_to_statement_list (t, &ret);
1615
1616 if (arg2)
1617 {
1618 p2 = create_tmp_var (TREE_TYPE (start2));
1619 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1620 append_to_statement_list (t, &ret);
1621 }
1622
1623 lab = create_artificial_label (input_location);
1624 t = build1 (LABEL_EXPR, void_type_node, lab);
1625 append_to_statement_list (t, &ret);
1626
1627 argarray[i++] = p1;
1628 if (arg2)
1629 argarray[i++] = p2;
1630 /* Handle default arguments. */
1631 for (parm = defparm; parm && parm != void_list_node;
1632 parm = TREE_CHAIN (parm), i++)
1633 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1634 TREE_PURPOSE (parm), fn, i,
1635 tf_warning_or_error);
1636 t = build_call_a (fn, i, argarray);
1637 t = fold_convert (void_type_node, t);
1638 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1639 append_to_statement_list (t, &ret);
1640
1641 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1642 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1643 append_to_statement_list (t, &ret);
1644
1645 if (arg2)
1646 {
1647 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1648 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1649 append_to_statement_list (t, &ret);
1650 }
1651
1652 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1653 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1654 append_to_statement_list (t, &ret);
1655
1656 return ret;
1657 }
1658 else
1659 {
1660 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1661 if (arg2)
1662 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1663 /* Handle default arguments. */
1664 for (parm = defparm; parm && parm != void_list_node;
1665 parm = TREE_CHAIN (parm), i++)
1666 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1667 TREE_PURPOSE (parm),
1668 fn, i, tf_warning_or_error);
1669 t = build_call_a (fn, i, argarray);
1670 t = fold_convert (void_type_node, t);
1671 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1672 }
1673 }
1674
1675 /* Return code to initialize DECL with its default constructor, or
1676 NULL if there's nothing to do. */
1677
1678 tree
1679 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1680 {
1681 tree info = CP_OMP_CLAUSE_INFO (clause);
1682 tree ret = NULL;
1683
1684 if (info)
1685 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1686
1687 return ret;
1688 }
1689
1690 /* Return code to initialize DST with a copy constructor from SRC. */
1691
1692 tree
1693 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1694 {
1695 tree info = CP_OMP_CLAUSE_INFO (clause);
1696 tree ret = NULL;
1697
1698 if (info)
1699 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1700 if (ret == NULL)
1701 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1702
1703 return ret;
1704 }
1705
1706 /* Similarly, except use an assignment operator instead. */
1707
1708 tree
1709 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1710 {
1711 tree info = CP_OMP_CLAUSE_INFO (clause);
1712 tree ret = NULL;
1713
1714 if (info)
1715 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1716 if (ret == NULL)
1717 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1718
1719 return ret;
1720 }
1721
1722 /* Return code to destroy DECL. */
1723
1724 tree
1725 cxx_omp_clause_dtor (tree clause, tree decl)
1726 {
1727 tree info = CP_OMP_CLAUSE_INFO (clause);
1728 tree ret = NULL;
1729
1730 if (info)
1731 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1732
1733 return ret;
1734 }
1735
1736 /* True if OpenMP should privatize what this DECL points to rather
1737 than the DECL itself. */
1738
1739 bool
1740 cxx_omp_privatize_by_reference (const_tree decl)
1741 {
1742 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1743 || is_invisiref_parm (decl));
1744 }
1745
1746 /* Return true if DECL is const qualified var having no mutable member. */
1747 bool
1748 cxx_omp_const_qual_no_mutable (tree decl)
1749 {
1750 tree type = TREE_TYPE (decl);
1751 if (TREE_CODE (type) == REFERENCE_TYPE)
1752 {
1753 if (!is_invisiref_parm (decl))
1754 return false;
1755 type = TREE_TYPE (type);
1756
1757 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1758 {
1759 /* NVR doesn't preserve const qualification of the
1760 variable's type. */
1761 tree outer = outer_curly_brace_block (current_function_decl);
1762 tree var;
1763
1764 if (outer)
1765 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1766 if (DECL_NAME (decl) == DECL_NAME (var)
1767 && (TYPE_MAIN_VARIANT (type)
1768 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1769 {
1770 if (TYPE_READONLY (TREE_TYPE (var)))
1771 type = TREE_TYPE (var);
1772 break;
1773 }
1774 }
1775 }
1776
1777 if (type == error_mark_node)
1778 return false;
1779
1780 /* Variables with const-qualified type having no mutable member
1781 are predetermined shared. */
1782 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1783 return true;
1784
1785 return false;
1786 }
1787
1788 /* True if OpenMP sharing attribute of DECL is predetermined. */
1789
1790 enum omp_clause_default_kind
1791 cxx_omp_predetermined_sharing (tree decl)
1792 {
1793 /* Static data members are predetermined shared. */
1794 if (TREE_STATIC (decl))
1795 {
1796 tree ctx = CP_DECL_CONTEXT (decl);
1797 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1798 return OMP_CLAUSE_DEFAULT_SHARED;
1799 }
1800
1801 /* Const qualified vars having no mutable member are predetermined
1802 shared. */
1803 if (cxx_omp_const_qual_no_mutable (decl))
1804 return OMP_CLAUSE_DEFAULT_SHARED;
1805
1806 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1807 }
1808
1809 /* Finalize an implicitly determined clause. */
1810
1811 void
1812 cxx_omp_finish_clause (tree c, gimple_seq *)
1813 {
1814 tree decl, inner_type;
1815 bool make_shared = false;
1816
1817 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1818 return;
1819
1820 decl = OMP_CLAUSE_DECL (c);
1821 decl = require_complete_type (decl);
1822 inner_type = TREE_TYPE (decl);
1823 if (decl == error_mark_node)
1824 make_shared = true;
1825 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1826 inner_type = TREE_TYPE (inner_type);
1827
1828 /* We're interested in the base element, not arrays. */
1829 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1830 inner_type = TREE_TYPE (inner_type);
1831
1832 /* Check for special function availability by building a call to one.
1833 Save the results, because later we won't be in the right context
1834 for making these queries. */
1835 if (!make_shared
1836 && CLASS_TYPE_P (inner_type)
1837 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1838 make_shared = true;
1839
1840 if (make_shared)
1841 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1842 }
1843
1844 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1845 disregarded in OpenMP construct, because it is going to be
1846 remapped during OpenMP lowering. SHARED is true if DECL
1847 is going to be shared, false if it is going to be privatized. */
1848
1849 bool
1850 cxx_omp_disregard_value_expr (tree decl, bool shared)
1851 {
1852 return !shared
1853 && VAR_P (decl)
1854 && DECL_HAS_VALUE_EXPR_P (decl)
1855 && DECL_ARTIFICIAL (decl)
1856 && DECL_LANG_SPECIFIC (decl)
1857 && DECL_OMP_PRIVATIZED_MEMBER (decl);
1858 }
1859
1860 /* Perform folding on expression X. */
1861
1862 tree
1863 cp_fully_fold (tree x)
1864 {
1865 return cp_fold (x);
1866 }
1867
1868 /* Fold expression X which is used as an rvalue if RVAL is true. */
1869
1870 static tree
1871 cp_fold_maybe_rvalue (tree x, bool rval)
1872 {
1873 if (rval && DECL_P (x))
1874 {
1875 tree v = decl_constant_value (x);
1876 if (v != error_mark_node)
1877 x = v;
1878 }
1879 return cp_fold (x);
1880 }
1881
1882 /* Fold expression X which is used as an rvalue. */
1883
1884 static tree
1885 cp_fold_rvalue (tree x)
1886 {
1887 return cp_fold_maybe_rvalue (x, true);
1888 }
1889
1890 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
1891 and certain changes are made to the folding done. Or should be (FIXME). We
1892 never touch maybe_const, as it is only used for the C front-end
1893 C_MAYBE_CONST_EXPR. */
1894
1895 tree
1896 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
1897 {
1898 /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
1899 INTEGER_CST. */
1900 return cp_fold_rvalue (x);
1901 }
1902
1903 static GTY((cache, deletable)) cache_map fold_cache;
1904
1905 /* Dispose of the whole FOLD_CACHE. */
1906
1907 void
1908 clear_fold_cache (void)
1909 {
1910 gt_cleare_cache (fold_cache);
1911 }
1912
1913 /* This function tries to fold an expression X.
1914 To avoid combinatorial explosion, folding results are kept in fold_cache.
1915 If we are processing a template or X is invalid, we don't fold at all.
1916 For performance reasons we don't cache expressions representing a
1917 declaration or constant.
1918 Function returns X or its folded variant. */
1919
1920 static tree
1921 cp_fold (tree x)
1922 {
1923 tree op0, op1, op2, op3;
1924 tree org_x = x, r = NULL_TREE;
1925 enum tree_code code;
1926 location_t loc;
1927 bool rval_ops = true;
1928
1929 if (!x || x == error_mark_node)
1930 return x;
1931
1932 if (processing_template_decl
1933 || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
1934 return x;
1935
1936 /* Don't bother to cache DECLs or constants. */
1937 if (DECL_P (x) || CONSTANT_CLASS_P (x))
1938 return x;
1939
1940 if (tree cached = fold_cache.get (x))
1941 return cached;
1942
1943 code = TREE_CODE (x);
1944 switch (code)
1945 {
1946 case SIZEOF_EXPR:
1947 x = fold_sizeof_expr (x);
1948 break;
1949
1950 case VIEW_CONVERT_EXPR:
1951 rval_ops = false;
1952 case CONVERT_EXPR:
1953 case NOP_EXPR:
1954 case NON_LVALUE_EXPR:
1955
1956 if (VOID_TYPE_P (TREE_TYPE (x)))
1957 return x;
1958
1959 loc = EXPR_LOCATION (x);
1960 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
1961
1962 if (code == CONVERT_EXPR
1963 && SCALAR_TYPE_P (TREE_TYPE (x))
1964 && op0 != void_node)
1965 /* During parsing we used convert_to_*_nofold; re-convert now using the
1966 folding variants, since fold() doesn't do those transformations. */
1967 x = fold (convert (TREE_TYPE (x), op0));
1968 else if (op0 != TREE_OPERAND (x, 0))
1969 {
1970 if (op0 == error_mark_node)
1971 x = error_mark_node;
1972 else
1973 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
1974 }
1975 else
1976 x = fold (x);
1977
1978 /* Conversion of an out-of-range value has implementation-defined
1979 behavior; the language considers it different from arithmetic
1980 overflow, which is undefined. */
1981 if (TREE_CODE (op0) == INTEGER_CST
1982 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
1983 TREE_OVERFLOW (x) = false;
1984
1985 break;
1986
1987 case ADDR_EXPR:
1988 case REALPART_EXPR:
1989 case IMAGPART_EXPR:
1990 rval_ops = false;
1991 case CONJ_EXPR:
1992 case FIX_TRUNC_EXPR:
1993 case FLOAT_EXPR:
1994 case NEGATE_EXPR:
1995 case ABS_EXPR:
1996 case BIT_NOT_EXPR:
1997 case TRUTH_NOT_EXPR:
1998 case FIXED_CONVERT_EXPR:
1999 case INDIRECT_REF:
2000
2001 loc = EXPR_LOCATION (x);
2002 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2003
2004 if (op0 != TREE_OPERAND (x, 0))
2005 {
2006 if (op0 == error_mark_node)
2007 x = error_mark_node;
2008 else
2009 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2010 }
2011 else
2012 x = fold (x);
2013
2014 gcc_assert (TREE_CODE (x) != COND_EXPR
2015 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2016 break;
2017
2018 case UNARY_PLUS_EXPR:
2019 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2020 if (op0 == error_mark_node)
2021 x = error_mark_node;
2022 else
2023 x = fold_convert (TREE_TYPE (x), op0);
2024 break;
2025
2026 case POSTDECREMENT_EXPR:
2027 case POSTINCREMENT_EXPR:
2028 case INIT_EXPR:
2029 case PREDECREMENT_EXPR:
2030 case PREINCREMENT_EXPR:
2031 case COMPOUND_EXPR:
2032 case MODIFY_EXPR:
2033 rval_ops = false;
2034 case POINTER_PLUS_EXPR:
2035 case PLUS_EXPR:
2036 case MINUS_EXPR:
2037 case MULT_EXPR:
2038 case TRUNC_DIV_EXPR:
2039 case CEIL_DIV_EXPR:
2040 case FLOOR_DIV_EXPR:
2041 case ROUND_DIV_EXPR:
2042 case TRUNC_MOD_EXPR:
2043 case CEIL_MOD_EXPR:
2044 case ROUND_MOD_EXPR:
2045 case RDIV_EXPR:
2046 case EXACT_DIV_EXPR:
2047 case MIN_EXPR:
2048 case MAX_EXPR:
2049 case LSHIFT_EXPR:
2050 case RSHIFT_EXPR:
2051 case LROTATE_EXPR:
2052 case RROTATE_EXPR:
2053 case BIT_AND_EXPR:
2054 case BIT_IOR_EXPR:
2055 case BIT_XOR_EXPR:
2056 case TRUTH_AND_EXPR:
2057 case TRUTH_ANDIF_EXPR:
2058 case TRUTH_OR_EXPR:
2059 case TRUTH_ORIF_EXPR:
2060 case TRUTH_XOR_EXPR:
2061 case LT_EXPR: case LE_EXPR:
2062 case GT_EXPR: case GE_EXPR:
2063 case EQ_EXPR: case NE_EXPR:
2064 case UNORDERED_EXPR: case ORDERED_EXPR:
2065 case UNLT_EXPR: case UNLE_EXPR:
2066 case UNGT_EXPR: case UNGE_EXPR:
2067 case UNEQ_EXPR: case LTGT_EXPR:
2068 case RANGE_EXPR: case COMPLEX_EXPR:
2069
2070 loc = EXPR_LOCATION (x);
2071 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2072 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2073
2074 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2075 {
2076 if (op0 == error_mark_node || op1 == error_mark_node)
2077 x = error_mark_node;
2078 else
2079 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2080 }
2081 else
2082 x = fold (x);
2083
2084 if (TREE_NO_WARNING (org_x)
2085 && warn_nonnull_compare
2086 && COMPARISON_CLASS_P (org_x))
2087 {
2088 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2089 ;
2090 else if (COMPARISON_CLASS_P (x))
2091 TREE_NO_WARNING (x) = 1;
2092 /* Otherwise give up on optimizing these, let GIMPLE folders
2093 optimize those later on. */
2094 else if (op0 != TREE_OPERAND (org_x, 0)
2095 || op1 != TREE_OPERAND (org_x, 1))
2096 {
2097 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2098 TREE_NO_WARNING (x) = 1;
2099 }
2100 else
2101 x = org_x;
2102 }
2103 break;
2104
2105 case VEC_COND_EXPR:
2106 case COND_EXPR:
2107
2108 /* Don't bother folding a void condition, since it can't produce a
2109 constant value. Also, some statement-level uses of COND_EXPR leave
2110 one of the branches NULL, so folding would crash. */
2111 if (VOID_TYPE_P (TREE_TYPE (x)))
2112 return x;
2113
2114 loc = EXPR_LOCATION (x);
2115 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2116 op1 = cp_fold (TREE_OPERAND (x, 1));
2117 op2 = cp_fold (TREE_OPERAND (x, 2));
2118
2119 if (op0 != TREE_OPERAND (x, 0)
2120 || op1 != TREE_OPERAND (x, 1)
2121 || op2 != TREE_OPERAND (x, 2))
2122 {
2123 if (op0 == error_mark_node
2124 || op1 == error_mark_node
2125 || op2 == error_mark_node)
2126 x = error_mark_node;
2127 else
2128 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2129 }
2130 else
2131 x = fold (x);
2132
2133 /* A COND_EXPR might have incompatible types in branches if one or both
2134 arms are bitfields. If folding exposed such a branch, fix it up. */
2135 if (TREE_CODE (x) != code)
2136 if (tree type = is_bitfield_expr_with_lowered_type (x))
2137 x = fold_convert (type, x);
2138
2139 break;
2140
2141 case CALL_EXPR:
2142 {
2143 int i, m, sv = optimize, nw = sv, changed = 0;
2144 tree callee = get_callee_fndecl (x);
2145
2146 /* Some built-in function calls will be evaluated at compile-time in
2147 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2148 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2149 if (callee && DECL_BUILT_IN (callee) && !optimize
2150 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2151 && current_function_decl
2152 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2153 nw = 1;
2154
2155 x = copy_node (x);
2156
2157 m = call_expr_nargs (x);
2158 for (i = 0; i < m; i++)
2159 {
2160 r = cp_fold (CALL_EXPR_ARG (x, i));
2161 if (r != CALL_EXPR_ARG (x, i))
2162 {
2163 if (r == error_mark_node)
2164 {
2165 x = error_mark_node;
2166 break;
2167 }
2168 changed = 1;
2169 }
2170 CALL_EXPR_ARG (x, i) = r;
2171 }
2172 if (x == error_mark_node)
2173 break;
2174
2175 optimize = nw;
2176 r = fold (x);
2177 optimize = sv;
2178
2179 if (TREE_CODE (r) != CALL_EXPR)
2180 {
2181 x = cp_fold (r);
2182 break;
2183 }
2184
2185 optimize = nw;
2186
2187 /* Invoke maybe_constant_value for functions declared
2188 constexpr and not called with AGGR_INIT_EXPRs.
2189 TODO:
2190 Do constexpr expansion of expressions where the call itself is not
2191 constant, but the call followed by an INDIRECT_REF is. */
2192 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2193 && !flag_no_inline)
2194 r = maybe_constant_value (x);
2195 optimize = sv;
2196
2197 if (TREE_CODE (r) != CALL_EXPR)
2198 {
2199 x = r;
2200 break;
2201 }
2202
2203 if (!changed)
2204 x = org_x;
2205 break;
2206 }
2207
2208 case CONSTRUCTOR:
2209 {
2210 unsigned i;
2211 constructor_elt *p;
2212 bool changed = false;
2213 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2214 vec<constructor_elt, va_gc> *nelts = NULL;
2215 vec_safe_reserve (nelts, vec_safe_length (elts));
2216 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2217 {
2218 tree op = cp_fold (p->value);
2219 constructor_elt e = { p->index, op };
2220 nelts->quick_push (e);
2221 if (op != p->value)
2222 {
2223 if (op == error_mark_node)
2224 {
2225 x = error_mark_node;
2226 changed = false;
2227 break;
2228 }
2229 changed = true;
2230 }
2231 }
2232 if (changed)
2233 x = build_constructor (TREE_TYPE (x), nelts);
2234 else
2235 vec_free (nelts);
2236 break;
2237 }
2238 case TREE_VEC:
2239 {
2240 bool changed = false;
2241 vec<tree, va_gc> *vec = make_tree_vector ();
2242 int i, n = TREE_VEC_LENGTH (x);
2243 vec_safe_reserve (vec, n);
2244
2245 for (i = 0; i < n; i++)
2246 {
2247 tree op = cp_fold (TREE_VEC_ELT (x, i));
2248 vec->quick_push (op);
2249 if (op != TREE_VEC_ELT (x, i))
2250 changed = true;
2251 }
2252
2253 if (changed)
2254 {
2255 r = copy_node (x);
2256 for (i = 0; i < n; i++)
2257 TREE_VEC_ELT (r, i) = (*vec)[i];
2258 x = r;
2259 }
2260
2261 release_tree_vector (vec);
2262 }
2263
2264 break;
2265
2266 case ARRAY_REF:
2267 case ARRAY_RANGE_REF:
2268
2269 loc = EXPR_LOCATION (x);
2270 op0 = cp_fold (TREE_OPERAND (x, 0));
2271 op1 = cp_fold (TREE_OPERAND (x, 1));
2272 op2 = cp_fold (TREE_OPERAND (x, 2));
2273 op3 = cp_fold (TREE_OPERAND (x, 3));
2274
2275 if (op0 != TREE_OPERAND (x, 0)
2276 || op1 != TREE_OPERAND (x, 1)
2277 || op2 != TREE_OPERAND (x, 2)
2278 || op3 != TREE_OPERAND (x, 3))
2279 {
2280 if (op0 == error_mark_node
2281 || op1 == error_mark_node
2282 || op2 == error_mark_node
2283 || op3 == error_mark_node)
2284 x = error_mark_node;
2285 else
2286 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2287 }
2288
2289 x = fold (x);
2290 break;
2291
2292 default:
2293 return org_x;
2294 }
2295
2296 fold_cache.put (org_x, x);
2297 /* Prevent that we try to fold an already folded result again. */
2298 if (x != org_x)
2299 fold_cache.put (x, x);
2300
2301 return x;
2302 }
2303
2304 #include "gt-cp-cp-gimplify.h"