]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/cp-gimplify.c
Factor unrelated declarations out of tree.h.
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "cp-tree.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
31 #include "gimple.h"
32 #include "gimplify.h"
33 #include "hashtab.h"
34 #include "pointer-set.h"
35 #include "flags.h"
36 #include "splay-tree.h"
37
38 /* Forward declarations. */
39
40 static tree cp_genericize_r (tree *, int *, void *);
41 static void cp_genericize_tree (tree*);
42
43 /* Local declarations. */
44
45 enum bc_t { bc_break = 0, bc_continue = 1 };
46
47 /* Stack of labels which are targets for "break" or "continue",
48 linked through TREE_CHAIN. */
49 static tree bc_label[2];
50
51 /* Begin a scope which can be exited by a break or continue statement. BC
52 indicates which.
53
54 Just creates a label with location LOCATION and pushes it into the current
55 context. */
56
57 static tree
58 begin_bc_block (enum bc_t bc, location_t location)
59 {
60 tree label = create_artificial_label (location);
61 DECL_CHAIN (label) = bc_label[bc];
62 bc_label[bc] = label;
63 return label;
64 }
65
66 /* Finish a scope which can be exited by a break or continue statement.
67 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
68 an expression for the contents of the scope.
69
70 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
71 BLOCK. Otherwise, just forget the label. */
72
73 static void
74 finish_bc_block (tree *block, enum bc_t bc, tree label)
75 {
76 gcc_assert (label == bc_label[bc]);
77
78 if (TREE_USED (label))
79 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
80 block);
81
82 bc_label[bc] = DECL_CHAIN (label);
83 DECL_CHAIN (label) = NULL_TREE;
84 }
85
86 /* Get the LABEL_EXPR to represent a break or continue statement
87 in the current block scope. BC indicates which. */
88
89 static tree
90 get_bc_label (enum bc_t bc)
91 {
92 tree label = bc_label[bc];
93
94 /* Mark the label used for finish_bc_block. */
95 TREE_USED (label) = 1;
96 return label;
97 }
98
99 /* Genericize a TRY_BLOCK. */
100
101 static void
102 genericize_try_block (tree *stmt_p)
103 {
104 tree body = TRY_STMTS (*stmt_p);
105 tree cleanup = TRY_HANDLERS (*stmt_p);
106
107 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
108 }
109
110 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
111
112 static void
113 genericize_catch_block (tree *stmt_p)
114 {
115 tree type = HANDLER_TYPE (*stmt_p);
116 tree body = HANDLER_BODY (*stmt_p);
117
118 /* FIXME should the caught type go in TREE_TYPE? */
119 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
120 }
121
122 /* A terser interface for building a representation of an exception
123 specification. */
124
125 static tree
126 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
127 {
128 tree t;
129
130 /* FIXME should the allowed types go in TREE_TYPE? */
131 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
132 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
133
134 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
135 append_to_statement_list (body, &TREE_OPERAND (t, 0));
136
137 return t;
138 }
139
140 /* Genericize an EH_SPEC_BLOCK by converting it to a
141 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
142
143 static void
144 genericize_eh_spec_block (tree *stmt_p)
145 {
146 tree body = EH_SPEC_STMTS (*stmt_p);
147 tree allowed = EH_SPEC_RAISES (*stmt_p);
148 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
149
150 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
151 TREE_NO_WARNING (*stmt_p) = true;
152 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
153 }
154
155 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
156
157 static void
158 genericize_if_stmt (tree *stmt_p)
159 {
160 tree stmt, cond, then_, else_;
161 location_t locus = EXPR_LOCATION (*stmt_p);
162
163 stmt = *stmt_p;
164 cond = IF_COND (stmt);
165 then_ = THEN_CLAUSE (stmt);
166 else_ = ELSE_CLAUSE (stmt);
167
168 if (!then_)
169 then_ = build_empty_stmt (locus);
170 if (!else_)
171 else_ = build_empty_stmt (locus);
172
173 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
174 stmt = then_;
175 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
176 stmt = else_;
177 else
178 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
179 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
180 SET_EXPR_LOCATION (stmt, locus);
181 *stmt_p = stmt;
182 }
183
184 /* Build a generic representation of one of the C loop forms. COND is the
185 loop condition or NULL_TREE. BODY is the (possibly compound) statement
186 controlled by the loop. INCR is the increment expression of a for-loop,
187 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
188 evaluated before the loop body as in while and for loops, or after the
189 loop body as in do-while loops. */
190
191 static void
192 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
193 tree incr, bool cond_is_first, int *walk_subtrees,
194 void *data)
195 {
196 tree blab, clab;
197 tree entry = NULL, exit = NULL, t;
198 tree stmt_list = NULL;
199
200 blab = begin_bc_block (bc_break, start_locus);
201 clab = begin_bc_block (bc_continue, start_locus);
202
203 if (incr && EXPR_P (incr))
204 SET_EXPR_LOCATION (incr, start_locus);
205
206 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
207 cp_walk_tree (&body, cp_genericize_r, data, NULL);
208 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
209 *walk_subtrees = 0;
210
211 /* If condition is zero don't generate a loop construct. */
212 if (cond && integer_zerop (cond))
213 {
214 if (cond_is_first)
215 {
216 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
217 get_bc_label (bc_break));
218 append_to_statement_list (t, &stmt_list);
219 }
220 }
221 else
222 {
223 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
224 tree top = build1 (LABEL_EXPR, void_type_node,
225 create_artificial_label (start_locus));
226
227 /* If we have an exit condition, then we build an IF with gotos either
228 out of the loop, or to the top of it. If there's no exit condition,
229 then we just build a jump back to the top. */
230 exit = build1 (GOTO_EXPR, void_type_node, LABEL_EXPR_LABEL (top));
231
232 if (cond && !integer_nonzerop (cond))
233 {
234 /* Canonicalize the loop condition to the end. This means
235 generating a branch to the loop condition. Reuse the
236 continue label, if possible. */
237 if (cond_is_first)
238 {
239 if (incr)
240 {
241 entry = build1 (LABEL_EXPR, void_type_node,
242 create_artificial_label (start_locus));
243 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
244 LABEL_EXPR_LABEL (entry));
245 }
246 else
247 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
248 get_bc_label (bc_continue));
249 append_to_statement_list (t, &stmt_list);
250 }
251
252 t = build1 (GOTO_EXPR, void_type_node, get_bc_label (bc_break));
253 exit = fold_build3_loc (start_locus,
254 COND_EXPR, void_type_node, cond, exit, t);
255 }
256
257 append_to_statement_list (top, &stmt_list);
258 }
259
260 append_to_statement_list (body, &stmt_list);
261 finish_bc_block (&stmt_list, bc_continue, clab);
262 append_to_statement_list (incr, &stmt_list);
263 append_to_statement_list (entry, &stmt_list);
264 append_to_statement_list (exit, &stmt_list);
265 finish_bc_block (&stmt_list, bc_break, blab);
266
267 if (stmt_list == NULL_TREE)
268 stmt_list = build1 (NOP_EXPR, void_type_node, integer_zero_node);
269
270 *stmt_p = stmt_list;
271 }
272
273 /* Genericize a FOR_STMT node *STMT_P. */
274
275 static void
276 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
277 {
278 tree stmt = *stmt_p;
279 tree expr = NULL;
280 tree loop;
281 tree init = FOR_INIT_STMT (stmt);
282
283 if (init)
284 {
285 cp_walk_tree (&init, cp_genericize_r, data, NULL);
286 append_to_statement_list (init, &expr);
287 }
288
289 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
290 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
291 append_to_statement_list (loop, &expr);
292 *stmt_p = expr;
293 }
294
295 /* Genericize a WHILE_STMT node *STMT_P. */
296
297 static void
298 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
299 {
300 tree stmt = *stmt_p;
301 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
302 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
303 }
304
305 /* Genericize a DO_STMT node *STMT_P. */
306
307 static void
308 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
309 {
310 tree stmt = *stmt_p;
311 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
312 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
313 }
314
315 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
316
317 static void
318 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
319 {
320 tree stmt = *stmt_p;
321 tree break_block, body, cond, type;
322 location_t stmt_locus = EXPR_LOCATION (stmt);
323
324 break_block = begin_bc_block (bc_break, stmt_locus);
325
326 body = SWITCH_STMT_BODY (stmt);
327 if (!body)
328 body = build_empty_stmt (stmt_locus);
329 cond = SWITCH_STMT_COND (stmt);
330 type = SWITCH_STMT_TYPE (stmt);
331
332 cp_walk_tree (&body, cp_genericize_r, data, NULL);
333 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
334 cp_walk_tree (&type, cp_genericize_r, data, NULL);
335 *walk_subtrees = 0;
336
337 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
338 finish_bc_block (stmt_p, bc_break, break_block);
339 }
340
341 /* Genericize a CONTINUE_STMT node *STMT_P. */
342
343 static void
344 genericize_continue_stmt (tree *stmt_p)
345 {
346 tree stmt_list = NULL;
347 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
348 tree label = get_bc_label (bc_continue);
349 location_t location = EXPR_LOCATION (*stmt_p);
350 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
351 append_to_statement_list (pred, &stmt_list);
352 append_to_statement_list (jump, &stmt_list);
353 *stmt_p = stmt_list;
354 }
355
356 /* Genericize a BREAK_STMT node *STMT_P. */
357
358 static void
359 genericize_break_stmt (tree *stmt_p)
360 {
361 tree label = get_bc_label (bc_break);
362 location_t location = EXPR_LOCATION (*stmt_p);
363 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
364 }
365
366 /* Genericize a OMP_FOR node *STMT_P. */
367
368 static void
369 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
370 {
371 tree stmt = *stmt_p;
372 location_t locus = EXPR_LOCATION (stmt);
373 tree clab = begin_bc_block (bc_continue, locus);
374
375 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
376 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
377 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
378 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
379 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
380 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
381 *walk_subtrees = 0;
382
383 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
384 }
385
386 /* Hook into the middle of gimplifying an OMP_FOR node. */
387
388 static enum gimplify_status
389 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
390 {
391 tree for_stmt = *expr_p;
392 gimple_seq seq = NULL;
393
394 /* Protect ourselves from recursion. */
395 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
396 return GS_UNHANDLED;
397 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
398
399 gimplify_and_add (for_stmt, &seq);
400 gimple_seq_add_seq (pre_p, seq);
401
402 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
403
404 return GS_ALL_DONE;
405 }
406
407 /* Gimplify an EXPR_STMT node. */
408
409 static void
410 gimplify_expr_stmt (tree *stmt_p)
411 {
412 tree stmt = EXPR_STMT_EXPR (*stmt_p);
413
414 if (stmt == error_mark_node)
415 stmt = NULL;
416
417 /* Gimplification of a statement expression will nullify the
418 statement if all its side effects are moved to *PRE_P and *POST_P.
419
420 In this case we will not want to emit the gimplified statement.
421 However, we may still want to emit a warning, so we do that before
422 gimplification. */
423 if (stmt && warn_unused_value)
424 {
425 if (!TREE_SIDE_EFFECTS (stmt))
426 {
427 if (!IS_EMPTY_STMT (stmt)
428 && !VOID_TYPE_P (TREE_TYPE (stmt))
429 && !TREE_NO_WARNING (stmt))
430 warning (OPT_Wunused_value, "statement with no effect");
431 }
432 else
433 warn_if_unused_value (stmt, input_location);
434 }
435
436 if (stmt == NULL_TREE)
437 stmt = alloc_stmt_list ();
438
439 *stmt_p = stmt;
440 }
441
442 /* Gimplify initialization from an AGGR_INIT_EXPR. */
443
444 static void
445 cp_gimplify_init_expr (tree *expr_p)
446 {
447 tree from = TREE_OPERAND (*expr_p, 1);
448 tree to = TREE_OPERAND (*expr_p, 0);
449 tree t;
450
451 /* What about code that pulls out the temp and uses it elsewhere? I
452 think that such code never uses the TARGET_EXPR as an initializer. If
453 I'm wrong, we'll abort because the temp won't have any RTL. In that
454 case, I guess we'll need to replace references somehow. */
455 if (TREE_CODE (from) == TARGET_EXPR)
456 from = TARGET_EXPR_INITIAL (from);
457
458 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
459 inside the TARGET_EXPR. */
460 for (t = from; t; )
461 {
462 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
463
464 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
465 replace the slot operand with our target.
466
467 Should we add a target parm to gimplify_expr instead? No, as in this
468 case we want to replace the INIT_EXPR. */
469 if (TREE_CODE (sub) == AGGR_INIT_EXPR
470 || TREE_CODE (sub) == VEC_INIT_EXPR)
471 {
472 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
473 AGGR_INIT_EXPR_SLOT (sub) = to;
474 else
475 VEC_INIT_EXPR_SLOT (sub) = to;
476 *expr_p = from;
477
478 /* The initialization is now a side-effect, so the container can
479 become void. */
480 if (from != sub)
481 TREE_TYPE (from) = void_type_node;
482 }
483
484 if (t == sub)
485 break;
486 else
487 t = TREE_OPERAND (t, 1);
488 }
489
490 }
491
492 /* Gimplify a MUST_NOT_THROW_EXPR. */
493
494 static enum gimplify_status
495 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
496 {
497 tree stmt = *expr_p;
498 tree temp = voidify_wrapper_expr (stmt, NULL);
499 tree body = TREE_OPERAND (stmt, 0);
500 gimple_seq try_ = NULL;
501 gimple_seq catch_ = NULL;
502 gimple mnt;
503
504 gimplify_and_add (body, &try_);
505 mnt = gimple_build_eh_must_not_throw (terminate_node);
506 gimple_seq_add_stmt_without_update (&catch_, mnt);
507 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
508
509 gimple_seq_add_stmt_without_update (pre_p, mnt);
510 if (temp)
511 {
512 *expr_p = temp;
513 return GS_OK;
514 }
515
516 *expr_p = NULL;
517 return GS_ALL_DONE;
518 }
519
520 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
521
522 int
523 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
524 {
525 int saved_stmts_are_full_exprs_p = 0;
526 enum tree_code code = TREE_CODE (*expr_p);
527 enum gimplify_status ret;
528
529 if (STATEMENT_CODE_P (code))
530 {
531 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
532 current_stmt_tree ()->stmts_are_full_exprs_p
533 = STMT_IS_FULL_EXPR_P (*expr_p);
534 }
535
536 switch (code)
537 {
538 case PTRMEM_CST:
539 *expr_p = cplus_expand_constant (*expr_p);
540 ret = GS_OK;
541 break;
542
543 case AGGR_INIT_EXPR:
544 simplify_aggr_init_expr (expr_p);
545 ret = GS_OK;
546 break;
547
548 case VEC_INIT_EXPR:
549 {
550 location_t loc = input_location;
551 tree init = VEC_INIT_EXPR_INIT (*expr_p);
552 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
553 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
554 input_location = EXPR_LOCATION (*expr_p);
555 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
556 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
557 from_array,
558 tf_warning_or_error);
559 cp_genericize_tree (expr_p);
560 ret = GS_OK;
561 input_location = loc;
562 }
563 break;
564
565 case THROW_EXPR:
566 /* FIXME communicate throw type to back end, probably by moving
567 THROW_EXPR into ../tree.def. */
568 *expr_p = TREE_OPERAND (*expr_p, 0);
569 ret = GS_OK;
570 break;
571
572 case MUST_NOT_THROW_EXPR:
573 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
574 break;
575
576 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
577 LHS of an assignment might also be involved in the RHS, as in bug
578 25979. */
579 case INIT_EXPR:
580 cp_gimplify_init_expr (expr_p);
581 if (TREE_CODE (*expr_p) != INIT_EXPR)
582 return GS_OK;
583 /* Otherwise fall through. */
584 case MODIFY_EXPR:
585 {
586 /* If the back end isn't clever enough to know that the lhs and rhs
587 types are the same, add an explicit conversion. */
588 tree op0 = TREE_OPERAND (*expr_p, 0);
589 tree op1 = TREE_OPERAND (*expr_p, 1);
590
591 if (!error_operand_p (op0)
592 && !error_operand_p (op1)
593 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
594 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
595 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
596 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
597 TREE_TYPE (op0), op1);
598
599 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
600 || (TREE_CODE (op1) == CONSTRUCTOR
601 && CONSTRUCTOR_NELTS (op1) == 0
602 && !TREE_CLOBBER_P (op1))
603 || (TREE_CODE (op1) == CALL_EXPR
604 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
605 && is_really_empty_class (TREE_TYPE (op0)))
606 {
607 /* Remove any copies of empty classes. We check that the RHS
608 has a simple form so that TARGET_EXPRs and non-empty
609 CONSTRUCTORs get reduced properly, and we leave the return
610 slot optimization alone because it isn't a copy (FIXME so it
611 shouldn't be represented as one).
612
613 Also drop volatile variables on the RHS to avoid infinite
614 recursion from gimplify_expr trying to load the value. */
615 if (!TREE_SIDE_EFFECTS (op1)
616 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
617 *expr_p = op0;
618 else if (TREE_CODE (op1) == MEM_REF
619 && TREE_THIS_VOLATILE (op1))
620 {
621 /* Similarly for volatile MEM_REFs on the RHS. */
622 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
623 *expr_p = op0;
624 else
625 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
626 TREE_OPERAND (op1, 0), op0);
627 }
628 else
629 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
630 op0, op1);
631 }
632 }
633 ret = GS_OK;
634 break;
635
636 case EMPTY_CLASS_EXPR:
637 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
638 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
639 ret = GS_OK;
640 break;
641
642 case BASELINK:
643 *expr_p = BASELINK_FUNCTIONS (*expr_p);
644 ret = GS_OK;
645 break;
646
647 case TRY_BLOCK:
648 genericize_try_block (expr_p);
649 ret = GS_OK;
650 break;
651
652 case HANDLER:
653 genericize_catch_block (expr_p);
654 ret = GS_OK;
655 break;
656
657 case EH_SPEC_BLOCK:
658 genericize_eh_spec_block (expr_p);
659 ret = GS_OK;
660 break;
661
662 case USING_STMT:
663 gcc_unreachable ();
664
665 case FOR_STMT:
666 case WHILE_STMT:
667 case DO_STMT:
668 case SWITCH_STMT:
669 case CONTINUE_STMT:
670 case BREAK_STMT:
671 gcc_unreachable ();
672
673 case OMP_FOR:
674 case OMP_SIMD:
675 case OMP_DISTRIBUTE:
676 ret = cp_gimplify_omp_for (expr_p, pre_p);
677 break;
678
679 case EXPR_STMT:
680 gimplify_expr_stmt (expr_p);
681 ret = GS_OK;
682 break;
683
684 case UNARY_PLUS_EXPR:
685 {
686 tree arg = TREE_OPERAND (*expr_p, 0);
687 tree type = TREE_TYPE (*expr_p);
688 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
689 : arg;
690 ret = GS_OK;
691 }
692 break;
693
694 default:
695 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
696 break;
697 }
698
699 /* Restore saved state. */
700 if (STATEMENT_CODE_P (code))
701 current_stmt_tree ()->stmts_are_full_exprs_p
702 = saved_stmts_are_full_exprs_p;
703
704 return ret;
705 }
706
707 static inline bool
708 is_invisiref_parm (const_tree t)
709 {
710 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
711 && DECL_BY_REFERENCE (t));
712 }
713
714 /* Return true if the uid in both int tree maps are equal. */
715
716 int
717 cxx_int_tree_map_eq (const void *va, const void *vb)
718 {
719 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
720 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
721 return (a->uid == b->uid);
722 }
723
724 /* Hash a UID in a cxx_int_tree_map. */
725
726 unsigned int
727 cxx_int_tree_map_hash (const void *item)
728 {
729 return ((const struct cxx_int_tree_map *)item)->uid;
730 }
731
732 /* A stable comparison routine for use with splay trees and DECLs. */
733
734 static int
735 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
736 {
737 tree a = (tree) xa;
738 tree b = (tree) xb;
739
740 return DECL_UID (a) - DECL_UID (b);
741 }
742
743 /* OpenMP context during genericization. */
744
745 struct cp_genericize_omp_taskreg
746 {
747 bool is_parallel;
748 bool default_shared;
749 struct cp_genericize_omp_taskreg *outer;
750 splay_tree variables;
751 };
752
753 /* Return true if genericization should try to determine if
754 DECL is firstprivate or shared within task regions. */
755
756 static bool
757 omp_var_to_track (tree decl)
758 {
759 tree type = TREE_TYPE (decl);
760 if (is_invisiref_parm (decl))
761 type = TREE_TYPE (type);
762 while (TREE_CODE (type) == ARRAY_TYPE)
763 type = TREE_TYPE (type);
764 if (type == error_mark_node || !CLASS_TYPE_P (type))
765 return false;
766 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
767 return false;
768 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
769 return false;
770 return true;
771 }
772
773 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
774
775 static void
776 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
777 {
778 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
779 (splay_tree_key) decl);
780 if (n == NULL)
781 {
782 int flags = OMP_CLAUSE_DEFAULT_SHARED;
783 if (omp_ctx->outer)
784 omp_cxx_notice_variable (omp_ctx->outer, decl);
785 if (!omp_ctx->default_shared)
786 {
787 struct cp_genericize_omp_taskreg *octx;
788
789 for (octx = omp_ctx->outer; octx; octx = octx->outer)
790 {
791 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
792 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
793 {
794 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
795 break;
796 }
797 if (octx->is_parallel)
798 break;
799 }
800 if (octx == NULL
801 && (TREE_CODE (decl) == PARM_DECL
802 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
803 && DECL_CONTEXT (decl) == current_function_decl)))
804 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
805 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
806 {
807 /* DECL is implicitly determined firstprivate in
808 the current task construct. Ensure copy ctor and
809 dtor are instantiated, because during gimplification
810 it will be already too late. */
811 tree type = TREE_TYPE (decl);
812 if (is_invisiref_parm (decl))
813 type = TREE_TYPE (type);
814 while (TREE_CODE (type) == ARRAY_TYPE)
815 type = TREE_TYPE (type);
816 get_copy_ctor (type, tf_none);
817 get_dtor (type, tf_none);
818 }
819 }
820 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
821 }
822 }
823
824 /* Genericization context. */
825
826 struct cp_genericize_data
827 {
828 struct pointer_set_t *p_set;
829 vec<tree> bind_expr_stack;
830 struct cp_genericize_omp_taskreg *omp_ctx;
831 };
832
833 /* Perform any pre-gimplification lowering of C++ front end trees to
834 GENERIC. */
835
836 static tree
837 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
838 {
839 tree stmt = *stmt_p;
840 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
841 struct pointer_set_t *p_set = wtd->p_set;
842
843 /* If in an OpenMP context, note var uses. */
844 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
845 && (VAR_P (stmt)
846 || TREE_CODE (stmt) == PARM_DECL
847 || TREE_CODE (stmt) == RESULT_DECL)
848 && omp_var_to_track (stmt))
849 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
850
851 if (is_invisiref_parm (stmt)
852 /* Don't dereference parms in a thunk, pass the references through. */
853 && !(DECL_THUNK_P (current_function_decl)
854 && TREE_CODE (stmt) == PARM_DECL))
855 {
856 *stmt_p = convert_from_reference (stmt);
857 *walk_subtrees = 0;
858 return NULL;
859 }
860
861 /* Map block scope extern declarations to visible declarations with the
862 same name and type in outer scopes if any. */
863 if (cp_function_chain->extern_decl_map
864 && VAR_OR_FUNCTION_DECL_P (stmt)
865 && DECL_EXTERNAL (stmt))
866 {
867 struct cxx_int_tree_map *h, in;
868 in.uid = DECL_UID (stmt);
869 h = (struct cxx_int_tree_map *)
870 htab_find_with_hash (cp_function_chain->extern_decl_map,
871 &in, in.uid);
872 if (h)
873 {
874 *stmt_p = h->to;
875 *walk_subtrees = 0;
876 return NULL;
877 }
878 }
879
880 /* Other than invisiref parms, don't walk the same tree twice. */
881 if (pointer_set_contains (p_set, stmt))
882 {
883 *walk_subtrees = 0;
884 return NULL_TREE;
885 }
886
887 if (TREE_CODE (stmt) == ADDR_EXPR
888 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
889 {
890 /* If in an OpenMP context, note var uses. */
891 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
892 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
893 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
894 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
895 *walk_subtrees = 0;
896 }
897 else if (TREE_CODE (stmt) == RETURN_EXPR
898 && TREE_OPERAND (stmt, 0)
899 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
900 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
901 *walk_subtrees = 0;
902 else if (TREE_CODE (stmt) == OMP_CLAUSE)
903 switch (OMP_CLAUSE_CODE (stmt))
904 {
905 case OMP_CLAUSE_LASTPRIVATE:
906 /* Don't dereference an invisiref in OpenMP clauses. */
907 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
908 {
909 *walk_subtrees = 0;
910 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
911 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
912 cp_genericize_r, data, NULL);
913 }
914 break;
915 case OMP_CLAUSE_PRIVATE:
916 /* Don't dereference an invisiref in OpenMP clauses. */
917 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
918 *walk_subtrees = 0;
919 else if (wtd->omp_ctx != NULL)
920 {
921 /* Private clause doesn't cause any references to the
922 var in outer contexts, avoid calling
923 omp_cxx_notice_variable for it. */
924 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
925 wtd->omp_ctx = NULL;
926 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
927 data, NULL);
928 wtd->omp_ctx = old;
929 *walk_subtrees = 0;
930 }
931 break;
932 case OMP_CLAUSE_SHARED:
933 case OMP_CLAUSE_FIRSTPRIVATE:
934 case OMP_CLAUSE_COPYIN:
935 case OMP_CLAUSE_COPYPRIVATE:
936 /* Don't dereference an invisiref in OpenMP clauses. */
937 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
938 *walk_subtrees = 0;
939 break;
940 case OMP_CLAUSE_REDUCTION:
941 /* Don't dereference an invisiref in reduction clause's
942 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
943 still needs to be genericized. */
944 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
945 {
946 *walk_subtrees = 0;
947 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
948 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
949 cp_genericize_r, data, NULL);
950 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
951 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
952 cp_genericize_r, data, NULL);
953 }
954 break;
955 default:
956 break;
957 }
958 else if (IS_TYPE_OR_DECL_P (stmt))
959 *walk_subtrees = 0;
960
961 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
962 to lower this construct before scanning it, so we need to lower these
963 before doing anything else. */
964 else if (TREE_CODE (stmt) == CLEANUP_STMT)
965 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
966 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
967 : TRY_FINALLY_EXPR,
968 void_type_node,
969 CLEANUP_BODY (stmt),
970 CLEANUP_EXPR (stmt));
971
972 else if (TREE_CODE (stmt) == IF_STMT)
973 {
974 genericize_if_stmt (stmt_p);
975 /* *stmt_p has changed, tail recurse to handle it again. */
976 return cp_genericize_r (stmt_p, walk_subtrees, data);
977 }
978
979 /* COND_EXPR might have incompatible types in branches if one or both
980 arms are bitfields. Fix it up now. */
981 else if (TREE_CODE (stmt) == COND_EXPR)
982 {
983 tree type_left
984 = (TREE_OPERAND (stmt, 1)
985 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
986 : NULL_TREE);
987 tree type_right
988 = (TREE_OPERAND (stmt, 2)
989 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
990 : NULL_TREE);
991 if (type_left
992 && !useless_type_conversion_p (TREE_TYPE (stmt),
993 TREE_TYPE (TREE_OPERAND (stmt, 1))))
994 {
995 TREE_OPERAND (stmt, 1)
996 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
997 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
998 type_left));
999 }
1000 if (type_right
1001 && !useless_type_conversion_p (TREE_TYPE (stmt),
1002 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1003 {
1004 TREE_OPERAND (stmt, 2)
1005 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1006 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1007 type_right));
1008 }
1009 }
1010
1011 else if (TREE_CODE (stmt) == BIND_EXPR)
1012 {
1013 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1014 {
1015 tree decl;
1016 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1017 if (VAR_P (decl)
1018 && !DECL_EXTERNAL (decl)
1019 && omp_var_to_track (decl))
1020 {
1021 splay_tree_node n
1022 = splay_tree_lookup (wtd->omp_ctx->variables,
1023 (splay_tree_key) decl);
1024 if (n == NULL)
1025 splay_tree_insert (wtd->omp_ctx->variables,
1026 (splay_tree_key) decl,
1027 TREE_STATIC (decl)
1028 ? OMP_CLAUSE_DEFAULT_SHARED
1029 : OMP_CLAUSE_DEFAULT_PRIVATE);
1030 }
1031 }
1032 wtd->bind_expr_stack.safe_push (stmt);
1033 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1034 cp_genericize_r, data, NULL);
1035 wtd->bind_expr_stack.pop ();
1036 }
1037
1038 else if (TREE_CODE (stmt) == USING_STMT)
1039 {
1040 tree block = NULL_TREE;
1041
1042 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1043 BLOCK, and append an IMPORTED_DECL to its
1044 BLOCK_VARS chained list. */
1045 if (wtd->bind_expr_stack.exists ())
1046 {
1047 int i;
1048 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1049 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1050 break;
1051 }
1052 if (block)
1053 {
1054 tree using_directive;
1055 gcc_assert (TREE_OPERAND (stmt, 0));
1056
1057 using_directive = make_node (IMPORTED_DECL);
1058 TREE_TYPE (using_directive) = void_type_node;
1059
1060 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1061 = TREE_OPERAND (stmt, 0);
1062 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1063 BLOCK_VARS (block) = using_directive;
1064 }
1065 /* The USING_STMT won't appear in GENERIC. */
1066 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1067 *walk_subtrees = 0;
1068 }
1069
1070 else if (TREE_CODE (stmt) == DECL_EXPR
1071 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1072 {
1073 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1074 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1075 *walk_subtrees = 0;
1076 }
1077 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1078 {
1079 struct cp_genericize_omp_taskreg omp_ctx;
1080 tree c, decl;
1081 splay_tree_node n;
1082
1083 *walk_subtrees = 0;
1084 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1085 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1086 omp_ctx.default_shared = omp_ctx.is_parallel;
1087 omp_ctx.outer = wtd->omp_ctx;
1088 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1089 wtd->omp_ctx = &omp_ctx;
1090 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1091 switch (OMP_CLAUSE_CODE (c))
1092 {
1093 case OMP_CLAUSE_SHARED:
1094 case OMP_CLAUSE_PRIVATE:
1095 case OMP_CLAUSE_FIRSTPRIVATE:
1096 case OMP_CLAUSE_LASTPRIVATE:
1097 decl = OMP_CLAUSE_DECL (c);
1098 if (decl == error_mark_node || !omp_var_to_track (decl))
1099 break;
1100 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1101 if (n != NULL)
1102 break;
1103 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1104 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1105 ? OMP_CLAUSE_DEFAULT_SHARED
1106 : OMP_CLAUSE_DEFAULT_PRIVATE);
1107 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1108 && omp_ctx.outer)
1109 omp_cxx_notice_variable (omp_ctx.outer, decl);
1110 break;
1111 case OMP_CLAUSE_DEFAULT:
1112 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1113 omp_ctx.default_shared = true;
1114 default:
1115 break;
1116 }
1117 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1118 wtd->omp_ctx = omp_ctx.outer;
1119 splay_tree_delete (omp_ctx.variables);
1120 }
1121 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1122 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1123 else if (TREE_CODE (stmt) == FOR_STMT)
1124 genericize_for_stmt (stmt_p, walk_subtrees, data);
1125 else if (TREE_CODE (stmt) == WHILE_STMT)
1126 genericize_while_stmt (stmt_p, walk_subtrees, data);
1127 else if (TREE_CODE (stmt) == DO_STMT)
1128 genericize_do_stmt (stmt_p, walk_subtrees, data);
1129 else if (TREE_CODE (stmt) == SWITCH_STMT)
1130 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1131 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1132 genericize_continue_stmt (stmt_p);
1133 else if (TREE_CODE (stmt) == BREAK_STMT)
1134 genericize_break_stmt (stmt_p);
1135 else if (TREE_CODE (stmt) == OMP_FOR
1136 || TREE_CODE (stmt) == OMP_SIMD
1137 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1138 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1139 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1140 {
1141 if (SIZEOF_EXPR_TYPE_P (stmt))
1142 *stmt_p
1143 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1144 SIZEOF_EXPR, false);
1145 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1146 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1147 SIZEOF_EXPR, false);
1148 else
1149 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1150 SIZEOF_EXPR, false);
1151 if (*stmt_p == error_mark_node)
1152 *stmt_p = size_one_node;
1153 return NULL;
1154 }
1155
1156 pointer_set_insert (p_set, *stmt_p);
1157
1158 return NULL;
1159 }
1160
1161 /* Lower C++ front end trees to GENERIC in T_P. */
1162
1163 static void
1164 cp_genericize_tree (tree* t_p)
1165 {
1166 struct cp_genericize_data wtd;
1167
1168 wtd.p_set = pointer_set_create ();
1169 wtd.bind_expr_stack.create (0);
1170 wtd.omp_ctx = NULL;
1171 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1172 pointer_set_destroy (wtd.p_set);
1173 wtd.bind_expr_stack.release ();
1174 }
1175
1176 void
1177 cp_genericize (tree fndecl)
1178 {
1179 tree t;
1180
1181 /* Fix up the types of parms passed by invisible reference. */
1182 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1183 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1184 {
1185 /* If a function's arguments are copied to create a thunk,
1186 then DECL_BY_REFERENCE will be set -- but the type of the
1187 argument will be a pointer type, so we will never get
1188 here. */
1189 gcc_assert (!DECL_BY_REFERENCE (t));
1190 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1191 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1192 DECL_BY_REFERENCE (t) = 1;
1193 TREE_ADDRESSABLE (t) = 0;
1194 relayout_decl (t);
1195 }
1196
1197 /* Do the same for the return value. */
1198 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1199 {
1200 t = DECL_RESULT (fndecl);
1201 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1202 DECL_BY_REFERENCE (t) = 1;
1203 TREE_ADDRESSABLE (t) = 0;
1204 relayout_decl (t);
1205 if (DECL_NAME (t))
1206 {
1207 /* Adjust DECL_VALUE_EXPR of the original var. */
1208 tree outer = outer_curly_brace_block (current_function_decl);
1209 tree var;
1210
1211 if (outer)
1212 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1213 if (DECL_NAME (t) == DECL_NAME (var)
1214 && DECL_HAS_VALUE_EXPR_P (var)
1215 && DECL_VALUE_EXPR (var) == t)
1216 {
1217 tree val = convert_from_reference (t);
1218 SET_DECL_VALUE_EXPR (var, val);
1219 break;
1220 }
1221 }
1222 }
1223
1224 /* If we're a clone, the body is already GIMPLE. */
1225 if (DECL_CLONED_FUNCTION_P (fndecl))
1226 return;
1227
1228 /* Expand all the array notations here. */
1229 if (flag_enable_cilkplus
1230 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1231 DECL_SAVED_TREE (fndecl) =
1232 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1233
1234 /* We do want to see every occurrence of the parms, so we can't just use
1235 walk_tree's hash functionality. */
1236 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1237
1238 /* Do everything else. */
1239 c_genericize (fndecl);
1240
1241 gcc_assert (bc_label[bc_break] == NULL);
1242 gcc_assert (bc_label[bc_continue] == NULL);
1243 }
1244 \f
1245 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1246 NULL if there is in fact nothing to do. ARG2 may be null if FN
1247 actually only takes one argument. */
1248
1249 static tree
1250 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1251 {
1252 tree defparm, parm, t;
1253 int i = 0;
1254 int nargs;
1255 tree *argarray;
1256
1257 if (fn == NULL)
1258 return NULL;
1259
1260 nargs = list_length (DECL_ARGUMENTS (fn));
1261 argarray = XALLOCAVEC (tree, nargs);
1262
1263 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1264 if (arg2)
1265 defparm = TREE_CHAIN (defparm);
1266
1267 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1268 {
1269 tree inner_type = TREE_TYPE (arg1);
1270 tree start1, end1, p1;
1271 tree start2 = NULL, p2 = NULL;
1272 tree ret = NULL, lab;
1273
1274 start1 = arg1;
1275 start2 = arg2;
1276 do
1277 {
1278 inner_type = TREE_TYPE (inner_type);
1279 start1 = build4 (ARRAY_REF, inner_type, start1,
1280 size_zero_node, NULL, NULL);
1281 if (arg2)
1282 start2 = build4 (ARRAY_REF, inner_type, start2,
1283 size_zero_node, NULL, NULL);
1284 }
1285 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1286 start1 = build_fold_addr_expr_loc (input_location, start1);
1287 if (arg2)
1288 start2 = build_fold_addr_expr_loc (input_location, start2);
1289
1290 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1291 end1 = fold_build_pointer_plus (start1, end1);
1292
1293 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1294 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1295 append_to_statement_list (t, &ret);
1296
1297 if (arg2)
1298 {
1299 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1300 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1301 append_to_statement_list (t, &ret);
1302 }
1303
1304 lab = create_artificial_label (input_location);
1305 t = build1 (LABEL_EXPR, void_type_node, lab);
1306 append_to_statement_list (t, &ret);
1307
1308 argarray[i++] = p1;
1309 if (arg2)
1310 argarray[i++] = p2;
1311 /* Handle default arguments. */
1312 for (parm = defparm; parm && parm != void_list_node;
1313 parm = TREE_CHAIN (parm), i++)
1314 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1315 TREE_PURPOSE (parm), fn, i,
1316 tf_warning_or_error);
1317 t = build_call_a (fn, i, argarray);
1318 t = fold_convert (void_type_node, t);
1319 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1320 append_to_statement_list (t, &ret);
1321
1322 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1323 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1324 append_to_statement_list (t, &ret);
1325
1326 if (arg2)
1327 {
1328 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1329 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1330 append_to_statement_list (t, &ret);
1331 }
1332
1333 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1334 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1335 append_to_statement_list (t, &ret);
1336
1337 return ret;
1338 }
1339 else
1340 {
1341 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1342 if (arg2)
1343 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1344 /* Handle default arguments. */
1345 for (parm = defparm; parm && parm != void_list_node;
1346 parm = TREE_CHAIN (parm), i++)
1347 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1348 TREE_PURPOSE (parm),
1349 fn, i, tf_warning_or_error);
1350 t = build_call_a (fn, i, argarray);
1351 t = fold_convert (void_type_node, t);
1352 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1353 }
1354 }
1355
1356 /* Return code to initialize DECL with its default constructor, or
1357 NULL if there's nothing to do. */
1358
1359 tree
1360 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1361 {
1362 tree info = CP_OMP_CLAUSE_INFO (clause);
1363 tree ret = NULL;
1364
1365 if (info)
1366 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1367
1368 return ret;
1369 }
1370
1371 /* Return code to initialize DST with a copy constructor from SRC. */
1372
1373 tree
1374 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1375 {
1376 tree info = CP_OMP_CLAUSE_INFO (clause);
1377 tree ret = NULL;
1378
1379 if (info)
1380 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1381 if (ret == NULL)
1382 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1383
1384 return ret;
1385 }
1386
1387 /* Similarly, except use an assignment operator instead. */
1388
1389 tree
1390 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1391 {
1392 tree info = CP_OMP_CLAUSE_INFO (clause);
1393 tree ret = NULL;
1394
1395 if (info)
1396 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1397 if (ret == NULL)
1398 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1399
1400 return ret;
1401 }
1402
1403 /* Return code to destroy DECL. */
1404
1405 tree
1406 cxx_omp_clause_dtor (tree clause, tree decl)
1407 {
1408 tree info = CP_OMP_CLAUSE_INFO (clause);
1409 tree ret = NULL;
1410
1411 if (info)
1412 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1413
1414 return ret;
1415 }
1416
1417 /* True if OpenMP should privatize what this DECL points to rather
1418 than the DECL itself. */
1419
1420 bool
1421 cxx_omp_privatize_by_reference (const_tree decl)
1422 {
1423 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1424 || is_invisiref_parm (decl));
1425 }
1426
1427 /* Return true if DECL is const qualified var having no mutable member. */
1428 bool
1429 cxx_omp_const_qual_no_mutable (tree decl)
1430 {
1431 tree type = TREE_TYPE (decl);
1432 if (TREE_CODE (type) == REFERENCE_TYPE)
1433 {
1434 if (!is_invisiref_parm (decl))
1435 return false;
1436 type = TREE_TYPE (type);
1437
1438 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1439 {
1440 /* NVR doesn't preserve const qualification of the
1441 variable's type. */
1442 tree outer = outer_curly_brace_block (current_function_decl);
1443 tree var;
1444
1445 if (outer)
1446 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1447 if (DECL_NAME (decl) == DECL_NAME (var)
1448 && (TYPE_MAIN_VARIANT (type)
1449 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1450 {
1451 if (TYPE_READONLY (TREE_TYPE (var)))
1452 type = TREE_TYPE (var);
1453 break;
1454 }
1455 }
1456 }
1457
1458 if (type == error_mark_node)
1459 return false;
1460
1461 /* Variables with const-qualified type having no mutable member
1462 are predetermined shared. */
1463 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1464 return true;
1465
1466 return false;
1467 }
1468
1469 /* True if OpenMP sharing attribute of DECL is predetermined. */
1470
1471 enum omp_clause_default_kind
1472 cxx_omp_predetermined_sharing (tree decl)
1473 {
1474 /* Static data members are predetermined shared. */
1475 if (TREE_STATIC (decl))
1476 {
1477 tree ctx = CP_DECL_CONTEXT (decl);
1478 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1479 return OMP_CLAUSE_DEFAULT_SHARED;
1480 }
1481
1482 /* Const qualified vars having no mutable member are predetermined
1483 shared. */
1484 if (cxx_omp_const_qual_no_mutable (decl))
1485 return OMP_CLAUSE_DEFAULT_SHARED;
1486
1487 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1488 }
1489
1490 /* Finalize an implicitly determined clause. */
1491
1492 void
1493 cxx_omp_finish_clause (tree c)
1494 {
1495 tree decl, inner_type;
1496 bool make_shared = false;
1497
1498 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1499 return;
1500
1501 decl = OMP_CLAUSE_DECL (c);
1502 decl = require_complete_type (decl);
1503 inner_type = TREE_TYPE (decl);
1504 if (decl == error_mark_node)
1505 make_shared = true;
1506 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1507 {
1508 if (is_invisiref_parm (decl))
1509 inner_type = TREE_TYPE (inner_type);
1510 else
1511 {
1512 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1513 decl);
1514 make_shared = true;
1515 }
1516 }
1517
1518 /* We're interested in the base element, not arrays. */
1519 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1520 inner_type = TREE_TYPE (inner_type);
1521
1522 /* Check for special function availability by building a call to one.
1523 Save the results, because later we won't be in the right context
1524 for making these queries. */
1525 if (!make_shared
1526 && CLASS_TYPE_P (inner_type)
1527 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1528 make_shared = true;
1529
1530 if (make_shared)
1531 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1532 }