]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/cp/cp-gimplify.c
gimple.h: Remove all includes.
[thirdparty/gcc.git] / gcc / cp / cp-gimplify.c
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "stor-layout.h"
28 #include "cp-tree.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
31 #include "pointer-set.h"
32 #include "basic-block.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "hashtab.h"
40 #include "flags.h"
41 #include "splay-tree.h"
42
43 /* Forward declarations. */
44
45 static tree cp_genericize_r (tree *, int *, void *);
46 static void cp_genericize_tree (tree*);
47
48 /* Local declarations. */
49
50 enum bc_t { bc_break = 0, bc_continue = 1 };
51
52 /* Stack of labels which are targets for "break" or "continue",
53 linked through TREE_CHAIN. */
54 static tree bc_label[2];
55
56 /* Begin a scope which can be exited by a break or continue statement. BC
57 indicates which.
58
59 Just creates a label with location LOCATION and pushes it into the current
60 context. */
61
62 static tree
63 begin_bc_block (enum bc_t bc, location_t location)
64 {
65 tree label = create_artificial_label (location);
66 DECL_CHAIN (label) = bc_label[bc];
67 bc_label[bc] = label;
68 return label;
69 }
70
71 /* Finish a scope which can be exited by a break or continue statement.
72 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
73 an expression for the contents of the scope.
74
75 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
76 BLOCK. Otherwise, just forget the label. */
77
78 static void
79 finish_bc_block (tree *block, enum bc_t bc, tree label)
80 {
81 gcc_assert (label == bc_label[bc]);
82
83 if (TREE_USED (label))
84 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
85 block);
86
87 bc_label[bc] = DECL_CHAIN (label);
88 DECL_CHAIN (label) = NULL_TREE;
89 }
90
91 /* Get the LABEL_EXPR to represent a break or continue statement
92 in the current block scope. BC indicates which. */
93
94 static tree
95 get_bc_label (enum bc_t bc)
96 {
97 tree label = bc_label[bc];
98
99 /* Mark the label used for finish_bc_block. */
100 TREE_USED (label) = 1;
101 return label;
102 }
103
104 /* Genericize a TRY_BLOCK. */
105
106 static void
107 genericize_try_block (tree *stmt_p)
108 {
109 tree body = TRY_STMTS (*stmt_p);
110 tree cleanup = TRY_HANDLERS (*stmt_p);
111
112 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
113 }
114
115 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
116
117 static void
118 genericize_catch_block (tree *stmt_p)
119 {
120 tree type = HANDLER_TYPE (*stmt_p);
121 tree body = HANDLER_BODY (*stmt_p);
122
123 /* FIXME should the caught type go in TREE_TYPE? */
124 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
125 }
126
127 /* A terser interface for building a representation of an exception
128 specification. */
129
130 static tree
131 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
132 {
133 tree t;
134
135 /* FIXME should the allowed types go in TREE_TYPE? */
136 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
137 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
138
139 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
140 append_to_statement_list (body, &TREE_OPERAND (t, 0));
141
142 return t;
143 }
144
145 /* Genericize an EH_SPEC_BLOCK by converting it to a
146 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
147
148 static void
149 genericize_eh_spec_block (tree *stmt_p)
150 {
151 tree body = EH_SPEC_STMTS (*stmt_p);
152 tree allowed = EH_SPEC_RAISES (*stmt_p);
153 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
154
155 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
156 TREE_NO_WARNING (*stmt_p) = true;
157 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
158 }
159
160 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
161
162 static void
163 genericize_if_stmt (tree *stmt_p)
164 {
165 tree stmt, cond, then_, else_;
166 location_t locus = EXPR_LOCATION (*stmt_p);
167
168 stmt = *stmt_p;
169 cond = IF_COND (stmt);
170 then_ = THEN_CLAUSE (stmt);
171 else_ = ELSE_CLAUSE (stmt);
172
173 if (!then_)
174 then_ = build_empty_stmt (locus);
175 if (!else_)
176 else_ = build_empty_stmt (locus);
177
178 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
179 stmt = then_;
180 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
181 stmt = else_;
182 else
183 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
184 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
185 SET_EXPR_LOCATION (stmt, locus);
186 *stmt_p = stmt;
187 }
188
189 /* Build a generic representation of one of the C loop forms. COND is the
190 loop condition or NULL_TREE. BODY is the (possibly compound) statement
191 controlled by the loop. INCR is the increment expression of a for-loop,
192 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
193 evaluated before the loop body as in while and for loops, or after the
194 loop body as in do-while loops. */
195
196 static void
197 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
198 tree incr, bool cond_is_first, int *walk_subtrees,
199 void *data)
200 {
201 tree blab, clab;
202 tree entry = NULL, exit = NULL, t;
203 tree stmt_list = NULL;
204
205 blab = begin_bc_block (bc_break, start_locus);
206 clab = begin_bc_block (bc_continue, start_locus);
207
208 if (incr && EXPR_P (incr))
209 SET_EXPR_LOCATION (incr, start_locus);
210
211 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
212 cp_walk_tree (&body, cp_genericize_r, data, NULL);
213 cp_walk_tree (&incr, cp_genericize_r, data, NULL);
214 *walk_subtrees = 0;
215
216 /* If condition is zero don't generate a loop construct. */
217 if (cond && integer_zerop (cond))
218 {
219 if (cond_is_first)
220 {
221 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
222 get_bc_label (bc_break));
223 append_to_statement_list (t, &stmt_list);
224 }
225 }
226 else
227 {
228 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
229 tree top = build1 (LABEL_EXPR, void_type_node,
230 create_artificial_label (start_locus));
231
232 /* If we have an exit condition, then we build an IF with gotos either
233 out of the loop, or to the top of it. If there's no exit condition,
234 then we just build a jump back to the top. */
235 exit = build1 (GOTO_EXPR, void_type_node, LABEL_EXPR_LABEL (top));
236
237 if (cond && !integer_nonzerop (cond))
238 {
239 /* Canonicalize the loop condition to the end. This means
240 generating a branch to the loop condition. Reuse the
241 continue label, if possible. */
242 if (cond_is_first)
243 {
244 if (incr)
245 {
246 entry = build1 (LABEL_EXPR, void_type_node,
247 create_artificial_label (start_locus));
248 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
249 LABEL_EXPR_LABEL (entry));
250 }
251 else
252 t = build1_loc (start_locus, GOTO_EXPR, void_type_node,
253 get_bc_label (bc_continue));
254 append_to_statement_list (t, &stmt_list);
255 }
256
257 t = build1 (GOTO_EXPR, void_type_node, get_bc_label (bc_break));
258 exit = fold_build3_loc (start_locus,
259 COND_EXPR, void_type_node, cond, exit, t);
260 }
261
262 append_to_statement_list (top, &stmt_list);
263 }
264
265 append_to_statement_list (body, &stmt_list);
266 finish_bc_block (&stmt_list, bc_continue, clab);
267 append_to_statement_list (incr, &stmt_list);
268 append_to_statement_list (entry, &stmt_list);
269 append_to_statement_list (exit, &stmt_list);
270 finish_bc_block (&stmt_list, bc_break, blab);
271
272 if (stmt_list == NULL_TREE)
273 stmt_list = build1 (NOP_EXPR, void_type_node, integer_zero_node);
274
275 *stmt_p = stmt_list;
276 }
277
278 /* Genericize a FOR_STMT node *STMT_P. */
279
280 static void
281 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
282 {
283 tree stmt = *stmt_p;
284 tree expr = NULL;
285 tree loop;
286 tree init = FOR_INIT_STMT (stmt);
287
288 if (init)
289 {
290 cp_walk_tree (&init, cp_genericize_r, data, NULL);
291 append_to_statement_list (init, &expr);
292 }
293
294 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
295 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
296 append_to_statement_list (loop, &expr);
297 *stmt_p = expr;
298 }
299
300 /* Genericize a WHILE_STMT node *STMT_P. */
301
302 static void
303 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
304 {
305 tree stmt = *stmt_p;
306 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
307 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
308 }
309
310 /* Genericize a DO_STMT node *STMT_P. */
311
312 static void
313 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
314 {
315 tree stmt = *stmt_p;
316 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
317 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
318 }
319
320 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
321
322 static void
323 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
324 {
325 tree stmt = *stmt_p;
326 tree break_block, body, cond, type;
327 location_t stmt_locus = EXPR_LOCATION (stmt);
328
329 break_block = begin_bc_block (bc_break, stmt_locus);
330
331 body = SWITCH_STMT_BODY (stmt);
332 if (!body)
333 body = build_empty_stmt (stmt_locus);
334 cond = SWITCH_STMT_COND (stmt);
335 type = SWITCH_STMT_TYPE (stmt);
336
337 cp_walk_tree (&body, cp_genericize_r, data, NULL);
338 cp_walk_tree (&cond, cp_genericize_r, data, NULL);
339 cp_walk_tree (&type, cp_genericize_r, data, NULL);
340 *walk_subtrees = 0;
341
342 *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
343 finish_bc_block (stmt_p, bc_break, break_block);
344 }
345
346 /* Genericize a CONTINUE_STMT node *STMT_P. */
347
348 static void
349 genericize_continue_stmt (tree *stmt_p)
350 {
351 tree stmt_list = NULL;
352 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
353 tree label = get_bc_label (bc_continue);
354 location_t location = EXPR_LOCATION (*stmt_p);
355 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
356 append_to_statement_list (pred, &stmt_list);
357 append_to_statement_list (jump, &stmt_list);
358 *stmt_p = stmt_list;
359 }
360
361 /* Genericize a BREAK_STMT node *STMT_P. */
362
363 static void
364 genericize_break_stmt (tree *stmt_p)
365 {
366 tree label = get_bc_label (bc_break);
367 location_t location = EXPR_LOCATION (*stmt_p);
368 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
369 }
370
371 /* Genericize a OMP_FOR node *STMT_P. */
372
373 static void
374 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
375 {
376 tree stmt = *stmt_p;
377 location_t locus = EXPR_LOCATION (stmt);
378 tree clab = begin_bc_block (bc_continue, locus);
379
380 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
381 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
382 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
383 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
384 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
385 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
386 *walk_subtrees = 0;
387
388 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
389 }
390
391 /* Hook into the middle of gimplifying an OMP_FOR node. */
392
393 static enum gimplify_status
394 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
395 {
396 tree for_stmt = *expr_p;
397 gimple_seq seq = NULL;
398
399 /* Protect ourselves from recursion. */
400 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
401 return GS_UNHANDLED;
402 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
403
404 gimplify_and_add (for_stmt, &seq);
405 gimple_seq_add_seq (pre_p, seq);
406
407 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
408
409 return GS_ALL_DONE;
410 }
411
412 /* Gimplify an EXPR_STMT node. */
413
414 static void
415 gimplify_expr_stmt (tree *stmt_p)
416 {
417 tree stmt = EXPR_STMT_EXPR (*stmt_p);
418
419 if (stmt == error_mark_node)
420 stmt = NULL;
421
422 /* Gimplification of a statement expression will nullify the
423 statement if all its side effects are moved to *PRE_P and *POST_P.
424
425 In this case we will not want to emit the gimplified statement.
426 However, we may still want to emit a warning, so we do that before
427 gimplification. */
428 if (stmt && warn_unused_value)
429 {
430 if (!TREE_SIDE_EFFECTS (stmt))
431 {
432 if (!IS_EMPTY_STMT (stmt)
433 && !VOID_TYPE_P (TREE_TYPE (stmt))
434 && !TREE_NO_WARNING (stmt))
435 warning (OPT_Wunused_value, "statement with no effect");
436 }
437 else
438 warn_if_unused_value (stmt, input_location);
439 }
440
441 if (stmt == NULL_TREE)
442 stmt = alloc_stmt_list ();
443
444 *stmt_p = stmt;
445 }
446
447 /* Gimplify initialization from an AGGR_INIT_EXPR. */
448
449 static void
450 cp_gimplify_init_expr (tree *expr_p)
451 {
452 tree from = TREE_OPERAND (*expr_p, 1);
453 tree to = TREE_OPERAND (*expr_p, 0);
454 tree t;
455
456 /* What about code that pulls out the temp and uses it elsewhere? I
457 think that such code never uses the TARGET_EXPR as an initializer. If
458 I'm wrong, we'll abort because the temp won't have any RTL. In that
459 case, I guess we'll need to replace references somehow. */
460 if (TREE_CODE (from) == TARGET_EXPR)
461 from = TARGET_EXPR_INITIAL (from);
462
463 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
464 inside the TARGET_EXPR. */
465 for (t = from; t; )
466 {
467 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
468
469 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
470 replace the slot operand with our target.
471
472 Should we add a target parm to gimplify_expr instead? No, as in this
473 case we want to replace the INIT_EXPR. */
474 if (TREE_CODE (sub) == AGGR_INIT_EXPR
475 || TREE_CODE (sub) == VEC_INIT_EXPR)
476 {
477 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
478 AGGR_INIT_EXPR_SLOT (sub) = to;
479 else
480 VEC_INIT_EXPR_SLOT (sub) = to;
481 *expr_p = from;
482
483 /* The initialization is now a side-effect, so the container can
484 become void. */
485 if (from != sub)
486 TREE_TYPE (from) = void_type_node;
487 }
488
489 if (t == sub)
490 break;
491 else
492 t = TREE_OPERAND (t, 1);
493 }
494
495 }
496
497 /* Gimplify a MUST_NOT_THROW_EXPR. */
498
499 static enum gimplify_status
500 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
501 {
502 tree stmt = *expr_p;
503 tree temp = voidify_wrapper_expr (stmt, NULL);
504 tree body = TREE_OPERAND (stmt, 0);
505 gimple_seq try_ = NULL;
506 gimple_seq catch_ = NULL;
507 gimple mnt;
508
509 gimplify_and_add (body, &try_);
510 mnt = gimple_build_eh_must_not_throw (terminate_node);
511 gimple_seq_add_stmt_without_update (&catch_, mnt);
512 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
513
514 gimple_seq_add_stmt_without_update (pre_p, mnt);
515 if (temp)
516 {
517 *expr_p = temp;
518 return GS_OK;
519 }
520
521 *expr_p = NULL;
522 return GS_ALL_DONE;
523 }
524
525 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
526
527 int
528 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
529 {
530 int saved_stmts_are_full_exprs_p = 0;
531 enum tree_code code = TREE_CODE (*expr_p);
532 enum gimplify_status ret;
533
534 if (STATEMENT_CODE_P (code))
535 {
536 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
537 current_stmt_tree ()->stmts_are_full_exprs_p
538 = STMT_IS_FULL_EXPR_P (*expr_p);
539 }
540
541 switch (code)
542 {
543 case PTRMEM_CST:
544 *expr_p = cplus_expand_constant (*expr_p);
545 ret = GS_OK;
546 break;
547
548 case AGGR_INIT_EXPR:
549 simplify_aggr_init_expr (expr_p);
550 ret = GS_OK;
551 break;
552
553 case VEC_INIT_EXPR:
554 {
555 location_t loc = input_location;
556 tree init = VEC_INIT_EXPR_INIT (*expr_p);
557 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
558 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
559 input_location = EXPR_LOCATION (*expr_p);
560 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
561 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
562 from_array,
563 tf_warning_or_error);
564 cp_genericize_tree (expr_p);
565 ret = GS_OK;
566 input_location = loc;
567 }
568 break;
569
570 case THROW_EXPR:
571 /* FIXME communicate throw type to back end, probably by moving
572 THROW_EXPR into ../tree.def. */
573 *expr_p = TREE_OPERAND (*expr_p, 0);
574 ret = GS_OK;
575 break;
576
577 case MUST_NOT_THROW_EXPR:
578 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
579 break;
580
581 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
582 LHS of an assignment might also be involved in the RHS, as in bug
583 25979. */
584 case INIT_EXPR:
585 cp_gimplify_init_expr (expr_p);
586 if (TREE_CODE (*expr_p) != INIT_EXPR)
587 return GS_OK;
588 /* Otherwise fall through. */
589 case MODIFY_EXPR:
590 {
591 /* If the back end isn't clever enough to know that the lhs and rhs
592 types are the same, add an explicit conversion. */
593 tree op0 = TREE_OPERAND (*expr_p, 0);
594 tree op1 = TREE_OPERAND (*expr_p, 1);
595
596 if (!error_operand_p (op0)
597 && !error_operand_p (op1)
598 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
599 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
600 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
601 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
602 TREE_TYPE (op0), op1);
603
604 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
605 || (TREE_CODE (op1) == CONSTRUCTOR
606 && CONSTRUCTOR_NELTS (op1) == 0
607 && !TREE_CLOBBER_P (op1))
608 || (TREE_CODE (op1) == CALL_EXPR
609 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
610 && is_really_empty_class (TREE_TYPE (op0)))
611 {
612 /* Remove any copies of empty classes. We check that the RHS
613 has a simple form so that TARGET_EXPRs and non-empty
614 CONSTRUCTORs get reduced properly, and we leave the return
615 slot optimization alone because it isn't a copy (FIXME so it
616 shouldn't be represented as one).
617
618 Also drop volatile variables on the RHS to avoid infinite
619 recursion from gimplify_expr trying to load the value. */
620 if (!TREE_SIDE_EFFECTS (op1)
621 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
622 *expr_p = op0;
623 else if (TREE_CODE (op1) == MEM_REF
624 && TREE_THIS_VOLATILE (op1))
625 {
626 /* Similarly for volatile MEM_REFs on the RHS. */
627 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
628 *expr_p = op0;
629 else
630 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
631 TREE_OPERAND (op1, 0), op0);
632 }
633 else
634 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
635 op0, op1);
636 }
637 }
638 ret = GS_OK;
639 break;
640
641 case EMPTY_CLASS_EXPR:
642 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
643 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
644 ret = GS_OK;
645 break;
646
647 case BASELINK:
648 *expr_p = BASELINK_FUNCTIONS (*expr_p);
649 ret = GS_OK;
650 break;
651
652 case TRY_BLOCK:
653 genericize_try_block (expr_p);
654 ret = GS_OK;
655 break;
656
657 case HANDLER:
658 genericize_catch_block (expr_p);
659 ret = GS_OK;
660 break;
661
662 case EH_SPEC_BLOCK:
663 genericize_eh_spec_block (expr_p);
664 ret = GS_OK;
665 break;
666
667 case USING_STMT:
668 gcc_unreachable ();
669
670 case FOR_STMT:
671 case WHILE_STMT:
672 case DO_STMT:
673 case SWITCH_STMT:
674 case CONTINUE_STMT:
675 case BREAK_STMT:
676 gcc_unreachable ();
677
678 case OMP_FOR:
679 case OMP_SIMD:
680 case OMP_DISTRIBUTE:
681 ret = cp_gimplify_omp_for (expr_p, pre_p);
682 break;
683
684 case EXPR_STMT:
685 gimplify_expr_stmt (expr_p);
686 ret = GS_OK;
687 break;
688
689 case UNARY_PLUS_EXPR:
690 {
691 tree arg = TREE_OPERAND (*expr_p, 0);
692 tree type = TREE_TYPE (*expr_p);
693 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
694 : arg;
695 ret = GS_OK;
696 }
697 break;
698
699 default:
700 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
701 break;
702 }
703
704 /* Restore saved state. */
705 if (STATEMENT_CODE_P (code))
706 current_stmt_tree ()->stmts_are_full_exprs_p
707 = saved_stmts_are_full_exprs_p;
708
709 return ret;
710 }
711
712 static inline bool
713 is_invisiref_parm (const_tree t)
714 {
715 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
716 && DECL_BY_REFERENCE (t));
717 }
718
719 /* Return true if the uid in both int tree maps are equal. */
720
721 int
722 cxx_int_tree_map_eq (const void *va, const void *vb)
723 {
724 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
725 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
726 return (a->uid == b->uid);
727 }
728
729 /* Hash a UID in a cxx_int_tree_map. */
730
731 unsigned int
732 cxx_int_tree_map_hash (const void *item)
733 {
734 return ((const struct cxx_int_tree_map *)item)->uid;
735 }
736
737 /* A stable comparison routine for use with splay trees and DECLs. */
738
739 static int
740 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
741 {
742 tree a = (tree) xa;
743 tree b = (tree) xb;
744
745 return DECL_UID (a) - DECL_UID (b);
746 }
747
748 /* OpenMP context during genericization. */
749
750 struct cp_genericize_omp_taskreg
751 {
752 bool is_parallel;
753 bool default_shared;
754 struct cp_genericize_omp_taskreg *outer;
755 splay_tree variables;
756 };
757
758 /* Return true if genericization should try to determine if
759 DECL is firstprivate or shared within task regions. */
760
761 static bool
762 omp_var_to_track (tree decl)
763 {
764 tree type = TREE_TYPE (decl);
765 if (is_invisiref_parm (decl))
766 type = TREE_TYPE (type);
767 while (TREE_CODE (type) == ARRAY_TYPE)
768 type = TREE_TYPE (type);
769 if (type == error_mark_node || !CLASS_TYPE_P (type))
770 return false;
771 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
772 return false;
773 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
774 return false;
775 return true;
776 }
777
778 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
779
780 static void
781 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
782 {
783 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
784 (splay_tree_key) decl);
785 if (n == NULL)
786 {
787 int flags = OMP_CLAUSE_DEFAULT_SHARED;
788 if (omp_ctx->outer)
789 omp_cxx_notice_variable (omp_ctx->outer, decl);
790 if (!omp_ctx->default_shared)
791 {
792 struct cp_genericize_omp_taskreg *octx;
793
794 for (octx = omp_ctx->outer; octx; octx = octx->outer)
795 {
796 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
797 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
798 {
799 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
800 break;
801 }
802 if (octx->is_parallel)
803 break;
804 }
805 if (octx == NULL
806 && (TREE_CODE (decl) == PARM_DECL
807 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
808 && DECL_CONTEXT (decl) == current_function_decl)))
809 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
810 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
811 {
812 /* DECL is implicitly determined firstprivate in
813 the current task construct. Ensure copy ctor and
814 dtor are instantiated, because during gimplification
815 it will be already too late. */
816 tree type = TREE_TYPE (decl);
817 if (is_invisiref_parm (decl))
818 type = TREE_TYPE (type);
819 while (TREE_CODE (type) == ARRAY_TYPE)
820 type = TREE_TYPE (type);
821 get_copy_ctor (type, tf_none);
822 get_dtor (type, tf_none);
823 }
824 }
825 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
826 }
827 }
828
829 /* Genericization context. */
830
831 struct cp_genericize_data
832 {
833 struct pointer_set_t *p_set;
834 vec<tree> bind_expr_stack;
835 struct cp_genericize_omp_taskreg *omp_ctx;
836 };
837
838 /* Perform any pre-gimplification lowering of C++ front end trees to
839 GENERIC. */
840
841 static tree
842 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
843 {
844 tree stmt = *stmt_p;
845 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
846 struct pointer_set_t *p_set = wtd->p_set;
847
848 /* If in an OpenMP context, note var uses. */
849 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
850 && (VAR_P (stmt)
851 || TREE_CODE (stmt) == PARM_DECL
852 || TREE_CODE (stmt) == RESULT_DECL)
853 && omp_var_to_track (stmt))
854 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
855
856 if (is_invisiref_parm (stmt)
857 /* Don't dereference parms in a thunk, pass the references through. */
858 && !(DECL_THUNK_P (current_function_decl)
859 && TREE_CODE (stmt) == PARM_DECL))
860 {
861 *stmt_p = convert_from_reference (stmt);
862 *walk_subtrees = 0;
863 return NULL;
864 }
865
866 /* Map block scope extern declarations to visible declarations with the
867 same name and type in outer scopes if any. */
868 if (cp_function_chain->extern_decl_map
869 && VAR_OR_FUNCTION_DECL_P (stmt)
870 && DECL_EXTERNAL (stmt))
871 {
872 struct cxx_int_tree_map *h, in;
873 in.uid = DECL_UID (stmt);
874 h = (struct cxx_int_tree_map *)
875 htab_find_with_hash (cp_function_chain->extern_decl_map,
876 &in, in.uid);
877 if (h)
878 {
879 *stmt_p = h->to;
880 *walk_subtrees = 0;
881 return NULL;
882 }
883 }
884
885 /* Other than invisiref parms, don't walk the same tree twice. */
886 if (pointer_set_contains (p_set, stmt))
887 {
888 *walk_subtrees = 0;
889 return NULL_TREE;
890 }
891
892 if (TREE_CODE (stmt) == ADDR_EXPR
893 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
894 {
895 /* If in an OpenMP context, note var uses. */
896 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
897 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
898 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
899 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
900 *walk_subtrees = 0;
901 }
902 else if (TREE_CODE (stmt) == RETURN_EXPR
903 && TREE_OPERAND (stmt, 0)
904 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
905 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
906 *walk_subtrees = 0;
907 else if (TREE_CODE (stmt) == OMP_CLAUSE)
908 switch (OMP_CLAUSE_CODE (stmt))
909 {
910 case OMP_CLAUSE_LASTPRIVATE:
911 /* Don't dereference an invisiref in OpenMP clauses. */
912 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
913 {
914 *walk_subtrees = 0;
915 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
916 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
917 cp_genericize_r, data, NULL);
918 }
919 break;
920 case OMP_CLAUSE_PRIVATE:
921 /* Don't dereference an invisiref in OpenMP clauses. */
922 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
923 *walk_subtrees = 0;
924 else if (wtd->omp_ctx != NULL)
925 {
926 /* Private clause doesn't cause any references to the
927 var in outer contexts, avoid calling
928 omp_cxx_notice_variable for it. */
929 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
930 wtd->omp_ctx = NULL;
931 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
932 data, NULL);
933 wtd->omp_ctx = old;
934 *walk_subtrees = 0;
935 }
936 break;
937 case OMP_CLAUSE_SHARED:
938 case OMP_CLAUSE_FIRSTPRIVATE:
939 case OMP_CLAUSE_COPYIN:
940 case OMP_CLAUSE_COPYPRIVATE:
941 /* Don't dereference an invisiref in OpenMP clauses. */
942 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
943 *walk_subtrees = 0;
944 break;
945 case OMP_CLAUSE_REDUCTION:
946 /* Don't dereference an invisiref in reduction clause's
947 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
948 still needs to be genericized. */
949 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
950 {
951 *walk_subtrees = 0;
952 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
953 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
954 cp_genericize_r, data, NULL);
955 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
956 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
957 cp_genericize_r, data, NULL);
958 }
959 break;
960 default:
961 break;
962 }
963 else if (IS_TYPE_OR_DECL_P (stmt))
964 *walk_subtrees = 0;
965
966 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
967 to lower this construct before scanning it, so we need to lower these
968 before doing anything else. */
969 else if (TREE_CODE (stmt) == CLEANUP_STMT)
970 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
971 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
972 : TRY_FINALLY_EXPR,
973 void_type_node,
974 CLEANUP_BODY (stmt),
975 CLEANUP_EXPR (stmt));
976
977 else if (TREE_CODE (stmt) == IF_STMT)
978 {
979 genericize_if_stmt (stmt_p);
980 /* *stmt_p has changed, tail recurse to handle it again. */
981 return cp_genericize_r (stmt_p, walk_subtrees, data);
982 }
983
984 /* COND_EXPR might have incompatible types in branches if one or both
985 arms are bitfields. Fix it up now. */
986 else if (TREE_CODE (stmt) == COND_EXPR)
987 {
988 tree type_left
989 = (TREE_OPERAND (stmt, 1)
990 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
991 : NULL_TREE);
992 tree type_right
993 = (TREE_OPERAND (stmt, 2)
994 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
995 : NULL_TREE);
996 if (type_left
997 && !useless_type_conversion_p (TREE_TYPE (stmt),
998 TREE_TYPE (TREE_OPERAND (stmt, 1))))
999 {
1000 TREE_OPERAND (stmt, 1)
1001 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1002 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1003 type_left));
1004 }
1005 if (type_right
1006 && !useless_type_conversion_p (TREE_TYPE (stmt),
1007 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1008 {
1009 TREE_OPERAND (stmt, 2)
1010 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1011 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1012 type_right));
1013 }
1014 }
1015
1016 else if (TREE_CODE (stmt) == BIND_EXPR)
1017 {
1018 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1019 {
1020 tree decl;
1021 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1022 if (VAR_P (decl)
1023 && !DECL_EXTERNAL (decl)
1024 && omp_var_to_track (decl))
1025 {
1026 splay_tree_node n
1027 = splay_tree_lookup (wtd->omp_ctx->variables,
1028 (splay_tree_key) decl);
1029 if (n == NULL)
1030 splay_tree_insert (wtd->omp_ctx->variables,
1031 (splay_tree_key) decl,
1032 TREE_STATIC (decl)
1033 ? OMP_CLAUSE_DEFAULT_SHARED
1034 : OMP_CLAUSE_DEFAULT_PRIVATE);
1035 }
1036 }
1037 wtd->bind_expr_stack.safe_push (stmt);
1038 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1039 cp_genericize_r, data, NULL);
1040 wtd->bind_expr_stack.pop ();
1041 }
1042
1043 else if (TREE_CODE (stmt) == USING_STMT)
1044 {
1045 tree block = NULL_TREE;
1046
1047 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1048 BLOCK, and append an IMPORTED_DECL to its
1049 BLOCK_VARS chained list. */
1050 if (wtd->bind_expr_stack.exists ())
1051 {
1052 int i;
1053 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1054 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1055 break;
1056 }
1057 if (block)
1058 {
1059 tree using_directive;
1060 gcc_assert (TREE_OPERAND (stmt, 0));
1061
1062 using_directive = make_node (IMPORTED_DECL);
1063 TREE_TYPE (using_directive) = void_type_node;
1064
1065 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1066 = TREE_OPERAND (stmt, 0);
1067 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1068 BLOCK_VARS (block) = using_directive;
1069 }
1070 /* The USING_STMT won't appear in GENERIC. */
1071 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1072 *walk_subtrees = 0;
1073 }
1074
1075 else if (TREE_CODE (stmt) == DECL_EXPR
1076 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1077 {
1078 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1079 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1080 *walk_subtrees = 0;
1081 }
1082 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1083 {
1084 struct cp_genericize_omp_taskreg omp_ctx;
1085 tree c, decl;
1086 splay_tree_node n;
1087
1088 *walk_subtrees = 0;
1089 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1090 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1091 omp_ctx.default_shared = omp_ctx.is_parallel;
1092 omp_ctx.outer = wtd->omp_ctx;
1093 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1094 wtd->omp_ctx = &omp_ctx;
1095 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1096 switch (OMP_CLAUSE_CODE (c))
1097 {
1098 case OMP_CLAUSE_SHARED:
1099 case OMP_CLAUSE_PRIVATE:
1100 case OMP_CLAUSE_FIRSTPRIVATE:
1101 case OMP_CLAUSE_LASTPRIVATE:
1102 decl = OMP_CLAUSE_DECL (c);
1103 if (decl == error_mark_node || !omp_var_to_track (decl))
1104 break;
1105 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1106 if (n != NULL)
1107 break;
1108 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1109 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1110 ? OMP_CLAUSE_DEFAULT_SHARED
1111 : OMP_CLAUSE_DEFAULT_PRIVATE);
1112 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1113 && omp_ctx.outer)
1114 omp_cxx_notice_variable (omp_ctx.outer, decl);
1115 break;
1116 case OMP_CLAUSE_DEFAULT:
1117 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1118 omp_ctx.default_shared = true;
1119 default:
1120 break;
1121 }
1122 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1123 wtd->omp_ctx = omp_ctx.outer;
1124 splay_tree_delete (omp_ctx.variables);
1125 }
1126 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1127 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1128 else if (TREE_CODE (stmt) == FOR_STMT)
1129 genericize_for_stmt (stmt_p, walk_subtrees, data);
1130 else if (TREE_CODE (stmt) == WHILE_STMT)
1131 genericize_while_stmt (stmt_p, walk_subtrees, data);
1132 else if (TREE_CODE (stmt) == DO_STMT)
1133 genericize_do_stmt (stmt_p, walk_subtrees, data);
1134 else if (TREE_CODE (stmt) == SWITCH_STMT)
1135 genericize_switch_stmt (stmt_p, walk_subtrees, data);
1136 else if (TREE_CODE (stmt) == CONTINUE_STMT)
1137 genericize_continue_stmt (stmt_p);
1138 else if (TREE_CODE (stmt) == BREAK_STMT)
1139 genericize_break_stmt (stmt_p);
1140 else if (TREE_CODE (stmt) == OMP_FOR
1141 || TREE_CODE (stmt) == OMP_SIMD
1142 || TREE_CODE (stmt) == OMP_DISTRIBUTE)
1143 genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1144 else if (TREE_CODE (stmt) == SIZEOF_EXPR)
1145 {
1146 if (SIZEOF_EXPR_TYPE_P (stmt))
1147 *stmt_p
1148 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt, 0)),
1149 SIZEOF_EXPR, false);
1150 else if (TYPE_P (TREE_OPERAND (stmt, 0)))
1151 *stmt_p = cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt, 0),
1152 SIZEOF_EXPR, false);
1153 else
1154 *stmt_p = cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt, 0),
1155 SIZEOF_EXPR, false);
1156 if (*stmt_p == error_mark_node)
1157 *stmt_p = size_one_node;
1158 return NULL;
1159 }
1160
1161 pointer_set_insert (p_set, *stmt_p);
1162
1163 return NULL;
1164 }
1165
1166 /* Lower C++ front end trees to GENERIC in T_P. */
1167
1168 static void
1169 cp_genericize_tree (tree* t_p)
1170 {
1171 struct cp_genericize_data wtd;
1172
1173 wtd.p_set = pointer_set_create ();
1174 wtd.bind_expr_stack.create (0);
1175 wtd.omp_ctx = NULL;
1176 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1177 pointer_set_destroy (wtd.p_set);
1178 wtd.bind_expr_stack.release ();
1179 }
1180
1181 void
1182 cp_genericize (tree fndecl)
1183 {
1184 tree t;
1185
1186 /* Fix up the types of parms passed by invisible reference. */
1187 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1188 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1189 {
1190 /* If a function's arguments are copied to create a thunk,
1191 then DECL_BY_REFERENCE will be set -- but the type of the
1192 argument will be a pointer type, so we will never get
1193 here. */
1194 gcc_assert (!DECL_BY_REFERENCE (t));
1195 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1196 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1197 DECL_BY_REFERENCE (t) = 1;
1198 TREE_ADDRESSABLE (t) = 0;
1199 relayout_decl (t);
1200 }
1201
1202 /* Do the same for the return value. */
1203 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1204 {
1205 t = DECL_RESULT (fndecl);
1206 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1207 DECL_BY_REFERENCE (t) = 1;
1208 TREE_ADDRESSABLE (t) = 0;
1209 relayout_decl (t);
1210 if (DECL_NAME (t))
1211 {
1212 /* Adjust DECL_VALUE_EXPR of the original var. */
1213 tree outer = outer_curly_brace_block (current_function_decl);
1214 tree var;
1215
1216 if (outer)
1217 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1218 if (DECL_NAME (t) == DECL_NAME (var)
1219 && DECL_HAS_VALUE_EXPR_P (var)
1220 && DECL_VALUE_EXPR (var) == t)
1221 {
1222 tree val = convert_from_reference (t);
1223 SET_DECL_VALUE_EXPR (var, val);
1224 break;
1225 }
1226 }
1227 }
1228
1229 /* If we're a clone, the body is already GIMPLE. */
1230 if (DECL_CLONED_FUNCTION_P (fndecl))
1231 return;
1232
1233 /* Expand all the array notations here. */
1234 if (flag_enable_cilkplus
1235 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
1236 DECL_SAVED_TREE (fndecl) =
1237 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
1238
1239 /* We do want to see every occurrence of the parms, so we can't just use
1240 walk_tree's hash functionality. */
1241 cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
1242
1243 /* Do everything else. */
1244 c_genericize (fndecl);
1245
1246 gcc_assert (bc_label[bc_break] == NULL);
1247 gcc_assert (bc_label[bc_continue] == NULL);
1248 }
1249 \f
1250 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1251 NULL if there is in fact nothing to do. ARG2 may be null if FN
1252 actually only takes one argument. */
1253
1254 static tree
1255 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1256 {
1257 tree defparm, parm, t;
1258 int i = 0;
1259 int nargs;
1260 tree *argarray;
1261
1262 if (fn == NULL)
1263 return NULL;
1264
1265 nargs = list_length (DECL_ARGUMENTS (fn));
1266 argarray = XALLOCAVEC (tree, nargs);
1267
1268 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1269 if (arg2)
1270 defparm = TREE_CHAIN (defparm);
1271
1272 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1273 {
1274 tree inner_type = TREE_TYPE (arg1);
1275 tree start1, end1, p1;
1276 tree start2 = NULL, p2 = NULL;
1277 tree ret = NULL, lab;
1278
1279 start1 = arg1;
1280 start2 = arg2;
1281 do
1282 {
1283 inner_type = TREE_TYPE (inner_type);
1284 start1 = build4 (ARRAY_REF, inner_type, start1,
1285 size_zero_node, NULL, NULL);
1286 if (arg2)
1287 start2 = build4 (ARRAY_REF, inner_type, start2,
1288 size_zero_node, NULL, NULL);
1289 }
1290 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1291 start1 = build_fold_addr_expr_loc (input_location, start1);
1292 if (arg2)
1293 start2 = build_fold_addr_expr_loc (input_location, start2);
1294
1295 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1296 end1 = fold_build_pointer_plus (start1, end1);
1297
1298 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1299 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1300 append_to_statement_list (t, &ret);
1301
1302 if (arg2)
1303 {
1304 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1305 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1306 append_to_statement_list (t, &ret);
1307 }
1308
1309 lab = create_artificial_label (input_location);
1310 t = build1 (LABEL_EXPR, void_type_node, lab);
1311 append_to_statement_list (t, &ret);
1312
1313 argarray[i++] = p1;
1314 if (arg2)
1315 argarray[i++] = p2;
1316 /* Handle default arguments. */
1317 for (parm = defparm; parm && parm != void_list_node;
1318 parm = TREE_CHAIN (parm), i++)
1319 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1320 TREE_PURPOSE (parm), fn, i,
1321 tf_warning_or_error);
1322 t = build_call_a (fn, i, argarray);
1323 t = fold_convert (void_type_node, t);
1324 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1325 append_to_statement_list (t, &ret);
1326
1327 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1328 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1329 append_to_statement_list (t, &ret);
1330
1331 if (arg2)
1332 {
1333 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1334 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1335 append_to_statement_list (t, &ret);
1336 }
1337
1338 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1339 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1340 append_to_statement_list (t, &ret);
1341
1342 return ret;
1343 }
1344 else
1345 {
1346 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1347 if (arg2)
1348 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1349 /* Handle default arguments. */
1350 for (parm = defparm; parm && parm != void_list_node;
1351 parm = TREE_CHAIN (parm), i++)
1352 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1353 TREE_PURPOSE (parm),
1354 fn, i, tf_warning_or_error);
1355 t = build_call_a (fn, i, argarray);
1356 t = fold_convert (void_type_node, t);
1357 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1358 }
1359 }
1360
1361 /* Return code to initialize DECL with its default constructor, or
1362 NULL if there's nothing to do. */
1363
1364 tree
1365 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1366 {
1367 tree info = CP_OMP_CLAUSE_INFO (clause);
1368 tree ret = NULL;
1369
1370 if (info)
1371 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1372
1373 return ret;
1374 }
1375
1376 /* Return code to initialize DST with a copy constructor from SRC. */
1377
1378 tree
1379 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1380 {
1381 tree info = CP_OMP_CLAUSE_INFO (clause);
1382 tree ret = NULL;
1383
1384 if (info)
1385 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1386 if (ret == NULL)
1387 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1388
1389 return ret;
1390 }
1391
1392 /* Similarly, except use an assignment operator instead. */
1393
1394 tree
1395 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1396 {
1397 tree info = CP_OMP_CLAUSE_INFO (clause);
1398 tree ret = NULL;
1399
1400 if (info)
1401 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1402 if (ret == NULL)
1403 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1404
1405 return ret;
1406 }
1407
1408 /* Return code to destroy DECL. */
1409
1410 tree
1411 cxx_omp_clause_dtor (tree clause, tree decl)
1412 {
1413 tree info = CP_OMP_CLAUSE_INFO (clause);
1414 tree ret = NULL;
1415
1416 if (info)
1417 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1418
1419 return ret;
1420 }
1421
1422 /* True if OpenMP should privatize what this DECL points to rather
1423 than the DECL itself. */
1424
1425 bool
1426 cxx_omp_privatize_by_reference (const_tree decl)
1427 {
1428 return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1429 || is_invisiref_parm (decl));
1430 }
1431
1432 /* Return true if DECL is const qualified var having no mutable member. */
1433 bool
1434 cxx_omp_const_qual_no_mutable (tree decl)
1435 {
1436 tree type = TREE_TYPE (decl);
1437 if (TREE_CODE (type) == REFERENCE_TYPE)
1438 {
1439 if (!is_invisiref_parm (decl))
1440 return false;
1441 type = TREE_TYPE (type);
1442
1443 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1444 {
1445 /* NVR doesn't preserve const qualification of the
1446 variable's type. */
1447 tree outer = outer_curly_brace_block (current_function_decl);
1448 tree var;
1449
1450 if (outer)
1451 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1452 if (DECL_NAME (decl) == DECL_NAME (var)
1453 && (TYPE_MAIN_VARIANT (type)
1454 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1455 {
1456 if (TYPE_READONLY (TREE_TYPE (var)))
1457 type = TREE_TYPE (var);
1458 break;
1459 }
1460 }
1461 }
1462
1463 if (type == error_mark_node)
1464 return false;
1465
1466 /* Variables with const-qualified type having no mutable member
1467 are predetermined shared. */
1468 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1469 return true;
1470
1471 return false;
1472 }
1473
1474 /* True if OpenMP sharing attribute of DECL is predetermined. */
1475
1476 enum omp_clause_default_kind
1477 cxx_omp_predetermined_sharing (tree decl)
1478 {
1479 /* Static data members are predetermined shared. */
1480 if (TREE_STATIC (decl))
1481 {
1482 tree ctx = CP_DECL_CONTEXT (decl);
1483 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1484 return OMP_CLAUSE_DEFAULT_SHARED;
1485 }
1486
1487 /* Const qualified vars having no mutable member are predetermined
1488 shared. */
1489 if (cxx_omp_const_qual_no_mutable (decl))
1490 return OMP_CLAUSE_DEFAULT_SHARED;
1491
1492 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1493 }
1494
1495 /* Finalize an implicitly determined clause. */
1496
1497 void
1498 cxx_omp_finish_clause (tree c)
1499 {
1500 tree decl, inner_type;
1501 bool make_shared = false;
1502
1503 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1504 return;
1505
1506 decl = OMP_CLAUSE_DECL (c);
1507 decl = require_complete_type (decl);
1508 inner_type = TREE_TYPE (decl);
1509 if (decl == error_mark_node)
1510 make_shared = true;
1511 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1512 {
1513 if (is_invisiref_parm (decl))
1514 inner_type = TREE_TYPE (inner_type);
1515 else
1516 {
1517 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1518 decl);
1519 make_shared = true;
1520 }
1521 }
1522
1523 /* We're interested in the base element, not arrays. */
1524 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1525 inner_type = TREE_TYPE (inner_type);
1526
1527 /* Check for special function availability by building a call to one.
1528 Save the results, because later we won't be in the right context
1529 for making these queries. */
1530 if (!make_shared
1531 && CLASS_TYPE_P (inner_type)
1532 && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
1533 make_shared = true;
1534
1535 if (make_shared)
1536 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1537 }