1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002-2013 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "stor-layout.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
34 #include "pointer-set.h"
36 #include "splay-tree.h"
38 /* Forward declarations. */
40 static tree
cp_genericize_r (tree
*, int *, void *);
41 static void cp_genericize_tree (tree
*);
43 /* Local declarations. */
45 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
47 /* Stack of labels which are targets for "break" or "continue",
48 linked through TREE_CHAIN. */
49 static tree bc_label
[2];
51 /* Begin a scope which can be exited by a break or continue statement. BC
54 Just creates a label with location LOCATION and pushes it into the current
58 begin_bc_block (enum bc_t bc
, location_t location
)
60 tree label
= create_artificial_label (location
);
61 DECL_CHAIN (label
) = bc_label
[bc
];
66 /* Finish a scope which can be exited by a break or continue statement.
67 LABEL was returned from the most recent call to begin_bc_block. BLOCK is
68 an expression for the contents of the scope.
70 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
71 BLOCK. Otherwise, just forget the label. */
74 finish_bc_block (tree
*block
, enum bc_t bc
, tree label
)
76 gcc_assert (label
== bc_label
[bc
]);
78 if (TREE_USED (label
))
79 append_to_statement_list (build1 (LABEL_EXPR
, void_type_node
, label
),
82 bc_label
[bc
] = DECL_CHAIN (label
);
83 DECL_CHAIN (label
) = NULL_TREE
;
86 /* Get the LABEL_EXPR to represent a break or continue statement
87 in the current block scope. BC indicates which. */
90 get_bc_label (enum bc_t bc
)
92 tree label
= bc_label
[bc
];
94 /* Mark the label used for finish_bc_block. */
95 TREE_USED (label
) = 1;
99 /* Genericize a TRY_BLOCK. */
102 genericize_try_block (tree
*stmt_p
)
104 tree body
= TRY_STMTS (*stmt_p
);
105 tree cleanup
= TRY_HANDLERS (*stmt_p
);
107 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
110 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
113 genericize_catch_block (tree
*stmt_p
)
115 tree type
= HANDLER_TYPE (*stmt_p
);
116 tree body
= HANDLER_BODY (*stmt_p
);
118 /* FIXME should the caught type go in TREE_TYPE? */
119 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
122 /* A terser interface for building a representation of an exception
126 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
130 /* FIXME should the allowed types go in TREE_TYPE? */
131 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
132 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
134 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
135 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
140 /* Genericize an EH_SPEC_BLOCK by converting it to a
141 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
144 genericize_eh_spec_block (tree
*stmt_p
)
146 tree body
= EH_SPEC_STMTS (*stmt_p
);
147 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
148 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
150 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
151 TREE_NO_WARNING (*stmt_p
) = true;
152 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
155 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
158 genericize_if_stmt (tree
*stmt_p
)
160 tree stmt
, cond
, then_
, else_
;
161 location_t locus
= EXPR_LOCATION (*stmt_p
);
164 cond
= IF_COND (stmt
);
165 then_
= THEN_CLAUSE (stmt
);
166 else_
= ELSE_CLAUSE (stmt
);
169 then_
= build_empty_stmt (locus
);
171 else_
= build_empty_stmt (locus
);
173 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
175 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
178 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
179 if (CAN_HAVE_LOCATION_P (stmt
) && !EXPR_HAS_LOCATION (stmt
))
180 SET_EXPR_LOCATION (stmt
, locus
);
184 /* Build a generic representation of one of the C loop forms. COND is the
185 loop condition or NULL_TREE. BODY is the (possibly compound) statement
186 controlled by the loop. INCR is the increment expression of a for-loop,
187 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
188 evaluated before the loop body as in while and for loops, or after the
189 loop body as in do-while loops. */
192 genericize_cp_loop (tree
*stmt_p
, location_t start_locus
, tree cond
, tree body
,
193 tree incr
, bool cond_is_first
, int *walk_subtrees
,
197 tree entry
= NULL
, exit
= NULL
, t
;
198 tree stmt_list
= NULL
;
200 blab
= begin_bc_block (bc_break
, start_locus
);
201 clab
= begin_bc_block (bc_continue
, start_locus
);
203 if (incr
&& EXPR_P (incr
))
204 SET_EXPR_LOCATION (incr
, start_locus
);
206 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
207 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
208 cp_walk_tree (&incr
, cp_genericize_r
, data
, NULL
);
211 /* If condition is zero don't generate a loop construct. */
212 if (cond
&& integer_zerop (cond
))
216 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
217 get_bc_label (bc_break
));
218 append_to_statement_list (t
, &stmt_list
);
223 /* Expand to gotos, just like c_finish_loop. TODO: Use LOOP_EXPR. */
224 tree top
= build1 (LABEL_EXPR
, void_type_node
,
225 create_artificial_label (start_locus
));
227 /* If we have an exit condition, then we build an IF with gotos either
228 out of the loop, or to the top of it. If there's no exit condition,
229 then we just build a jump back to the top. */
230 exit
= build1 (GOTO_EXPR
, void_type_node
, LABEL_EXPR_LABEL (top
));
232 if (cond
&& !integer_nonzerop (cond
))
234 /* Canonicalize the loop condition to the end. This means
235 generating a branch to the loop condition. Reuse the
236 continue label, if possible. */
241 entry
= build1 (LABEL_EXPR
, void_type_node
,
242 create_artificial_label (start_locus
));
243 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
244 LABEL_EXPR_LABEL (entry
));
247 t
= build1_loc (start_locus
, GOTO_EXPR
, void_type_node
,
248 get_bc_label (bc_continue
));
249 append_to_statement_list (t
, &stmt_list
);
252 t
= build1 (GOTO_EXPR
, void_type_node
, get_bc_label (bc_break
));
253 exit
= fold_build3_loc (start_locus
,
254 COND_EXPR
, void_type_node
, cond
, exit
, t
);
257 append_to_statement_list (top
, &stmt_list
);
260 append_to_statement_list (body
, &stmt_list
);
261 finish_bc_block (&stmt_list
, bc_continue
, clab
);
262 append_to_statement_list (incr
, &stmt_list
);
263 append_to_statement_list (entry
, &stmt_list
);
264 append_to_statement_list (exit
, &stmt_list
);
265 finish_bc_block (&stmt_list
, bc_break
, blab
);
267 if (stmt_list
== NULL_TREE
)
268 stmt_list
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
273 /* Genericize a FOR_STMT node *STMT_P. */
276 genericize_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
281 tree init
= FOR_INIT_STMT (stmt
);
285 cp_walk_tree (&init
, cp_genericize_r
, data
, NULL
);
286 append_to_statement_list (init
, &expr
);
289 genericize_cp_loop (&loop
, EXPR_LOCATION (stmt
), FOR_COND (stmt
),
290 FOR_BODY (stmt
), FOR_EXPR (stmt
), 1, walk_subtrees
, data
);
291 append_to_statement_list (loop
, &expr
);
295 /* Genericize a WHILE_STMT node *STMT_P. */
298 genericize_while_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
301 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), WHILE_COND (stmt
),
302 WHILE_BODY (stmt
), NULL_TREE
, 1, walk_subtrees
, data
);
305 /* Genericize a DO_STMT node *STMT_P. */
308 genericize_do_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
311 genericize_cp_loop (stmt_p
, EXPR_LOCATION (stmt
), DO_COND (stmt
),
312 DO_BODY (stmt
), NULL_TREE
, 0, walk_subtrees
, data
);
315 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
318 genericize_switch_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
321 tree break_block
, body
, cond
, type
;
322 location_t stmt_locus
= EXPR_LOCATION (stmt
);
324 break_block
= begin_bc_block (bc_break
, stmt_locus
);
326 body
= SWITCH_STMT_BODY (stmt
);
328 body
= build_empty_stmt (stmt_locus
);
329 cond
= SWITCH_STMT_COND (stmt
);
330 type
= SWITCH_STMT_TYPE (stmt
);
332 cp_walk_tree (&body
, cp_genericize_r
, data
, NULL
);
333 cp_walk_tree (&cond
, cp_genericize_r
, data
, NULL
);
334 cp_walk_tree (&type
, cp_genericize_r
, data
, NULL
);
337 *stmt_p
= build3_loc (stmt_locus
, SWITCH_EXPR
, type
, cond
, body
, NULL_TREE
);
338 finish_bc_block (stmt_p
, bc_break
, break_block
);
341 /* Genericize a CONTINUE_STMT node *STMT_P. */
344 genericize_continue_stmt (tree
*stmt_p
)
346 tree stmt_list
= NULL
;
347 tree pred
= build_predict_expr (PRED_CONTINUE
, NOT_TAKEN
);
348 tree label
= get_bc_label (bc_continue
);
349 location_t location
= EXPR_LOCATION (*stmt_p
);
350 tree jump
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
351 append_to_statement_list (pred
, &stmt_list
);
352 append_to_statement_list (jump
, &stmt_list
);
356 /* Genericize a BREAK_STMT node *STMT_P. */
359 genericize_break_stmt (tree
*stmt_p
)
361 tree label
= get_bc_label (bc_break
);
362 location_t location
= EXPR_LOCATION (*stmt_p
);
363 *stmt_p
= build1_loc (location
, GOTO_EXPR
, void_type_node
, label
);
366 /* Genericize a OMP_FOR node *STMT_P. */
369 genericize_omp_for_stmt (tree
*stmt_p
, int *walk_subtrees
, void *data
)
372 location_t locus
= EXPR_LOCATION (stmt
);
373 tree clab
= begin_bc_block (bc_continue
, locus
);
375 cp_walk_tree (&OMP_FOR_BODY (stmt
), cp_genericize_r
, data
, NULL
);
376 cp_walk_tree (&OMP_FOR_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
377 cp_walk_tree (&OMP_FOR_INIT (stmt
), cp_genericize_r
, data
, NULL
);
378 cp_walk_tree (&OMP_FOR_COND (stmt
), cp_genericize_r
, data
, NULL
);
379 cp_walk_tree (&OMP_FOR_INCR (stmt
), cp_genericize_r
, data
, NULL
);
380 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt
), cp_genericize_r
, data
, NULL
);
383 finish_bc_block (&OMP_FOR_BODY (stmt
), bc_continue
, clab
);
386 /* Hook into the middle of gimplifying an OMP_FOR node. */
388 static enum gimplify_status
389 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
391 tree for_stmt
= *expr_p
;
392 gimple_seq seq
= NULL
;
394 /* Protect ourselves from recursion. */
395 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
397 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
399 gimplify_and_add (for_stmt
, &seq
);
400 gimple_seq_add_seq (pre_p
, seq
);
402 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
407 /* Gimplify an EXPR_STMT node. */
410 gimplify_expr_stmt (tree
*stmt_p
)
412 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
414 if (stmt
== error_mark_node
)
417 /* Gimplification of a statement expression will nullify the
418 statement if all its side effects are moved to *PRE_P and *POST_P.
420 In this case we will not want to emit the gimplified statement.
421 However, we may still want to emit a warning, so we do that before
423 if (stmt
&& warn_unused_value
)
425 if (!TREE_SIDE_EFFECTS (stmt
))
427 if (!IS_EMPTY_STMT (stmt
)
428 && !VOID_TYPE_P (TREE_TYPE (stmt
))
429 && !TREE_NO_WARNING (stmt
))
430 warning (OPT_Wunused_value
, "statement with no effect");
433 warn_if_unused_value (stmt
, input_location
);
436 if (stmt
== NULL_TREE
)
437 stmt
= alloc_stmt_list ();
442 /* Gimplify initialization from an AGGR_INIT_EXPR. */
445 cp_gimplify_init_expr (tree
*expr_p
)
447 tree from
= TREE_OPERAND (*expr_p
, 1);
448 tree to
= TREE_OPERAND (*expr_p
, 0);
451 /* What about code that pulls out the temp and uses it elsewhere? I
452 think that such code never uses the TARGET_EXPR as an initializer. If
453 I'm wrong, we'll abort because the temp won't have any RTL. In that
454 case, I guess we'll need to replace references somehow. */
455 if (TREE_CODE (from
) == TARGET_EXPR
)
456 from
= TARGET_EXPR_INITIAL (from
);
458 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
459 inside the TARGET_EXPR. */
462 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
464 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
465 replace the slot operand with our target.
467 Should we add a target parm to gimplify_expr instead? No, as in this
468 case we want to replace the INIT_EXPR. */
469 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
470 || TREE_CODE (sub
) == VEC_INIT_EXPR
)
472 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
473 AGGR_INIT_EXPR_SLOT (sub
) = to
;
475 VEC_INIT_EXPR_SLOT (sub
) = to
;
478 /* The initialization is now a side-effect, so the container can
481 TREE_TYPE (from
) = void_type_node
;
487 t
= TREE_OPERAND (t
, 1);
492 /* Gimplify a MUST_NOT_THROW_EXPR. */
494 static enum gimplify_status
495 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
498 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
499 tree body
= TREE_OPERAND (stmt
, 0);
500 gimple_seq try_
= NULL
;
501 gimple_seq catch_
= NULL
;
504 gimplify_and_add (body
, &try_
);
505 mnt
= gimple_build_eh_must_not_throw (terminate_node
);
506 gimple_seq_add_stmt_without_update (&catch_
, mnt
);
507 mnt
= gimple_build_try (try_
, catch_
, GIMPLE_TRY_CATCH
);
509 gimple_seq_add_stmt_without_update (pre_p
, mnt
);
520 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
523 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
525 int saved_stmts_are_full_exprs_p
= 0;
526 enum tree_code code
= TREE_CODE (*expr_p
);
527 enum gimplify_status ret
;
529 if (STATEMENT_CODE_P (code
))
531 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
532 current_stmt_tree ()->stmts_are_full_exprs_p
533 = STMT_IS_FULL_EXPR_P (*expr_p
);
539 *expr_p
= cplus_expand_constant (*expr_p
);
544 simplify_aggr_init_expr (expr_p
);
550 location_t loc
= input_location
;
551 tree init
= VEC_INIT_EXPR_INIT (*expr_p
);
552 int from_array
= (init
&& TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
553 gcc_assert (EXPR_HAS_LOCATION (*expr_p
));
554 input_location
= EXPR_LOCATION (*expr_p
);
555 *expr_p
= build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p
), NULL_TREE
,
556 init
, VEC_INIT_EXPR_VALUE_INIT (*expr_p
),
558 tf_warning_or_error
);
559 cp_genericize_tree (expr_p
);
561 input_location
= loc
;
566 /* FIXME communicate throw type to back end, probably by moving
567 THROW_EXPR into ../tree.def. */
568 *expr_p
= TREE_OPERAND (*expr_p
, 0);
572 case MUST_NOT_THROW_EXPR
:
573 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
576 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
577 LHS of an assignment might also be involved in the RHS, as in bug
580 cp_gimplify_init_expr (expr_p
);
581 if (TREE_CODE (*expr_p
) != INIT_EXPR
)
583 /* Otherwise fall through. */
586 /* If the back end isn't clever enough to know that the lhs and rhs
587 types are the same, add an explicit conversion. */
588 tree op0
= TREE_OPERAND (*expr_p
, 0);
589 tree op1
= TREE_OPERAND (*expr_p
, 1);
591 if (!error_operand_p (op0
)
592 && !error_operand_p (op1
)
593 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0
))
594 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1
)))
595 && !useless_type_conversion_p (TREE_TYPE (op1
), TREE_TYPE (op0
)))
596 TREE_OPERAND (*expr_p
, 1) = build1 (VIEW_CONVERT_EXPR
,
597 TREE_TYPE (op0
), op1
);
599 else if ((is_gimple_lvalue (op1
) || INDIRECT_REF_P (op1
)
600 || (TREE_CODE (op1
) == CONSTRUCTOR
601 && CONSTRUCTOR_NELTS (op1
) == 0
602 && !TREE_CLOBBER_P (op1
))
603 || (TREE_CODE (op1
) == CALL_EXPR
604 && !CALL_EXPR_RETURN_SLOT_OPT (op1
)))
605 && is_really_empty_class (TREE_TYPE (op0
)))
607 /* Remove any copies of empty classes. We check that the RHS
608 has a simple form so that TARGET_EXPRs and non-empty
609 CONSTRUCTORs get reduced properly, and we leave the return
610 slot optimization alone because it isn't a copy (FIXME so it
611 shouldn't be represented as one).
613 Also drop volatile variables on the RHS to avoid infinite
614 recursion from gimplify_expr trying to load the value. */
615 if (!TREE_SIDE_EFFECTS (op1
)
616 || (DECL_P (op1
) && TREE_THIS_VOLATILE (op1
)))
618 else if (TREE_CODE (op1
) == MEM_REF
619 && TREE_THIS_VOLATILE (op1
))
621 /* Similarly for volatile MEM_REFs on the RHS. */
622 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1
, 0)))
625 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
626 TREE_OPERAND (op1
, 0), op0
);
629 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
636 case EMPTY_CLASS_EXPR
:
637 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
638 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
643 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
648 genericize_try_block (expr_p
);
653 genericize_catch_block (expr_p
);
658 genericize_eh_spec_block (expr_p
);
676 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
680 gimplify_expr_stmt (expr_p
);
684 case UNARY_PLUS_EXPR
:
686 tree arg
= TREE_OPERAND (*expr_p
, 0);
687 tree type
= TREE_TYPE (*expr_p
);
688 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
695 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
699 /* Restore saved state. */
700 if (STATEMENT_CODE_P (code
))
701 current_stmt_tree ()->stmts_are_full_exprs_p
702 = saved_stmts_are_full_exprs_p
;
708 is_invisiref_parm (const_tree t
)
710 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
711 && DECL_BY_REFERENCE (t
));
714 /* Return true if the uid in both int tree maps are equal. */
717 cxx_int_tree_map_eq (const void *va
, const void *vb
)
719 const struct cxx_int_tree_map
*a
= (const struct cxx_int_tree_map
*) va
;
720 const struct cxx_int_tree_map
*b
= (const struct cxx_int_tree_map
*) vb
;
721 return (a
->uid
== b
->uid
);
724 /* Hash a UID in a cxx_int_tree_map. */
727 cxx_int_tree_map_hash (const void *item
)
729 return ((const struct cxx_int_tree_map
*)item
)->uid
;
732 /* A stable comparison routine for use with splay trees and DECLs. */
735 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
740 return DECL_UID (a
) - DECL_UID (b
);
743 /* OpenMP context during genericization. */
745 struct cp_genericize_omp_taskreg
749 struct cp_genericize_omp_taskreg
*outer
;
750 splay_tree variables
;
753 /* Return true if genericization should try to determine if
754 DECL is firstprivate or shared within task regions. */
757 omp_var_to_track (tree decl
)
759 tree type
= TREE_TYPE (decl
);
760 if (is_invisiref_parm (decl
))
761 type
= TREE_TYPE (type
);
762 while (TREE_CODE (type
) == ARRAY_TYPE
)
763 type
= TREE_TYPE (type
);
764 if (type
== error_mark_node
|| !CLASS_TYPE_P (type
))
766 if (VAR_P (decl
) && DECL_THREAD_LOCAL_P (decl
))
768 if (cxx_omp_predetermined_sharing (decl
) != OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
773 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
776 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg
*omp_ctx
, tree decl
)
778 splay_tree_node n
= splay_tree_lookup (omp_ctx
->variables
,
779 (splay_tree_key
) decl
);
782 int flags
= OMP_CLAUSE_DEFAULT_SHARED
;
784 omp_cxx_notice_variable (omp_ctx
->outer
, decl
);
785 if (!omp_ctx
->default_shared
)
787 struct cp_genericize_omp_taskreg
*octx
;
789 for (octx
= omp_ctx
->outer
; octx
; octx
= octx
->outer
)
791 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
792 if (n
&& n
->value
!= OMP_CLAUSE_DEFAULT_SHARED
)
794 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
797 if (octx
->is_parallel
)
801 && (TREE_CODE (decl
) == PARM_DECL
802 || (!(TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
803 && DECL_CONTEXT (decl
) == current_function_decl
)))
804 flags
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
805 if (flags
== OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
)
807 /* DECL is implicitly determined firstprivate in
808 the current task construct. Ensure copy ctor and
809 dtor are instantiated, because during gimplification
810 it will be already too late. */
811 tree type
= TREE_TYPE (decl
);
812 if (is_invisiref_parm (decl
))
813 type
= TREE_TYPE (type
);
814 while (TREE_CODE (type
) == ARRAY_TYPE
)
815 type
= TREE_TYPE (type
);
816 get_copy_ctor (type
, tf_none
);
817 get_dtor (type
, tf_none
);
820 splay_tree_insert (omp_ctx
->variables
, (splay_tree_key
) decl
, flags
);
824 /* Genericization context. */
826 struct cp_genericize_data
828 struct pointer_set_t
*p_set
;
829 vec
<tree
> bind_expr_stack
;
830 struct cp_genericize_omp_taskreg
*omp_ctx
;
833 /* Perform any pre-gimplification lowering of C++ front end trees to
837 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
840 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
841 struct pointer_set_t
*p_set
= wtd
->p_set
;
843 /* If in an OpenMP context, note var uses. */
844 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
846 || TREE_CODE (stmt
) == PARM_DECL
847 || TREE_CODE (stmt
) == RESULT_DECL
)
848 && omp_var_to_track (stmt
))
849 omp_cxx_notice_variable (wtd
->omp_ctx
, stmt
);
851 if (is_invisiref_parm (stmt
)
852 /* Don't dereference parms in a thunk, pass the references through. */
853 && !(DECL_THUNK_P (current_function_decl
)
854 && TREE_CODE (stmt
) == PARM_DECL
))
856 *stmt_p
= convert_from_reference (stmt
);
861 /* Map block scope extern declarations to visible declarations with the
862 same name and type in outer scopes if any. */
863 if (cp_function_chain
->extern_decl_map
864 && VAR_OR_FUNCTION_DECL_P (stmt
)
865 && DECL_EXTERNAL (stmt
))
867 struct cxx_int_tree_map
*h
, in
;
868 in
.uid
= DECL_UID (stmt
);
869 h
= (struct cxx_int_tree_map
*)
870 htab_find_with_hash (cp_function_chain
->extern_decl_map
,
880 /* Other than invisiref parms, don't walk the same tree twice. */
881 if (pointer_set_contains (p_set
, stmt
))
887 if (TREE_CODE (stmt
) == ADDR_EXPR
888 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
890 /* If in an OpenMP context, note var uses. */
891 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0)
892 && omp_var_to_track (TREE_OPERAND (stmt
, 0)))
893 omp_cxx_notice_variable (wtd
->omp_ctx
, TREE_OPERAND (stmt
, 0));
894 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
897 else if (TREE_CODE (stmt
) == RETURN_EXPR
898 && TREE_OPERAND (stmt
, 0)
899 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
900 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
902 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
903 switch (OMP_CLAUSE_CODE (stmt
))
905 case OMP_CLAUSE_LASTPRIVATE
:
906 /* Don't dereference an invisiref in OpenMP clauses. */
907 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
910 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
911 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
912 cp_genericize_r
, data
, NULL
);
915 case OMP_CLAUSE_PRIVATE
:
916 /* Don't dereference an invisiref in OpenMP clauses. */
917 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
919 else if (wtd
->omp_ctx
!= NULL
)
921 /* Private clause doesn't cause any references to the
922 var in outer contexts, avoid calling
923 omp_cxx_notice_variable for it. */
924 struct cp_genericize_omp_taskreg
*old
= wtd
->omp_ctx
;
926 cp_walk_tree (&OMP_CLAUSE_DECL (stmt
), cp_genericize_r
,
932 case OMP_CLAUSE_SHARED
:
933 case OMP_CLAUSE_FIRSTPRIVATE
:
934 case OMP_CLAUSE_COPYIN
:
935 case OMP_CLAUSE_COPYPRIVATE
:
936 /* Don't dereference an invisiref in OpenMP clauses. */
937 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
940 case OMP_CLAUSE_REDUCTION
:
941 /* Don't dereference an invisiref in reduction clause's
942 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
943 still needs to be genericized. */
944 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
947 if (OMP_CLAUSE_REDUCTION_INIT (stmt
))
948 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt
),
949 cp_genericize_r
, data
, NULL
);
950 if (OMP_CLAUSE_REDUCTION_MERGE (stmt
))
951 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt
),
952 cp_genericize_r
, data
, NULL
);
958 else if (IS_TYPE_OR_DECL_P (stmt
))
961 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
962 to lower this construct before scanning it, so we need to lower these
963 before doing anything else. */
964 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
965 *stmt_p
= build2_loc (EXPR_LOCATION (stmt
),
966 CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
970 CLEANUP_EXPR (stmt
));
972 else if (TREE_CODE (stmt
) == IF_STMT
)
974 genericize_if_stmt (stmt_p
);
975 /* *stmt_p has changed, tail recurse to handle it again. */
976 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
979 /* COND_EXPR might have incompatible types in branches if one or both
980 arms are bitfields. Fix it up now. */
981 else if (TREE_CODE (stmt
) == COND_EXPR
)
984 = (TREE_OPERAND (stmt
, 1)
985 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
988 = (TREE_OPERAND (stmt
, 2)
989 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
992 && !useless_type_conversion_p (TREE_TYPE (stmt
),
993 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
995 TREE_OPERAND (stmt
, 1)
996 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
997 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1001 && !useless_type_conversion_p (TREE_TYPE (stmt
),
1002 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
1004 TREE_OPERAND (stmt
, 2)
1005 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
1006 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
1011 else if (TREE_CODE (stmt
) == BIND_EXPR
)
1013 if (__builtin_expect (wtd
->omp_ctx
!= NULL
, 0))
1016 for (decl
= BIND_EXPR_VARS (stmt
); decl
; decl
= DECL_CHAIN (decl
))
1018 && !DECL_EXTERNAL (decl
)
1019 && omp_var_to_track (decl
))
1022 = splay_tree_lookup (wtd
->omp_ctx
->variables
,
1023 (splay_tree_key
) decl
);
1025 splay_tree_insert (wtd
->omp_ctx
->variables
,
1026 (splay_tree_key
) decl
,
1028 ? OMP_CLAUSE_DEFAULT_SHARED
1029 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1032 wtd
->bind_expr_stack
.safe_push (stmt
);
1033 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
1034 cp_genericize_r
, data
, NULL
);
1035 wtd
->bind_expr_stack
.pop ();
1038 else if (TREE_CODE (stmt
) == USING_STMT
)
1040 tree block
= NULL_TREE
;
1042 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1043 BLOCK, and append an IMPORTED_DECL to its
1044 BLOCK_VARS chained list. */
1045 if (wtd
->bind_expr_stack
.exists ())
1048 for (i
= wtd
->bind_expr_stack
.length () - 1; i
>= 0; i
--)
1049 if ((block
= BIND_EXPR_BLOCK (wtd
->bind_expr_stack
[i
])))
1054 tree using_directive
;
1055 gcc_assert (TREE_OPERAND (stmt
, 0));
1057 using_directive
= make_node (IMPORTED_DECL
);
1058 TREE_TYPE (using_directive
) = void_type_node
;
1060 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
1061 = TREE_OPERAND (stmt
, 0);
1062 DECL_CHAIN (using_directive
) = BLOCK_VARS (block
);
1063 BLOCK_VARS (block
) = using_directive
;
1065 /* The USING_STMT won't appear in GENERIC. */
1066 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1070 else if (TREE_CODE (stmt
) == DECL_EXPR
1071 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
1073 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1074 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
1077 else if (TREE_CODE (stmt
) == OMP_PARALLEL
|| TREE_CODE (stmt
) == OMP_TASK
)
1079 struct cp_genericize_omp_taskreg omp_ctx
;
1084 cp_walk_tree (&OMP_CLAUSES (stmt
), cp_genericize_r
, data
, NULL
);
1085 omp_ctx
.is_parallel
= TREE_CODE (stmt
) == OMP_PARALLEL
;
1086 omp_ctx
.default_shared
= omp_ctx
.is_parallel
;
1087 omp_ctx
.outer
= wtd
->omp_ctx
;
1088 omp_ctx
.variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
1089 wtd
->omp_ctx
= &omp_ctx
;
1090 for (c
= OMP_CLAUSES (stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
1091 switch (OMP_CLAUSE_CODE (c
))
1093 case OMP_CLAUSE_SHARED
:
1094 case OMP_CLAUSE_PRIVATE
:
1095 case OMP_CLAUSE_FIRSTPRIVATE
:
1096 case OMP_CLAUSE_LASTPRIVATE
:
1097 decl
= OMP_CLAUSE_DECL (c
);
1098 if (decl
== error_mark_node
|| !omp_var_to_track (decl
))
1100 n
= splay_tree_lookup (omp_ctx
.variables
, (splay_tree_key
) decl
);
1103 splay_tree_insert (omp_ctx
.variables
, (splay_tree_key
) decl
,
1104 OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
1105 ? OMP_CLAUSE_DEFAULT_SHARED
1106 : OMP_CLAUSE_DEFAULT_PRIVATE
);
1107 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_PRIVATE
1109 omp_cxx_notice_variable (omp_ctx
.outer
, decl
);
1111 case OMP_CLAUSE_DEFAULT
:
1112 if (OMP_CLAUSE_DEFAULT_KIND (c
) == OMP_CLAUSE_DEFAULT_SHARED
)
1113 omp_ctx
.default_shared
= true;
1117 cp_walk_tree (&OMP_BODY (stmt
), cp_genericize_r
, data
, NULL
);
1118 wtd
->omp_ctx
= omp_ctx
.outer
;
1119 splay_tree_delete (omp_ctx
.variables
);
1121 else if (TREE_CODE (stmt
) == CONVERT_EXPR
)
1122 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt
));
1123 else if (TREE_CODE (stmt
) == FOR_STMT
)
1124 genericize_for_stmt (stmt_p
, walk_subtrees
, data
);
1125 else if (TREE_CODE (stmt
) == WHILE_STMT
)
1126 genericize_while_stmt (stmt_p
, walk_subtrees
, data
);
1127 else if (TREE_CODE (stmt
) == DO_STMT
)
1128 genericize_do_stmt (stmt_p
, walk_subtrees
, data
);
1129 else if (TREE_CODE (stmt
) == SWITCH_STMT
)
1130 genericize_switch_stmt (stmt_p
, walk_subtrees
, data
);
1131 else if (TREE_CODE (stmt
) == CONTINUE_STMT
)
1132 genericize_continue_stmt (stmt_p
);
1133 else if (TREE_CODE (stmt
) == BREAK_STMT
)
1134 genericize_break_stmt (stmt_p
);
1135 else if (TREE_CODE (stmt
) == OMP_FOR
1136 || TREE_CODE (stmt
) == OMP_SIMD
1137 || TREE_CODE (stmt
) == OMP_DISTRIBUTE
)
1138 genericize_omp_for_stmt (stmt_p
, walk_subtrees
, data
);
1139 else if (TREE_CODE (stmt
) == SIZEOF_EXPR
)
1141 if (SIZEOF_EXPR_TYPE_P (stmt
))
1143 = cxx_sizeof_or_alignof_type (TREE_TYPE (TREE_OPERAND (stmt
, 0)),
1144 SIZEOF_EXPR
, false);
1145 else if (TYPE_P (TREE_OPERAND (stmt
, 0)))
1146 *stmt_p
= cxx_sizeof_or_alignof_type (TREE_OPERAND (stmt
, 0),
1147 SIZEOF_EXPR
, false);
1149 *stmt_p
= cxx_sizeof_or_alignof_expr (TREE_OPERAND (stmt
, 0),
1150 SIZEOF_EXPR
, false);
1151 if (*stmt_p
== error_mark_node
)
1152 *stmt_p
= size_one_node
;
1156 pointer_set_insert (p_set
, *stmt_p
);
1161 /* Lower C++ front end trees to GENERIC in T_P. */
1164 cp_genericize_tree (tree
* t_p
)
1166 struct cp_genericize_data wtd
;
1168 wtd
.p_set
= pointer_set_create ();
1169 wtd
.bind_expr_stack
.create (0);
1171 cp_walk_tree (t_p
, cp_genericize_r
, &wtd
, NULL
);
1172 pointer_set_destroy (wtd
.p_set
);
1173 wtd
.bind_expr_stack
.release ();
1177 cp_genericize (tree fndecl
)
1181 /* Fix up the types of parms passed by invisible reference. */
1182 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= DECL_CHAIN (t
))
1183 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
1185 /* If a function's arguments are copied to create a thunk,
1186 then DECL_BY_REFERENCE will be set -- but the type of the
1187 argument will be a pointer type, so we will never get
1189 gcc_assert (!DECL_BY_REFERENCE (t
));
1190 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
1191 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
1192 DECL_BY_REFERENCE (t
) = 1;
1193 TREE_ADDRESSABLE (t
) = 0;
1197 /* Do the same for the return value. */
1198 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
1200 t
= DECL_RESULT (fndecl
);
1201 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
1202 DECL_BY_REFERENCE (t
) = 1;
1203 TREE_ADDRESSABLE (t
) = 0;
1207 /* Adjust DECL_VALUE_EXPR of the original var. */
1208 tree outer
= outer_curly_brace_block (current_function_decl
);
1212 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1213 if (DECL_NAME (t
) == DECL_NAME (var
)
1214 && DECL_HAS_VALUE_EXPR_P (var
)
1215 && DECL_VALUE_EXPR (var
) == t
)
1217 tree val
= convert_from_reference (t
);
1218 SET_DECL_VALUE_EXPR (var
, val
);
1224 /* If we're a clone, the body is already GIMPLE. */
1225 if (DECL_CLONED_FUNCTION_P (fndecl
))
1228 /* Expand all the array notations here. */
1229 if (flag_enable_cilkplus
1230 && contains_array_notation_expr (DECL_SAVED_TREE (fndecl
)))
1231 DECL_SAVED_TREE (fndecl
) =
1232 expand_array_notation_exprs (DECL_SAVED_TREE (fndecl
));
1234 /* We do want to see every occurrence of the parms, so we can't just use
1235 walk_tree's hash functionality. */
1236 cp_genericize_tree (&DECL_SAVED_TREE (fndecl
));
1238 /* Do everything else. */
1239 c_genericize (fndecl
);
1241 gcc_assert (bc_label
[bc_break
] == NULL
);
1242 gcc_assert (bc_label
[bc_continue
] == NULL
);
1245 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1246 NULL if there is in fact nothing to do. ARG2 may be null if FN
1247 actually only takes one argument. */
1250 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
1252 tree defparm
, parm
, t
;
1260 nargs
= list_length (DECL_ARGUMENTS (fn
));
1261 argarray
= XALLOCAVEC (tree
, nargs
);
1263 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
1265 defparm
= TREE_CHAIN (defparm
);
1267 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
1269 tree inner_type
= TREE_TYPE (arg1
);
1270 tree start1
, end1
, p1
;
1271 tree start2
= NULL
, p2
= NULL
;
1272 tree ret
= NULL
, lab
;
1278 inner_type
= TREE_TYPE (inner_type
);
1279 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
1280 size_zero_node
, NULL
, NULL
);
1282 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
1283 size_zero_node
, NULL
, NULL
);
1285 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
1286 start1
= build_fold_addr_expr_loc (input_location
, start1
);
1288 start2
= build_fold_addr_expr_loc (input_location
, start2
);
1290 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
1291 end1
= fold_build_pointer_plus (start1
, end1
);
1293 p1
= create_tmp_var (TREE_TYPE (start1
), NULL
);
1294 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
1295 append_to_statement_list (t
, &ret
);
1299 p2
= create_tmp_var (TREE_TYPE (start2
), NULL
);
1300 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
1301 append_to_statement_list (t
, &ret
);
1304 lab
= create_artificial_label (input_location
);
1305 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
1306 append_to_statement_list (t
, &ret
);
1311 /* Handle default arguments. */
1312 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1313 parm
= TREE_CHAIN (parm
), i
++)
1314 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1315 TREE_PURPOSE (parm
), fn
, i
,
1316 tf_warning_or_error
);
1317 t
= build_call_a (fn
, i
, argarray
);
1318 t
= fold_convert (void_type_node
, t
);
1319 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1320 append_to_statement_list (t
, &ret
);
1322 t
= fold_build_pointer_plus (p1
, TYPE_SIZE_UNIT (inner_type
));
1323 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1324 append_to_statement_list (t
, &ret
);
1328 t
= fold_build_pointer_plus (p2
, TYPE_SIZE_UNIT (inner_type
));
1329 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1330 append_to_statement_list (t
, &ret
);
1333 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1334 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1335 append_to_statement_list (t
, &ret
);
1341 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1343 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1344 /* Handle default arguments. */
1345 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1346 parm
= TREE_CHAIN (parm
), i
++)
1347 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1348 TREE_PURPOSE (parm
),
1349 fn
, i
, tf_warning_or_error
);
1350 t
= build_call_a (fn
, i
, argarray
);
1351 t
= fold_convert (void_type_node
, t
);
1352 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1356 /* Return code to initialize DECL with its default constructor, or
1357 NULL if there's nothing to do. */
1360 cxx_omp_clause_default_ctor (tree clause
, tree decl
, tree
/*outer*/)
1362 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1366 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1371 /* Return code to initialize DST with a copy constructor from SRC. */
1374 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1376 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1380 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1382 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1387 /* Similarly, except use an assignment operator instead. */
1390 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1392 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1396 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1398 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1403 /* Return code to destroy DECL. */
1406 cxx_omp_clause_dtor (tree clause
, tree decl
)
1408 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1412 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1417 /* True if OpenMP should privatize what this DECL points to rather
1418 than the DECL itself. */
1421 cxx_omp_privatize_by_reference (const_tree decl
)
1423 return (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
1424 || is_invisiref_parm (decl
));
1427 /* Return true if DECL is const qualified var having no mutable member. */
1429 cxx_omp_const_qual_no_mutable (tree decl
)
1431 tree type
= TREE_TYPE (decl
);
1432 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1434 if (!is_invisiref_parm (decl
))
1436 type
= TREE_TYPE (type
);
1438 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1440 /* NVR doesn't preserve const qualification of the
1442 tree outer
= outer_curly_brace_block (current_function_decl
);
1446 for (var
= BLOCK_VARS (outer
); var
; var
= DECL_CHAIN (var
))
1447 if (DECL_NAME (decl
) == DECL_NAME (var
)
1448 && (TYPE_MAIN_VARIANT (type
)
1449 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1451 if (TYPE_READONLY (TREE_TYPE (var
)))
1452 type
= TREE_TYPE (var
);
1458 if (type
== error_mark_node
)
1461 /* Variables with const-qualified type having no mutable member
1462 are predetermined shared. */
1463 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1469 /* True if OpenMP sharing attribute of DECL is predetermined. */
1471 enum omp_clause_default_kind
1472 cxx_omp_predetermined_sharing (tree decl
)
1474 /* Static data members are predetermined shared. */
1475 if (TREE_STATIC (decl
))
1477 tree ctx
= CP_DECL_CONTEXT (decl
);
1478 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1479 return OMP_CLAUSE_DEFAULT_SHARED
;
1482 /* Const qualified vars having no mutable member are predetermined
1484 if (cxx_omp_const_qual_no_mutable (decl
))
1485 return OMP_CLAUSE_DEFAULT_SHARED
;
1487 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1490 /* Finalize an implicitly determined clause. */
1493 cxx_omp_finish_clause (tree c
)
1495 tree decl
, inner_type
;
1496 bool make_shared
= false;
1498 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1501 decl
= OMP_CLAUSE_DECL (c
);
1502 decl
= require_complete_type (decl
);
1503 inner_type
= TREE_TYPE (decl
);
1504 if (decl
== error_mark_node
)
1506 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1508 if (is_invisiref_parm (decl
))
1509 inner_type
= TREE_TYPE (inner_type
);
1512 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1518 /* We're interested in the base element, not arrays. */
1519 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1520 inner_type
= TREE_TYPE (inner_type
);
1522 /* Check for special function availability by building a call to one.
1523 Save the results, because later we won't be in the right context
1524 for making these queries. */
1526 && CLASS_TYPE_P (inner_type
)
1527 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false, true))
1531 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;