1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
31 #include "tree-iterator.h"
34 #include "pointer-set.h"
37 /* Local declarations. */
39 enum bc_t
{ bc_break
= 0, bc_continue
= 1 };
41 /* Stack of labels which are targets for "break" or "continue",
42 linked through TREE_CHAIN. */
43 static tree bc_label
[2];
45 /* Begin a scope which can be exited by a break or continue statement. BC
48 Just creates a label and pushes it into the current context. */
51 begin_bc_block (enum bc_t bc
)
53 tree label
= create_artificial_label (input_location
);
54 TREE_CHAIN (label
) = bc_label
[bc
];
59 /* Finish a scope which can be exited by a break or continue statement.
60 LABEL was returned from the most recent call to begin_bc_block. BODY is
61 an expression for the contents of the scope.
63 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64 body. Otherwise, just forget the label. */
67 finish_bc_block (enum bc_t bc
, tree label
, gimple_seq body
)
69 gcc_assert (label
== bc_label
[bc
]);
71 if (TREE_USED (label
))
73 gimple_seq_add_stmt (&body
, gimple_build_label (label
));
76 bc_label
[bc
] = TREE_CHAIN (label
);
77 TREE_CHAIN (label
) = NULL_TREE
;
81 /* Get the LABEL_EXPR to represent a break or continue statement
82 in the current block scope. BC indicates which. */
85 get_bc_label (enum bc_t bc
)
87 tree label
= bc_label
[bc
];
89 if (label
== NULL_TREE
)
92 error ("break statement not within loop or switch");
94 error ("continue statement not within loop or switch");
99 /* Mark the label used for finish_bc_block. */
100 TREE_USED (label
) = 1;
104 /* Genericize a TRY_BLOCK. */
107 genericize_try_block (tree
*stmt_p
)
109 tree body
= TRY_STMTS (*stmt_p
);
110 tree cleanup
= TRY_HANDLERS (*stmt_p
);
112 *stmt_p
= build2 (TRY_CATCH_EXPR
, void_type_node
, body
, cleanup
);
115 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
118 genericize_catch_block (tree
*stmt_p
)
120 tree type
= HANDLER_TYPE (*stmt_p
);
121 tree body
= HANDLER_BODY (*stmt_p
);
123 /* FIXME should the caught type go in TREE_TYPE? */
124 *stmt_p
= build2 (CATCH_EXPR
, void_type_node
, type
, body
);
127 /* A terser interface for building a representation of an exception
131 build_gimple_eh_filter_tree (tree body
, tree allowed
, tree failure
)
135 /* FIXME should the allowed types go in TREE_TYPE? */
136 t
= build2 (EH_FILTER_EXPR
, void_type_node
, allowed
, NULL_TREE
);
137 append_to_statement_list (failure
, &EH_FILTER_FAILURE (t
));
139 t
= build2 (TRY_CATCH_EXPR
, void_type_node
, NULL_TREE
, t
);
140 append_to_statement_list (body
, &TREE_OPERAND (t
, 0));
145 /* Genericize an EH_SPEC_BLOCK by converting it to a
146 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
149 genericize_eh_spec_block (tree
*stmt_p
)
151 tree body
= EH_SPEC_STMTS (*stmt_p
);
152 tree allowed
= EH_SPEC_RAISES (*stmt_p
);
153 tree failure
= build_call_n (call_unexpected_node
, 1, build_exc_ptr ());
155 *stmt_p
= build_gimple_eh_filter_tree (body
, allowed
, failure
);
156 TREE_NO_WARNING (*stmt_p
) = true;
157 TREE_NO_WARNING (TREE_OPERAND (*stmt_p
, 1)) = true;
160 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
163 genericize_if_stmt (tree
*stmt_p
)
165 tree stmt
, cond
, then_
, else_
;
166 location_t locus
= EXPR_LOCATION (*stmt_p
);
169 cond
= IF_COND (stmt
);
170 then_
= THEN_CLAUSE (stmt
);
171 else_
= ELSE_CLAUSE (stmt
);
174 then_
= build_empty_stmt (locus
);
176 else_
= build_empty_stmt (locus
);
178 if (integer_nonzerop (cond
) && !TREE_SIDE_EFFECTS (else_
))
180 else if (integer_zerop (cond
) && !TREE_SIDE_EFFECTS (then_
))
183 stmt
= build3 (COND_EXPR
, void_type_node
, cond
, then_
, else_
);
184 if (CAN_HAVE_LOCATION_P (stmt
) && !EXPR_HAS_LOCATION (stmt
))
185 SET_EXPR_LOCATION (stmt
, locus
);
189 /* Build a generic representation of one of the C loop forms. COND is the
190 loop condition or NULL_TREE. BODY is the (possibly compound) statement
191 controlled by the loop. INCR is the increment expression of a for-loop,
192 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
193 evaluated before the loop body as in while and for loops, or after the
194 loop body as in do-while loops. */
197 gimplify_cp_loop (tree cond
, tree body
, tree incr
, bool cond_is_first
)
199 gimple top
, entry
, stmt
;
200 gimple_seq stmt_list
, body_seq
, incr_seq
, exit_seq
;
201 tree cont_block
, break_block
;
202 location_t stmt_locus
;
204 stmt_locus
= input_location
;
211 break_block
= begin_bc_block (bc_break
);
212 cont_block
= begin_bc_block (bc_continue
);
214 /* If condition is zero don't generate a loop construct. */
215 if (cond
&& integer_zerop (cond
))
220 stmt
= gimple_build_goto (get_bc_label (bc_break
));
221 gimple_set_location (stmt
, stmt_locus
);
222 gimple_seq_add_stmt (&stmt_list
, stmt
);
227 /* If we use a LOOP_EXPR here, we have to feed the whole thing
228 back through the main gimplifier to lower it. Given that we
229 have to gimplify the loop body NOW so that we can resolve
230 break/continue stmts, seems easier to just expand to gotos. */
231 top
= gimple_build_label (create_artificial_label (stmt_locus
));
233 /* If we have an exit condition, then we build an IF with gotos either
234 out of the loop, or to the top of it. If there's no exit condition,
235 then we just build a jump back to the top. */
236 if (cond
&& !integer_nonzerop (cond
))
238 if (cond
!= error_mark_node
)
240 gimplify_expr (&cond
, &exit_seq
, NULL
, is_gimple_val
, fb_rvalue
);
241 stmt
= gimple_build_cond (NE_EXPR
, cond
,
242 build_int_cst (TREE_TYPE (cond
), 0),
243 gimple_label_label (top
),
244 get_bc_label (bc_break
));
245 gimple_seq_add_stmt (&exit_seq
, stmt
);
252 entry
= gimple_build_label
253 (create_artificial_label (stmt_locus
));
254 stmt
= gimple_build_goto (gimple_label_label (entry
));
257 stmt
= gimple_build_goto (get_bc_label (bc_continue
));
258 gimple_set_location (stmt
, stmt_locus
);
259 gimple_seq_add_stmt (&stmt_list
, stmt
);
264 stmt
= gimple_build_goto (gimple_label_label (top
));
265 gimple_seq_add_stmt (&exit_seq
, stmt
);
269 gimplify_stmt (&body
, &body_seq
);
270 gimplify_stmt (&incr
, &incr_seq
);
272 body_seq
= finish_bc_block (bc_continue
, cont_block
, body_seq
);
274 gimple_seq_add_stmt (&stmt_list
, top
);
275 gimple_seq_add_seq (&stmt_list
, body_seq
);
276 gimple_seq_add_seq (&stmt_list
, incr_seq
);
277 gimple_seq_add_stmt (&stmt_list
, entry
);
278 gimple_seq_add_seq (&stmt_list
, exit_seq
);
280 annotate_all_with_location (stmt_list
, stmt_locus
);
282 return finish_bc_block (bc_break
, break_block
, stmt_list
);
285 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
286 prequeue and hand off to gimplify_cp_loop. */
289 gimplify_for_stmt (tree
*stmt_p
, gimple_seq
*pre_p
)
293 if (FOR_INIT_STMT (stmt
))
294 gimplify_and_add (FOR_INIT_STMT (stmt
), pre_p
);
296 gimple_seq_add_seq (pre_p
,
297 gimplify_cp_loop (FOR_COND (stmt
), FOR_BODY (stmt
),
298 FOR_EXPR (stmt
), 1));
302 /* Gimplify a WHILE_STMT node. */
305 gimplify_while_stmt (tree
*stmt_p
, gimple_seq
*pre_p
)
308 gimple_seq_add_seq (pre_p
,
309 gimplify_cp_loop (WHILE_COND (stmt
), WHILE_BODY (stmt
),
314 /* Gimplify a DO_STMT node. */
317 gimplify_do_stmt (tree
*stmt_p
, gimple_seq
*pre_p
)
320 gimple_seq_add_seq (pre_p
,
321 gimplify_cp_loop (DO_COND (stmt
), DO_BODY (stmt
),
326 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
329 gimplify_switch_stmt (tree
*stmt_p
, gimple_seq
*pre_p
)
332 tree break_block
, body
, t
;
333 location_t stmt_locus
= input_location
;
334 gimple_seq seq
= NULL
;
336 break_block
= begin_bc_block (bc_break
);
338 body
= SWITCH_STMT_BODY (stmt
);
340 body
= build_empty_stmt (stmt_locus
);
342 t
= build3 (SWITCH_EXPR
, SWITCH_STMT_TYPE (stmt
),
343 SWITCH_STMT_COND (stmt
), body
, NULL_TREE
);
344 SET_EXPR_LOCATION (t
, stmt_locus
);
345 gimplify_and_add (t
, &seq
);
347 seq
= finish_bc_block (bc_break
, break_block
, seq
);
348 gimple_seq_add_seq (pre_p
, seq
);
352 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
353 in order to properly gimplify CONTINUE statements. Here we merely
354 manage the continue stack; the rest of the job is performed by the
355 regular gimplifier. */
357 static enum gimplify_status
358 cp_gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
360 tree for_stmt
= *expr_p
;
363 gimple_seq seq
= NULL
;
365 /* Protect ourselves from recursion. */
366 if (OMP_FOR_GIMPLIFYING_P (for_stmt
))
368 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 1;
370 /* Note that while technically the continue label is enabled too soon
371 here, we should have already diagnosed invalid continues nested within
372 statement expressions within the INIT, COND, or INCR expressions. */
373 cont_block
= begin_bc_block (bc_continue
);
375 gimplify_and_add (for_stmt
, &seq
);
376 stmt
= gimple_seq_last_stmt (seq
);
377 if (gimple_code (stmt
) == GIMPLE_OMP_FOR
)
378 gimple_omp_set_body (stmt
, finish_bc_block (bc_continue
, cont_block
,
379 gimple_omp_body (stmt
)));
381 seq
= finish_bc_block (bc_continue
, cont_block
, seq
);
382 gimple_seq_add_seq (pre_p
, seq
);
384 OMP_FOR_GIMPLIFYING_P (for_stmt
) = 0;
389 /* Gimplify an EXPR_STMT node. */
392 gimplify_expr_stmt (tree
*stmt_p
)
394 tree stmt
= EXPR_STMT_EXPR (*stmt_p
);
396 if (stmt
== error_mark_node
)
399 /* Gimplification of a statement expression will nullify the
400 statement if all its side effects are moved to *PRE_P and *POST_P.
402 In this case we will not want to emit the gimplified statement.
403 However, we may still want to emit a warning, so we do that before
405 if (stmt
&& warn_unused_value
)
407 if (!TREE_SIDE_EFFECTS (stmt
))
409 if (!IS_EMPTY_STMT (stmt
)
410 && !VOID_TYPE_P (TREE_TYPE (stmt
))
411 && !TREE_NO_WARNING (stmt
))
412 warning (OPT_Wunused_value
, "statement with no effect");
415 warn_if_unused_value (stmt
, input_location
);
418 if (stmt
== NULL_TREE
)
419 stmt
= alloc_stmt_list ();
424 /* Gimplify initialization from an AGGR_INIT_EXPR. */
427 cp_gimplify_init_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
429 tree from
= TREE_OPERAND (*expr_p
, 1);
430 tree to
= TREE_OPERAND (*expr_p
, 0);
432 tree slot
= NULL_TREE
;
434 /* What about code that pulls out the temp and uses it elsewhere? I
435 think that such code never uses the TARGET_EXPR as an initializer. If
436 I'm wrong, we'll abort because the temp won't have any RTL. In that
437 case, I guess we'll need to replace references somehow. */
438 if (TREE_CODE (from
) == TARGET_EXPR
)
440 slot
= TARGET_EXPR_SLOT (from
);
441 from
= TARGET_EXPR_INITIAL (from
);
444 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
445 inside the TARGET_EXPR. */
448 tree sub
= TREE_CODE (t
) == COMPOUND_EXPR
? TREE_OPERAND (t
, 0) : t
;
450 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
451 replace the slot operand with our target.
453 Should we add a target parm to gimplify_expr instead? No, as in this
454 case we want to replace the INIT_EXPR. */
455 if (TREE_CODE (sub
) == AGGR_INIT_EXPR
)
457 gimplify_expr (&to
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
458 AGGR_INIT_EXPR_SLOT (sub
) = to
;
461 /* The initialization is now a side-effect, so the container can
464 TREE_TYPE (from
) = void_type_node
;
470 t
= TREE_OPERAND (t
, 1);
475 /* Gimplify a MUST_NOT_THROW_EXPR. */
477 static enum gimplify_status
478 gimplify_must_not_throw_expr (tree
*expr_p
, gimple_seq
*pre_p
)
481 tree temp
= voidify_wrapper_expr (stmt
, NULL
);
482 tree body
= TREE_OPERAND (stmt
, 0);
484 stmt
= build_gimple_eh_filter_tree (body
, NULL_TREE
,
485 build_call_n (terminate_node
, 0));
487 gimplify_and_add (stmt
, pre_p
);
498 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
501 cp_gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
503 int saved_stmts_are_full_exprs_p
= 0;
504 enum tree_code code
= TREE_CODE (*expr_p
);
505 enum gimplify_status ret
;
507 if (STATEMENT_CODE_P (code
))
509 saved_stmts_are_full_exprs_p
= stmts_are_full_exprs_p ();
510 current_stmt_tree ()->stmts_are_full_exprs_p
511 = STMT_IS_FULL_EXPR_P (*expr_p
);
517 *expr_p
= cplus_expand_constant (*expr_p
);
522 simplify_aggr_init_expr (expr_p
);
527 /* FIXME communicate throw type to back end, probably by moving
528 THROW_EXPR into ../tree.def. */
529 *expr_p
= TREE_OPERAND (*expr_p
, 0);
533 case MUST_NOT_THROW_EXPR
:
534 ret
= gimplify_must_not_throw_expr (expr_p
, pre_p
);
537 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
538 LHS of an assignment might also be involved in the RHS, as in bug
541 cp_gimplify_init_expr (expr_p
, pre_p
, post_p
);
545 case EMPTY_CLASS_EXPR
:
546 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
547 *expr_p
= build_constructor (TREE_TYPE (*expr_p
), NULL
);
552 *expr_p
= BASELINK_FUNCTIONS (*expr_p
);
557 genericize_try_block (expr_p
);
562 genericize_catch_block (expr_p
);
567 genericize_eh_spec_block (expr_p
);
575 gimplify_for_stmt (expr_p
, pre_p
);
580 gimplify_while_stmt (expr_p
, pre_p
);
585 gimplify_do_stmt (expr_p
, pre_p
);
590 gimplify_switch_stmt (expr_p
, pre_p
);
595 ret
= cp_gimplify_omp_for (expr_p
, pre_p
);
599 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_CONTINUE
, NOT_TAKEN
));
600 gimple_seq_add_stmt (pre_p
, gimple_build_goto (get_bc_label (bc_continue
)));
606 gimple_seq_add_stmt (pre_p
, gimple_build_goto (get_bc_label (bc_break
)));
612 gimplify_expr_stmt (expr_p
);
616 case UNARY_PLUS_EXPR
:
618 tree arg
= TREE_OPERAND (*expr_p
, 0);
619 tree type
= TREE_TYPE (*expr_p
);
620 *expr_p
= (TREE_TYPE (arg
) != type
) ? fold_convert (type
, arg
)
627 ret
= (enum gimplify_status
) c_gimplify_expr (expr_p
, pre_p
, post_p
);
631 /* Restore saved state. */
632 if (STATEMENT_CODE_P (code
))
633 current_stmt_tree ()->stmts_are_full_exprs_p
634 = saved_stmts_are_full_exprs_p
;
640 is_invisiref_parm (const_tree t
)
642 return ((TREE_CODE (t
) == PARM_DECL
|| TREE_CODE (t
) == RESULT_DECL
)
643 && DECL_BY_REFERENCE (t
));
646 /* Return true if the uid in both int tree maps are equal. */
649 cxx_int_tree_map_eq (const void *va
, const void *vb
)
651 const struct cxx_int_tree_map
*a
= (const struct cxx_int_tree_map
*) va
;
652 const struct cxx_int_tree_map
*b
= (const struct cxx_int_tree_map
*) vb
;
653 return (a
->uid
== b
->uid
);
656 /* Hash a UID in a cxx_int_tree_map. */
659 cxx_int_tree_map_hash (const void *item
)
661 return ((const struct cxx_int_tree_map
*)item
)->uid
;
664 struct cp_genericize_data
666 struct pointer_set_t
*p_set
;
667 VEC (tree
, heap
) *bind_expr_stack
;
670 /* Perform any pre-gimplification lowering of C++ front end trees to
674 cp_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
677 struct cp_genericize_data
*wtd
= (struct cp_genericize_data
*) data
;
678 struct pointer_set_t
*p_set
= wtd
->p_set
;
680 if (is_invisiref_parm (stmt
)
681 /* Don't dereference parms in a thunk, pass the references through. */
682 && !(DECL_THUNK_P (current_function_decl
)
683 && TREE_CODE (stmt
) == PARM_DECL
))
685 *stmt_p
= convert_from_reference (stmt
);
690 /* Map block scope extern declarations to visible declarations with the
691 same name and type in outer scopes if any. */
692 if (cp_function_chain
->extern_decl_map
693 && (TREE_CODE (stmt
) == FUNCTION_DECL
|| TREE_CODE (stmt
) == VAR_DECL
)
694 && DECL_EXTERNAL (stmt
))
696 struct cxx_int_tree_map
*h
, in
;
697 in
.uid
= DECL_UID (stmt
);
698 h
= (struct cxx_int_tree_map
*)
699 htab_find_with_hash (cp_function_chain
->extern_decl_map
,
709 /* Other than invisiref parms, don't walk the same tree twice. */
710 if (pointer_set_contains (p_set
, stmt
))
716 if (TREE_CODE (stmt
) == ADDR_EXPR
717 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
719 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
722 else if (TREE_CODE (stmt
) == RETURN_EXPR
723 && TREE_OPERAND (stmt
, 0)
724 && is_invisiref_parm (TREE_OPERAND (stmt
, 0)))
725 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
727 else if (TREE_CODE (stmt
) == OMP_CLAUSE
)
728 switch (OMP_CLAUSE_CODE (stmt
))
730 case OMP_CLAUSE_LASTPRIVATE
:
731 /* Don't dereference an invisiref in OpenMP clauses. */
732 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
735 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt
))
736 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt
),
737 cp_genericize_r
, data
, NULL
);
740 case OMP_CLAUSE_PRIVATE
:
741 case OMP_CLAUSE_SHARED
:
742 case OMP_CLAUSE_FIRSTPRIVATE
:
743 case OMP_CLAUSE_COPYIN
:
744 case OMP_CLAUSE_COPYPRIVATE
:
745 /* Don't dereference an invisiref in OpenMP clauses. */
746 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)))
749 case OMP_CLAUSE_REDUCTION
:
750 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt
)));
755 else if (IS_TYPE_OR_DECL_P (stmt
))
758 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
759 to lower this construct before scanning it, so we need to lower these
760 before doing anything else. */
761 else if (TREE_CODE (stmt
) == CLEANUP_STMT
)
762 *stmt_p
= build2 (CLEANUP_EH_ONLY (stmt
) ? TRY_CATCH_EXPR
766 CLEANUP_EXPR (stmt
));
768 else if (TREE_CODE (stmt
) == IF_STMT
)
770 genericize_if_stmt (stmt_p
);
771 /* *stmt_p has changed, tail recurse to handle it again. */
772 return cp_genericize_r (stmt_p
, walk_subtrees
, data
);
775 /* COND_EXPR might have incompatible types in branches if one or both
776 arms are bitfields. Fix it up now. */
777 else if (TREE_CODE (stmt
) == COND_EXPR
)
780 = (TREE_OPERAND (stmt
, 1)
781 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 1))
784 = (TREE_OPERAND (stmt
, 2)
785 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt
, 2))
788 && !useless_type_conversion_p (TREE_TYPE (stmt
),
789 TREE_TYPE (TREE_OPERAND (stmt
, 1))))
791 TREE_OPERAND (stmt
, 1)
792 = fold_convert (type_left
, TREE_OPERAND (stmt
, 1));
793 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
797 && !useless_type_conversion_p (TREE_TYPE (stmt
),
798 TREE_TYPE (TREE_OPERAND (stmt
, 2))))
800 TREE_OPERAND (stmt
, 2)
801 = fold_convert (type_right
, TREE_OPERAND (stmt
, 2));
802 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt
),
807 else if (TREE_CODE (stmt
) == BIND_EXPR
)
809 VEC_safe_push (tree
, heap
, wtd
->bind_expr_stack
, stmt
);
810 cp_walk_tree (&BIND_EXPR_BODY (stmt
),
811 cp_genericize_r
, data
, NULL
);
812 VEC_pop (tree
, wtd
->bind_expr_stack
);
815 else if (TREE_CODE (stmt
) == USING_STMT
)
817 tree block
= NULL_TREE
;
819 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
820 BLOCK, and append an IMPORTED_DECL to its
821 BLOCK_VARS chained list. */
822 if (wtd
->bind_expr_stack
)
825 for (i
= VEC_length (tree
, wtd
->bind_expr_stack
) - 1; i
>= 0; i
--)
826 if ((block
= BIND_EXPR_BLOCK (VEC_index (tree
,
827 wtd
->bind_expr_stack
, i
))))
832 tree using_directive
;
833 gcc_assert (TREE_OPERAND (stmt
, 0));
835 using_directive
= make_node (IMPORTED_DECL
);
836 TREE_TYPE (using_directive
) = void_type_node
;
838 IMPORTED_DECL_ASSOCIATED_DECL (using_directive
)
839 = TREE_OPERAND (stmt
, 0);
840 TREE_CHAIN (using_directive
) = BLOCK_VARS (block
);
841 BLOCK_VARS (block
) = using_directive
;
843 /* The USING_STMT won't appear in GENERIC. */
844 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
848 else if (TREE_CODE (stmt
) == DECL_EXPR
849 && TREE_CODE (DECL_EXPR_DECL (stmt
)) == USING_DECL
)
851 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
852 *stmt_p
= build1 (NOP_EXPR
, void_type_node
, integer_zero_node
);
856 else if (TREE_CODE (stmt
) == MODIFY_EXPR
857 && (integer_zerop (cp_expr_size (TREE_OPERAND (stmt
, 0)))
858 || integer_zerop (cp_expr_size (TREE_OPERAND (stmt
, 1)))))
860 *stmt_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (stmt
),
861 TREE_OPERAND (stmt
, 0),
862 TREE_OPERAND (stmt
, 1));
865 pointer_set_insert (p_set
, *stmt_p
);
871 cp_genericize (tree fndecl
)
874 struct cp_genericize_data wtd
;
876 /* Fix up the types of parms passed by invisible reference. */
877 for (t
= DECL_ARGUMENTS (fndecl
); t
; t
= TREE_CHAIN (t
))
878 if (TREE_ADDRESSABLE (TREE_TYPE (t
)))
880 /* If a function's arguments are copied to create a thunk,
881 then DECL_BY_REFERENCE will be set -- but the type of the
882 argument will be a pointer type, so we will never get
884 gcc_assert (!DECL_BY_REFERENCE (t
));
885 gcc_assert (DECL_ARG_TYPE (t
) != TREE_TYPE (t
));
886 TREE_TYPE (t
) = DECL_ARG_TYPE (t
);
887 DECL_BY_REFERENCE (t
) = 1;
888 TREE_ADDRESSABLE (t
) = 0;
892 /* Do the same for the return value. */
893 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl
))))
895 t
= DECL_RESULT (fndecl
);
896 TREE_TYPE (t
) = build_reference_type (TREE_TYPE (t
));
897 DECL_BY_REFERENCE (t
) = 1;
898 TREE_ADDRESSABLE (t
) = 0;
902 /* If we're a clone, the body is already GIMPLE. */
903 if (DECL_CLONED_FUNCTION_P (fndecl
))
906 /* We do want to see every occurrence of the parms, so we can't just use
907 walk_tree's hash functionality. */
908 wtd
.p_set
= pointer_set_create ();
909 wtd
.bind_expr_stack
= NULL
;
910 cp_walk_tree (&DECL_SAVED_TREE (fndecl
), cp_genericize_r
, &wtd
, NULL
);
911 pointer_set_destroy (wtd
.p_set
);
912 VEC_free (tree
, heap
, wtd
.bind_expr_stack
);
914 /* Do everything else. */
915 c_genericize (fndecl
);
917 gcc_assert (bc_label
[bc_break
] == NULL
);
918 gcc_assert (bc_label
[bc_continue
] == NULL
);
921 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
922 NULL if there is in fact nothing to do. ARG2 may be null if FN
923 actually only takes one argument. */
926 cxx_omp_clause_apply_fn (tree fn
, tree arg1
, tree arg2
)
928 tree defparm
, parm
, t
;
936 nargs
= list_length (DECL_ARGUMENTS (fn
));
937 argarray
= (tree
*) alloca (nargs
* sizeof (tree
));
939 defparm
= TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn
)));
941 defparm
= TREE_CHAIN (defparm
);
943 if (TREE_CODE (TREE_TYPE (arg1
)) == ARRAY_TYPE
)
945 tree inner_type
= TREE_TYPE (arg1
);
946 tree start1
, end1
, p1
;
947 tree start2
= NULL
, p2
= NULL
;
948 tree ret
= NULL
, lab
;
954 inner_type
= TREE_TYPE (inner_type
);
955 start1
= build4 (ARRAY_REF
, inner_type
, start1
,
956 size_zero_node
, NULL
, NULL
);
958 start2
= build4 (ARRAY_REF
, inner_type
, start2
,
959 size_zero_node
, NULL
, NULL
);
961 while (TREE_CODE (inner_type
) == ARRAY_TYPE
);
962 start1
= build_fold_addr_expr_loc (input_location
, start1
);
964 start2
= build_fold_addr_expr_loc (input_location
, start2
);
966 end1
= TYPE_SIZE_UNIT (TREE_TYPE (arg1
));
967 end1
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start1
), start1
, end1
);
969 p1
= create_tmp_var (TREE_TYPE (start1
), NULL
);
970 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, start1
);
971 append_to_statement_list (t
, &ret
);
975 p2
= create_tmp_var (TREE_TYPE (start2
), NULL
);
976 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, start2
);
977 append_to_statement_list (t
, &ret
);
980 lab
= create_artificial_label (input_location
);
981 t
= build1 (LABEL_EXPR
, void_type_node
, lab
);
982 append_to_statement_list (t
, &ret
);
987 /* Handle default arguments. */
988 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
989 parm
= TREE_CHAIN (parm
), i
++)
990 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
991 TREE_PURPOSE (parm
), fn
, i
);
992 t
= build_call_a (fn
, i
, argarray
);
993 t
= fold_convert (void_type_node
, t
);
994 t
= fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
995 append_to_statement_list (t
, &ret
);
997 t
= TYPE_SIZE_UNIT (inner_type
);
998 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (p1
), p1
, t
);
999 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p1
), p1
, t
);
1000 append_to_statement_list (t
, &ret
);
1004 t
= TYPE_SIZE_UNIT (inner_type
);
1005 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (p2
), p2
, t
);
1006 t
= build2 (MODIFY_EXPR
, TREE_TYPE (p2
), p2
, t
);
1007 append_to_statement_list (t
, &ret
);
1010 t
= build2 (NE_EXPR
, boolean_type_node
, p1
, end1
);
1011 t
= build3 (COND_EXPR
, void_type_node
, t
, build_and_jump (&lab
), NULL
);
1012 append_to_statement_list (t
, &ret
);
1018 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg1
);
1020 argarray
[i
++] = build_fold_addr_expr_loc (input_location
, arg2
);
1021 /* Handle default arguments. */
1022 for (parm
= defparm
; parm
&& parm
!= void_list_node
;
1023 parm
= TREE_CHAIN (parm
), i
++)
1024 argarray
[i
] = convert_default_arg (TREE_VALUE (parm
),
1025 TREE_PURPOSE (parm
),
1027 t
= build_call_a (fn
, i
, argarray
);
1028 t
= fold_convert (void_type_node
, t
);
1029 return fold_build_cleanup_point_expr (TREE_TYPE (t
), t
);
1033 /* Return code to initialize DECL with its default constructor, or
1034 NULL if there's nothing to do. */
1037 cxx_omp_clause_default_ctor (tree clause
, tree decl
,
1038 tree outer ATTRIBUTE_UNUSED
)
1040 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1044 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), decl
, NULL
);
1049 /* Return code to initialize DST with a copy constructor from SRC. */
1052 cxx_omp_clause_copy_ctor (tree clause
, tree dst
, tree src
)
1054 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1058 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 0), dst
, src
);
1060 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1065 /* Similarly, except use an assignment operator instead. */
1068 cxx_omp_clause_assign_op (tree clause
, tree dst
, tree src
)
1070 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1074 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 2), dst
, src
);
1076 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
1081 /* Return code to destroy DECL. */
1084 cxx_omp_clause_dtor (tree clause
, tree decl
)
1086 tree info
= CP_OMP_CLAUSE_INFO (clause
);
1090 ret
= cxx_omp_clause_apply_fn (TREE_VEC_ELT (info
, 1), decl
, NULL
);
1095 /* True if OpenMP should privatize what this DECL points to rather
1096 than the DECL itself. */
1099 cxx_omp_privatize_by_reference (const_tree decl
)
1101 return is_invisiref_parm (decl
);
1104 /* True if OpenMP sharing attribute of DECL is predetermined. */
1106 enum omp_clause_default_kind
1107 cxx_omp_predetermined_sharing (tree decl
)
1111 /* Static data members are predetermined as shared. */
1112 if (TREE_STATIC (decl
))
1114 tree ctx
= CP_DECL_CONTEXT (decl
);
1115 if (TYPE_P (ctx
) && MAYBE_CLASS_TYPE_P (ctx
))
1116 return OMP_CLAUSE_DEFAULT_SHARED
;
1119 type
= TREE_TYPE (decl
);
1120 if (TREE_CODE (type
) == REFERENCE_TYPE
)
1122 if (!is_invisiref_parm (decl
))
1123 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1124 type
= TREE_TYPE (type
);
1126 if (TREE_CODE (decl
) == RESULT_DECL
&& DECL_NAME (decl
))
1128 /* NVR doesn't preserve const qualification of the
1130 tree outer
= outer_curly_brace_block (current_function_decl
);
1134 for (var
= BLOCK_VARS (outer
); var
; var
= TREE_CHAIN (var
))
1135 if (DECL_NAME (decl
) == DECL_NAME (var
)
1136 && (TYPE_MAIN_VARIANT (type
)
1137 == TYPE_MAIN_VARIANT (TREE_TYPE (var
))))
1139 if (TYPE_READONLY (TREE_TYPE (var
)))
1140 type
= TREE_TYPE (var
);
1146 if (type
== error_mark_node
)
1147 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1149 /* Variables with const-qualified type having no mutable member
1150 are predetermined shared. */
1151 if (TYPE_READONLY (type
) && !cp_has_mutable_p (type
))
1152 return OMP_CLAUSE_DEFAULT_SHARED
;
1154 return OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
1157 /* Finalize an implicitly determined clause. */
1160 cxx_omp_finish_clause (tree c
)
1162 tree decl
, inner_type
;
1163 bool make_shared
= false;
1165 if (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_FIRSTPRIVATE
)
1168 decl
= OMP_CLAUSE_DECL (c
);
1169 decl
= require_complete_type (decl
);
1170 inner_type
= TREE_TYPE (decl
);
1171 if (decl
== error_mark_node
)
1173 else if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1175 if (is_invisiref_parm (decl
))
1176 inner_type
= TREE_TYPE (inner_type
);
1179 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1185 /* We're interested in the base element, not arrays. */
1186 while (TREE_CODE (inner_type
) == ARRAY_TYPE
)
1187 inner_type
= TREE_TYPE (inner_type
);
1189 /* Check for special function availability by building a call to one.
1190 Save the results, because later we won't be in the right context
1191 for making these queries. */
1193 && CLASS_TYPE_P (inner_type
)
1194 && cxx_omp_create_clause_info (c
, inner_type
, false, true, false))
1198 OMP_CLAUSE_CODE (c
) = OMP_CLAUSE_SHARED
;