1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "tree-dump.h"
28 #include "tree-inline.h"
30 #include "gimple-iterator.h"
31 #include "gimple-walk.h"
32 #include "tree-iterator.h"
36 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
37 #include "langhooks.h"
38 #include "pointer-set.h"
39 #include "gimple-low.h"
42 /* The object of this pass is to lower the representation of a set of nested
43 functions in order to expose all of the gory details of the various
44 nonlocal references. We want to do this sooner rather than later, in
45 order to give us more freedom in emitting all of the functions in question.
47 Back in olden times, when gcc was young, we developed an insanely
48 complicated scheme whereby variables which were referenced nonlocally
49 were forced to live in the stack of the declaring function, and then
50 the nested functions magically discovered where these variables were
51 placed. In order for this scheme to function properly, it required
52 that the outer function be partially expanded, then we switch to
53 compiling the inner function, and once done with those we switch back
54 to compiling the outer function. Such delicate ordering requirements
55 makes it difficult to do whole translation unit optimizations
56 involving such functions.
58 The implementation here is much more direct. Everything that can be
59 referenced by an inner function is a member of an explicitly created
60 structure herein called the "nonlocal frame struct". The incoming
61 static chain for a nested function is a pointer to this struct in
62 the parent. In this way, we settle on known offsets from a known
63 base, and so are decoupled from the logic that places objects in the
64 function's stack frame. More importantly, we don't have to wait for
65 that to happen -- since the compilation of the inner function is no
66 longer tied to a real stack frame, the nonlocal frame struct can be
67 allocated anywhere. Which means that the outer function is now
70 Theory of operation here is very simple. Iterate over all the
71 statements in all the functions (depth first) several times,
72 allocating structures and fields on demand. In general we want to
73 examine inner functions first, so that we can avoid making changes
74 to outer functions which are unnecessary.
76 The order of the passes matters a bit, in that later passes will be
77 skipped if it is discovered that the functions don't actually interact
78 at all. That is, they're nested in the lexical sense but could have
79 been written as independent functions without change. */
84 struct nesting_info
*outer
;
85 struct nesting_info
*inner
;
86 struct nesting_info
*next
;
88 struct pointer_map_t
*field_map
;
89 struct pointer_map_t
*var_map
;
90 struct pointer_set_t
*mem_refs
;
91 bitmap suppress_expansion
;
94 tree new_local_var_chain
;
102 bool any_parm_remapped
;
103 bool any_tramp_created
;
104 char static_chain_added
;
108 /* Iterate over the nesting tree, starting with ROOT, depth first. */
110 static inline struct nesting_info
*
111 iter_nestinfo_start (struct nesting_info
*root
)
118 static inline struct nesting_info
*
119 iter_nestinfo_next (struct nesting_info
*node
)
122 return iter_nestinfo_start (node
->next
);
126 #define FOR_EACH_NEST_INFO(I, ROOT) \
127 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
129 /* Obstack used for the bitmaps in the struct above. */
130 static struct bitmap_obstack nesting_info_bitmap_obstack
;
133 /* We're working in so many different function contexts simultaneously,
134 that create_tmp_var is dangerous. Prevent mishap. */
135 #define create_tmp_var cant_use_create_tmp_var_here_dummy
137 /* Like create_tmp_var, except record the variable for registration at
138 the given nesting level. */
141 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
145 /* If the type is of variable size or a type which must be created by the
146 frontend, something is wrong. Note that we explicitly allow
147 incomplete types here, since we create them ourselves here. */
148 gcc_assert (!TREE_ADDRESSABLE (type
));
149 gcc_assert (!TYPE_SIZE_UNIT (type
)
150 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
152 tmp_var
= create_tmp_var_raw (type
, prefix
);
153 DECL_CONTEXT (tmp_var
) = info
->context
;
154 DECL_CHAIN (tmp_var
) = info
->new_local_var_chain
;
155 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
156 if (TREE_CODE (type
) == COMPLEX_TYPE
157 || TREE_CODE (type
) == VECTOR_TYPE
)
158 DECL_GIMPLE_REG_P (tmp_var
) = 1;
160 info
->new_local_var_chain
= tmp_var
;
165 /* Take the address of EXP to be used within function CONTEXT.
166 Mark it for addressability as necessary. */
169 build_addr (tree exp
, tree context
)
175 while (handled_component_p (base
))
176 base
= TREE_OPERAND (base
, 0);
179 TREE_ADDRESSABLE (base
) = 1;
181 /* Building the ADDR_EXPR will compute a set of properties for
182 that ADDR_EXPR. Those properties are unfortunately context
183 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
185 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
186 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
187 way the properties are for the ADDR_EXPR are computed properly. */
188 save_context
= current_function_decl
;
189 current_function_decl
= context
;
190 retval
= build_fold_addr_expr (exp
);
191 current_function_decl
= save_context
;
195 /* Insert FIELD into TYPE, sorted by alignment requirements. */
198 insert_field_into_struct (tree type
, tree field
)
202 DECL_CONTEXT (field
) = type
;
204 for (p
= &TYPE_FIELDS (type
); *p
; p
= &DECL_CHAIN (*p
))
205 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
208 DECL_CHAIN (field
) = *p
;
211 /* Set correct alignment for frame struct type. */
212 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
213 TYPE_ALIGN (type
) = DECL_ALIGN (field
);
216 /* Build or return the RECORD_TYPE that describes the frame state that is
217 shared between INFO->CONTEXT and its nested functions. This record will
218 not be complete until finalize_nesting_tree; up until that point we'll
219 be adding fields as necessary.
221 We also build the DECL that represents this frame in the function. */
224 get_frame_type (struct nesting_info
*info
)
226 tree type
= info
->frame_type
;
231 type
= make_node (RECORD_TYPE
);
233 name
= concat ("FRAME.",
234 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
236 TYPE_NAME (type
) = get_identifier (name
);
239 info
->frame_type
= type
;
240 info
->frame_decl
= create_tmp_var_for (info
, type
, "FRAME");
241 DECL_NONLOCAL_FRAME (info
->frame_decl
) = 1;
243 /* ??? Always make it addressable for now, since it is meant to
244 be pointed to by the static chain pointer. This pessimizes
245 when it turns out that no static chains are needed because
246 the nested functions referencing non-local variables are not
247 reachable, but the true pessimization is to create the non-
248 local frame structure in the first place. */
249 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
254 /* Return true if DECL should be referenced by pointer in the non-local
258 use_pointer_in_frame (tree decl
)
260 if (TREE_CODE (decl
) == PARM_DECL
)
262 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
263 sized decls, and inefficient to copy large aggregates. Don't bother
264 moving anything but scalar variables. */
265 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
269 /* Variable sized types make things "interesting" in the frame. */
270 return DECL_SIZE (decl
) == NULL
|| !TREE_CONSTANT (DECL_SIZE (decl
));
274 /* Given DECL, a non-locally accessed variable, find or create a field
275 in the non-local frame structure for the given nesting context. */
278 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
279 enum insert_option insert
)
283 if (insert
== NO_INSERT
)
285 slot
= pointer_map_contains (info
->field_map
, decl
);
286 return slot
? (tree
) *slot
: NULL_TREE
;
289 slot
= pointer_map_insert (info
->field_map
, decl
);
292 tree field
= make_node (FIELD_DECL
);
293 DECL_NAME (field
) = DECL_NAME (decl
);
295 if (use_pointer_in_frame (decl
))
297 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
298 DECL_ALIGN (field
) = TYPE_ALIGN (TREE_TYPE (field
));
299 DECL_NONADDRESSABLE_P (field
) = 1;
303 TREE_TYPE (field
) = TREE_TYPE (decl
);
304 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
305 DECL_ALIGN (field
) = DECL_ALIGN (decl
);
306 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
307 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
308 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
309 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
312 insert_field_into_struct (get_frame_type (info
), field
);
315 if (TREE_CODE (decl
) == PARM_DECL
)
316 info
->any_parm_remapped
= true;
322 /* Build or return the variable that holds the static chain within
323 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
326 get_chain_decl (struct nesting_info
*info
)
328 tree decl
= info
->chain_decl
;
334 type
= get_frame_type (info
->outer
);
335 type
= build_pointer_type (type
);
337 /* Note that this variable is *not* entered into any BIND_EXPR;
338 the construction of this variable is handled specially in
339 expand_function_start and initialize_inlined_parameters.
340 Note also that it's represented as a parameter. This is more
341 close to the truth, since the initial value does come from
343 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
344 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
345 DECL_ARTIFICIAL (decl
) = 1;
346 DECL_IGNORED_P (decl
) = 1;
347 TREE_USED (decl
) = 1;
348 DECL_CONTEXT (decl
) = info
->context
;
349 DECL_ARG_TYPE (decl
) = type
;
351 /* Tell tree-inline.c that we never write to this variable, so
352 it can copy-prop the replacement value immediately. */
353 TREE_READONLY (decl
) = 1;
355 info
->chain_decl
= decl
;
358 && (dump_flags
& TDF_DETAILS
)
359 && !DECL_STATIC_CHAIN (info
->context
))
360 fprintf (dump_file
, "Setting static-chain for %s\n",
361 lang_hooks
.decl_printable_name (info
->context
, 2));
363 DECL_STATIC_CHAIN (info
->context
) = 1;
368 /* Build or return the field within the non-local frame state that holds
369 the static chain for INFO->CONTEXT. This is the way to walk back up
370 multiple nesting levels. */
373 get_chain_field (struct nesting_info
*info
)
375 tree field
= info
->chain_field
;
379 tree type
= build_pointer_type (get_frame_type (info
->outer
));
381 field
= make_node (FIELD_DECL
);
382 DECL_NAME (field
) = get_identifier ("__chain");
383 TREE_TYPE (field
) = type
;
384 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
385 DECL_NONADDRESSABLE_P (field
) = 1;
387 insert_field_into_struct (get_frame_type (info
), field
);
389 info
->chain_field
= field
;
392 && (dump_flags
& TDF_DETAILS
)
393 && !DECL_STATIC_CHAIN (info
->context
))
394 fprintf (dump_file
, "Setting static-chain for %s\n",
395 lang_hooks
.decl_printable_name (info
->context
, 2));
397 DECL_STATIC_CHAIN (info
->context
) = 1;
402 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
405 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
410 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
411 gimple_call_set_lhs (call
, t
);
412 if (! gsi_end_p (*gsi
))
413 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
414 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
420 /* Copy EXP into a temporary. Allocate the temporary in the context of
421 INFO and insert the initialization statement before GSI. */
424 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
429 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
430 stmt
= gimple_build_assign (t
, exp
);
431 if (! gsi_end_p (*gsi
))
432 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
433 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
439 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
442 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
443 gimple_stmt_iterator
*gsi
)
445 if (is_gimple_val (exp
))
448 return init_tmp_var (info
, exp
, gsi
);
451 /* Similarly, but copy from the temporary and insert the statement
452 after the iterator. */
455 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
460 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
461 stmt
= gimple_build_assign (exp
, t
);
462 if (! gsi_end_p (*gsi
))
463 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
464 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
469 /* Build or return the type used to represent a nested function trampoline. */
471 static GTY(()) tree trampoline_type
;
474 get_trampoline_type (struct nesting_info
*info
)
476 unsigned align
, size
;
480 return trampoline_type
;
482 align
= TRAMPOLINE_ALIGNMENT
;
483 size
= TRAMPOLINE_SIZE
;
485 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
486 then allocate extra space so that we can do dynamic alignment. */
487 if (align
> STACK_BOUNDARY
)
489 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
490 align
= STACK_BOUNDARY
;
493 t
= build_index_type (size_int (size
- 1));
494 t
= build_array_type (char_type_node
, t
);
495 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
496 FIELD_DECL
, get_identifier ("__data"), t
);
497 DECL_ALIGN (t
) = align
;
498 DECL_USER_ALIGN (t
) = 1;
500 trampoline_type
= make_node (RECORD_TYPE
);
501 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
502 TYPE_FIELDS (trampoline_type
) = t
;
503 layout_type (trampoline_type
);
504 DECL_CONTEXT (t
) = trampoline_type
;
506 return trampoline_type
;
509 /* Given DECL, a nested function, find or create a field in the non-local
510 frame structure for a trampoline for this function. */
513 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
514 enum insert_option insert
)
518 if (insert
== NO_INSERT
)
520 slot
= pointer_map_contains (info
->var_map
, decl
);
521 return slot
? (tree
) *slot
: NULL_TREE
;
524 slot
= pointer_map_insert (info
->var_map
, decl
);
527 tree field
= make_node (FIELD_DECL
);
528 DECL_NAME (field
) = DECL_NAME (decl
);
529 TREE_TYPE (field
) = get_trampoline_type (info
);
530 TREE_ADDRESSABLE (field
) = 1;
532 insert_field_into_struct (get_frame_type (info
), field
);
535 info
->any_tramp_created
= true;
541 /* Build or return the field within the non-local frame state that holds
542 the non-local goto "jmp_buf". The buffer itself is maintained by the
543 rtl middle-end as dynamic stack space is allocated. */
546 get_nl_goto_field (struct nesting_info
*info
)
548 tree field
= info
->nl_goto_field
;
554 /* For __builtin_nonlocal_goto, we need N words. The first is the
555 frame pointer, the rest is for the target's stack pointer save
556 area. The number of words is controlled by STACK_SAVEAREA_MODE;
557 not the best interface, but it'll do for now. */
558 if (Pmode
== ptr_mode
)
559 type
= ptr_type_node
;
561 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
563 size
= GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
564 size
= size
/ GET_MODE_SIZE (Pmode
);
567 type
= build_array_type
568 (type
, build_index_type (size_int (size
)));
570 field
= make_node (FIELD_DECL
);
571 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
572 TREE_TYPE (field
) = type
;
573 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
574 TREE_ADDRESSABLE (field
) = 1;
576 insert_field_into_struct (get_frame_type (info
), field
);
578 info
->nl_goto_field
= field
;
584 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
587 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
588 struct nesting_info
*info
, gimple_seq
*pseq
)
590 struct walk_stmt_info wi
;
592 memset (&wi
, 0, sizeof (wi
));
595 walk_gimple_seq_mod (pseq
, callback_stmt
, callback_op
, &wi
);
599 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
602 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
603 struct nesting_info
*info
)
605 gimple_seq body
= gimple_body (info
->context
);
606 walk_body (callback_stmt
, callback_op
, info
, &body
);
607 gimple_set_body (info
->context
, body
);
610 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
613 walk_gimple_omp_for (gimple for_stmt
,
614 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
615 struct nesting_info
*info
)
617 struct walk_stmt_info wi
;
622 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body_ptr (for_stmt
));
625 memset (&wi
, 0, sizeof (wi
));
627 wi
.gsi
= gsi_last (seq
);
629 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
632 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
636 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
641 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
644 t
= gimple_omp_for_incr (for_stmt
, i
);
645 gcc_assert (BINARY_CLASS_P (t
));
647 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
650 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
653 seq
= gsi_seq (wi
.gsi
);
654 if (!gimple_seq_empty_p (seq
))
656 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
657 annotate_all_with_location (seq
, gimple_location (for_stmt
));
658 gimple_seq_add_seq (&pre_body
, seq
);
659 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
663 /* Similarly for ROOT and all functions nested underneath, depth first. */
666 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
667 struct nesting_info
*root
)
669 struct nesting_info
*n
;
670 FOR_EACH_NEST_INFO (n
, root
)
671 walk_function (callback_stmt
, callback_op
, n
);
675 /* We have to check for a fairly pathological case. The operands of function
676 nested function are to be interpreted in the context of the enclosing
677 function. So if any are variably-sized, they will get remapped when the
678 enclosing function is inlined. But that remapping would also have to be
679 done in the types of the PARM_DECLs of the nested function, meaning the
680 argument types of that function will disagree with the arguments in the
681 calls to that function. So we'd either have to make a copy of the nested
682 function corresponding to each time the enclosing function was inlined or
683 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
684 function. The former is not practical. The latter would still require
685 detecting this case to know when to add the conversions. So, for now at
686 least, we don't inline such an enclosing function.
688 We have to do that check recursively, so here return indicating whether
689 FNDECL has such a nested function. ORIG_FN is the function we were
690 trying to inline to use for checking whether any argument is variably
691 modified by anything in it.
693 It would be better to do this in tree-inline.c so that we could give
694 the appropriate warning for why a function can't be inlined, but that's
695 too late since the nesting structure has already been flattened and
696 adding a flag just to record this fact seems a waste of a flag. */
699 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
701 struct cgraph_node
*cgn
= cgraph_get_node (fndecl
);
704 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
706 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= DECL_CHAIN (arg
))
707 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
710 if (check_for_nested_with_variably_modified (cgn
->decl
,
718 /* Construct our local datastructure describing the function nesting
719 tree rooted by CGN. */
721 static struct nesting_info
*
722 create_nesting_tree (struct cgraph_node
*cgn
)
724 struct nesting_info
*info
= XCNEW (struct nesting_info
);
725 info
->field_map
= pointer_map_create ();
726 info
->var_map
= pointer_map_create ();
727 info
->mem_refs
= pointer_set_create ();
728 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
729 info
->context
= cgn
->decl
;
731 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
733 struct nesting_info
*sub
= create_nesting_tree (cgn
);
735 sub
->next
= info
->inner
;
739 /* See discussion at check_for_nested_with_variably_modified for a
740 discussion of why this has to be here. */
741 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
742 DECL_UNINLINABLE (info
->context
) = true;
747 /* Return an expression computing the static chain for TARGET_CONTEXT
748 from INFO->CONTEXT. Insert any necessary computations before TSI. */
751 get_static_chain (struct nesting_info
*info
, tree target_context
,
752 gimple_stmt_iterator
*gsi
)
754 struct nesting_info
*i
;
757 if (info
->context
== target_context
)
759 x
= build_addr (info
->frame_decl
, target_context
);
763 x
= get_chain_decl (info
);
765 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
767 tree field
= get_chain_field (i
);
769 x
= build_simple_mem_ref (x
);
770 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
771 x
= init_tmp_var (info
, x
, gsi
);
779 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
780 frame as seen from INFO->CONTEXT. Insert any necessary computations
784 get_frame_field (struct nesting_info
*info
, tree target_context
,
785 tree field
, gimple_stmt_iterator
*gsi
)
787 struct nesting_info
*i
;
790 if (info
->context
== target_context
)
792 /* Make sure frame_decl gets created. */
793 (void) get_frame_type (info
);
794 x
= info
->frame_decl
;
798 x
= get_chain_decl (info
);
800 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
802 tree field
= get_chain_field (i
);
804 x
= build_simple_mem_ref (x
);
805 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
806 x
= init_tmp_var (info
, x
, gsi
);
809 x
= build_simple_mem_ref (x
);
812 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
816 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
818 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
819 in the nested function with DECL_VALUE_EXPR set to reference the true
820 variable in the parent function. This is used both for debug info
821 and in OpenMP lowering. */
824 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
827 struct nesting_info
*i
;
828 tree x
, field
, new_decl
;
831 slot
= pointer_map_insert (info
->var_map
, decl
);
836 target_context
= decl_function_context (decl
);
838 /* A copy of the code in get_frame_field, but without the temporaries. */
839 if (info
->context
== target_context
)
841 /* Make sure frame_decl gets created. */
842 (void) get_frame_type (info
);
843 x
= info
->frame_decl
;
848 x
= get_chain_decl (info
);
849 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
851 field
= get_chain_field (i
);
852 x
= build_simple_mem_ref (x
);
853 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
855 x
= build_simple_mem_ref (x
);
858 field
= lookup_field_for_decl (i
, decl
, INSERT
);
859 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
860 if (use_pointer_in_frame (decl
))
861 x
= build_simple_mem_ref (x
);
863 /* ??? We should be remapping types as well, surely. */
864 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
865 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
866 DECL_CONTEXT (new_decl
) = info
->context
;
867 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
868 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
869 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
870 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
871 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
872 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
873 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
874 if ((TREE_CODE (decl
) == PARM_DECL
875 || TREE_CODE (decl
) == RESULT_DECL
876 || TREE_CODE (decl
) == VAR_DECL
)
877 && DECL_BY_REFERENCE (decl
))
878 DECL_BY_REFERENCE (new_decl
) = 1;
880 SET_DECL_VALUE_EXPR (new_decl
, x
);
881 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
884 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
885 info
->debug_var_chain
= new_decl
;
888 && info
->context
!= target_context
889 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
890 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
896 /* Callback for walk_gimple_stmt, rewrite all references to VAR
897 and PARM_DECLs that belong to outer functions.
899 The rewrite will involve some number of structure accesses back up
900 the static chain. E.g. for a variable FOO up one nesting level it'll
901 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
902 indirections apply to decls for which use_pointer_in_frame is true. */
905 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
907 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
908 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
912 switch (TREE_CODE (t
))
915 /* Non-automatic variables are never processed. */
916 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
921 if (decl_function_context (t
) != info
->context
)
926 x
= get_nonlocal_debug_decl (info
, t
);
927 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
929 tree target_context
= decl_function_context (t
);
930 struct nesting_info
*i
;
931 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
933 x
= lookup_field_for_decl (i
, t
, INSERT
);
934 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
935 if (use_pointer_in_frame (t
))
937 x
= init_tmp_var (info
, x
, &wi
->gsi
);
938 x
= build_simple_mem_ref (x
);
945 x
= save_tmp_var (info
, x
, &wi
->gsi
);
947 x
= init_tmp_var (info
, x
, &wi
->gsi
);
955 /* We're taking the address of a label from a parent function, but
956 this is not itself a non-local goto. Mark the label such that it
957 will not be deleted, much as we would with a label address in
959 if (decl_function_context (t
) != info
->context
)
960 FORCED_LABEL (t
) = 1;
965 bool save_val_only
= wi
->val_only
;
967 wi
->val_only
= false;
970 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
977 /* If we changed anything, we might no longer be directly
978 referencing a decl. */
979 save_context
= current_function_decl
;
980 current_function_decl
= info
->context
;
981 recompute_tree_invariant_for_addr_expr (t
);
982 current_function_decl
= save_context
;
984 /* If the callback converted the address argument in a context
985 where we only accept variables (and min_invariant, presumably),
986 then compute the address into a temporary. */
988 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
998 case ARRAY_RANGE_REF
:
1000 /* Go down this entire nest and just look at the final prefix and
1001 anything that describes the references. Otherwise, we lose track
1002 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1003 wi
->val_only
= true;
1005 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1007 if (TREE_CODE (t
) == COMPONENT_REF
)
1008 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1010 else if (TREE_CODE (t
) == ARRAY_REF
1011 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1013 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1015 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1017 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1021 wi
->val_only
= false;
1022 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1025 case VIEW_CONVERT_EXPR
:
1026 /* Just request to look at the subtrees, leaving val_only and lhs
1027 untouched. This might actually be for !val_only + lhs, in which
1028 case we don't want to force a replacement by a temporary. */
1033 if (!IS_TYPE_OR_DECL_P (t
))
1036 wi
->val_only
= true;
1045 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1046 struct walk_stmt_info
*);
1048 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1049 and PARM_DECLs that belong to outer functions. */
1052 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1054 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1055 bool need_chain
= false, need_stmts
= false;
1058 bitmap new_suppress
;
1060 new_suppress
= BITMAP_GGC_ALLOC ();
1061 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1063 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1065 switch (OMP_CLAUSE_CODE (clause
))
1067 case OMP_CLAUSE_REDUCTION
:
1068 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1070 goto do_decl_clause
;
1072 case OMP_CLAUSE_LASTPRIVATE
:
1073 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1075 goto do_decl_clause
;
1077 case OMP_CLAUSE_PRIVATE
:
1078 case OMP_CLAUSE_FIRSTPRIVATE
:
1079 case OMP_CLAUSE_COPYPRIVATE
:
1080 case OMP_CLAUSE_SHARED
:
1082 decl
= OMP_CLAUSE_DECL (clause
);
1083 if (TREE_CODE (decl
) == VAR_DECL
1084 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1086 if (decl_function_context (decl
) != info
->context
)
1088 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1089 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1090 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1095 case OMP_CLAUSE_SCHEDULE
:
1096 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1099 case OMP_CLAUSE_FINAL
:
1101 case OMP_CLAUSE_NUM_THREADS
:
1102 wi
->val_only
= true;
1104 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1108 case OMP_CLAUSE_NOWAIT
:
1109 case OMP_CLAUSE_ORDERED
:
1110 case OMP_CLAUSE_DEFAULT
:
1111 case OMP_CLAUSE_COPYIN
:
1112 case OMP_CLAUSE_COLLAPSE
:
1113 case OMP_CLAUSE_UNTIED
:
1114 case OMP_CLAUSE_MERGEABLE
:
1122 info
->suppress_expansion
= new_suppress
;
1125 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1126 switch (OMP_CLAUSE_CODE (clause
))
1128 case OMP_CLAUSE_REDUCTION
:
1129 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1132 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1133 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1135 walk_body (convert_nonlocal_reference_stmt
,
1136 convert_nonlocal_reference_op
, info
,
1137 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1138 walk_body (convert_nonlocal_reference_stmt
,
1139 convert_nonlocal_reference_op
, info
,
1140 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1141 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1146 case OMP_CLAUSE_LASTPRIVATE
:
1147 walk_body (convert_nonlocal_reference_stmt
,
1148 convert_nonlocal_reference_op
, info
,
1149 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1159 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1162 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1164 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1165 type
= TREE_TYPE (type
);
1167 if (TYPE_NAME (type
)
1168 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1169 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1170 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1172 while (POINTER_TYPE_P (type
)
1173 || TREE_CODE (type
) == VECTOR_TYPE
1174 || TREE_CODE (type
) == FUNCTION_TYPE
1175 || TREE_CODE (type
) == METHOD_TYPE
)
1176 type
= TREE_TYPE (type
);
1178 if (TREE_CODE (type
) == ARRAY_TYPE
)
1182 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1183 domain
= TYPE_DOMAIN (type
);
1186 t
= TYPE_MIN_VALUE (domain
);
1187 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1188 && decl_function_context (t
) != info
->context
)
1189 get_nonlocal_debug_decl (info
, t
);
1190 t
= TYPE_MAX_VALUE (domain
);
1191 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1192 && decl_function_context (t
) != info
->context
)
1193 get_nonlocal_debug_decl (info
, t
);
1198 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1202 note_nonlocal_block_vlas (struct nesting_info
*info
, tree block
)
1206 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
1207 if (TREE_CODE (var
) == VAR_DECL
1208 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
1209 && DECL_HAS_VALUE_EXPR_P (var
)
1210 && decl_function_context (var
) != info
->context
)
1211 note_nonlocal_vla_type (info
, TREE_TYPE (var
));
1214 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1215 PARM_DECLs that belong to outer functions. This handles statements
1216 that are not handled via the standard recursion done in
1217 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1218 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1219 operands of STMT have been handled by this function. */
1222 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1223 struct walk_stmt_info
*wi
)
1225 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1226 tree save_local_var_chain
;
1227 bitmap save_suppress
;
1228 gimple stmt
= gsi_stmt (*gsi
);
1230 switch (gimple_code (stmt
))
1233 /* Don't walk non-local gotos for now. */
1234 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1236 wi
->val_only
= true;
1238 *handled_ops_p
= true;
1243 case GIMPLE_OMP_PARALLEL
:
1244 case GIMPLE_OMP_TASK
:
1245 save_suppress
= info
->suppress_expansion
;
1246 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1250 decl
= get_chain_decl (info
);
1251 c
= build_omp_clause (gimple_location (stmt
),
1252 OMP_CLAUSE_FIRSTPRIVATE
);
1253 OMP_CLAUSE_DECL (c
) = decl
;
1254 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1255 gimple_omp_taskreg_set_clauses (stmt
, c
);
1258 save_local_var_chain
= info
->new_local_var_chain
;
1259 info
->new_local_var_chain
= NULL
;
1261 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1262 info
, gimple_omp_body_ptr (stmt
));
1264 if (info
->new_local_var_chain
)
1265 declare_vars (info
->new_local_var_chain
,
1266 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1268 info
->new_local_var_chain
= save_local_var_chain
;
1269 info
->suppress_expansion
= save_suppress
;
1272 case GIMPLE_OMP_FOR
:
1273 save_suppress
= info
->suppress_expansion
;
1274 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1275 walk_gimple_omp_for (stmt
, convert_nonlocal_reference_stmt
,
1276 convert_nonlocal_reference_op
, info
);
1277 walk_body (convert_nonlocal_reference_stmt
,
1278 convert_nonlocal_reference_op
, info
, gimple_omp_body_ptr (stmt
));
1279 info
->suppress_expansion
= save_suppress
;
1282 case GIMPLE_OMP_SECTIONS
:
1283 save_suppress
= info
->suppress_expansion
;
1284 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1285 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1286 info
, gimple_omp_body_ptr (stmt
));
1287 info
->suppress_expansion
= save_suppress
;
1290 case GIMPLE_OMP_SINGLE
:
1291 save_suppress
= info
->suppress_expansion
;
1292 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1293 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1294 info
, gimple_omp_body_ptr (stmt
));
1295 info
->suppress_expansion
= save_suppress
;
1298 case GIMPLE_OMP_TARGET
:
1299 save_suppress
= info
->suppress_expansion
;
1300 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
1301 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1302 info
, gimple_omp_body_ptr (stmt
));
1303 info
->suppress_expansion
= save_suppress
;
1306 case GIMPLE_OMP_TEAMS
:
1307 save_suppress
= info
->suppress_expansion
;
1308 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
1309 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1310 info
, gimple_omp_body_ptr (stmt
));
1311 info
->suppress_expansion
= save_suppress
;
1314 case GIMPLE_OMP_SECTION
:
1315 case GIMPLE_OMP_MASTER
:
1316 case GIMPLE_OMP_TASKGROUP
:
1317 case GIMPLE_OMP_ORDERED
:
1318 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1319 info
, gimple_omp_body_ptr (stmt
));
1323 if (!optimize
&& gimple_bind_block (stmt
))
1324 note_nonlocal_block_vlas (info
, gimple_bind_block (stmt
));
1326 *handled_ops_p
= false;
1330 wi
->val_only
= true;
1332 *handled_ops_p
= false;
1336 /* For every other statement that we are not interested in
1337 handling here, let the walker traverse the operands. */
1338 *handled_ops_p
= false;
1342 /* We have handled all of STMT operands, no need to traverse the operands. */
1343 *handled_ops_p
= true;
1348 /* A subroutine of convert_local_reference. Create a local variable
1349 in the parent function with DECL_VALUE_EXPR set to reference the
1350 field in FRAME. This is used both for debug info and in OpenMP
1354 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1359 slot
= pointer_map_insert (info
->var_map
, decl
);
1361 return (tree
) *slot
;
1363 /* Make sure frame_decl gets created. */
1364 (void) get_frame_type (info
);
1365 x
= info
->frame_decl
;
1366 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1368 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1369 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1370 DECL_CONTEXT (new_decl
) = info
->context
;
1371 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1372 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1373 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1374 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1375 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1376 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1377 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1378 if ((TREE_CODE (decl
) == PARM_DECL
1379 || TREE_CODE (decl
) == RESULT_DECL
1380 || TREE_CODE (decl
) == VAR_DECL
)
1381 && DECL_BY_REFERENCE (decl
))
1382 DECL_BY_REFERENCE (new_decl
) = 1;
1384 SET_DECL_VALUE_EXPR (new_decl
, x
);
1385 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1388 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1389 info
->debug_var_chain
= new_decl
;
1391 /* Do not emit debug info twice. */
1392 DECL_IGNORED_P (decl
) = 1;
1398 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1399 and PARM_DECLs that were referenced by inner nested functions.
1400 The rewrite will be a structure reference to the local frame variable. */
1402 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1405 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1407 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1408 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1409 tree t
= *tp
, field
, x
;
1413 switch (TREE_CODE (t
))
1416 /* Non-automatic variables are never processed. */
1417 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1422 if (decl_function_context (t
) == info
->context
)
1424 /* If we copied a pointer to the frame, then the original decl
1425 is used unchanged in the parent function. */
1426 if (use_pointer_in_frame (t
))
1429 /* No need to transform anything if no child references the
1431 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1436 x
= get_local_debug_decl (info
, t
, field
);
1437 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1438 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1443 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1445 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1453 save_val_only
= wi
->val_only
;
1454 wi
->val_only
= false;
1456 wi
->changed
= false;
1457 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1458 wi
->val_only
= save_val_only
;
1460 /* If we converted anything ... */
1465 /* Then the frame decl is now addressable. */
1466 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1468 save_context
= current_function_decl
;
1469 current_function_decl
= info
->context
;
1470 recompute_tree_invariant_for_addr_expr (t
);
1471 current_function_decl
= save_context
;
1473 /* If we are in a context where we only accept values, then
1474 compute the address into a temporary. */
1476 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1485 case ARRAY_RANGE_REF
:
1487 /* Go down this entire nest and just look at the final prefix and
1488 anything that describes the references. Otherwise, we lose track
1489 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1490 save_val_only
= wi
->val_only
;
1491 wi
->val_only
= true;
1493 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1495 if (TREE_CODE (t
) == COMPONENT_REF
)
1496 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1498 else if (TREE_CODE (t
) == ARRAY_REF
1499 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1501 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1503 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1505 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1509 wi
->val_only
= false;
1510 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1511 wi
->val_only
= save_val_only
;
1515 save_val_only
= wi
->val_only
;
1516 wi
->val_only
= true;
1518 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
,
1520 /* We need to re-fold the MEM_REF as component references as
1521 part of a ADDR_EXPR address are not allowed. But we cannot
1522 fold here, as the chain record type is not yet finalized. */
1523 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
1524 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
1525 pointer_set_insert (info
->mem_refs
, tp
);
1526 wi
->val_only
= save_val_only
;
1529 case VIEW_CONVERT_EXPR
:
1530 /* Just request to look at the subtrees, leaving val_only and lhs
1531 untouched. This might actually be for !val_only + lhs, in which
1532 case we don't want to force a replacement by a temporary. */
1537 if (!IS_TYPE_OR_DECL_P (t
))
1540 wi
->val_only
= true;
1549 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1550 struct walk_stmt_info
*);
1552 /* Helper for convert_local_reference. Convert all the references in
1553 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1556 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1558 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1559 bool need_frame
= false, need_stmts
= false;
1562 bitmap new_suppress
;
1564 new_suppress
= BITMAP_GGC_ALLOC ();
1565 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1567 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1569 switch (OMP_CLAUSE_CODE (clause
))
1571 case OMP_CLAUSE_REDUCTION
:
1572 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1574 goto do_decl_clause
;
1576 case OMP_CLAUSE_LASTPRIVATE
:
1577 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1579 goto do_decl_clause
;
1581 case OMP_CLAUSE_PRIVATE
:
1582 case OMP_CLAUSE_FIRSTPRIVATE
:
1583 case OMP_CLAUSE_COPYPRIVATE
:
1584 case OMP_CLAUSE_SHARED
:
1586 decl
= OMP_CLAUSE_DECL (clause
);
1587 if (TREE_CODE (decl
) == VAR_DECL
1588 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1590 if (decl_function_context (decl
) == info
->context
1591 && !use_pointer_in_frame (decl
))
1593 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1596 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1597 OMP_CLAUSE_DECL (clause
)
1598 = get_local_debug_decl (info
, decl
, field
);
1604 case OMP_CLAUSE_SCHEDULE
:
1605 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1608 case OMP_CLAUSE_FINAL
:
1610 case OMP_CLAUSE_NUM_THREADS
:
1611 wi
->val_only
= true;
1613 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0), &dummy
,
1617 case OMP_CLAUSE_NOWAIT
:
1618 case OMP_CLAUSE_ORDERED
:
1619 case OMP_CLAUSE_DEFAULT
:
1620 case OMP_CLAUSE_COPYIN
:
1621 case OMP_CLAUSE_COLLAPSE
:
1622 case OMP_CLAUSE_UNTIED
:
1623 case OMP_CLAUSE_MERGEABLE
:
1631 info
->suppress_expansion
= new_suppress
;
1634 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1635 switch (OMP_CLAUSE_CODE (clause
))
1637 case OMP_CLAUSE_REDUCTION
:
1638 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1641 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1642 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1644 walk_body (convert_local_reference_stmt
,
1645 convert_local_reference_op
, info
,
1646 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1647 walk_body (convert_local_reference_stmt
,
1648 convert_local_reference_op
, info
,
1649 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1650 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1655 case OMP_CLAUSE_LASTPRIVATE
:
1656 walk_body (convert_local_reference_stmt
,
1657 convert_local_reference_op
, info
,
1658 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1669 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1670 and PARM_DECLs that were referenced by inner nested functions.
1671 The rewrite will be a structure reference to the local frame variable. */
1674 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1675 struct walk_stmt_info
*wi
)
1677 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1678 tree save_local_var_chain
;
1679 bitmap save_suppress
;
1680 gimple stmt
= gsi_stmt (*gsi
);
1682 switch (gimple_code (stmt
))
1684 case GIMPLE_OMP_PARALLEL
:
1685 case GIMPLE_OMP_TASK
:
1686 save_suppress
= info
->suppress_expansion
;
1687 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1691 (void) get_frame_type (info
);
1692 c
= build_omp_clause (gimple_location (stmt
),
1694 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
1695 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1696 gimple_omp_taskreg_set_clauses (stmt
, c
);
1699 save_local_var_chain
= info
->new_local_var_chain
;
1700 info
->new_local_var_chain
= NULL
;
1702 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
1703 gimple_omp_body_ptr (stmt
));
1705 if (info
->new_local_var_chain
)
1706 declare_vars (info
->new_local_var_chain
,
1707 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
1708 info
->new_local_var_chain
= save_local_var_chain
;
1709 info
->suppress_expansion
= save_suppress
;
1712 case GIMPLE_OMP_FOR
:
1713 save_suppress
= info
->suppress_expansion
;
1714 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1715 walk_gimple_omp_for (stmt
, convert_local_reference_stmt
,
1716 convert_local_reference_op
, info
);
1717 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1718 info
, gimple_omp_body_ptr (stmt
));
1719 info
->suppress_expansion
= save_suppress
;
1722 case GIMPLE_OMP_SECTIONS
:
1723 save_suppress
= info
->suppress_expansion
;
1724 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1725 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1726 info
, gimple_omp_body_ptr (stmt
));
1727 info
->suppress_expansion
= save_suppress
;
1730 case GIMPLE_OMP_SINGLE
:
1731 save_suppress
= info
->suppress_expansion
;
1732 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1733 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1734 info
, gimple_omp_body_ptr (stmt
));
1735 info
->suppress_expansion
= save_suppress
;
1738 case GIMPLE_OMP_TARGET
:
1739 save_suppress
= info
->suppress_expansion
;
1740 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
1741 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1742 info
, gimple_omp_body_ptr (stmt
));
1743 info
->suppress_expansion
= save_suppress
;
1746 case GIMPLE_OMP_TEAMS
:
1747 save_suppress
= info
->suppress_expansion
;
1748 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
1749 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1750 info
, gimple_omp_body_ptr (stmt
));
1751 info
->suppress_expansion
= save_suppress
;
1754 case GIMPLE_OMP_SECTION
:
1755 case GIMPLE_OMP_MASTER
:
1756 case GIMPLE_OMP_TASKGROUP
:
1757 case GIMPLE_OMP_ORDERED
:
1758 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1759 info
, gimple_omp_body_ptr (stmt
));
1763 wi
->val_only
= true;
1765 *handled_ops_p
= false;
1769 if (gimple_clobber_p (stmt
))
1771 tree lhs
= gimple_assign_lhs (stmt
);
1772 if (!use_pointer_in_frame (lhs
)
1773 && lookup_field_for_decl (info
, lhs
, NO_INSERT
))
1775 gsi_replace (gsi
, gimple_build_nop (), true);
1779 *handled_ops_p
= false;
1783 /* For every other statement that we are not interested in
1784 handling here, let the walker traverse the operands. */
1785 *handled_ops_p
= false;
1789 /* Indicate that we have handled all the operands ourselves. */
1790 *handled_ops_p
= true;
1795 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1796 that reference labels from outer functions. The rewrite will be a
1797 call to __builtin_nonlocal_goto. */
1800 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1801 struct walk_stmt_info
*wi
)
1803 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
1804 tree label
, new_label
, target_context
, x
, field
;
1807 gimple stmt
= gsi_stmt (*gsi
);
1809 if (gimple_code (stmt
) != GIMPLE_GOTO
)
1811 *handled_ops_p
= false;
1815 label
= gimple_goto_dest (stmt
);
1816 if (TREE_CODE (label
) != LABEL_DECL
)
1818 *handled_ops_p
= false;
1822 target_context
= decl_function_context (label
);
1823 if (target_context
== info
->context
)
1825 *handled_ops_p
= false;
1829 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
1832 /* The original user label may also be use for a normal goto, therefore
1833 we must create a new label that will actually receive the abnormal
1834 control transfer. This new label will be marked LABEL_NONLOCAL; this
1835 mark will trigger proper behavior in the cfg, as well as cause the
1836 (hairy target-specific) non-local goto receiver code to be generated
1837 when we expand rtl. Enter this association into var_map so that we
1838 can insert the new label into the IL during a second pass. */
1839 slot
= pointer_map_insert (i
->var_map
, label
);
1842 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
1843 DECL_NONLOCAL (new_label
) = 1;
1847 new_label
= (tree
) *slot
;
1849 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1850 field
= get_nl_goto_field (i
);
1851 x
= get_frame_field (info
, target_context
, field
, gsi
);
1852 x
= build_addr (x
, target_context
);
1853 x
= gsi_gimplify_val (info
, x
, gsi
);
1854 call
= gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO
),
1855 2, build_addr (new_label
, target_context
), x
);
1856 gsi_replace (gsi
, call
, false);
1858 /* We have handled all of STMT's operands, no need to keep going. */
1859 *handled_ops_p
= true;
1864 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1865 are referenced via nonlocal goto from a nested function. The rewrite
1866 will involve installing a newly generated DECL_NONLOCAL label, and
1867 (potentially) a branch around the rtl gunk that is assumed to be
1868 attached to such a label. */
1871 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1872 struct walk_stmt_info
*wi
)
1874 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1875 tree label
, new_label
;
1876 gimple_stmt_iterator tmp_gsi
;
1878 gimple stmt
= gsi_stmt (*gsi
);
1880 if (gimple_code (stmt
) != GIMPLE_LABEL
)
1882 *handled_ops_p
= false;
1886 label
= gimple_label_label (stmt
);
1888 slot
= pointer_map_contains (info
->var_map
, label
);
1891 *handled_ops_p
= false;
1895 /* If there's any possibility that the previous statement falls through,
1896 then we must branch around the new non-local label. */
1898 gsi_prev (&tmp_gsi
);
1899 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
1901 gimple stmt
= gimple_build_goto (label
);
1902 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1905 new_label
= (tree
) *slot
;
1906 stmt
= gimple_build_label (new_label
);
1907 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1909 *handled_ops_p
= true;
1914 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1915 of nested functions that require the use of trampolines. The rewrite
1916 will involve a reference a trampoline generated for the occasion. */
1919 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1921 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1922 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
1923 tree t
= *tp
, decl
, target_context
, x
, builtin
;
1927 switch (TREE_CODE (t
))
1931 T.1 = &CHAIN->tramp;
1932 T.2 = __builtin_adjust_trampoline (T.1);
1933 T.3 = (func_type)T.2;
1936 decl
= TREE_OPERAND (t
, 0);
1937 if (TREE_CODE (decl
) != FUNCTION_DECL
)
1940 /* Only need to process nested functions. */
1941 target_context
= decl_function_context (decl
);
1942 if (!target_context
)
1945 /* If the nested function doesn't use a static chain, then
1946 it doesn't need a trampoline. */
1947 if (!DECL_STATIC_CHAIN (decl
))
1950 /* If we don't want a trampoline, then don't build one. */
1951 if (TREE_NO_TRAMPOLINE (t
))
1954 /* Lookup the immediate parent of the callee, as that's where
1955 we need to insert the trampoline. */
1956 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
1958 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
1960 /* Compute the address of the field holding the trampoline. */
1961 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1962 x
= build_addr (x
, target_context
);
1963 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
1965 /* Do machine-specific ugliness. Normally this will involve
1966 computing extra alignment, but it can really be anything. */
1967 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE
);
1968 call
= gimple_build_call (builtin
, 1, x
);
1969 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
1971 /* Cast back to the proper function type. */
1972 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
1973 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1979 if (!IS_TYPE_OR_DECL_P (t
))
1988 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1989 to addresses of nested functions that require the use of
1990 trampolines. The rewrite will involve a reference a trampoline
1991 generated for the occasion. */
1994 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1995 struct walk_stmt_info
*wi
)
1997 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1998 gimple stmt
= gsi_stmt (*gsi
);
2000 switch (gimple_code (stmt
))
2004 /* Only walk call arguments, lest we generate trampolines for
2006 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
2007 for (i
= 0; i
< nargs
; i
++)
2008 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
2013 case GIMPLE_OMP_PARALLEL
:
2014 case GIMPLE_OMP_TASK
:
2016 tree save_local_var_chain
;
2017 walk_gimple_op (stmt
, convert_tramp_reference_op
, wi
);
2018 save_local_var_chain
= info
->new_local_var_chain
;
2019 info
->new_local_var_chain
= NULL
;
2020 walk_body (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2021 info
, gimple_omp_body_ptr (stmt
));
2022 if (info
->new_local_var_chain
)
2023 declare_vars (info
->new_local_var_chain
,
2024 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
2026 info
->new_local_var_chain
= save_local_var_chain
;
2031 *handled_ops_p
= false;
2036 *handled_ops_p
= true;
2042 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2043 that reference nested functions to make sure that the static chain
2044 is set up properly for the call. */
2047 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2048 struct walk_stmt_info
*wi
)
2050 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2051 tree decl
, target_context
;
2052 char save_static_chain_added
;
2054 gimple stmt
= gsi_stmt (*gsi
);
2056 switch (gimple_code (stmt
))
2059 if (gimple_call_chain (stmt
))
2061 decl
= gimple_call_fndecl (stmt
);
2064 target_context
= decl_function_context (decl
);
2065 if (target_context
&& DECL_STATIC_CHAIN (decl
))
2067 gimple_call_set_chain (stmt
, get_static_chain (info
, target_context
,
2069 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
2073 case GIMPLE_OMP_PARALLEL
:
2074 case GIMPLE_OMP_TASK
:
2075 save_static_chain_added
= info
->static_chain_added
;
2076 info
->static_chain_added
= 0;
2077 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2078 for (i
= 0; i
< 2; i
++)
2081 if ((info
->static_chain_added
& (1 << i
)) == 0)
2083 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2084 /* Don't add CHAIN.* or FRAME.* twice. */
2085 for (c
= gimple_omp_taskreg_clauses (stmt
);
2087 c
= OMP_CLAUSE_CHAIN (c
))
2088 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2089 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2090 && OMP_CLAUSE_DECL (c
) == decl
)
2094 c
= build_omp_clause (gimple_location (stmt
),
2095 i
? OMP_CLAUSE_FIRSTPRIVATE
2096 : OMP_CLAUSE_SHARED
);
2097 OMP_CLAUSE_DECL (c
) = decl
;
2098 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2099 gimple_omp_taskreg_set_clauses (stmt
, c
);
2102 info
->static_chain_added
|= save_static_chain_added
;
2105 case GIMPLE_OMP_FOR
:
2106 walk_body (convert_gimple_call
, NULL
, info
,
2107 gimple_omp_for_pre_body_ptr (stmt
));
2109 case GIMPLE_OMP_SECTIONS
:
2110 case GIMPLE_OMP_SECTION
:
2111 case GIMPLE_OMP_SINGLE
:
2112 case GIMPLE_OMP_TARGET
:
2113 case GIMPLE_OMP_TEAMS
:
2114 case GIMPLE_OMP_MASTER
:
2115 case GIMPLE_OMP_TASKGROUP
:
2116 case GIMPLE_OMP_ORDERED
:
2117 case GIMPLE_OMP_CRITICAL
:
2118 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2122 /* Keep looking for other operands. */
2123 *handled_ops_p
= false;
2127 *handled_ops_p
= true;
2131 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2132 call expressions. At the same time, determine if a nested function
2133 actually uses its static chain; if not, remember that. */
2136 convert_all_function_calls (struct nesting_info
*root
)
2138 unsigned int chain_count
= 0, old_chain_count
, iter_count
;
2139 struct nesting_info
*n
;
2141 /* First, optimistically clear static_chain for all decls that haven't
2142 used the static chain already for variable access. */
2143 FOR_EACH_NEST_INFO (n
, root
)
2145 tree decl
= n
->context
;
2146 if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
2148 DECL_STATIC_CHAIN (decl
) = 0;
2149 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2150 fprintf (dump_file
, "Guessing no static-chain for %s\n",
2151 lang_hooks
.decl_printable_name (decl
, 2));
2154 DECL_STATIC_CHAIN (decl
) = 1;
2155 chain_count
+= DECL_STATIC_CHAIN (decl
);
2158 /* Walk the functions and perform transformations. Note that these
2159 transformations can induce new uses of the static chain, which in turn
2160 require re-examining all users of the decl. */
2161 /* ??? It would make sense to try to use the call graph to speed this up,
2162 but the call graph hasn't really been built yet. Even if it did, we
2163 would still need to iterate in this loop since address-of references
2164 wouldn't show up in the callgraph anyway. */
2168 old_chain_count
= chain_count
;
2172 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2173 fputc ('\n', dump_file
);
2175 FOR_EACH_NEST_INFO (n
, root
)
2177 tree decl
= n
->context
;
2178 walk_function (convert_tramp_reference_stmt
,
2179 convert_tramp_reference_op
, n
);
2180 walk_function (convert_gimple_call
, NULL
, n
);
2181 chain_count
+= DECL_STATIC_CHAIN (decl
);
2184 while (chain_count
!= old_chain_count
);
2186 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2187 fprintf (dump_file
, "convert_all_function_calls iterations: %u\n\n",
2191 struct nesting_copy_body_data
2194 struct nesting_info
*root
;
2197 /* A helper subroutine for debug_var_chain type remapping. */
2200 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2202 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2203 void **slot
= pointer_map_contains (nid
->root
->var_map
, decl
);
2206 return (tree
) *slot
;
2208 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2210 tree new_decl
= copy_decl_no_change (decl
, id
);
2211 DECL_ORIGINAL_TYPE (new_decl
)
2212 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2216 if (TREE_CODE (decl
) == VAR_DECL
2217 || TREE_CODE (decl
) == PARM_DECL
2218 || TREE_CODE (decl
) == RESULT_DECL
)
2221 return copy_decl_no_change (decl
, id
);
2224 /* A helper function for remap_vla_decls. See if *TP contains
2225 some remapped variables. */
2228 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2230 struct nesting_info
*root
= (struct nesting_info
*) data
;
2237 slot
= pointer_map_contains (root
->var_map
, t
);
2240 return (tree
) *slot
;
2245 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2249 remap_vla_decls (tree block
, struct nesting_info
*root
)
2251 tree var
, subblock
, val
, type
;
2252 struct nesting_copy_body_data id
;
2254 for (subblock
= BLOCK_SUBBLOCKS (block
);
2256 subblock
= BLOCK_CHAIN (subblock
))
2257 remap_vla_decls (subblock
, root
);
2259 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
2260 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
2262 val
= DECL_VALUE_EXPR (var
);
2263 type
= TREE_TYPE (var
);
2265 if (!(TREE_CODE (val
) == INDIRECT_REF
2266 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2267 && variably_modified_type_p (type
, NULL
)))
2270 if (pointer_map_contains (root
->var_map
, TREE_OPERAND (val
, 0))
2271 || walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2275 if (var
== NULL_TREE
)
2278 memset (&id
, 0, sizeof (id
));
2279 id
.cb
.copy_decl
= nesting_copy_decl
;
2280 id
.cb
.decl_map
= pointer_map_create ();
2283 for (; var
; var
= DECL_CHAIN (var
))
2284 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
2286 struct nesting_info
*i
;
2290 val
= DECL_VALUE_EXPR (var
);
2291 type
= TREE_TYPE (var
);
2293 if (!(TREE_CODE (val
) == INDIRECT_REF
2294 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2295 && variably_modified_type_p (type
, NULL
)))
2298 slot
= pointer_map_contains (root
->var_map
, TREE_OPERAND (val
, 0));
2299 if (!slot
&& !walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2302 context
= decl_function_context (var
);
2303 for (i
= root
; i
; i
= i
->outer
)
2304 if (i
->context
== context
)
2310 /* Fully expand value expressions. This avoids having debug variables
2311 only referenced from them and that can be swept during GC. */
2314 tree t
= (tree
) *slot
;
2315 gcc_assert (DECL_P (t
) && DECL_HAS_VALUE_EXPR_P (t
));
2316 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
), DECL_VALUE_EXPR (t
));
2319 id
.cb
.src_fn
= i
->context
;
2320 id
.cb
.dst_fn
= i
->context
;
2321 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2323 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
2324 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2326 newt
= TREE_TYPE (newt
);
2327 type
= TREE_TYPE (type
);
2329 if (TYPE_NAME (newt
)
2330 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2331 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2333 && TYPE_NAME (newt
) == TYPE_NAME (type
))
2334 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2336 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
2337 if (val
!= DECL_VALUE_EXPR (var
))
2338 SET_DECL_VALUE_EXPR (var
, val
);
2341 pointer_map_destroy (id
.cb
.decl_map
);
2344 /* Fold the MEM_REF *E. */
2346 fold_mem_refs (const void *e
, void *data ATTRIBUTE_UNUSED
)
2348 tree
*ref_p
= CONST_CAST2 (tree
*, const tree
*, (const tree
*)e
);
2349 *ref_p
= fold (*ref_p
);
2353 /* Do "everything else" to clean up or complete state collected by the
2354 various walking passes -- lay out the types and decls, generate code
2355 to initialize the frame decl, store critical expressions in the
2356 struct function for rtl to find. */
2359 finalize_nesting_tree_1 (struct nesting_info
*root
)
2361 gimple_seq stmt_list
;
2363 tree context
= root
->context
;
2364 struct function
*sf
;
2368 /* If we created a non-local frame type or decl, we need to lay them
2369 out at this time. */
2370 if (root
->frame_type
)
2372 /* In some cases the frame type will trigger the -Wpadded warning.
2373 This is not helpful; suppress it. */
2374 int save_warn_padded
= warn_padded
;
2378 layout_type (root
->frame_type
);
2379 warn_padded
= save_warn_padded
;
2380 layout_decl (root
->frame_decl
, 0);
2382 /* Remove root->frame_decl from root->new_local_var_chain, so
2383 that we can declare it also in the lexical blocks, which
2384 helps ensure virtual regs that end up appearing in its RTL
2385 expression get substituted in instantiate_virtual_regs(). */
2386 for (adjust
= &root
->new_local_var_chain
;
2387 *adjust
!= root
->frame_decl
;
2388 adjust
= &DECL_CHAIN (*adjust
))
2389 gcc_assert (DECL_CHAIN (*adjust
));
2390 *adjust
= DECL_CHAIN (*adjust
);
2392 DECL_CHAIN (root
->frame_decl
) = NULL_TREE
;
2393 declare_vars (root
->frame_decl
,
2394 gimple_seq_first_stmt (gimple_body (context
)), true);
2397 /* If any parameters were referenced non-locally, then we need to
2398 insert a copy. Likewise, if any variables were referenced by
2399 pointer, we need to initialize the address. */
2400 if (root
->any_parm_remapped
)
2403 for (p
= DECL_ARGUMENTS (context
); p
; p
= DECL_CHAIN (p
))
2407 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
2411 if (use_pointer_in_frame (p
))
2412 x
= build_addr (p
, context
);
2416 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2417 root
->frame_decl
, field
, NULL_TREE
);
2418 stmt
= gimple_build_assign (y
, x
);
2419 gimple_seq_add_stmt (&stmt_list
, stmt
);
2420 /* If the assignment is from a non-register the stmt is
2421 not valid gimple. Make it so by using a temporary instead. */
2422 if (!is_gimple_reg (x
)
2423 && is_gimple_reg_type (TREE_TYPE (x
)))
2425 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
2426 x
= init_tmp_var (root
, x
, &gsi
);
2427 gimple_assign_set_rhs1 (stmt
, x
);
2432 /* If a chain_field was created, then it needs to be initialized
2434 if (root
->chain_field
)
2436 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
2437 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
2438 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
2439 gimple_seq_add_stmt (&stmt_list
, stmt
);
2442 /* If trampolines were created, then we need to initialize them. */
2443 if (root
->any_tramp_created
)
2445 struct nesting_info
*i
;
2446 for (i
= root
->inner
; i
; i
= i
->next
)
2448 tree arg1
, arg2
, arg3
, x
, field
;
2450 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
2454 gcc_assert (DECL_STATIC_CHAIN (i
->context
));
2455 arg3
= build_addr (root
->frame_decl
, context
);
2457 arg2
= build_addr (i
->context
, context
);
2459 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2460 root
->frame_decl
, field
, NULL_TREE
);
2461 arg1
= build_addr (x
, context
);
2463 x
= builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE
);
2464 stmt
= gimple_build_call (x
, 3, arg1
, arg2
, arg3
);
2465 gimple_seq_add_stmt (&stmt_list
, stmt
);
2469 /* If we created initialization statements, insert them. */
2473 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
2474 bind
= gimple_seq_first_stmt (gimple_body (context
));
2475 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
2476 gimple_bind_set_body (bind
, stmt_list
);
2479 /* If a chain_decl was created, then it needs to be registered with
2480 struct function so that it gets initialized from the static chain
2481 register at the beginning of the function. */
2482 sf
= DECL_STRUCT_FUNCTION (root
->context
);
2483 sf
->static_chain_decl
= root
->chain_decl
;
2485 /* Similarly for the non-local goto save area. */
2486 if (root
->nl_goto_field
)
2488 sf
->nonlocal_goto_save_area
2489 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
2490 sf
->has_nonlocal_label
= 1;
2493 /* Make sure all new local variables get inserted into the
2494 proper BIND_EXPR. */
2495 if (root
->new_local_var_chain
)
2496 declare_vars (root
->new_local_var_chain
,
2497 gimple_seq_first_stmt (gimple_body (root
->context
)),
2500 if (root
->debug_var_chain
)
2505 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
2507 for (debug_var
= root
->debug_var_chain
; debug_var
;
2508 debug_var
= DECL_CHAIN (debug_var
))
2509 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2512 /* If there are any debug decls with variable length types,
2513 remap those types using other debug_var_chain variables. */
2516 struct nesting_copy_body_data id
;
2518 memset (&id
, 0, sizeof (id
));
2519 id
.cb
.copy_decl
= nesting_copy_decl
;
2520 id
.cb
.decl_map
= pointer_map_create ();
2523 for (; debug_var
; debug_var
= DECL_CHAIN (debug_var
))
2524 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2526 tree type
= TREE_TYPE (debug_var
);
2527 tree newt
, t
= type
;
2528 struct nesting_info
*i
;
2530 for (i
= root
; i
; i
= i
->outer
)
2531 if (variably_modified_type_p (type
, i
->context
))
2537 id
.cb
.src_fn
= i
->context
;
2538 id
.cb
.dst_fn
= i
->context
;
2539 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2541 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
2542 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2544 newt
= TREE_TYPE (newt
);
2547 if (TYPE_NAME (newt
)
2548 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2549 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2551 && TYPE_NAME (newt
) == TYPE_NAME (t
))
2552 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2555 pointer_map_destroy (id
.cb
.decl_map
);
2558 scope
= gimple_seq_first_stmt (gimple_body (root
->context
));
2559 if (gimple_bind_block (scope
))
2560 declare_vars (root
->debug_var_chain
, scope
, true);
2562 BLOCK_VARS (DECL_INITIAL (root
->context
))
2563 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
2564 root
->debug_var_chain
);
2567 /* Fold the rewritten MEM_REF trees. */
2568 pointer_set_traverse (root
->mem_refs
, fold_mem_refs
, NULL
);
2570 /* Dump the translated tree function. */
2573 fputs ("\n\n", dump_file
);
2574 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
2579 finalize_nesting_tree (struct nesting_info
*root
)
2581 struct nesting_info
*n
;
2582 FOR_EACH_NEST_INFO (n
, root
)
2583 finalize_nesting_tree_1 (n
);
2586 /* Unnest the nodes and pass them to cgraph. */
2589 unnest_nesting_tree_1 (struct nesting_info
*root
)
2591 struct cgraph_node
*node
= cgraph_get_node (root
->context
);
2593 /* For nested functions update the cgraph to reflect unnesting.
2594 We also delay finalizing of these functions up to this point. */
2597 cgraph_unnest_node (node
);
2598 cgraph_finalize_function (root
->context
, true);
2603 unnest_nesting_tree (struct nesting_info
*root
)
2605 struct nesting_info
*n
;
2606 FOR_EACH_NEST_INFO (n
, root
)
2607 unnest_nesting_tree_1 (n
);
2610 /* Free the data structures allocated during this pass. */
2613 free_nesting_tree (struct nesting_info
*root
)
2615 struct nesting_info
*node
, *next
;
2617 node
= iter_nestinfo_start (root
);
2620 next
= iter_nestinfo_next (node
);
2621 pointer_map_destroy (node
->var_map
);
2622 pointer_map_destroy (node
->field_map
);
2623 pointer_set_destroy (node
->mem_refs
);
2630 /* Gimplify a function and all its nested functions. */
2632 gimplify_all_functions (struct cgraph_node
*root
)
2634 struct cgraph_node
*iter
;
2635 if (!gimple_body (root
->decl
))
2636 gimplify_function_tree (root
->decl
);
2637 for (iter
= root
->nested
; iter
; iter
= iter
->next_nested
)
2638 gimplify_all_functions (iter
);
2641 /* Main entry point for this pass. Process FNDECL and all of its nested
2642 subroutines and turn them into something less tightly bound. */
2645 lower_nested_functions (tree fndecl
)
2647 struct cgraph_node
*cgn
;
2648 struct nesting_info
*root
;
2650 /* If there are no nested functions, there's nothing to do. */
2651 cgn
= cgraph_get_node (fndecl
);
2655 gimplify_all_functions (cgn
);
2657 dump_file
= dump_begin (TDI_nested
, &dump_flags
);
2659 fprintf (dump_file
, "\n;; Function %s\n\n",
2660 lang_hooks
.decl_printable_name (fndecl
, 2));
2662 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
2663 root
= create_nesting_tree (cgn
);
2665 walk_all_functions (convert_nonlocal_reference_stmt
,
2666 convert_nonlocal_reference_op
,
2668 walk_all_functions (convert_local_reference_stmt
,
2669 convert_local_reference_op
,
2671 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
2672 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
2674 convert_all_function_calls (root
);
2675 finalize_nesting_tree (root
);
2676 unnest_nesting_tree (root
);
2678 free_nesting_tree (root
);
2679 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
2683 dump_end (TDI_nested
, dump_file
);
2688 #include "gt-tree-nested.h"