1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "internal-fn.h"
36 #include "gimple-iterator.h"
37 #include "gimple-walk.h"
38 #include "tree-iterator.h"
42 #include "insn-config.h"
50 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
51 #include "langhooks.h"
52 #include "gimple-low.h"
53 #include "gomp-constants.h"
56 /* The object of this pass is to lower the representation of a set of nested
57 functions in order to expose all of the gory details of the various
58 nonlocal references. We want to do this sooner rather than later, in
59 order to give us more freedom in emitting all of the functions in question.
61 Back in olden times, when gcc was young, we developed an insanely
62 complicated scheme whereby variables which were referenced nonlocally
63 were forced to live in the stack of the declaring function, and then
64 the nested functions magically discovered where these variables were
65 placed. In order for this scheme to function properly, it required
66 that the outer function be partially expanded, then we switch to
67 compiling the inner function, and once done with those we switch back
68 to compiling the outer function. Such delicate ordering requirements
69 makes it difficult to do whole translation unit optimizations
70 involving such functions.
72 The implementation here is much more direct. Everything that can be
73 referenced by an inner function is a member of an explicitly created
74 structure herein called the "nonlocal frame struct". The incoming
75 static chain for a nested function is a pointer to this struct in
76 the parent. In this way, we settle on known offsets from a known
77 base, and so are decoupled from the logic that places objects in the
78 function's stack frame. More importantly, we don't have to wait for
79 that to happen -- since the compilation of the inner function is no
80 longer tied to a real stack frame, the nonlocal frame struct can be
81 allocated anywhere. Which means that the outer function is now
84 Theory of operation here is very simple. Iterate over all the
85 statements in all the functions (depth first) several times,
86 allocating structures and fields on demand. In general we want to
87 examine inner functions first, so that we can avoid making changes
88 to outer functions which are unnecessary.
90 The order of the passes matters a bit, in that later passes will be
91 skipped if it is discovered that the functions don't actually interact
92 at all. That is, they're nested in the lexical sense but could have
93 been written as independent functions without change. */
98 struct nesting_info
*outer
;
99 struct nesting_info
*inner
;
100 struct nesting_info
*next
;
102 hash_map
<tree
, tree
> *field_map
;
103 hash_map
<tree
, tree
> *var_map
;
104 hash_set
<tree
*> *mem_refs
;
105 bitmap suppress_expansion
;
108 tree new_local_var_chain
;
109 tree debug_var_chain
;
116 bool any_parm_remapped
;
117 bool any_tramp_created
;
118 char static_chain_added
;
122 /* Iterate over the nesting tree, starting with ROOT, depth first. */
124 static inline struct nesting_info
*
125 iter_nestinfo_start (struct nesting_info
*root
)
132 static inline struct nesting_info
*
133 iter_nestinfo_next (struct nesting_info
*node
)
136 return iter_nestinfo_start (node
->next
);
140 #define FOR_EACH_NEST_INFO(I, ROOT) \
141 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
143 /* Obstack used for the bitmaps in the struct above. */
144 static struct bitmap_obstack nesting_info_bitmap_obstack
;
147 /* We're working in so many different function contexts simultaneously,
148 that create_tmp_var is dangerous. Prevent mishap. */
149 #define create_tmp_var cant_use_create_tmp_var_here_dummy
151 /* Like create_tmp_var, except record the variable for registration at
152 the given nesting level. */
155 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
159 /* If the type is of variable size or a type which must be created by the
160 frontend, something is wrong. Note that we explicitly allow
161 incomplete types here, since we create them ourselves here. */
162 gcc_assert (!TREE_ADDRESSABLE (type
));
163 gcc_assert (!TYPE_SIZE_UNIT (type
)
164 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
166 tmp_var
= create_tmp_var_raw (type
, prefix
);
167 DECL_CONTEXT (tmp_var
) = info
->context
;
168 DECL_CHAIN (tmp_var
) = info
->new_local_var_chain
;
169 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
170 if (TREE_CODE (type
) == COMPLEX_TYPE
171 || TREE_CODE (type
) == VECTOR_TYPE
)
172 DECL_GIMPLE_REG_P (tmp_var
) = 1;
174 info
->new_local_var_chain
= tmp_var
;
179 /* Take the address of EXP to be used within function CONTEXT.
180 Mark it for addressability as necessary. */
183 build_addr (tree exp
, tree context
)
189 while (handled_component_p (base
))
190 base
= TREE_OPERAND (base
, 0);
193 TREE_ADDRESSABLE (base
) = 1;
195 /* Building the ADDR_EXPR will compute a set of properties for
196 that ADDR_EXPR. Those properties are unfortunately context
197 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
199 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
200 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
201 way the properties are for the ADDR_EXPR are computed properly. */
202 save_context
= current_function_decl
;
203 current_function_decl
= context
;
204 retval
= build_fold_addr_expr (exp
);
205 current_function_decl
= save_context
;
209 /* Insert FIELD into TYPE, sorted by alignment requirements. */
212 insert_field_into_struct (tree type
, tree field
)
216 DECL_CONTEXT (field
) = type
;
218 for (p
= &TYPE_FIELDS (type
); *p
; p
= &DECL_CHAIN (*p
))
219 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
222 DECL_CHAIN (field
) = *p
;
225 /* Set correct alignment for frame struct type. */
226 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
227 TYPE_ALIGN (type
) = DECL_ALIGN (field
);
230 /* Build or return the RECORD_TYPE that describes the frame state that is
231 shared between INFO->CONTEXT and its nested functions. This record will
232 not be complete until finalize_nesting_tree; up until that point we'll
233 be adding fields as necessary.
235 We also build the DECL that represents this frame in the function. */
238 get_frame_type (struct nesting_info
*info
)
240 tree type
= info
->frame_type
;
245 type
= make_node (RECORD_TYPE
);
247 name
= concat ("FRAME.",
248 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
250 TYPE_NAME (type
) = get_identifier (name
);
253 info
->frame_type
= type
;
254 info
->frame_decl
= create_tmp_var_for (info
, type
, "FRAME");
255 DECL_NONLOCAL_FRAME (info
->frame_decl
) = 1;
257 /* ??? Always make it addressable for now, since it is meant to
258 be pointed to by the static chain pointer. This pessimizes
259 when it turns out that no static chains are needed because
260 the nested functions referencing non-local variables are not
261 reachable, but the true pessimization is to create the non-
262 local frame structure in the first place. */
263 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
268 /* Return true if DECL should be referenced by pointer in the non-local
272 use_pointer_in_frame (tree decl
)
274 if (TREE_CODE (decl
) == PARM_DECL
)
276 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
277 sized decls, and inefficient to copy large aggregates. Don't bother
278 moving anything but scalar variables. */
279 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
283 /* Variable sized types make things "interesting" in the frame. */
284 return DECL_SIZE (decl
) == NULL
|| !TREE_CONSTANT (DECL_SIZE (decl
));
288 /* Given DECL, a non-locally accessed variable, find or create a field
289 in the non-local frame structure for the given nesting context. */
292 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
293 enum insert_option insert
)
295 if (insert
== NO_INSERT
)
297 tree
*slot
= info
->field_map
->get (decl
);
298 return slot
? *slot
: NULL_TREE
;
301 tree
*slot
= &info
->field_map
->get_or_insert (decl
);
304 tree field
= make_node (FIELD_DECL
);
305 DECL_NAME (field
) = DECL_NAME (decl
);
307 if (use_pointer_in_frame (decl
))
309 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
310 DECL_ALIGN (field
) = TYPE_ALIGN (TREE_TYPE (field
));
311 DECL_NONADDRESSABLE_P (field
) = 1;
315 TREE_TYPE (field
) = TREE_TYPE (decl
);
316 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
317 DECL_ALIGN (field
) = DECL_ALIGN (decl
);
318 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
319 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
320 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
321 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
324 insert_field_into_struct (get_frame_type (info
), field
);
327 if (TREE_CODE (decl
) == PARM_DECL
)
328 info
->any_parm_remapped
= true;
334 /* Build or return the variable that holds the static chain within
335 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
338 get_chain_decl (struct nesting_info
*info
)
340 tree decl
= info
->chain_decl
;
346 type
= get_frame_type (info
->outer
);
347 type
= build_pointer_type (type
);
349 /* Note that this variable is *not* entered into any BIND_EXPR;
350 the construction of this variable is handled specially in
351 expand_function_start and initialize_inlined_parameters.
352 Note also that it's represented as a parameter. This is more
353 close to the truth, since the initial value does come from
355 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
356 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
357 DECL_ARTIFICIAL (decl
) = 1;
358 DECL_IGNORED_P (decl
) = 1;
359 TREE_USED (decl
) = 1;
360 DECL_CONTEXT (decl
) = info
->context
;
361 DECL_ARG_TYPE (decl
) = type
;
363 /* Tell tree-inline.c that we never write to this variable, so
364 it can copy-prop the replacement value immediately. */
365 TREE_READONLY (decl
) = 1;
367 info
->chain_decl
= decl
;
370 && (dump_flags
& TDF_DETAILS
)
371 && !DECL_STATIC_CHAIN (info
->context
))
372 fprintf (dump_file
, "Setting static-chain for %s\n",
373 lang_hooks
.decl_printable_name (info
->context
, 2));
375 DECL_STATIC_CHAIN (info
->context
) = 1;
380 /* Build or return the field within the non-local frame state that holds
381 the static chain for INFO->CONTEXT. This is the way to walk back up
382 multiple nesting levels. */
385 get_chain_field (struct nesting_info
*info
)
387 tree field
= info
->chain_field
;
391 tree type
= build_pointer_type (get_frame_type (info
->outer
));
393 field
= make_node (FIELD_DECL
);
394 DECL_NAME (field
) = get_identifier ("__chain");
395 TREE_TYPE (field
) = type
;
396 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
397 DECL_NONADDRESSABLE_P (field
) = 1;
399 insert_field_into_struct (get_frame_type (info
), field
);
401 info
->chain_field
= field
;
404 && (dump_flags
& TDF_DETAILS
)
405 && !DECL_STATIC_CHAIN (info
->context
))
406 fprintf (dump_file
, "Setting static-chain for %s\n",
407 lang_hooks
.decl_printable_name (info
->context
, 2));
409 DECL_STATIC_CHAIN (info
->context
) = 1;
414 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
417 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
422 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
423 gimple_call_set_lhs (call
, t
);
424 if (! gsi_end_p (*gsi
))
425 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
426 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
432 /* Copy EXP into a temporary. Allocate the temporary in the context of
433 INFO and insert the initialization statement before GSI. */
436 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
441 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
442 stmt
= gimple_build_assign (t
, exp
);
443 if (! gsi_end_p (*gsi
))
444 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
445 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
451 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
454 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
455 gimple_stmt_iterator
*gsi
)
457 if (is_gimple_val (exp
))
460 return init_tmp_var (info
, exp
, gsi
);
463 /* Similarly, but copy from the temporary and insert the statement
464 after the iterator. */
467 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
472 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
473 stmt
= gimple_build_assign (exp
, t
);
474 if (! gsi_end_p (*gsi
))
475 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
476 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
481 /* Build or return the type used to represent a nested function trampoline. */
483 static GTY(()) tree trampoline_type
;
486 get_trampoline_type (struct nesting_info
*info
)
488 unsigned align
, size
;
492 return trampoline_type
;
494 align
= TRAMPOLINE_ALIGNMENT
;
495 size
= TRAMPOLINE_SIZE
;
497 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
498 then allocate extra space so that we can do dynamic alignment. */
499 if (align
> STACK_BOUNDARY
)
501 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
502 align
= STACK_BOUNDARY
;
505 t
= build_index_type (size_int (size
- 1));
506 t
= build_array_type (char_type_node
, t
);
507 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
508 FIELD_DECL
, get_identifier ("__data"), t
);
509 DECL_ALIGN (t
) = align
;
510 DECL_USER_ALIGN (t
) = 1;
512 trampoline_type
= make_node (RECORD_TYPE
);
513 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
514 TYPE_FIELDS (trampoline_type
) = t
;
515 layout_type (trampoline_type
);
516 DECL_CONTEXT (t
) = trampoline_type
;
518 return trampoline_type
;
521 /* Given DECL, a nested function, find or create a field in the non-local
522 frame structure for a trampoline for this function. */
525 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
526 enum insert_option insert
)
528 if (insert
== NO_INSERT
)
530 tree
*slot
= info
->var_map
->get (decl
);
531 return slot
? *slot
: NULL_TREE
;
534 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
537 tree field
= make_node (FIELD_DECL
);
538 DECL_NAME (field
) = DECL_NAME (decl
);
539 TREE_TYPE (field
) = get_trampoline_type (info
);
540 TREE_ADDRESSABLE (field
) = 1;
542 insert_field_into_struct (get_frame_type (info
), field
);
545 info
->any_tramp_created
= true;
551 /* Build or return the field within the non-local frame state that holds
552 the non-local goto "jmp_buf". The buffer itself is maintained by the
553 rtl middle-end as dynamic stack space is allocated. */
556 get_nl_goto_field (struct nesting_info
*info
)
558 tree field
= info
->nl_goto_field
;
564 /* For __builtin_nonlocal_goto, we need N words. The first is the
565 frame pointer, the rest is for the target's stack pointer save
566 area. The number of words is controlled by STACK_SAVEAREA_MODE;
567 not the best interface, but it'll do for now. */
568 if (Pmode
== ptr_mode
)
569 type
= ptr_type_node
;
571 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
573 size
= GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
574 size
= size
/ GET_MODE_SIZE (Pmode
);
577 type
= build_array_type
578 (type
, build_index_type (size_int (size
)));
580 field
= make_node (FIELD_DECL
);
581 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
582 TREE_TYPE (field
) = type
;
583 DECL_ALIGN (field
) = TYPE_ALIGN (type
);
584 TREE_ADDRESSABLE (field
) = 1;
586 insert_field_into_struct (get_frame_type (info
), field
);
588 info
->nl_goto_field
= field
;
594 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
597 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
598 struct nesting_info
*info
, gimple_seq
*pseq
)
600 struct walk_stmt_info wi
;
602 memset (&wi
, 0, sizeof (wi
));
605 walk_gimple_seq_mod (pseq
, callback_stmt
, callback_op
, &wi
);
609 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
612 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
613 struct nesting_info
*info
)
615 gimple_seq body
= gimple_body (info
->context
);
616 walk_body (callback_stmt
, callback_op
, info
, &body
);
617 gimple_set_body (info
->context
, body
);
620 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
623 walk_gimple_omp_for (gomp_for
*for_stmt
,
624 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
625 struct nesting_info
*info
)
627 struct walk_stmt_info wi
;
632 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body_ptr (for_stmt
));
635 memset (&wi
, 0, sizeof (wi
));
637 wi
.gsi
= gsi_last (seq
);
639 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
642 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
646 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
651 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
654 t
= gimple_omp_for_incr (for_stmt
, i
);
655 gcc_assert (BINARY_CLASS_P (t
));
657 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
660 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
663 seq
= gsi_seq (wi
.gsi
);
664 if (!gimple_seq_empty_p (seq
))
666 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
667 annotate_all_with_location (seq
, gimple_location (for_stmt
));
668 gimple_seq_add_seq (&pre_body
, seq
);
669 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
673 /* Similarly for ROOT and all functions nested underneath, depth first. */
676 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
677 struct nesting_info
*root
)
679 struct nesting_info
*n
;
680 FOR_EACH_NEST_INFO (n
, root
)
681 walk_function (callback_stmt
, callback_op
, n
);
685 /* We have to check for a fairly pathological case. The operands of function
686 nested function are to be interpreted in the context of the enclosing
687 function. So if any are variably-sized, they will get remapped when the
688 enclosing function is inlined. But that remapping would also have to be
689 done in the types of the PARM_DECLs of the nested function, meaning the
690 argument types of that function will disagree with the arguments in the
691 calls to that function. So we'd either have to make a copy of the nested
692 function corresponding to each time the enclosing function was inlined or
693 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
694 function. The former is not practical. The latter would still require
695 detecting this case to know when to add the conversions. So, for now at
696 least, we don't inline such an enclosing function.
698 We have to do that check recursively, so here return indicating whether
699 FNDECL has such a nested function. ORIG_FN is the function we were
700 trying to inline to use for checking whether any argument is variably
701 modified by anything in it.
703 It would be better to do this in tree-inline.c so that we could give
704 the appropriate warning for why a function can't be inlined, but that's
705 too late since the nesting structure has already been flattened and
706 adding a flag just to record this fact seems a waste of a flag. */
709 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
711 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
714 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
716 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= DECL_CHAIN (arg
))
717 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
720 if (check_for_nested_with_variably_modified (cgn
->decl
,
728 /* Construct our local datastructure describing the function nesting
729 tree rooted by CGN. */
731 static struct nesting_info
*
732 create_nesting_tree (struct cgraph_node
*cgn
)
734 struct nesting_info
*info
= XCNEW (struct nesting_info
);
735 info
->field_map
= new hash_map
<tree
, tree
>;
736 info
->var_map
= new hash_map
<tree
, tree
>;
737 info
->mem_refs
= new hash_set
<tree
*>;
738 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
739 info
->context
= cgn
->decl
;
741 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
743 struct nesting_info
*sub
= create_nesting_tree (cgn
);
745 sub
->next
= info
->inner
;
749 /* See discussion at check_for_nested_with_variably_modified for a
750 discussion of why this has to be here. */
751 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
752 DECL_UNINLINABLE (info
->context
) = true;
757 /* Return an expression computing the static chain for TARGET_CONTEXT
758 from INFO->CONTEXT. Insert any necessary computations before TSI. */
761 get_static_chain (struct nesting_info
*info
, tree target_context
,
762 gimple_stmt_iterator
*gsi
)
764 struct nesting_info
*i
;
767 if (info
->context
== target_context
)
769 x
= build_addr (info
->frame_decl
, target_context
);
770 info
->static_chain_added
|= 1;
774 x
= get_chain_decl (info
);
775 info
->static_chain_added
|= 2;
777 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
779 tree field
= get_chain_field (i
);
781 x
= build_simple_mem_ref (x
);
782 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
783 x
= init_tmp_var (info
, x
, gsi
);
791 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
792 frame as seen from INFO->CONTEXT. Insert any necessary computations
796 get_frame_field (struct nesting_info
*info
, tree target_context
,
797 tree field
, gimple_stmt_iterator
*gsi
)
799 struct nesting_info
*i
;
802 if (info
->context
== target_context
)
804 /* Make sure frame_decl gets created. */
805 (void) get_frame_type (info
);
806 x
= info
->frame_decl
;
807 info
->static_chain_added
|= 1;
811 x
= get_chain_decl (info
);
812 info
->static_chain_added
|= 2;
814 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
816 tree field
= get_chain_field (i
);
818 x
= build_simple_mem_ref (x
);
819 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
820 x
= init_tmp_var (info
, x
, gsi
);
823 x
= build_simple_mem_ref (x
);
826 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
830 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
832 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
833 in the nested function with DECL_VALUE_EXPR set to reference the true
834 variable in the parent function. This is used both for debug info
835 and in OMP lowering. */
838 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
841 struct nesting_info
*i
;
842 tree x
, field
, new_decl
;
844 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
849 target_context
= decl_function_context (decl
);
851 /* A copy of the code in get_frame_field, but without the temporaries. */
852 if (info
->context
== target_context
)
854 /* Make sure frame_decl gets created. */
855 (void) get_frame_type (info
);
856 x
= info
->frame_decl
;
858 info
->static_chain_added
|= 1;
862 x
= get_chain_decl (info
);
863 info
->static_chain_added
|= 2;
864 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
866 field
= get_chain_field (i
);
867 x
= build_simple_mem_ref (x
);
868 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
870 x
= build_simple_mem_ref (x
);
873 field
= lookup_field_for_decl (i
, decl
, INSERT
);
874 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
875 if (use_pointer_in_frame (decl
))
876 x
= build_simple_mem_ref (x
);
878 /* ??? We should be remapping types as well, surely. */
879 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
880 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
881 DECL_CONTEXT (new_decl
) = info
->context
;
882 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
883 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
884 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
885 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
886 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
887 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
888 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
889 if ((TREE_CODE (decl
) == PARM_DECL
890 || TREE_CODE (decl
) == RESULT_DECL
891 || TREE_CODE (decl
) == VAR_DECL
)
892 && DECL_BY_REFERENCE (decl
))
893 DECL_BY_REFERENCE (new_decl
) = 1;
895 SET_DECL_VALUE_EXPR (new_decl
, x
);
896 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
899 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
900 info
->debug_var_chain
= new_decl
;
903 && info
->context
!= target_context
904 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
905 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
911 /* Callback for walk_gimple_stmt, rewrite all references to VAR
912 and PARM_DECLs that belong to outer functions.
914 The rewrite will involve some number of structure accesses back up
915 the static chain. E.g. for a variable FOO up one nesting level it'll
916 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
917 indirections apply to decls for which use_pointer_in_frame is true. */
920 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
922 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
923 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
927 switch (TREE_CODE (t
))
930 /* Non-automatic variables are never processed. */
931 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
936 if (decl_function_context (t
) != info
->context
)
941 x
= get_nonlocal_debug_decl (info
, t
);
942 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
944 tree target_context
= decl_function_context (t
);
945 struct nesting_info
*i
;
946 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
948 x
= lookup_field_for_decl (i
, t
, INSERT
);
949 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
950 if (use_pointer_in_frame (t
))
952 x
= init_tmp_var (info
, x
, &wi
->gsi
);
953 x
= build_simple_mem_ref (x
);
960 x
= save_tmp_var (info
, x
, &wi
->gsi
);
962 x
= init_tmp_var (info
, x
, &wi
->gsi
);
970 /* We're taking the address of a label from a parent function, but
971 this is not itself a non-local goto. Mark the label such that it
972 will not be deleted, much as we would with a label address in
974 if (decl_function_context (t
) != info
->context
)
975 FORCED_LABEL (t
) = 1;
980 bool save_val_only
= wi
->val_only
;
982 wi
->val_only
= false;
985 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
992 /* If we changed anything, we might no longer be directly
993 referencing a decl. */
994 save_context
= current_function_decl
;
995 current_function_decl
= info
->context
;
996 recompute_tree_invariant_for_addr_expr (t
);
997 current_function_decl
= save_context
;
999 /* If the callback converted the address argument in a context
1000 where we only accept variables (and min_invariant, presumably),
1001 then compute the address into a temporary. */
1003 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1013 case ARRAY_RANGE_REF
:
1015 /* Go down this entire nest and just look at the final prefix and
1016 anything that describes the references. Otherwise, we lose track
1017 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1018 wi
->val_only
= true;
1020 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1022 if (TREE_CODE (t
) == COMPONENT_REF
)
1023 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1025 else if (TREE_CODE (t
) == ARRAY_REF
1026 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1028 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1030 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1032 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1036 wi
->val_only
= false;
1037 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1040 case VIEW_CONVERT_EXPR
:
1041 /* Just request to look at the subtrees, leaving val_only and lhs
1042 untouched. This might actually be for !val_only + lhs, in which
1043 case we don't want to force a replacement by a temporary. */
1048 if (!IS_TYPE_OR_DECL_P (t
))
1051 wi
->val_only
= true;
1060 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1061 struct walk_stmt_info
*);
1063 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1064 and PARM_DECLs that belong to outer functions. */
1067 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1069 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1070 bool need_chain
= false, need_stmts
= false;
1073 bitmap new_suppress
;
1075 new_suppress
= BITMAP_GGC_ALLOC ();
1076 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1078 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1080 switch (OMP_CLAUSE_CODE (clause
))
1082 case OMP_CLAUSE_REDUCTION
:
1083 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1085 goto do_decl_clause
;
1087 case OMP_CLAUSE_LASTPRIVATE
:
1088 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1090 goto do_decl_clause
;
1092 case OMP_CLAUSE_LINEAR
:
1093 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1095 wi
->val_only
= true;
1097 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
),
1099 goto do_decl_clause
;
1101 case OMP_CLAUSE_PRIVATE
:
1102 case OMP_CLAUSE_FIRSTPRIVATE
:
1103 case OMP_CLAUSE_COPYPRIVATE
:
1104 case OMP_CLAUSE_SHARED
:
1105 case OMP_CLAUSE_TO_DECLARE
:
1106 case OMP_CLAUSE_LINK
:
1107 case OMP_CLAUSE_USE_DEVICE_PTR
:
1108 case OMP_CLAUSE_IS_DEVICE_PTR
:
1110 decl
= OMP_CLAUSE_DECL (clause
);
1111 if (TREE_CODE (decl
) == VAR_DECL
1112 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1114 if (decl_function_context (decl
) != info
->context
)
1116 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1117 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1118 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1123 case OMP_CLAUSE_SCHEDULE
:
1124 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1127 case OMP_CLAUSE_FINAL
:
1129 case OMP_CLAUSE_NUM_THREADS
:
1130 case OMP_CLAUSE_DEPEND
:
1131 case OMP_CLAUSE_DEVICE
:
1132 case OMP_CLAUSE_NUM_TEAMS
:
1133 case OMP_CLAUSE_THREAD_LIMIT
:
1134 case OMP_CLAUSE_SAFELEN
:
1135 case OMP_CLAUSE_SIMDLEN
:
1136 case OMP_CLAUSE_PRIORITY
:
1137 case OMP_CLAUSE_GRAINSIZE
:
1138 case OMP_CLAUSE_NUM_TASKS
:
1139 case OMP_CLAUSE_HINT
:
1140 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1141 wi
->val_only
= true;
1143 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1147 case OMP_CLAUSE_DIST_SCHEDULE
:
1148 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1150 wi
->val_only
= true;
1152 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1157 case OMP_CLAUSE_MAP
:
1159 case OMP_CLAUSE_FROM
:
1160 if (OMP_CLAUSE_SIZE (clause
))
1162 wi
->val_only
= true;
1164 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause
),
1167 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1168 goto do_decl_clause
;
1169 wi
->val_only
= true;
1171 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_nonlocal_reference_op
,
1175 case OMP_CLAUSE_ALIGNED
:
1176 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1178 wi
->val_only
= true;
1180 convert_nonlocal_reference_op
1181 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1183 /* Like do_decl_clause, but don't add any suppression. */
1184 decl
= OMP_CLAUSE_DECL (clause
);
1185 if (TREE_CODE (decl
) == VAR_DECL
1186 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1188 if (decl_function_context (decl
) != info
->context
)
1190 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1191 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1196 case OMP_CLAUSE_NOWAIT
:
1197 case OMP_CLAUSE_ORDERED
:
1198 case OMP_CLAUSE_DEFAULT
:
1199 case OMP_CLAUSE_COPYIN
:
1200 case OMP_CLAUSE_COLLAPSE
:
1201 case OMP_CLAUSE_UNTIED
:
1202 case OMP_CLAUSE_MERGEABLE
:
1203 case OMP_CLAUSE_PROC_BIND
:
1204 case OMP_CLAUSE_NOGROUP
:
1205 case OMP_CLAUSE_THREADS
:
1206 case OMP_CLAUSE_SIMD
:
1207 case OMP_CLAUSE_DEFAULTMAP
:
1215 info
->suppress_expansion
= new_suppress
;
1218 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1219 switch (OMP_CLAUSE_CODE (clause
))
1221 case OMP_CLAUSE_REDUCTION
:
1222 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1225 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1226 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1228 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1229 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1231 walk_body (convert_nonlocal_reference_stmt
,
1232 convert_nonlocal_reference_op
, info
,
1233 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1234 walk_body (convert_nonlocal_reference_stmt
,
1235 convert_nonlocal_reference_op
, info
,
1236 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1237 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1239 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1240 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1245 case OMP_CLAUSE_LASTPRIVATE
:
1246 walk_body (convert_nonlocal_reference_stmt
,
1247 convert_nonlocal_reference_op
, info
,
1248 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1251 case OMP_CLAUSE_LINEAR
:
1252 walk_body (convert_nonlocal_reference_stmt
,
1253 convert_nonlocal_reference_op
, info
,
1254 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
1264 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1267 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1269 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1270 type
= TREE_TYPE (type
);
1272 if (TYPE_NAME (type
)
1273 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1274 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1275 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1277 while (POINTER_TYPE_P (type
)
1278 || TREE_CODE (type
) == VECTOR_TYPE
1279 || TREE_CODE (type
) == FUNCTION_TYPE
1280 || TREE_CODE (type
) == METHOD_TYPE
)
1281 type
= TREE_TYPE (type
);
1283 if (TREE_CODE (type
) == ARRAY_TYPE
)
1287 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1288 domain
= TYPE_DOMAIN (type
);
1291 t
= TYPE_MIN_VALUE (domain
);
1292 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1293 && decl_function_context (t
) != info
->context
)
1294 get_nonlocal_debug_decl (info
, t
);
1295 t
= TYPE_MAX_VALUE (domain
);
1296 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1297 && decl_function_context (t
) != info
->context
)
1298 get_nonlocal_debug_decl (info
, t
);
1303 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1307 note_nonlocal_block_vlas (struct nesting_info
*info
, tree block
)
1311 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
1312 if (TREE_CODE (var
) == VAR_DECL
1313 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
1314 && DECL_HAS_VALUE_EXPR_P (var
)
1315 && decl_function_context (var
) != info
->context
)
1316 note_nonlocal_vla_type (info
, TREE_TYPE (var
));
1319 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1320 PARM_DECLs that belong to outer functions. This handles statements
1321 that are not handled via the standard recursion done in
1322 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1323 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1324 operands of STMT have been handled by this function. */
1327 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1328 struct walk_stmt_info
*wi
)
1330 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1331 tree save_local_var_chain
;
1332 bitmap save_suppress
;
1333 gimple
*stmt
= gsi_stmt (*gsi
);
1335 switch (gimple_code (stmt
))
1338 /* Don't walk non-local gotos for now. */
1339 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1341 wi
->val_only
= true;
1343 *handled_ops_p
= true;
1348 case GIMPLE_OMP_PARALLEL
:
1349 case GIMPLE_OMP_TASK
:
1350 save_suppress
= info
->suppress_expansion
;
1351 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1355 decl
= get_chain_decl (info
);
1356 c
= build_omp_clause (gimple_location (stmt
),
1357 OMP_CLAUSE_FIRSTPRIVATE
);
1358 OMP_CLAUSE_DECL (c
) = decl
;
1359 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1360 gimple_omp_taskreg_set_clauses (stmt
, c
);
1363 save_local_var_chain
= info
->new_local_var_chain
;
1364 info
->new_local_var_chain
= NULL
;
1366 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1367 info
, gimple_omp_body_ptr (stmt
));
1369 if (info
->new_local_var_chain
)
1370 declare_vars (info
->new_local_var_chain
,
1371 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1373 info
->new_local_var_chain
= save_local_var_chain
;
1374 info
->suppress_expansion
= save_suppress
;
1377 case GIMPLE_OMP_FOR
:
1378 save_suppress
= info
->suppress_expansion
;
1379 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1380 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
1381 convert_nonlocal_reference_stmt
,
1382 convert_nonlocal_reference_op
, info
);
1383 walk_body (convert_nonlocal_reference_stmt
,
1384 convert_nonlocal_reference_op
, info
, gimple_omp_body_ptr (stmt
));
1385 info
->suppress_expansion
= save_suppress
;
1388 case GIMPLE_OMP_SECTIONS
:
1389 save_suppress
= info
->suppress_expansion
;
1390 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1391 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1392 info
, gimple_omp_body_ptr (stmt
));
1393 info
->suppress_expansion
= save_suppress
;
1396 case GIMPLE_OMP_SINGLE
:
1397 save_suppress
= info
->suppress_expansion
;
1398 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1399 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1400 info
, gimple_omp_body_ptr (stmt
));
1401 info
->suppress_expansion
= save_suppress
;
1404 case GIMPLE_OMP_TARGET
:
1405 if (!is_gimple_omp_offloaded (stmt
))
1407 save_suppress
= info
->suppress_expansion
;
1408 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1410 info
->suppress_expansion
= save_suppress
;
1411 walk_body (convert_nonlocal_reference_stmt
,
1412 convert_nonlocal_reference_op
, info
,
1413 gimple_omp_body_ptr (stmt
));
1416 save_suppress
= info
->suppress_expansion
;
1417 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1421 decl
= get_chain_decl (info
);
1422 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
1423 OMP_CLAUSE_DECL (c
) = decl
;
1424 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
1425 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
1426 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
1427 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
1430 save_local_var_chain
= info
->new_local_var_chain
;
1431 info
->new_local_var_chain
= NULL
;
1433 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1434 info
, gimple_omp_body_ptr (stmt
));
1436 if (info
->new_local_var_chain
)
1437 declare_vars (info
->new_local_var_chain
,
1438 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1440 info
->new_local_var_chain
= save_local_var_chain
;
1441 info
->suppress_expansion
= save_suppress
;
1444 case GIMPLE_OMP_TEAMS
:
1445 save_suppress
= info
->suppress_expansion
;
1446 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
1447 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1448 info
, gimple_omp_body_ptr (stmt
));
1449 info
->suppress_expansion
= save_suppress
;
1452 case GIMPLE_OMP_SECTION
:
1453 case GIMPLE_OMP_MASTER
:
1454 case GIMPLE_OMP_TASKGROUP
:
1455 case GIMPLE_OMP_ORDERED
:
1456 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1457 info
, gimple_omp_body_ptr (stmt
));
1462 gbind
*bind_stmt
= as_a
<gbind
*> (stmt
);
1463 if (!optimize
&& gimple_bind_block (bind_stmt
))
1464 note_nonlocal_block_vlas (info
, gimple_bind_block (bind_stmt
));
1466 for (tree var
= gimple_bind_vars (bind_stmt
); var
; var
= DECL_CHAIN (var
))
1467 if (TREE_CODE (var
) == NAMELIST_DECL
)
1469 /* Adjust decls mentioned in NAMELIST_DECL. */
1470 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
1474 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
1476 if (TREE_CODE (decl
) == VAR_DECL
1477 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1479 if (decl_function_context (decl
) != info
->context
)
1480 CONSTRUCTOR_ELT (decls
, i
)->value
1481 = get_nonlocal_debug_decl (info
, decl
);
1485 *handled_ops_p
= false;
1489 wi
->val_only
= true;
1491 *handled_ops_p
= false;
1495 /* For every other statement that we are not interested in
1496 handling here, let the walker traverse the operands. */
1497 *handled_ops_p
= false;
1501 /* We have handled all of STMT operands, no need to traverse the operands. */
1502 *handled_ops_p
= true;
1507 /* A subroutine of convert_local_reference. Create a local variable
1508 in the parent function with DECL_VALUE_EXPR set to reference the
1509 field in FRAME. This is used both for debug info and in OMP
1513 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1517 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1521 /* Make sure frame_decl gets created. */
1522 (void) get_frame_type (info
);
1523 x
= info
->frame_decl
;
1524 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1526 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1527 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1528 DECL_CONTEXT (new_decl
) = info
->context
;
1529 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1530 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1531 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1532 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1533 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1534 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1535 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1536 if ((TREE_CODE (decl
) == PARM_DECL
1537 || TREE_CODE (decl
) == RESULT_DECL
1538 || TREE_CODE (decl
) == VAR_DECL
)
1539 && DECL_BY_REFERENCE (decl
))
1540 DECL_BY_REFERENCE (new_decl
) = 1;
1542 SET_DECL_VALUE_EXPR (new_decl
, x
);
1543 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1546 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1547 info
->debug_var_chain
= new_decl
;
1549 /* Do not emit debug info twice. */
1550 DECL_IGNORED_P (decl
) = 1;
1556 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1557 and PARM_DECLs that were referenced by inner nested functions.
1558 The rewrite will be a structure reference to the local frame variable. */
1560 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1563 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1565 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1566 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1567 tree t
= *tp
, field
, x
;
1571 switch (TREE_CODE (t
))
1574 /* Non-automatic variables are never processed. */
1575 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1580 if (decl_function_context (t
) == info
->context
)
1582 /* If we copied a pointer to the frame, then the original decl
1583 is used unchanged in the parent function. */
1584 if (use_pointer_in_frame (t
))
1587 /* No need to transform anything if no child references the
1589 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1594 x
= get_local_debug_decl (info
, t
, field
);
1595 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1596 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1601 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1603 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1611 save_val_only
= wi
->val_only
;
1612 wi
->val_only
= false;
1614 wi
->changed
= false;
1615 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1616 wi
->val_only
= save_val_only
;
1618 /* If we converted anything ... */
1623 /* Then the frame decl is now addressable. */
1624 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1626 save_context
= current_function_decl
;
1627 current_function_decl
= info
->context
;
1628 recompute_tree_invariant_for_addr_expr (t
);
1629 current_function_decl
= save_context
;
1631 /* If we are in a context where we only accept values, then
1632 compute the address into a temporary. */
1634 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1643 case ARRAY_RANGE_REF
:
1645 /* Go down this entire nest and just look at the final prefix and
1646 anything that describes the references. Otherwise, we lose track
1647 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1648 save_val_only
= wi
->val_only
;
1649 wi
->val_only
= true;
1651 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1653 if (TREE_CODE (t
) == COMPONENT_REF
)
1654 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1656 else if (TREE_CODE (t
) == ARRAY_REF
1657 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1659 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1661 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1663 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1667 wi
->val_only
= false;
1668 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1669 wi
->val_only
= save_val_only
;
1673 save_val_only
= wi
->val_only
;
1674 wi
->val_only
= true;
1676 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
,
1678 /* We need to re-fold the MEM_REF as component references as
1679 part of a ADDR_EXPR address are not allowed. But we cannot
1680 fold here, as the chain record type is not yet finalized. */
1681 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
1682 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
1683 info
->mem_refs
->add (tp
);
1684 wi
->val_only
= save_val_only
;
1687 case VIEW_CONVERT_EXPR
:
1688 /* Just request to look at the subtrees, leaving val_only and lhs
1689 untouched. This might actually be for !val_only + lhs, in which
1690 case we don't want to force a replacement by a temporary. */
1695 if (!IS_TYPE_OR_DECL_P (t
))
1698 wi
->val_only
= true;
1707 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1708 struct walk_stmt_info
*);
1710 /* Helper for convert_local_reference. Convert all the references in
1711 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1714 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1716 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1717 bool need_frame
= false, need_stmts
= false;
1720 bitmap new_suppress
;
1722 new_suppress
= BITMAP_GGC_ALLOC ();
1723 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1725 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1727 switch (OMP_CLAUSE_CODE (clause
))
1729 case OMP_CLAUSE_REDUCTION
:
1730 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1732 goto do_decl_clause
;
1734 case OMP_CLAUSE_LASTPRIVATE
:
1735 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1737 goto do_decl_clause
;
1739 case OMP_CLAUSE_LINEAR
:
1740 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1742 wi
->val_only
= true;
1744 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
), &dummy
,
1746 goto do_decl_clause
;
1748 case OMP_CLAUSE_PRIVATE
:
1749 case OMP_CLAUSE_FIRSTPRIVATE
:
1750 case OMP_CLAUSE_COPYPRIVATE
:
1751 case OMP_CLAUSE_SHARED
:
1752 case OMP_CLAUSE_TO_DECLARE
:
1753 case OMP_CLAUSE_LINK
:
1754 case OMP_CLAUSE_USE_DEVICE_PTR
:
1755 case OMP_CLAUSE_IS_DEVICE_PTR
:
1757 decl
= OMP_CLAUSE_DECL (clause
);
1758 if (TREE_CODE (decl
) == VAR_DECL
1759 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1761 if (decl_function_context (decl
) == info
->context
1762 && !use_pointer_in_frame (decl
))
1764 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1767 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1768 OMP_CLAUSE_DECL (clause
)
1769 = get_local_debug_decl (info
, decl
, field
);
1775 case OMP_CLAUSE_SCHEDULE
:
1776 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1779 case OMP_CLAUSE_FINAL
:
1781 case OMP_CLAUSE_NUM_THREADS
:
1782 case OMP_CLAUSE_DEPEND
:
1783 case OMP_CLAUSE_DEVICE
:
1784 case OMP_CLAUSE_NUM_TEAMS
:
1785 case OMP_CLAUSE_THREAD_LIMIT
:
1786 case OMP_CLAUSE_SAFELEN
:
1787 case OMP_CLAUSE_SIMDLEN
:
1788 case OMP_CLAUSE_PRIORITY
:
1789 case OMP_CLAUSE_GRAINSIZE
:
1790 case OMP_CLAUSE_NUM_TASKS
:
1791 case OMP_CLAUSE_HINT
:
1792 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1793 wi
->val_only
= true;
1795 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0), &dummy
,
1799 case OMP_CLAUSE_DIST_SCHEDULE
:
1800 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1802 wi
->val_only
= true;
1804 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1809 case OMP_CLAUSE_MAP
:
1811 case OMP_CLAUSE_FROM
:
1812 if (OMP_CLAUSE_SIZE (clause
))
1814 wi
->val_only
= true;
1816 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause
),
1819 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1820 goto do_decl_clause
;
1821 wi
->val_only
= true;
1823 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_local_reference_op
,
1827 case OMP_CLAUSE_ALIGNED
:
1828 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1830 wi
->val_only
= true;
1832 convert_local_reference_op
1833 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1835 /* Like do_decl_clause, but don't add any suppression. */
1836 decl
= OMP_CLAUSE_DECL (clause
);
1837 if (TREE_CODE (decl
) == VAR_DECL
1838 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1840 if (decl_function_context (decl
) == info
->context
1841 && !use_pointer_in_frame (decl
))
1843 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1846 OMP_CLAUSE_DECL (clause
)
1847 = get_local_debug_decl (info
, decl
, field
);
1853 case OMP_CLAUSE_NOWAIT
:
1854 case OMP_CLAUSE_ORDERED
:
1855 case OMP_CLAUSE_DEFAULT
:
1856 case OMP_CLAUSE_COPYIN
:
1857 case OMP_CLAUSE_COLLAPSE
:
1858 case OMP_CLAUSE_UNTIED
:
1859 case OMP_CLAUSE_MERGEABLE
:
1860 case OMP_CLAUSE_PROC_BIND
:
1861 case OMP_CLAUSE_NOGROUP
:
1862 case OMP_CLAUSE_THREADS
:
1863 case OMP_CLAUSE_SIMD
:
1864 case OMP_CLAUSE_DEFAULTMAP
:
1872 info
->suppress_expansion
= new_suppress
;
1875 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1876 switch (OMP_CLAUSE_CODE (clause
))
1878 case OMP_CLAUSE_REDUCTION
:
1879 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1882 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1883 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1885 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1886 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1888 walk_body (convert_local_reference_stmt
,
1889 convert_local_reference_op
, info
,
1890 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1891 walk_body (convert_local_reference_stmt
,
1892 convert_local_reference_op
, info
,
1893 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1894 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1896 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1897 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1902 case OMP_CLAUSE_LASTPRIVATE
:
1903 walk_body (convert_local_reference_stmt
,
1904 convert_local_reference_op
, info
,
1905 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1908 case OMP_CLAUSE_LINEAR
:
1909 walk_body (convert_local_reference_stmt
,
1910 convert_local_reference_op
, info
,
1911 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
1922 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1923 and PARM_DECLs that were referenced by inner nested functions.
1924 The rewrite will be a structure reference to the local frame variable. */
1927 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1928 struct walk_stmt_info
*wi
)
1930 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1931 tree save_local_var_chain
;
1932 bitmap save_suppress
;
1933 gimple
*stmt
= gsi_stmt (*gsi
);
1935 switch (gimple_code (stmt
))
1937 case GIMPLE_OMP_PARALLEL
:
1938 case GIMPLE_OMP_TASK
:
1939 save_suppress
= info
->suppress_expansion
;
1940 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1944 (void) get_frame_type (info
);
1945 c
= build_omp_clause (gimple_location (stmt
),
1947 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
1948 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1949 gimple_omp_taskreg_set_clauses (stmt
, c
);
1952 save_local_var_chain
= info
->new_local_var_chain
;
1953 info
->new_local_var_chain
= NULL
;
1955 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
1956 gimple_omp_body_ptr (stmt
));
1958 if (info
->new_local_var_chain
)
1959 declare_vars (info
->new_local_var_chain
,
1960 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
1961 info
->new_local_var_chain
= save_local_var_chain
;
1962 info
->suppress_expansion
= save_suppress
;
1965 case GIMPLE_OMP_FOR
:
1966 save_suppress
= info
->suppress_expansion
;
1967 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1968 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
1969 convert_local_reference_stmt
,
1970 convert_local_reference_op
, info
);
1971 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1972 info
, gimple_omp_body_ptr (stmt
));
1973 info
->suppress_expansion
= save_suppress
;
1976 case GIMPLE_OMP_SECTIONS
:
1977 save_suppress
= info
->suppress_expansion
;
1978 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1979 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1980 info
, gimple_omp_body_ptr (stmt
));
1981 info
->suppress_expansion
= save_suppress
;
1984 case GIMPLE_OMP_SINGLE
:
1985 save_suppress
= info
->suppress_expansion
;
1986 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1987 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1988 info
, gimple_omp_body_ptr (stmt
));
1989 info
->suppress_expansion
= save_suppress
;
1992 case GIMPLE_OMP_TARGET
:
1993 if (!is_gimple_omp_offloaded (stmt
))
1995 save_suppress
= info
->suppress_expansion
;
1996 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
1997 info
->suppress_expansion
= save_suppress
;
1998 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
1999 info
, gimple_omp_body_ptr (stmt
));
2002 save_suppress
= info
->suppress_expansion
;
2003 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
))
2006 (void) get_frame_type (info
);
2007 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2008 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2009 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2010 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2011 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2012 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2015 save_local_var_chain
= info
->new_local_var_chain
;
2016 info
->new_local_var_chain
= NULL
;
2018 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2019 gimple_omp_body_ptr (stmt
));
2021 if (info
->new_local_var_chain
)
2022 declare_vars (info
->new_local_var_chain
,
2023 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2024 info
->new_local_var_chain
= save_local_var_chain
;
2025 info
->suppress_expansion
= save_suppress
;
2028 case GIMPLE_OMP_TEAMS
:
2029 save_suppress
= info
->suppress_expansion
;
2030 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
2031 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2032 info
, gimple_omp_body_ptr (stmt
));
2033 info
->suppress_expansion
= save_suppress
;
2036 case GIMPLE_OMP_SECTION
:
2037 case GIMPLE_OMP_MASTER
:
2038 case GIMPLE_OMP_TASKGROUP
:
2039 case GIMPLE_OMP_ORDERED
:
2040 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2041 info
, gimple_omp_body_ptr (stmt
));
2045 wi
->val_only
= true;
2047 *handled_ops_p
= false;
2051 if (gimple_clobber_p (stmt
))
2053 tree lhs
= gimple_assign_lhs (stmt
);
2054 if (!use_pointer_in_frame (lhs
)
2055 && lookup_field_for_decl (info
, lhs
, NO_INSERT
))
2057 gsi_replace (gsi
, gimple_build_nop (), true);
2061 *handled_ops_p
= false;
2065 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
2067 var
= DECL_CHAIN (var
))
2068 if (TREE_CODE (var
) == NAMELIST_DECL
)
2070 /* Adjust decls mentioned in NAMELIST_DECL. */
2071 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
2075 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
2077 if (TREE_CODE (decl
) == VAR_DECL
2078 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2080 if (decl_function_context (decl
) == info
->context
2081 && !use_pointer_in_frame (decl
))
2083 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2086 CONSTRUCTOR_ELT (decls
, i
)->value
2087 = get_local_debug_decl (info
, decl
, field
);
2093 *handled_ops_p
= false;
2097 /* For every other statement that we are not interested in
2098 handling here, let the walker traverse the operands. */
2099 *handled_ops_p
= false;
2103 /* Indicate that we have handled all the operands ourselves. */
2104 *handled_ops_p
= true;
2109 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2110 that reference labels from outer functions. The rewrite will be a
2111 call to __builtin_nonlocal_goto. */
2114 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2115 struct walk_stmt_info
*wi
)
2117 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2118 tree label
, new_label
, target_context
, x
, field
;
2120 gimple
*stmt
= gsi_stmt (*gsi
);
2122 if (gimple_code (stmt
) != GIMPLE_GOTO
)
2124 *handled_ops_p
= false;
2128 label
= gimple_goto_dest (stmt
);
2129 if (TREE_CODE (label
) != LABEL_DECL
)
2131 *handled_ops_p
= false;
2135 target_context
= decl_function_context (label
);
2136 if (target_context
== info
->context
)
2138 *handled_ops_p
= false;
2142 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
2145 /* The original user label may also be use for a normal goto, therefore
2146 we must create a new label that will actually receive the abnormal
2147 control transfer. This new label will be marked LABEL_NONLOCAL; this
2148 mark will trigger proper behavior in the cfg, as well as cause the
2149 (hairy target-specific) non-local goto receiver code to be generated
2150 when we expand rtl. Enter this association into var_map so that we
2151 can insert the new label into the IL during a second pass. */
2152 tree
*slot
= &i
->var_map
->get_or_insert (label
);
2155 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
2156 DECL_NONLOCAL (new_label
) = 1;
2162 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2163 field
= get_nl_goto_field (i
);
2164 x
= get_frame_field (info
, target_context
, field
, gsi
);
2165 x
= build_addr (x
, target_context
);
2166 x
= gsi_gimplify_val (info
, x
, gsi
);
2167 call
= gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO
),
2168 2, build_addr (new_label
, target_context
), x
);
2169 gsi_replace (gsi
, call
, false);
2171 /* We have handled all of STMT's operands, no need to keep going. */
2172 *handled_ops_p
= true;
2177 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2178 are referenced via nonlocal goto from a nested function. The rewrite
2179 will involve installing a newly generated DECL_NONLOCAL label, and
2180 (potentially) a branch around the rtl gunk that is assumed to be
2181 attached to such a label. */
2184 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2185 struct walk_stmt_info
*wi
)
2187 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2188 tree label
, new_label
;
2189 gimple_stmt_iterator tmp_gsi
;
2190 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsi
));
2194 *handled_ops_p
= false;
2198 label
= gimple_label_label (stmt
);
2200 tree
*slot
= info
->var_map
->get (label
);
2203 *handled_ops_p
= false;
2207 /* If there's any possibility that the previous statement falls through,
2208 then we must branch around the new non-local label. */
2210 gsi_prev (&tmp_gsi
);
2211 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
2213 gimple
*stmt
= gimple_build_goto (label
);
2214 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2217 new_label
= (tree
) *slot
;
2218 stmt
= gimple_build_label (new_label
);
2219 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2221 *handled_ops_p
= true;
2226 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2227 of nested functions that require the use of trampolines. The rewrite
2228 will involve a reference a trampoline generated for the occasion. */
2231 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
2233 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2234 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2235 tree t
= *tp
, decl
, target_context
, x
, builtin
;
2239 switch (TREE_CODE (t
))
2243 T.1 = &CHAIN->tramp;
2244 T.2 = __builtin_adjust_trampoline (T.1);
2245 T.3 = (func_type)T.2;
2248 decl
= TREE_OPERAND (t
, 0);
2249 if (TREE_CODE (decl
) != FUNCTION_DECL
)
2252 /* Only need to process nested functions. */
2253 target_context
= decl_function_context (decl
);
2254 if (!target_context
)
2257 /* If the nested function doesn't use a static chain, then
2258 it doesn't need a trampoline. */
2259 if (!DECL_STATIC_CHAIN (decl
))
2262 /* If we don't want a trampoline, then don't build one. */
2263 if (TREE_NO_TRAMPOLINE (t
))
2266 /* Lookup the immediate parent of the callee, as that's where
2267 we need to insert the trampoline. */
2268 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
2270 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
2272 /* Compute the address of the field holding the trampoline. */
2273 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
2274 x
= build_addr (x
, target_context
);
2275 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
2277 /* Do machine-specific ugliness. Normally this will involve
2278 computing extra alignment, but it can really be anything. */
2279 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE
);
2280 call
= gimple_build_call (builtin
, 1, x
);
2281 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
2283 /* Cast back to the proper function type. */
2284 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
2285 x
= init_tmp_var (info
, x
, &wi
->gsi
);
2291 if (!IS_TYPE_OR_DECL_P (t
))
2300 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2301 to addresses of nested functions that require the use of
2302 trampolines. The rewrite will involve a reference a trampoline
2303 generated for the occasion. */
2306 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2307 struct walk_stmt_info
*wi
)
2309 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2310 gimple
*stmt
= gsi_stmt (*gsi
);
2312 switch (gimple_code (stmt
))
2316 /* Only walk call arguments, lest we generate trampolines for
2318 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
2319 for (i
= 0; i
< nargs
; i
++)
2320 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
2325 case GIMPLE_OMP_TARGET
:
2326 if (!is_gimple_omp_offloaded (stmt
))
2328 *handled_ops_p
= false;
2332 case GIMPLE_OMP_PARALLEL
:
2333 case GIMPLE_OMP_TASK
:
2335 tree save_local_var_chain
= info
->new_local_var_chain
;
2336 walk_gimple_op (stmt
, convert_tramp_reference_op
, wi
);
2337 info
->new_local_var_chain
= NULL
;
2338 char save_static_chain_added
= info
->static_chain_added
;
2339 info
->static_chain_added
= 0;
2340 walk_body (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2341 info
, gimple_omp_body_ptr (stmt
));
2342 if (info
->new_local_var_chain
)
2343 declare_vars (info
->new_local_var_chain
,
2344 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
2346 for (int i
= 0; i
< 2; i
++)
2349 if ((info
->static_chain_added
& (1 << i
)) == 0)
2351 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2352 /* Don't add CHAIN.* or FRAME.* twice. */
2353 for (c
= gimple_omp_taskreg_clauses (stmt
);
2355 c
= OMP_CLAUSE_CHAIN (c
))
2356 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2357 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2358 && OMP_CLAUSE_DECL (c
) == decl
)
2360 if (c
== NULL
&& gimple_code (stmt
) != GIMPLE_OMP_TARGET
)
2362 c
= build_omp_clause (gimple_location (stmt
),
2363 i
? OMP_CLAUSE_FIRSTPRIVATE
2364 : OMP_CLAUSE_SHARED
);
2365 OMP_CLAUSE_DECL (c
) = decl
;
2366 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2367 gimple_omp_taskreg_set_clauses (stmt
, c
);
2371 c
= build_omp_clause (gimple_location (stmt
),
2373 OMP_CLAUSE_DECL (c
) = decl
;
2374 OMP_CLAUSE_SET_MAP_KIND (c
,
2375 i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2376 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2377 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2378 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2382 info
->new_local_var_chain
= save_local_var_chain
;
2383 info
->static_chain_added
|= save_static_chain_added
;
2388 *handled_ops_p
= false;
2392 *handled_ops_p
= true;
2398 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2399 that reference nested functions to make sure that the static chain
2400 is set up properly for the call. */
2403 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2404 struct walk_stmt_info
*wi
)
2406 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2407 tree decl
, target_context
;
2408 char save_static_chain_added
;
2410 gimple
*stmt
= gsi_stmt (*gsi
);
2412 switch (gimple_code (stmt
))
2415 if (gimple_call_chain (stmt
))
2417 decl
= gimple_call_fndecl (stmt
);
2420 target_context
= decl_function_context (decl
);
2421 if (target_context
&& DECL_STATIC_CHAIN (decl
))
2423 gimple_call_set_chain (as_a
<gcall
*> (stmt
),
2424 get_static_chain (info
, target_context
,
2426 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
2430 case GIMPLE_OMP_PARALLEL
:
2431 case GIMPLE_OMP_TASK
:
2432 save_static_chain_added
= info
->static_chain_added
;
2433 info
->static_chain_added
= 0;
2434 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2435 for (i
= 0; i
< 2; i
++)
2438 if ((info
->static_chain_added
& (1 << i
)) == 0)
2440 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2441 /* Don't add CHAIN.* or FRAME.* twice. */
2442 for (c
= gimple_omp_taskreg_clauses (stmt
);
2444 c
= OMP_CLAUSE_CHAIN (c
))
2445 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2446 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2447 && OMP_CLAUSE_DECL (c
) == decl
)
2451 c
= build_omp_clause (gimple_location (stmt
),
2452 i
? OMP_CLAUSE_FIRSTPRIVATE
2453 : OMP_CLAUSE_SHARED
);
2454 OMP_CLAUSE_DECL (c
) = decl
;
2455 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2456 gimple_omp_taskreg_set_clauses (stmt
, c
);
2459 info
->static_chain_added
|= save_static_chain_added
;
2462 case GIMPLE_OMP_TARGET
:
2463 if (!is_gimple_omp_offloaded (stmt
))
2465 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2468 save_static_chain_added
= info
->static_chain_added
;
2469 info
->static_chain_added
= 0;
2470 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2471 for (i
= 0; i
< 2; i
++)
2474 if ((info
->static_chain_added
& (1 << i
)) == 0)
2476 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2477 /* Don't add CHAIN.* or FRAME.* twice. */
2478 for (c
= gimple_omp_target_clauses (stmt
);
2480 c
= OMP_CLAUSE_CHAIN (c
))
2481 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
2482 && OMP_CLAUSE_DECL (c
) == decl
)
2486 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2487 OMP_CLAUSE_DECL (c
) = decl
;
2488 OMP_CLAUSE_SET_MAP_KIND (c
, i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2489 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2490 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2491 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2495 info
->static_chain_added
|= save_static_chain_added
;
2498 case GIMPLE_OMP_FOR
:
2499 walk_body (convert_gimple_call
, NULL
, info
,
2500 gimple_omp_for_pre_body_ptr (stmt
));
2502 case GIMPLE_OMP_SECTIONS
:
2503 case GIMPLE_OMP_SECTION
:
2504 case GIMPLE_OMP_SINGLE
:
2505 case GIMPLE_OMP_TEAMS
:
2506 case GIMPLE_OMP_MASTER
:
2507 case GIMPLE_OMP_TASKGROUP
:
2508 case GIMPLE_OMP_ORDERED
:
2509 case GIMPLE_OMP_CRITICAL
:
2510 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2514 /* Keep looking for other operands. */
2515 *handled_ops_p
= false;
2519 *handled_ops_p
= true;
2523 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2524 call expressions. At the same time, determine if a nested function
2525 actually uses its static chain; if not, remember that. */
2528 convert_all_function_calls (struct nesting_info
*root
)
2530 unsigned int chain_count
= 0, old_chain_count
, iter_count
;
2531 struct nesting_info
*n
;
2533 /* First, optimistically clear static_chain for all decls that haven't
2534 used the static chain already for variable access. But always create
2535 it if not optimizing. This makes it possible to reconstruct the static
2536 nesting tree at run time and thus to resolve up-level references from
2537 within the debugger. */
2538 FOR_EACH_NEST_INFO (n
, root
)
2540 tree decl
= n
->context
;
2544 (void) get_frame_type (n
);
2546 (void) get_chain_decl (n
);
2548 else if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
2550 DECL_STATIC_CHAIN (decl
) = 0;
2551 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2552 fprintf (dump_file
, "Guessing no static-chain for %s\n",
2553 lang_hooks
.decl_printable_name (decl
, 2));
2556 DECL_STATIC_CHAIN (decl
) = 1;
2557 chain_count
+= DECL_STATIC_CHAIN (decl
);
2560 /* Walk the functions and perform transformations. Note that these
2561 transformations can induce new uses of the static chain, which in turn
2562 require re-examining all users of the decl. */
2563 /* ??? It would make sense to try to use the call graph to speed this up,
2564 but the call graph hasn't really been built yet. Even if it did, we
2565 would still need to iterate in this loop since address-of references
2566 wouldn't show up in the callgraph anyway. */
2570 old_chain_count
= chain_count
;
2574 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2575 fputc ('\n', dump_file
);
2577 FOR_EACH_NEST_INFO (n
, root
)
2579 tree decl
= n
->context
;
2580 walk_function (convert_tramp_reference_stmt
,
2581 convert_tramp_reference_op
, n
);
2582 walk_function (convert_gimple_call
, NULL
, n
);
2583 chain_count
+= DECL_STATIC_CHAIN (decl
);
2586 while (chain_count
!= old_chain_count
);
2588 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2589 fprintf (dump_file
, "convert_all_function_calls iterations: %u\n\n",
2593 struct nesting_copy_body_data
2596 struct nesting_info
*root
;
2599 /* A helper subroutine for debug_var_chain type remapping. */
2602 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2604 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2605 tree
*slot
= nid
->root
->var_map
->get (decl
);
2608 return (tree
) *slot
;
2610 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2612 tree new_decl
= copy_decl_no_change (decl
, id
);
2613 DECL_ORIGINAL_TYPE (new_decl
)
2614 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2618 if (TREE_CODE (decl
) == VAR_DECL
2619 || TREE_CODE (decl
) == PARM_DECL
2620 || TREE_CODE (decl
) == RESULT_DECL
)
2623 return copy_decl_no_change (decl
, id
);
2626 /* A helper function for remap_vla_decls. See if *TP contains
2627 some remapped variables. */
2630 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2632 struct nesting_info
*root
= (struct nesting_info
*) data
;
2638 tree
*slot
= root
->var_map
->get (t
);
2646 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2650 remap_vla_decls (tree block
, struct nesting_info
*root
)
2652 tree var
, subblock
, val
, type
;
2653 struct nesting_copy_body_data id
;
2655 for (subblock
= BLOCK_SUBBLOCKS (block
);
2657 subblock
= BLOCK_CHAIN (subblock
))
2658 remap_vla_decls (subblock
, root
);
2660 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
2661 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
2663 val
= DECL_VALUE_EXPR (var
);
2664 type
= TREE_TYPE (var
);
2666 if (!(TREE_CODE (val
) == INDIRECT_REF
2667 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2668 && variably_modified_type_p (type
, NULL
)))
2671 if (root
->var_map
->get (TREE_OPERAND (val
, 0))
2672 || walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2676 if (var
== NULL_TREE
)
2679 memset (&id
, 0, sizeof (id
));
2680 id
.cb
.copy_decl
= nesting_copy_decl
;
2681 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
2684 for (; var
; var
= DECL_CHAIN (var
))
2685 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
2687 struct nesting_info
*i
;
2690 val
= DECL_VALUE_EXPR (var
);
2691 type
= TREE_TYPE (var
);
2693 if (!(TREE_CODE (val
) == INDIRECT_REF
2694 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2695 && variably_modified_type_p (type
, NULL
)))
2698 tree
*slot
= root
->var_map
->get (TREE_OPERAND (val
, 0));
2699 if (!slot
&& !walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2702 context
= decl_function_context (var
);
2703 for (i
= root
; i
; i
= i
->outer
)
2704 if (i
->context
== context
)
2710 /* Fully expand value expressions. This avoids having debug variables
2711 only referenced from them and that can be swept during GC. */
2714 tree t
= (tree
) *slot
;
2715 gcc_assert (DECL_P (t
) && DECL_HAS_VALUE_EXPR_P (t
));
2716 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
), DECL_VALUE_EXPR (t
));
2719 id
.cb
.src_fn
= i
->context
;
2720 id
.cb
.dst_fn
= i
->context
;
2721 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2723 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
2724 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2726 newt
= TREE_TYPE (newt
);
2727 type
= TREE_TYPE (type
);
2729 if (TYPE_NAME (newt
)
2730 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2731 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2733 && TYPE_NAME (newt
) == TYPE_NAME (type
))
2734 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2736 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
2737 if (val
!= DECL_VALUE_EXPR (var
))
2738 SET_DECL_VALUE_EXPR (var
, val
);
2741 delete id
.cb
.decl_map
;
2744 /* Fold the MEM_REF *E. */
2746 fold_mem_refs (tree
*const &e
, void *data ATTRIBUTE_UNUSED
)
2748 tree
*ref_p
= CONST_CAST2 (tree
*, const tree
*, (const tree
*)e
);
2749 *ref_p
= fold (*ref_p
);
2753 /* Do "everything else" to clean up or complete state collected by the
2754 various walking passes -- lay out the types and decls, generate code
2755 to initialize the frame decl, store critical expressions in the
2756 struct function for rtl to find. */
2759 finalize_nesting_tree_1 (struct nesting_info
*root
)
2761 gimple_seq stmt_list
;
2763 tree context
= root
->context
;
2764 struct function
*sf
;
2768 /* If we created a non-local frame type or decl, we need to lay them
2769 out at this time. */
2770 if (root
->frame_type
)
2772 /* In some cases the frame type will trigger the -Wpadded warning.
2773 This is not helpful; suppress it. */
2774 int save_warn_padded
= warn_padded
;
2778 layout_type (root
->frame_type
);
2779 warn_padded
= save_warn_padded
;
2780 layout_decl (root
->frame_decl
, 0);
2782 /* Remove root->frame_decl from root->new_local_var_chain, so
2783 that we can declare it also in the lexical blocks, which
2784 helps ensure virtual regs that end up appearing in its RTL
2785 expression get substituted in instantiate_virtual_regs(). */
2786 for (adjust
= &root
->new_local_var_chain
;
2787 *adjust
!= root
->frame_decl
;
2788 adjust
= &DECL_CHAIN (*adjust
))
2789 gcc_assert (DECL_CHAIN (*adjust
));
2790 *adjust
= DECL_CHAIN (*adjust
);
2792 DECL_CHAIN (root
->frame_decl
) = NULL_TREE
;
2793 declare_vars (root
->frame_decl
,
2794 gimple_seq_first_stmt (gimple_body (context
)), true);
2797 /* If any parameters were referenced non-locally, then we need to
2798 insert a copy. Likewise, if any variables were referenced by
2799 pointer, we need to initialize the address. */
2800 if (root
->any_parm_remapped
)
2803 for (p
= DECL_ARGUMENTS (context
); p
; p
= DECL_CHAIN (p
))
2807 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
2811 if (use_pointer_in_frame (p
))
2812 x
= build_addr (p
, context
);
2816 /* If the assignment is from a non-register the stmt is
2817 not valid gimple. Make it so by using a temporary instead. */
2818 if (!is_gimple_reg (x
)
2819 && is_gimple_reg_type (TREE_TYPE (x
)))
2821 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
2822 x
= init_tmp_var (root
, x
, &gsi
);
2825 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2826 root
->frame_decl
, field
, NULL_TREE
);
2827 stmt
= gimple_build_assign (y
, x
);
2828 gimple_seq_add_stmt (&stmt_list
, stmt
);
2832 /* If a chain_field was created, then it needs to be initialized
2834 if (root
->chain_field
)
2836 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
2837 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
2838 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
2839 gimple_seq_add_stmt (&stmt_list
, stmt
);
2842 /* If trampolines were created, then we need to initialize them. */
2843 if (root
->any_tramp_created
)
2845 struct nesting_info
*i
;
2846 for (i
= root
->inner
; i
; i
= i
->next
)
2848 tree arg1
, arg2
, arg3
, x
, field
;
2850 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
2854 gcc_assert (DECL_STATIC_CHAIN (i
->context
));
2855 arg3
= build_addr (root
->frame_decl
, context
);
2857 arg2
= build_addr (i
->context
, context
);
2859 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2860 root
->frame_decl
, field
, NULL_TREE
);
2861 arg1
= build_addr (x
, context
);
2863 x
= builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE
);
2864 stmt
= gimple_build_call (x
, 3, arg1
, arg2
, arg3
);
2865 gimple_seq_add_stmt (&stmt_list
, stmt
);
2869 /* If we created initialization statements, insert them. */
2873 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
2874 bind
= gimple_seq_first_stmt_as_a_bind (gimple_body (context
));
2875 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
2876 gimple_bind_set_body (bind
, stmt_list
);
2879 /* If a chain_decl was created, then it needs to be registered with
2880 struct function so that it gets initialized from the static chain
2881 register at the beginning of the function. */
2882 sf
= DECL_STRUCT_FUNCTION (root
->context
);
2883 sf
->static_chain_decl
= root
->chain_decl
;
2885 /* Similarly for the non-local goto save area. */
2886 if (root
->nl_goto_field
)
2888 sf
->nonlocal_goto_save_area
2889 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
2890 sf
->has_nonlocal_label
= 1;
2893 /* Make sure all new local variables get inserted into the
2894 proper BIND_EXPR. */
2895 if (root
->new_local_var_chain
)
2896 declare_vars (root
->new_local_var_chain
,
2897 gimple_seq_first_stmt (gimple_body (root
->context
)),
2900 if (root
->debug_var_chain
)
2905 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
2907 for (debug_var
= root
->debug_var_chain
; debug_var
;
2908 debug_var
= DECL_CHAIN (debug_var
))
2909 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2912 /* If there are any debug decls with variable length types,
2913 remap those types using other debug_var_chain variables. */
2916 struct nesting_copy_body_data id
;
2918 memset (&id
, 0, sizeof (id
));
2919 id
.cb
.copy_decl
= nesting_copy_decl
;
2920 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
2923 for (; debug_var
; debug_var
= DECL_CHAIN (debug_var
))
2924 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
2926 tree type
= TREE_TYPE (debug_var
);
2927 tree newt
, t
= type
;
2928 struct nesting_info
*i
;
2930 for (i
= root
; i
; i
= i
->outer
)
2931 if (variably_modified_type_p (type
, i
->context
))
2937 id
.cb
.src_fn
= i
->context
;
2938 id
.cb
.dst_fn
= i
->context
;
2939 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2941 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
2942 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2944 newt
= TREE_TYPE (newt
);
2947 if (TYPE_NAME (newt
)
2948 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2949 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2951 && TYPE_NAME (newt
) == TYPE_NAME (t
))
2952 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2955 delete id
.cb
.decl_map
;
2958 scope
= gimple_seq_first_stmt_as_a_bind (gimple_body (root
->context
));
2959 if (gimple_bind_block (scope
))
2960 declare_vars (root
->debug_var_chain
, scope
, true);
2962 BLOCK_VARS (DECL_INITIAL (root
->context
))
2963 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
2964 root
->debug_var_chain
);
2967 /* Fold the rewritten MEM_REF trees. */
2968 root
->mem_refs
->traverse
<void *, fold_mem_refs
> (NULL
);
2970 /* Dump the translated tree function. */
2973 fputs ("\n\n", dump_file
);
2974 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
2979 finalize_nesting_tree (struct nesting_info
*root
)
2981 struct nesting_info
*n
;
2982 FOR_EACH_NEST_INFO (n
, root
)
2983 finalize_nesting_tree_1 (n
);
2986 /* Unnest the nodes and pass them to cgraph. */
2989 unnest_nesting_tree_1 (struct nesting_info
*root
)
2991 struct cgraph_node
*node
= cgraph_node::get (root
->context
);
2993 /* For nested functions update the cgraph to reflect unnesting.
2994 We also delay finalizing of these functions up to this point. */
2998 cgraph_node::finalize_function (root
->context
, true);
3003 unnest_nesting_tree (struct nesting_info
*root
)
3005 struct nesting_info
*n
;
3006 FOR_EACH_NEST_INFO (n
, root
)
3007 unnest_nesting_tree_1 (n
);
3010 /* Free the data structures allocated during this pass. */
3013 free_nesting_tree (struct nesting_info
*root
)
3015 struct nesting_info
*node
, *next
;
3017 node
= iter_nestinfo_start (root
);
3020 next
= iter_nestinfo_next (node
);
3021 delete node
->var_map
;
3022 delete node
->field_map
;
3023 delete node
->mem_refs
;
3030 /* Gimplify a function and all its nested functions. */
3032 gimplify_all_functions (struct cgraph_node
*root
)
3034 struct cgraph_node
*iter
;
3035 if (!gimple_body (root
->decl
))
3036 gimplify_function_tree (root
->decl
);
3037 for (iter
= root
->nested
; iter
; iter
= iter
->next_nested
)
3038 gimplify_all_functions (iter
);
3041 /* Main entry point for this pass. Process FNDECL and all of its nested
3042 subroutines and turn them into something less tightly bound. */
3045 lower_nested_functions (tree fndecl
)
3047 struct cgraph_node
*cgn
;
3048 struct nesting_info
*root
;
3050 /* If there are no nested functions, there's nothing to do. */
3051 cgn
= cgraph_node::get (fndecl
);
3055 gimplify_all_functions (cgn
);
3057 dump_file
= dump_begin (TDI_nested
, &dump_flags
);
3059 fprintf (dump_file
, "\n;; Function %s\n\n",
3060 lang_hooks
.decl_printable_name (fndecl
, 2));
3062 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
3063 root
= create_nesting_tree (cgn
);
3065 walk_all_functions (convert_nonlocal_reference_stmt
,
3066 convert_nonlocal_reference_op
,
3068 walk_all_functions (convert_local_reference_stmt
,
3069 convert_local_reference_op
,
3071 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
3072 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
3074 convert_all_function_calls (root
);
3075 finalize_nesting_tree (root
);
3076 unnest_nesting_tree (root
);
3078 free_nesting_tree (root
);
3079 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
3083 dump_end (TDI_nested
, dump_file
);
3088 #include "gt-tree-nested.h"