1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "stringpool.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
35 #include "tree-inline.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
47 /* The object of this pass is to lower the representation of a set of nested
48 functions in order to expose all of the gory details of the various
49 nonlocal references. We want to do this sooner rather than later, in
50 order to give us more freedom in emitting all of the functions in question.
52 Back in olden times, when gcc was young, we developed an insanely
53 complicated scheme whereby variables which were referenced nonlocally
54 were forced to live in the stack of the declaring function, and then
55 the nested functions magically discovered where these variables were
56 placed. In order for this scheme to function properly, it required
57 that the outer function be partially expanded, then we switch to
58 compiling the inner function, and once done with those we switch back
59 to compiling the outer function. Such delicate ordering requirements
60 makes it difficult to do whole translation unit optimizations
61 involving such functions.
63 The implementation here is much more direct. Everything that can be
64 referenced by an inner function is a member of an explicitly created
65 structure herein called the "nonlocal frame struct". The incoming
66 static chain for a nested function is a pointer to this struct in
67 the parent. In this way, we settle on known offsets from a known
68 base, and so are decoupled from the logic that places objects in the
69 function's stack frame. More importantly, we don't have to wait for
70 that to happen -- since the compilation of the inner function is no
71 longer tied to a real stack frame, the nonlocal frame struct can be
72 allocated anywhere. Which means that the outer function is now
75 Theory of operation here is very simple. Iterate over all the
76 statements in all the functions (depth first) several times,
77 allocating structures and fields on demand. In general we want to
78 examine inner functions first, so that we can avoid making changes
79 to outer functions which are unnecessary.
81 The order of the passes matters a bit, in that later passes will be
82 skipped if it is discovered that the functions don't actually interact
83 at all. That is, they're nested in the lexical sense but could have
84 been written as independent functions without change. */
89 struct nesting_info
*outer
;
90 struct nesting_info
*inner
;
91 struct nesting_info
*next
;
93 hash_map
<tree
, tree
> *field_map
;
94 hash_map
<tree
, tree
> *var_map
;
95 hash_set
<tree
*> *mem_refs
;
96 bitmap suppress_expansion
;
99 tree new_local_var_chain
;
100 tree debug_var_chain
;
108 bool any_parm_remapped
;
109 bool any_tramp_created
;
110 bool any_descr_created
;
111 char static_chain_added
;
115 /* Iterate over the nesting tree, starting with ROOT, depth first. */
117 static inline struct nesting_info
*
118 iter_nestinfo_start (struct nesting_info
*root
)
125 static inline struct nesting_info
*
126 iter_nestinfo_next (struct nesting_info
*node
)
129 return iter_nestinfo_start (node
->next
);
133 #define FOR_EACH_NEST_INFO(I, ROOT) \
134 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
136 /* Obstack used for the bitmaps in the struct above. */
137 static struct bitmap_obstack nesting_info_bitmap_obstack
;
140 /* We're working in so many different function contexts simultaneously,
141 that create_tmp_var is dangerous. Prevent mishap. */
142 #define create_tmp_var cant_use_create_tmp_var_here_dummy
144 /* Like create_tmp_var, except record the variable for registration at
145 the given nesting level. */
148 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
152 /* If the type is of variable size or a type which must be created by the
153 frontend, something is wrong. Note that we explicitly allow
154 incomplete types here, since we create them ourselves here. */
155 gcc_assert (!TREE_ADDRESSABLE (type
));
156 gcc_assert (!TYPE_SIZE_UNIT (type
)
157 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
159 tmp_var
= create_tmp_var_raw (type
, prefix
);
160 DECL_CONTEXT (tmp_var
) = info
->context
;
161 DECL_CHAIN (tmp_var
) = info
->new_local_var_chain
;
162 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
163 if (TREE_CODE (type
) == COMPLEX_TYPE
164 || TREE_CODE (type
) == VECTOR_TYPE
)
165 DECL_GIMPLE_REG_P (tmp_var
) = 1;
167 info
->new_local_var_chain
= tmp_var
;
172 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
175 build_simple_mem_ref_notrap (tree ptr
)
177 tree t
= build_simple_mem_ref (ptr
);
178 TREE_THIS_NOTRAP (t
) = 1;
182 /* Take the address of EXP to be used within function CONTEXT.
183 Mark it for addressability as necessary. */
186 build_addr (tree exp
)
188 mark_addressable (exp
);
189 return build_fold_addr_expr (exp
);
192 /* Insert FIELD into TYPE, sorted by alignment requirements. */
195 insert_field_into_struct (tree type
, tree field
)
199 DECL_CONTEXT (field
) = type
;
201 for (p
= &TYPE_FIELDS (type
); *p
; p
= &DECL_CHAIN (*p
))
202 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
205 DECL_CHAIN (field
) = *p
;
208 /* Set correct alignment for frame struct type. */
209 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
210 SET_TYPE_ALIGN (type
, DECL_ALIGN (field
));
213 /* Build or return the RECORD_TYPE that describes the frame state that is
214 shared between INFO->CONTEXT and its nested functions. This record will
215 not be complete until finalize_nesting_tree; up until that point we'll
216 be adding fields as necessary.
218 We also build the DECL that represents this frame in the function. */
221 get_frame_type (struct nesting_info
*info
)
223 tree type
= info
->frame_type
;
228 type
= make_node (RECORD_TYPE
);
230 name
= concat ("FRAME.",
231 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
233 TYPE_NAME (type
) = get_identifier (name
);
236 info
->frame_type
= type
;
238 /* Do not put info->frame_decl on info->new_local_var_chain,
239 so that we can declare it in the lexical blocks, which
240 makes sure virtual regs that end up appearing in its RTL
241 expression get substituted in instantiate_virtual_regs. */
242 info
->frame_decl
= create_tmp_var_raw (type
, "FRAME");
243 DECL_CONTEXT (info
->frame_decl
) = info
->context
;
244 DECL_NONLOCAL_FRAME (info
->frame_decl
) = 1;
245 DECL_SEEN_IN_BIND_EXPR_P (info
->frame_decl
) = 1;
247 /* ??? Always make it addressable for now, since it is meant to
248 be pointed to by the static chain pointer. This pessimizes
249 when it turns out that no static chains are needed because
250 the nested functions referencing non-local variables are not
251 reachable, but the true pessimization is to create the non-
252 local frame structure in the first place. */
253 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
259 /* Return true if DECL should be referenced by pointer in the non-local frame
263 use_pointer_in_frame (tree decl
)
265 if (TREE_CODE (decl
) == PARM_DECL
)
267 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
268 sized DECLs, and inefficient to copy large aggregates. Don't bother
269 moving anything but scalar parameters. */
270 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
274 /* Variable-sized DECLs can only come from OMP clauses at this point
275 since the gimplifier has already turned the regular variables into
276 pointers. Do the same as the gimplifier. */
277 return !DECL_SIZE (decl
) || TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
;
281 /* Given DECL, a non-locally accessed variable, find or create a field
282 in the non-local frame structure for the given nesting context. */
285 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
286 enum insert_option insert
)
288 gcc_checking_assert (decl_function_context (decl
) == info
->context
);
290 if (insert
== NO_INSERT
)
292 tree
*slot
= info
->field_map
->get (decl
);
293 return slot
? *slot
: NULL_TREE
;
296 tree
*slot
= &info
->field_map
->get_or_insert (decl
);
299 tree type
= get_frame_type (info
);
300 tree field
= make_node (FIELD_DECL
);
301 DECL_NAME (field
) = DECL_NAME (decl
);
303 if (use_pointer_in_frame (decl
))
305 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
306 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
307 DECL_NONADDRESSABLE_P (field
) = 1;
311 TREE_TYPE (field
) = TREE_TYPE (decl
);
312 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
313 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
314 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
315 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
316 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
317 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
319 /* Declare the transformation and adjust the original DECL. For a
320 variable or for a parameter when not optimizing, we make it point
321 to the field in the frame directly. For a parameter, we don't do
322 it when optimizing because the variable tracking pass will already
324 if (VAR_P (decl
) || !optimize
)
327 = build3 (COMPONENT_REF
, TREE_TYPE (field
), info
->frame_decl
,
330 /* If the next declaration is a PARM_DECL pointing to the DECL,
331 we need to adjust its VALUE_EXPR directly, since chains of
332 VALUE_EXPRs run afoul of garbage collection. This occurs
333 in Ada for Out parameters that aren't copied in. */
334 tree next
= DECL_CHAIN (decl
);
336 && TREE_CODE (next
) == PARM_DECL
337 && DECL_HAS_VALUE_EXPR_P (next
)
338 && DECL_VALUE_EXPR (next
) == decl
)
339 SET_DECL_VALUE_EXPR (next
, x
);
341 SET_DECL_VALUE_EXPR (decl
, x
);
342 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
346 insert_field_into_struct (type
, field
);
349 if (TREE_CODE (decl
) == PARM_DECL
)
350 info
->any_parm_remapped
= true;
356 /* Build or return the variable that holds the static chain within
357 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
360 get_chain_decl (struct nesting_info
*info
)
362 tree decl
= info
->chain_decl
;
368 type
= get_frame_type (info
->outer
);
369 type
= build_pointer_type (type
);
371 /* Note that this variable is *not* entered into any BIND_EXPR;
372 the construction of this variable is handled specially in
373 expand_function_start and initialize_inlined_parameters.
374 Note also that it's represented as a parameter. This is more
375 close to the truth, since the initial value does come from
377 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
378 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
379 DECL_ARTIFICIAL (decl
) = 1;
380 DECL_IGNORED_P (decl
) = 1;
381 TREE_USED (decl
) = 1;
382 DECL_CONTEXT (decl
) = info
->context
;
383 DECL_ARG_TYPE (decl
) = type
;
385 /* Tell tree-inline.c that we never write to this variable, so
386 it can copy-prop the replacement value immediately. */
387 TREE_READONLY (decl
) = 1;
389 info
->chain_decl
= decl
;
392 && (dump_flags
& TDF_DETAILS
)
393 && !DECL_STATIC_CHAIN (info
->context
))
394 fprintf (dump_file
, "Setting static-chain for %s\n",
395 lang_hooks
.decl_printable_name (info
->context
, 2));
397 DECL_STATIC_CHAIN (info
->context
) = 1;
402 /* Build or return the field within the non-local frame state that holds
403 the static chain for INFO->CONTEXT. This is the way to walk back up
404 multiple nesting levels. */
407 get_chain_field (struct nesting_info
*info
)
409 tree field
= info
->chain_field
;
413 tree type
= build_pointer_type (get_frame_type (info
->outer
));
415 field
= make_node (FIELD_DECL
);
416 DECL_NAME (field
) = get_identifier ("__chain");
417 TREE_TYPE (field
) = type
;
418 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
419 DECL_NONADDRESSABLE_P (field
) = 1;
421 insert_field_into_struct (get_frame_type (info
), field
);
423 info
->chain_field
= field
;
426 && (dump_flags
& TDF_DETAILS
)
427 && !DECL_STATIC_CHAIN (info
->context
))
428 fprintf (dump_file
, "Setting static-chain for %s\n",
429 lang_hooks
.decl_printable_name (info
->context
, 2));
431 DECL_STATIC_CHAIN (info
->context
) = 1;
436 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
439 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
444 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
445 gimple_call_set_lhs (call
, t
);
446 if (! gsi_end_p (*gsi
))
447 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
448 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
454 /* Copy EXP into a temporary. Allocate the temporary in the context of
455 INFO and insert the initialization statement before GSI. */
458 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
463 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
464 stmt
= gimple_build_assign (t
, exp
);
465 if (! gsi_end_p (*gsi
))
466 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
467 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
473 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
476 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
477 gimple_stmt_iterator
*gsi
)
479 if (is_gimple_val (exp
))
482 return init_tmp_var (info
, exp
, gsi
);
485 /* Similarly, but copy from the temporary and insert the statement
486 after the iterator. */
489 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
494 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
495 stmt
= gimple_build_assign (exp
, t
);
496 if (! gsi_end_p (*gsi
))
497 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
498 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
503 /* Build or return the type used to represent a nested function trampoline. */
505 static GTY(()) tree trampoline_type
;
508 get_trampoline_type (struct nesting_info
*info
)
510 unsigned align
, size
;
514 return trampoline_type
;
516 align
= TRAMPOLINE_ALIGNMENT
;
517 size
= TRAMPOLINE_SIZE
;
519 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
520 then allocate extra space so that we can do dynamic alignment. */
521 if (align
> STACK_BOUNDARY
)
523 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
524 align
= STACK_BOUNDARY
;
527 t
= build_index_type (size_int (size
- 1));
528 t
= build_array_type (char_type_node
, t
);
529 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
530 FIELD_DECL
, get_identifier ("__data"), t
);
531 SET_DECL_ALIGN (t
, align
);
532 DECL_USER_ALIGN (t
) = 1;
534 trampoline_type
= make_node (RECORD_TYPE
);
535 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
536 TYPE_FIELDS (trampoline_type
) = t
;
537 layout_type (trampoline_type
);
538 DECL_CONTEXT (t
) = trampoline_type
;
540 return trampoline_type
;
543 /* Build or return the type used to represent a nested function descriptor. */
545 static GTY(()) tree descriptor_type
;
548 get_descriptor_type (struct nesting_info
*info
)
550 /* The base alignment is that of a function. */
551 const unsigned align
= FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
);
555 return descriptor_type
;
557 t
= build_index_type (integer_one_node
);
558 t
= build_array_type (ptr_type_node
, t
);
559 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
560 FIELD_DECL
, get_identifier ("__data"), t
);
561 SET_DECL_ALIGN (t
, MAX (TYPE_ALIGN (ptr_type_node
), align
));
562 DECL_USER_ALIGN (t
) = 1;
564 descriptor_type
= make_node (RECORD_TYPE
);
565 TYPE_NAME (descriptor_type
) = get_identifier ("__builtin_descriptor");
566 TYPE_FIELDS (descriptor_type
) = t
;
567 layout_type (descriptor_type
);
568 DECL_CONTEXT (t
) = descriptor_type
;
570 return descriptor_type
;
573 /* Given DECL, a nested function, find or create an element in the
574 var map for this function. */
577 lookup_element_for_decl (struct nesting_info
*info
, tree decl
,
578 enum insert_option insert
)
580 if (insert
== NO_INSERT
)
582 tree
*slot
= info
->var_map
->get (decl
);
583 return slot
? *slot
: NULL_TREE
;
586 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
588 *slot
= build_tree_list (NULL_TREE
, NULL_TREE
);
593 /* Given DECL, a nested function, create a field in the non-local
594 frame structure for this function. */
597 create_field_for_decl (struct nesting_info
*info
, tree decl
, tree type
)
599 tree field
= make_node (FIELD_DECL
);
600 DECL_NAME (field
) = DECL_NAME (decl
);
601 TREE_TYPE (field
) = type
;
602 TREE_ADDRESSABLE (field
) = 1;
603 insert_field_into_struct (get_frame_type (info
), field
);
607 /* Given DECL, a nested function, find or create a field in the non-local
608 frame structure for a trampoline for this function. */
611 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
612 enum insert_option insert
)
616 elt
= lookup_element_for_decl (info
, decl
, insert
);
620 field
= TREE_PURPOSE (elt
);
622 if (!field
&& insert
== INSERT
)
624 field
= create_field_for_decl (info
, decl
, get_trampoline_type (info
));
625 TREE_PURPOSE (elt
) = field
;
626 info
->any_tramp_created
= true;
632 /* Given DECL, a nested function, find or create a field in the non-local
633 frame structure for a descriptor for this function. */
636 lookup_descr_for_decl (struct nesting_info
*info
, tree decl
,
637 enum insert_option insert
)
641 elt
= lookup_element_for_decl (info
, decl
, insert
);
645 field
= TREE_VALUE (elt
);
647 if (!field
&& insert
== INSERT
)
649 field
= create_field_for_decl (info
, decl
, get_descriptor_type (info
));
650 TREE_VALUE (elt
) = field
;
651 info
->any_descr_created
= true;
657 /* Build or return the field within the non-local frame state that holds
658 the non-local goto "jmp_buf". The buffer itself is maintained by the
659 rtl middle-end as dynamic stack space is allocated. */
662 get_nl_goto_field (struct nesting_info
*info
)
664 tree field
= info
->nl_goto_field
;
670 /* For __builtin_nonlocal_goto, we need N words. The first is the
671 frame pointer, the rest is for the target's stack pointer save
672 area. The number of words is controlled by STACK_SAVEAREA_MODE;
673 not the best interface, but it'll do for now. */
674 if (Pmode
== ptr_mode
)
675 type
= ptr_type_node
;
677 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
680 = as_a
<scalar_int_mode
> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
681 size
= GET_MODE_SIZE (mode
);
682 size
= size
/ GET_MODE_SIZE (Pmode
);
685 type
= build_array_type
686 (type
, build_index_type (size_int (size
)));
688 field
= make_node (FIELD_DECL
);
689 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
690 TREE_TYPE (field
) = type
;
691 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
692 TREE_ADDRESSABLE (field
) = 1;
694 insert_field_into_struct (get_frame_type (info
), field
);
696 info
->nl_goto_field
= field
;
702 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
705 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
706 struct nesting_info
*info
, gimple_seq
*pseq
)
708 struct walk_stmt_info wi
;
710 memset (&wi
, 0, sizeof (wi
));
713 walk_gimple_seq_mod (pseq
, callback_stmt
, callback_op
, &wi
);
717 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
720 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
721 struct nesting_info
*info
)
723 gimple_seq body
= gimple_body (info
->context
);
724 walk_body (callback_stmt
, callback_op
, info
, &body
);
725 gimple_set_body (info
->context
, body
);
728 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
731 walk_gimple_omp_for (gomp_for
*for_stmt
,
732 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
733 struct nesting_info
*info
)
735 struct walk_stmt_info wi
;
740 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body_ptr (for_stmt
));
743 memset (&wi
, 0, sizeof (wi
));
745 wi
.gsi
= gsi_last (seq
);
747 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
750 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
754 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
759 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
762 t
= gimple_omp_for_incr (for_stmt
, i
);
763 gcc_assert (BINARY_CLASS_P (t
));
765 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
768 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
771 seq
= gsi_seq (wi
.gsi
);
772 if (!gimple_seq_empty_p (seq
))
774 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
775 annotate_all_with_location (seq
, gimple_location (for_stmt
));
776 gimple_seq_add_seq (&pre_body
, seq
);
777 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
781 /* Similarly for ROOT and all functions nested underneath, depth first. */
784 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
785 struct nesting_info
*root
)
787 struct nesting_info
*n
;
788 FOR_EACH_NEST_INFO (n
, root
)
789 walk_function (callback_stmt
, callback_op
, n
);
793 /* We have to check for a fairly pathological case. The operands of function
794 nested function are to be interpreted in the context of the enclosing
795 function. So if any are variably-sized, they will get remapped when the
796 enclosing function is inlined. But that remapping would also have to be
797 done in the types of the PARM_DECLs of the nested function, meaning the
798 argument types of that function will disagree with the arguments in the
799 calls to that function. So we'd either have to make a copy of the nested
800 function corresponding to each time the enclosing function was inlined or
801 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
802 function. The former is not practical. The latter would still require
803 detecting this case to know when to add the conversions. So, for now at
804 least, we don't inline such an enclosing function.
806 We have to do that check recursively, so here return indicating whether
807 FNDECL has such a nested function. ORIG_FN is the function we were
808 trying to inline to use for checking whether any argument is variably
809 modified by anything in it.
811 It would be better to do this in tree-inline.c so that we could give
812 the appropriate warning for why a function can't be inlined, but that's
813 too late since the nesting structure has already been flattened and
814 adding a flag just to record this fact seems a waste of a flag. */
817 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
819 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
822 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
824 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= DECL_CHAIN (arg
))
825 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
828 if (check_for_nested_with_variably_modified (cgn
->decl
,
836 /* Construct our local datastructure describing the function nesting
837 tree rooted by CGN. */
839 static struct nesting_info
*
840 create_nesting_tree (struct cgraph_node
*cgn
)
842 struct nesting_info
*info
= XCNEW (struct nesting_info
);
843 info
->field_map
= new hash_map
<tree
, tree
>;
844 info
->var_map
= new hash_map
<tree
, tree
>;
845 info
->mem_refs
= new hash_set
<tree
*>;
846 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
847 info
->context
= cgn
->decl
;
848 info
->thunk_p
= cgn
->thunk
.thunk_p
;
850 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
852 struct nesting_info
*sub
= create_nesting_tree (cgn
);
854 sub
->next
= info
->inner
;
858 /* See discussion at check_for_nested_with_variably_modified for a
859 discussion of why this has to be here. */
860 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
861 DECL_UNINLINABLE (info
->context
) = true;
866 /* Return an expression computing the static chain for TARGET_CONTEXT
867 from INFO->CONTEXT. Insert any necessary computations before TSI. */
870 get_static_chain (struct nesting_info
*info
, tree target_context
,
871 gimple_stmt_iterator
*gsi
)
873 struct nesting_info
*i
;
876 if (info
->context
== target_context
)
878 x
= build_addr (info
->frame_decl
);
879 info
->static_chain_added
|= 1;
883 x
= get_chain_decl (info
);
884 info
->static_chain_added
|= 2;
886 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
888 tree field
= get_chain_field (i
);
890 x
= build_simple_mem_ref_notrap (x
);
891 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
892 x
= init_tmp_var (info
, x
, gsi
);
900 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
901 frame as seen from INFO->CONTEXT. Insert any necessary computations
905 get_frame_field (struct nesting_info
*info
, tree target_context
,
906 tree field
, gimple_stmt_iterator
*gsi
)
908 struct nesting_info
*i
;
911 if (info
->context
== target_context
)
913 /* Make sure frame_decl gets created. */
914 (void) get_frame_type (info
);
915 x
= info
->frame_decl
;
916 info
->static_chain_added
|= 1;
920 x
= get_chain_decl (info
);
921 info
->static_chain_added
|= 2;
923 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
925 tree field
= get_chain_field (i
);
927 x
= build_simple_mem_ref_notrap (x
);
928 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
929 x
= init_tmp_var (info
, x
, gsi
);
932 x
= build_simple_mem_ref_notrap (x
);
935 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
939 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
941 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
942 in the nested function with DECL_VALUE_EXPR set to reference the true
943 variable in the parent function. This is used both for debug info
944 and in OMP lowering. */
947 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
950 struct nesting_info
*i
;
951 tree x
, field
, new_decl
;
953 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
958 target_context
= decl_function_context (decl
);
960 /* A copy of the code in get_frame_field, but without the temporaries. */
961 if (info
->context
== target_context
)
963 /* Make sure frame_decl gets created. */
964 (void) get_frame_type (info
);
965 x
= info
->frame_decl
;
967 info
->static_chain_added
|= 1;
971 x
= get_chain_decl (info
);
972 info
->static_chain_added
|= 2;
973 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
975 field
= get_chain_field (i
);
976 x
= build_simple_mem_ref_notrap (x
);
977 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
979 x
= build_simple_mem_ref_notrap (x
);
982 field
= lookup_field_for_decl (i
, decl
, INSERT
);
983 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
984 if (use_pointer_in_frame (decl
))
985 x
= build_simple_mem_ref_notrap (x
);
987 /* ??? We should be remapping types as well, surely. */
988 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
989 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
990 DECL_CONTEXT (new_decl
) = info
->context
;
991 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
992 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
993 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
994 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
995 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
996 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
997 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
998 if ((TREE_CODE (decl
) == PARM_DECL
999 || TREE_CODE (decl
) == RESULT_DECL
1001 && DECL_BY_REFERENCE (decl
))
1002 DECL_BY_REFERENCE (new_decl
) = 1;
1004 SET_DECL_VALUE_EXPR (new_decl
, x
);
1005 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1008 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1009 info
->debug_var_chain
= new_decl
;
1012 && info
->context
!= target_context
1013 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
1014 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
1020 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1021 and PARM_DECLs that belong to outer functions.
1023 The rewrite will involve some number of structure accesses back up
1024 the static chain. E.g. for a variable FOO up one nesting level it'll
1025 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1026 indirections apply to decls for which use_pointer_in_frame is true. */
1029 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1031 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1032 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1036 switch (TREE_CODE (t
))
1039 /* Non-automatic variables are never processed. */
1040 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1046 tree x
, target_context
= decl_function_context (t
);
1048 if (info
->context
== target_context
)
1053 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1054 x
= get_nonlocal_debug_decl (info
, t
);
1057 struct nesting_info
*i
= info
;
1058 while (i
&& i
->context
!= target_context
)
1060 /* If none of the outer contexts is the target context, this means
1061 that the VAR or PARM_DECL is referenced in a wrong context. */
1063 internal_error ("%s from %s referenced in %s",
1064 IDENTIFIER_POINTER (DECL_NAME (t
)),
1065 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
1066 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
1068 x
= lookup_field_for_decl (i
, t
, INSERT
);
1069 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1070 if (use_pointer_in_frame (t
))
1072 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1073 x
= build_simple_mem_ref_notrap (x
);
1080 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1082 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1090 /* We're taking the address of a label from a parent function, but
1091 this is not itself a non-local goto. Mark the label such that it
1092 will not be deleted, much as we would with a label address in
1094 if (decl_function_context (t
) != info
->context
)
1095 FORCED_LABEL (t
) = 1;
1100 bool save_val_only
= wi
->val_only
;
1102 wi
->val_only
= false;
1104 wi
->changed
= false;
1105 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
1106 wi
->val_only
= true;
1112 /* If we changed anything, we might no longer be directly
1113 referencing a decl. */
1114 save_context
= current_function_decl
;
1115 current_function_decl
= info
->context
;
1116 recompute_tree_invariant_for_addr_expr (t
);
1117 current_function_decl
= save_context
;
1119 /* If the callback converted the address argument in a context
1120 where we only accept variables (and min_invariant, presumably),
1121 then compute the address into a temporary. */
1123 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1133 case ARRAY_RANGE_REF
:
1135 /* Go down this entire nest and just look at the final prefix and
1136 anything that describes the references. Otherwise, we lose track
1137 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1138 wi
->val_only
= true;
1140 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1142 if (TREE_CODE (t
) == COMPONENT_REF
)
1143 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1145 else if (TREE_CODE (t
) == ARRAY_REF
1146 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1148 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1150 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1152 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1156 wi
->val_only
= false;
1157 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1160 case VIEW_CONVERT_EXPR
:
1161 /* Just request to look at the subtrees, leaving val_only and lhs
1162 untouched. This might actually be for !val_only + lhs, in which
1163 case we don't want to force a replacement by a temporary. */
1168 if (!IS_TYPE_OR_DECL_P (t
))
1171 wi
->val_only
= true;
1180 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1181 struct walk_stmt_info
*);
1183 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1184 and PARM_DECLs that belong to outer functions. */
1187 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1189 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1190 bool need_chain
= false, need_stmts
= false;
1193 bitmap new_suppress
;
1195 new_suppress
= BITMAP_GGC_ALLOC ();
1196 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1198 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1200 switch (OMP_CLAUSE_CODE (clause
))
1202 case OMP_CLAUSE_REDUCTION
:
1203 case OMP_CLAUSE_IN_REDUCTION
:
1204 case OMP_CLAUSE_TASK_REDUCTION
:
1205 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1207 goto do_decl_clause
;
1209 case OMP_CLAUSE_LASTPRIVATE
:
1210 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1212 goto do_decl_clause
;
1214 case OMP_CLAUSE_LINEAR
:
1215 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1217 wi
->val_only
= true;
1219 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
),
1221 goto do_decl_clause
;
1223 case OMP_CLAUSE_PRIVATE
:
1224 case OMP_CLAUSE_FIRSTPRIVATE
:
1225 case OMP_CLAUSE_COPYPRIVATE
:
1226 case OMP_CLAUSE_SHARED
:
1227 case OMP_CLAUSE_TO_DECLARE
:
1228 case OMP_CLAUSE_LINK
:
1229 case OMP_CLAUSE_USE_DEVICE_PTR
:
1230 case OMP_CLAUSE_IS_DEVICE_PTR
:
1232 decl
= OMP_CLAUSE_DECL (clause
);
1234 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1236 if (decl_function_context (decl
) != info
->context
)
1238 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1239 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1240 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1241 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1242 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1247 case OMP_CLAUSE_SCHEDULE
:
1248 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1251 case OMP_CLAUSE_FINAL
:
1253 case OMP_CLAUSE_NUM_THREADS
:
1254 case OMP_CLAUSE_DEPEND
:
1255 case OMP_CLAUSE_DEVICE
:
1256 case OMP_CLAUSE_NUM_TEAMS
:
1257 case OMP_CLAUSE_THREAD_LIMIT
:
1258 case OMP_CLAUSE_SAFELEN
:
1259 case OMP_CLAUSE_SIMDLEN
:
1260 case OMP_CLAUSE_PRIORITY
:
1261 case OMP_CLAUSE_GRAINSIZE
:
1262 case OMP_CLAUSE_NUM_TASKS
:
1263 case OMP_CLAUSE_HINT
:
1264 case OMP_CLAUSE_NUM_GANGS
:
1265 case OMP_CLAUSE_NUM_WORKERS
:
1266 case OMP_CLAUSE_VECTOR_LENGTH
:
1267 case OMP_CLAUSE_GANG
:
1268 case OMP_CLAUSE_WORKER
:
1269 case OMP_CLAUSE_VECTOR
:
1270 case OMP_CLAUSE_ASYNC
:
1271 case OMP_CLAUSE_WAIT
:
1272 /* Several OpenACC clauses have optional arguments. Check if they
1274 if (OMP_CLAUSE_OPERAND (clause
, 0))
1276 wi
->val_only
= true;
1278 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1282 /* The gang clause accepts two arguments. */
1283 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1284 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1286 wi
->val_only
= true;
1288 convert_nonlocal_reference_op
1289 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1293 case OMP_CLAUSE_DIST_SCHEDULE
:
1294 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1296 wi
->val_only
= true;
1298 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1303 case OMP_CLAUSE_MAP
:
1305 case OMP_CLAUSE_FROM
:
1306 if (OMP_CLAUSE_SIZE (clause
))
1308 wi
->val_only
= true;
1310 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause
),
1313 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1314 goto do_decl_clause
;
1315 wi
->val_only
= true;
1317 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_nonlocal_reference_op
,
1321 case OMP_CLAUSE_ALIGNED
:
1322 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1324 wi
->val_only
= true;
1326 convert_nonlocal_reference_op
1327 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1330 case OMP_CLAUSE_NONTEMPORAL
:
1331 /* Like do_decl_clause, but don't add any suppression. */
1332 decl
= OMP_CLAUSE_DECL (clause
);
1334 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1336 if (decl_function_context (decl
) != info
->context
)
1338 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1343 case OMP_CLAUSE_NOWAIT
:
1344 case OMP_CLAUSE_ORDERED
:
1345 case OMP_CLAUSE_DEFAULT
:
1346 case OMP_CLAUSE_COPYIN
:
1347 case OMP_CLAUSE_COLLAPSE
:
1348 case OMP_CLAUSE_TILE
:
1349 case OMP_CLAUSE_UNTIED
:
1350 case OMP_CLAUSE_MERGEABLE
:
1351 case OMP_CLAUSE_PROC_BIND
:
1352 case OMP_CLAUSE_NOGROUP
:
1353 case OMP_CLAUSE_THREADS
:
1354 case OMP_CLAUSE_SIMD
:
1355 case OMP_CLAUSE_DEFAULTMAP
:
1356 case OMP_CLAUSE_ORDER
:
1357 case OMP_CLAUSE_SEQ
:
1358 case OMP_CLAUSE_INDEPENDENT
:
1359 case OMP_CLAUSE_AUTO
:
1360 case OMP_CLAUSE_IF_PRESENT
:
1361 case OMP_CLAUSE_FINALIZE
:
1362 case OMP_CLAUSE__CONDTEMP_
:
1363 case OMP_CLAUSE__SCANTEMP_
:
1366 /* The following clause belongs to the OpenACC cache directive, which
1367 is discarded during gimplification. */
1368 case OMP_CLAUSE__CACHE_
:
1369 /* The following clauses are only allowed in the OpenMP declare simd
1370 directive, so not seen here. */
1371 case OMP_CLAUSE_UNIFORM
:
1372 case OMP_CLAUSE_INBRANCH
:
1373 case OMP_CLAUSE_NOTINBRANCH
:
1374 /* The following clauses are only allowed on OpenMP cancel and
1375 cancellation point directives, which at this point have already
1376 been lowered into a function call. */
1377 case OMP_CLAUSE_FOR
:
1378 case OMP_CLAUSE_PARALLEL
:
1379 case OMP_CLAUSE_SECTIONS
:
1380 case OMP_CLAUSE_TASKGROUP
:
1381 /* The following clauses are only added during OMP lowering; nested
1382 function decomposition happens before that. */
1383 case OMP_CLAUSE__LOOPTEMP_
:
1384 case OMP_CLAUSE__REDUCTEMP_
:
1385 case OMP_CLAUSE__SIMDUID_
:
1386 case OMP_CLAUSE__GRIDDIM_
:
1387 case OMP_CLAUSE__SIMT_
:
1388 /* Anything else. */
1394 info
->suppress_expansion
= new_suppress
;
1397 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1398 switch (OMP_CLAUSE_CODE (clause
))
1400 case OMP_CLAUSE_REDUCTION
:
1401 case OMP_CLAUSE_IN_REDUCTION
:
1402 case OMP_CLAUSE_TASK_REDUCTION
:
1403 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1406 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1407 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1409 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1410 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1412 walk_body (convert_nonlocal_reference_stmt
,
1413 convert_nonlocal_reference_op
, info
,
1414 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1415 walk_body (convert_nonlocal_reference_stmt
,
1416 convert_nonlocal_reference_op
, info
,
1417 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1418 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1420 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1421 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1426 case OMP_CLAUSE_LASTPRIVATE
:
1427 walk_body (convert_nonlocal_reference_stmt
,
1428 convert_nonlocal_reference_op
, info
,
1429 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1432 case OMP_CLAUSE_LINEAR
:
1433 walk_body (convert_nonlocal_reference_stmt
,
1434 convert_nonlocal_reference_op
, info
,
1435 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
1445 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1448 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1450 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1451 type
= TREE_TYPE (type
);
1453 if (TYPE_NAME (type
)
1454 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1455 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1456 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1458 while (POINTER_TYPE_P (type
)
1459 || TREE_CODE (type
) == VECTOR_TYPE
1460 || TREE_CODE (type
) == FUNCTION_TYPE
1461 || TREE_CODE (type
) == METHOD_TYPE
)
1462 type
= TREE_TYPE (type
);
1464 if (TREE_CODE (type
) == ARRAY_TYPE
)
1468 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1469 domain
= TYPE_DOMAIN (type
);
1472 t
= TYPE_MIN_VALUE (domain
);
1473 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1474 && decl_function_context (t
) != info
->context
)
1475 get_nonlocal_debug_decl (info
, t
);
1476 t
= TYPE_MAX_VALUE (domain
);
1477 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1478 && decl_function_context (t
) != info
->context
)
1479 get_nonlocal_debug_decl (info
, t
);
1484 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1485 PARM_DECLs that belong to outer functions. This handles statements
1486 that are not handled via the standard recursion done in
1487 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1488 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1489 operands of STMT have been handled by this function. */
1492 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1493 struct walk_stmt_info
*wi
)
1495 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1496 tree save_local_var_chain
;
1497 bitmap save_suppress
;
1498 gimple
*stmt
= gsi_stmt (*gsi
);
1500 switch (gimple_code (stmt
))
1503 /* Don't walk non-local gotos for now. */
1504 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1506 wi
->val_only
= true;
1508 *handled_ops_p
= false;
1513 case GIMPLE_OMP_TEAMS
:
1514 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
1516 save_suppress
= info
->suppress_expansion
;
1517 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
),
1519 walk_body (convert_nonlocal_reference_stmt
,
1520 convert_nonlocal_reference_op
, info
,
1521 gimple_omp_body_ptr (stmt
));
1522 info
->suppress_expansion
= save_suppress
;
1527 case GIMPLE_OMP_PARALLEL
:
1528 case GIMPLE_OMP_TASK
:
1529 save_suppress
= info
->suppress_expansion
;
1530 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1534 decl
= get_chain_decl (info
);
1535 c
= build_omp_clause (gimple_location (stmt
),
1536 OMP_CLAUSE_FIRSTPRIVATE
);
1537 OMP_CLAUSE_DECL (c
) = decl
;
1538 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1539 gimple_omp_taskreg_set_clauses (stmt
, c
);
1542 save_local_var_chain
= info
->new_local_var_chain
;
1543 info
->new_local_var_chain
= NULL
;
1545 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1546 info
, gimple_omp_body_ptr (stmt
));
1548 if (info
->new_local_var_chain
)
1549 declare_vars (info
->new_local_var_chain
,
1550 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1552 info
->new_local_var_chain
= save_local_var_chain
;
1553 info
->suppress_expansion
= save_suppress
;
1556 case GIMPLE_OMP_FOR
:
1557 save_suppress
= info
->suppress_expansion
;
1558 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1559 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
1560 convert_nonlocal_reference_stmt
,
1561 convert_nonlocal_reference_op
, info
);
1562 walk_body (convert_nonlocal_reference_stmt
,
1563 convert_nonlocal_reference_op
, info
, gimple_omp_body_ptr (stmt
));
1564 info
->suppress_expansion
= save_suppress
;
1567 case GIMPLE_OMP_SECTIONS
:
1568 save_suppress
= info
->suppress_expansion
;
1569 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1570 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1571 info
, gimple_omp_body_ptr (stmt
));
1572 info
->suppress_expansion
= save_suppress
;
1575 case GIMPLE_OMP_SINGLE
:
1576 save_suppress
= info
->suppress_expansion
;
1577 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1578 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1579 info
, gimple_omp_body_ptr (stmt
));
1580 info
->suppress_expansion
= save_suppress
;
1583 case GIMPLE_OMP_TASKGROUP
:
1584 save_suppress
= info
->suppress_expansion
;
1585 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt
), wi
);
1586 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1587 info
, gimple_omp_body_ptr (stmt
));
1588 info
->suppress_expansion
= save_suppress
;
1591 case GIMPLE_OMP_TARGET
:
1592 if (!is_gimple_omp_offloaded (stmt
))
1594 save_suppress
= info
->suppress_expansion
;
1595 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1597 info
->suppress_expansion
= save_suppress
;
1598 walk_body (convert_nonlocal_reference_stmt
,
1599 convert_nonlocal_reference_op
, info
,
1600 gimple_omp_body_ptr (stmt
));
1603 save_suppress
= info
->suppress_expansion
;
1604 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1608 decl
= get_chain_decl (info
);
1609 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
1610 OMP_CLAUSE_DECL (c
) = decl
;
1611 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
1612 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
1613 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
1614 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
1617 save_local_var_chain
= info
->new_local_var_chain
;
1618 info
->new_local_var_chain
= NULL
;
1620 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1621 info
, gimple_omp_body_ptr (stmt
));
1623 if (info
->new_local_var_chain
)
1624 declare_vars (info
->new_local_var_chain
,
1625 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1627 info
->new_local_var_chain
= save_local_var_chain
;
1628 info
->suppress_expansion
= save_suppress
;
1631 case GIMPLE_OMP_SECTION
:
1632 case GIMPLE_OMP_MASTER
:
1633 case GIMPLE_OMP_ORDERED
:
1634 case GIMPLE_OMP_SCAN
:
1635 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1636 info
, gimple_omp_body_ptr (stmt
));
1641 gbind
*bind_stmt
= as_a
<gbind
*> (stmt
);
1643 for (tree var
= gimple_bind_vars (bind_stmt
); var
; var
= DECL_CHAIN (var
))
1644 if (TREE_CODE (var
) == NAMELIST_DECL
)
1646 /* Adjust decls mentioned in NAMELIST_DECL. */
1647 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
1651 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
1654 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1656 if (decl_function_context (decl
) != info
->context
)
1657 CONSTRUCTOR_ELT (decls
, i
)->value
1658 = get_nonlocal_debug_decl (info
, decl
);
1662 *handled_ops_p
= false;
1666 wi
->val_only
= true;
1668 *handled_ops_p
= false;
1672 if (gimple_clobber_p (stmt
))
1674 tree lhs
= gimple_assign_lhs (stmt
);
1676 && !(TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
1677 && decl_function_context (lhs
) != info
->context
)
1679 gsi_replace (gsi
, gimple_build_nop (), true);
1683 *handled_ops_p
= false;
1687 /* For every other statement that we are not interested in
1688 handling here, let the walker traverse the operands. */
1689 *handled_ops_p
= false;
1693 /* We have handled all of STMT operands, no need to traverse the operands. */
1694 *handled_ops_p
= true;
1699 /* A subroutine of convert_local_reference. Create a local variable
1700 in the parent function with DECL_VALUE_EXPR set to reference the
1701 field in FRAME. This is used both for debug info and in OMP
1705 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1709 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1713 /* Make sure frame_decl gets created. */
1714 (void) get_frame_type (info
);
1715 x
= info
->frame_decl
;
1716 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1718 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1719 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1720 DECL_CONTEXT (new_decl
) = info
->context
;
1721 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1722 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1723 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1724 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1725 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1726 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1727 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1728 if ((TREE_CODE (decl
) == PARM_DECL
1729 || TREE_CODE (decl
) == RESULT_DECL
1731 && DECL_BY_REFERENCE (decl
))
1732 DECL_BY_REFERENCE (new_decl
) = 1;
1734 SET_DECL_VALUE_EXPR (new_decl
, x
);
1735 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1738 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1739 info
->debug_var_chain
= new_decl
;
1741 /* Do not emit debug info twice. */
1742 DECL_IGNORED_P (decl
) = 1;
1748 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1749 and PARM_DECLs that were referenced by inner nested functions.
1750 The rewrite will be a structure reference to the local frame variable. */
1752 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1755 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1757 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1758 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1759 tree t
= *tp
, field
, x
;
1763 switch (TREE_CODE (t
))
1766 /* Non-automatic variables are never processed. */
1767 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1772 if (t
!= info
->frame_decl
&& decl_function_context (t
) == info
->context
)
1774 /* If we copied a pointer to the frame, then the original decl
1775 is used unchanged in the parent function. */
1776 if (use_pointer_in_frame (t
))
1779 /* No need to transform anything if no child references the
1781 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1786 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1787 x
= get_local_debug_decl (info
, t
, field
);
1789 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1794 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1796 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1804 save_val_only
= wi
->val_only
;
1805 wi
->val_only
= false;
1807 wi
->changed
= false;
1808 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1809 wi
->val_only
= save_val_only
;
1811 /* If we converted anything ... */
1816 /* Then the frame decl is now addressable. */
1817 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1819 save_context
= current_function_decl
;
1820 current_function_decl
= info
->context
;
1821 recompute_tree_invariant_for_addr_expr (t
);
1822 current_function_decl
= save_context
;
1824 /* If we are in a context where we only accept values, then
1825 compute the address into a temporary. */
1827 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1836 case ARRAY_RANGE_REF
:
1838 /* Go down this entire nest and just look at the final prefix and
1839 anything that describes the references. Otherwise, we lose track
1840 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1841 save_val_only
= wi
->val_only
;
1842 wi
->val_only
= true;
1844 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1846 if (TREE_CODE (t
) == COMPONENT_REF
)
1847 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1849 else if (TREE_CODE (t
) == ARRAY_REF
1850 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1852 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1854 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1856 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1860 wi
->val_only
= false;
1861 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1862 wi
->val_only
= save_val_only
;
1866 save_val_only
= wi
->val_only
;
1867 wi
->val_only
= true;
1869 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
,
1871 /* We need to re-fold the MEM_REF as component references as
1872 part of a ADDR_EXPR address are not allowed. But we cannot
1873 fold here, as the chain record type is not yet finalized. */
1874 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
1875 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
1876 info
->mem_refs
->add (tp
);
1877 wi
->val_only
= save_val_only
;
1880 case VIEW_CONVERT_EXPR
:
1881 /* Just request to look at the subtrees, leaving val_only and lhs
1882 untouched. This might actually be for !val_only + lhs, in which
1883 case we don't want to force a replacement by a temporary. */
1888 if (!IS_TYPE_OR_DECL_P (t
))
1891 wi
->val_only
= true;
1900 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1901 struct walk_stmt_info
*);
1903 /* Helper for convert_local_reference. Convert all the references in
1904 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1907 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1909 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1910 bool need_frame
= false, need_stmts
= false;
1913 bitmap new_suppress
;
1915 new_suppress
= BITMAP_GGC_ALLOC ();
1916 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1918 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1920 switch (OMP_CLAUSE_CODE (clause
))
1922 case OMP_CLAUSE_REDUCTION
:
1923 case OMP_CLAUSE_IN_REDUCTION
:
1924 case OMP_CLAUSE_TASK_REDUCTION
:
1925 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1927 goto do_decl_clause
;
1929 case OMP_CLAUSE_LASTPRIVATE
:
1930 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1932 goto do_decl_clause
;
1934 case OMP_CLAUSE_LINEAR
:
1935 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1937 wi
->val_only
= true;
1939 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
), &dummy
,
1941 goto do_decl_clause
;
1943 case OMP_CLAUSE_PRIVATE
:
1944 case OMP_CLAUSE_FIRSTPRIVATE
:
1945 case OMP_CLAUSE_COPYPRIVATE
:
1946 case OMP_CLAUSE_SHARED
:
1947 case OMP_CLAUSE_TO_DECLARE
:
1948 case OMP_CLAUSE_LINK
:
1949 case OMP_CLAUSE_USE_DEVICE_PTR
:
1950 case OMP_CLAUSE_IS_DEVICE_PTR
:
1952 decl
= OMP_CLAUSE_DECL (clause
);
1954 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1956 if (decl_function_context (decl
) == info
->context
1957 && !use_pointer_in_frame (decl
))
1959 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1962 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1963 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1964 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1965 OMP_CLAUSE_DECL (clause
)
1966 = get_local_debug_decl (info
, decl
, field
);
1972 case OMP_CLAUSE_SCHEDULE
:
1973 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1976 case OMP_CLAUSE_FINAL
:
1978 case OMP_CLAUSE_NUM_THREADS
:
1979 case OMP_CLAUSE_DEPEND
:
1980 case OMP_CLAUSE_DEVICE
:
1981 case OMP_CLAUSE_NUM_TEAMS
:
1982 case OMP_CLAUSE_THREAD_LIMIT
:
1983 case OMP_CLAUSE_SAFELEN
:
1984 case OMP_CLAUSE_SIMDLEN
:
1985 case OMP_CLAUSE_PRIORITY
:
1986 case OMP_CLAUSE_GRAINSIZE
:
1987 case OMP_CLAUSE_NUM_TASKS
:
1988 case OMP_CLAUSE_HINT
:
1989 case OMP_CLAUSE_NUM_GANGS
:
1990 case OMP_CLAUSE_NUM_WORKERS
:
1991 case OMP_CLAUSE_VECTOR_LENGTH
:
1992 case OMP_CLAUSE_GANG
:
1993 case OMP_CLAUSE_WORKER
:
1994 case OMP_CLAUSE_VECTOR
:
1995 case OMP_CLAUSE_ASYNC
:
1996 case OMP_CLAUSE_WAIT
:
1997 /* Several OpenACC clauses have optional arguments. Check if they
1999 if (OMP_CLAUSE_OPERAND (clause
, 0))
2001 wi
->val_only
= true;
2003 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
2007 /* The gang clause accepts two arguments. */
2008 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
2009 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
2011 wi
->val_only
= true;
2013 convert_nonlocal_reference_op
2014 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
2018 case OMP_CLAUSE_DIST_SCHEDULE
:
2019 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
2021 wi
->val_only
= true;
2023 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
2028 case OMP_CLAUSE_MAP
:
2030 case OMP_CLAUSE_FROM
:
2031 if (OMP_CLAUSE_SIZE (clause
))
2033 wi
->val_only
= true;
2035 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause
),
2038 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
2039 goto do_decl_clause
;
2040 wi
->val_only
= true;
2042 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_local_reference_op
,
2046 case OMP_CLAUSE_ALIGNED
:
2047 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
2049 wi
->val_only
= true;
2051 convert_local_reference_op
2052 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
2055 case OMP_CLAUSE_NONTEMPORAL
:
2056 /* Like do_decl_clause, but don't add any suppression. */
2057 decl
= OMP_CLAUSE_DECL (clause
);
2059 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2061 if (decl_function_context (decl
) == info
->context
2062 && !use_pointer_in_frame (decl
))
2064 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2067 OMP_CLAUSE_DECL (clause
)
2068 = get_local_debug_decl (info
, decl
, field
);
2074 case OMP_CLAUSE_NOWAIT
:
2075 case OMP_CLAUSE_ORDERED
:
2076 case OMP_CLAUSE_DEFAULT
:
2077 case OMP_CLAUSE_COPYIN
:
2078 case OMP_CLAUSE_COLLAPSE
:
2079 case OMP_CLAUSE_TILE
:
2080 case OMP_CLAUSE_UNTIED
:
2081 case OMP_CLAUSE_MERGEABLE
:
2082 case OMP_CLAUSE_PROC_BIND
:
2083 case OMP_CLAUSE_NOGROUP
:
2084 case OMP_CLAUSE_THREADS
:
2085 case OMP_CLAUSE_SIMD
:
2086 case OMP_CLAUSE_DEFAULTMAP
:
2087 case OMP_CLAUSE_ORDER
:
2088 case OMP_CLAUSE_SEQ
:
2089 case OMP_CLAUSE_INDEPENDENT
:
2090 case OMP_CLAUSE_AUTO
:
2091 case OMP_CLAUSE_IF_PRESENT
:
2092 case OMP_CLAUSE_FINALIZE
:
2093 case OMP_CLAUSE__CONDTEMP_
:
2094 case OMP_CLAUSE__SCANTEMP_
:
2097 /* The following clause belongs to the OpenACC cache directive, which
2098 is discarded during gimplification. */
2099 case OMP_CLAUSE__CACHE_
:
2100 /* The following clauses are only allowed in the OpenMP declare simd
2101 directive, so not seen here. */
2102 case OMP_CLAUSE_UNIFORM
:
2103 case OMP_CLAUSE_INBRANCH
:
2104 case OMP_CLAUSE_NOTINBRANCH
:
2105 /* The following clauses are only allowed on OpenMP cancel and
2106 cancellation point directives, which at this point have already
2107 been lowered into a function call. */
2108 case OMP_CLAUSE_FOR
:
2109 case OMP_CLAUSE_PARALLEL
:
2110 case OMP_CLAUSE_SECTIONS
:
2111 case OMP_CLAUSE_TASKGROUP
:
2112 /* The following clauses are only added during OMP lowering; nested
2113 function decomposition happens before that. */
2114 case OMP_CLAUSE__LOOPTEMP_
:
2115 case OMP_CLAUSE__REDUCTEMP_
:
2116 case OMP_CLAUSE__SIMDUID_
:
2117 case OMP_CLAUSE__GRIDDIM_
:
2118 case OMP_CLAUSE__SIMT_
:
2119 /* Anything else. */
2125 info
->suppress_expansion
= new_suppress
;
2128 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
2129 switch (OMP_CLAUSE_CODE (clause
))
2131 case OMP_CLAUSE_REDUCTION
:
2132 case OMP_CLAUSE_IN_REDUCTION
:
2133 case OMP_CLAUSE_TASK_REDUCTION
:
2134 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2137 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
2138 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2140 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2141 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2143 walk_body (convert_local_reference_stmt
,
2144 convert_local_reference_op
, info
,
2145 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
2146 walk_body (convert_local_reference_stmt
,
2147 convert_local_reference_op
, info
,
2148 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
2149 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2151 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2152 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2157 case OMP_CLAUSE_LASTPRIVATE
:
2158 walk_body (convert_local_reference_stmt
,
2159 convert_local_reference_op
, info
,
2160 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
2163 case OMP_CLAUSE_LINEAR
:
2164 walk_body (convert_local_reference_stmt
,
2165 convert_local_reference_op
, info
,
2166 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
2177 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2178 and PARM_DECLs that were referenced by inner nested functions.
2179 The rewrite will be a structure reference to the local frame variable. */
2182 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2183 struct walk_stmt_info
*wi
)
2185 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2186 tree save_local_var_chain
;
2187 bitmap save_suppress
;
2188 char save_static_chain_added
;
2189 bool frame_decl_added
;
2190 gimple
*stmt
= gsi_stmt (*gsi
);
2192 switch (gimple_code (stmt
))
2194 case GIMPLE_OMP_TEAMS
:
2195 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2197 save_suppress
= info
->suppress_expansion
;
2198 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
2199 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2200 info
, gimple_omp_body_ptr (stmt
));
2201 info
->suppress_expansion
= save_suppress
;
2206 case GIMPLE_OMP_PARALLEL
:
2207 case GIMPLE_OMP_TASK
:
2208 save_suppress
= info
->suppress_expansion
;
2209 frame_decl_added
= false;
2210 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
2213 tree c
= build_omp_clause (gimple_location (stmt
),
2215 (void) get_frame_type (info
);
2216 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2217 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2218 gimple_omp_taskreg_set_clauses (stmt
, c
);
2219 info
->static_chain_added
|= 4;
2220 frame_decl_added
= true;
2223 save_local_var_chain
= info
->new_local_var_chain
;
2224 save_static_chain_added
= info
->static_chain_added
;
2225 info
->new_local_var_chain
= NULL
;
2226 info
->static_chain_added
= 0;
2228 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2229 gimple_omp_body_ptr (stmt
));
2231 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2233 tree c
= build_omp_clause (gimple_location (stmt
),
2235 (void) get_frame_type (info
);
2236 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2237 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2238 info
->static_chain_added
|= 4;
2239 gimple_omp_taskreg_set_clauses (stmt
, c
);
2241 if (info
->new_local_var_chain
)
2242 declare_vars (info
->new_local_var_chain
,
2243 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2244 info
->new_local_var_chain
= save_local_var_chain
;
2245 info
->suppress_expansion
= save_suppress
;
2246 info
->static_chain_added
|= save_static_chain_added
;
2249 case GIMPLE_OMP_FOR
:
2250 save_suppress
= info
->suppress_expansion
;
2251 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
2252 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
2253 convert_local_reference_stmt
,
2254 convert_local_reference_op
, info
);
2255 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2256 info
, gimple_omp_body_ptr (stmt
));
2257 info
->suppress_expansion
= save_suppress
;
2260 case GIMPLE_OMP_SECTIONS
:
2261 save_suppress
= info
->suppress_expansion
;
2262 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
2263 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2264 info
, gimple_omp_body_ptr (stmt
));
2265 info
->suppress_expansion
= save_suppress
;
2268 case GIMPLE_OMP_SINGLE
:
2269 save_suppress
= info
->suppress_expansion
;
2270 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
2271 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2272 info
, gimple_omp_body_ptr (stmt
));
2273 info
->suppress_expansion
= save_suppress
;
2276 case GIMPLE_OMP_TASKGROUP
:
2277 save_suppress
= info
->suppress_expansion
;
2278 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt
), wi
);
2279 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2280 info
, gimple_omp_body_ptr (stmt
));
2281 info
->suppress_expansion
= save_suppress
;
2284 case GIMPLE_OMP_TARGET
:
2285 if (!is_gimple_omp_offloaded (stmt
))
2287 save_suppress
= info
->suppress_expansion
;
2288 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
2289 info
->suppress_expansion
= save_suppress
;
2290 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2291 info
, gimple_omp_body_ptr (stmt
));
2294 save_suppress
= info
->suppress_expansion
;
2295 frame_decl_added
= false;
2296 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
))
2298 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2299 (void) get_frame_type (info
);
2300 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2301 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2302 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2303 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2304 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2305 info
->static_chain_added
|= 4;
2306 frame_decl_added
= true;
2309 save_local_var_chain
= info
->new_local_var_chain
;
2310 save_static_chain_added
= info
->static_chain_added
;
2311 info
->new_local_var_chain
= NULL
;
2312 info
->static_chain_added
= 0;
2314 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2315 gimple_omp_body_ptr (stmt
));
2317 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2319 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2320 (void) get_frame_type (info
);
2321 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2322 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2323 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2324 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2325 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2326 info
->static_chain_added
|= 4;
2329 if (info
->new_local_var_chain
)
2330 declare_vars (info
->new_local_var_chain
,
2331 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2332 info
->new_local_var_chain
= save_local_var_chain
;
2333 info
->suppress_expansion
= save_suppress
;
2334 info
->static_chain_added
|= save_static_chain_added
;
2337 case GIMPLE_OMP_SECTION
:
2338 case GIMPLE_OMP_MASTER
:
2339 case GIMPLE_OMP_ORDERED
:
2340 case GIMPLE_OMP_SCAN
:
2341 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2342 info
, gimple_omp_body_ptr (stmt
));
2346 wi
->val_only
= true;
2348 *handled_ops_p
= false;
2352 if (gimple_clobber_p (stmt
))
2354 tree lhs
= gimple_assign_lhs (stmt
);
2356 && !use_pointer_in_frame (lhs
)
2357 && lookup_field_for_decl (info
, lhs
, NO_INSERT
))
2359 gsi_replace (gsi
, gimple_build_nop (), true);
2363 *handled_ops_p
= false;
2367 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
2369 var
= DECL_CHAIN (var
))
2370 if (TREE_CODE (var
) == NAMELIST_DECL
)
2372 /* Adjust decls mentioned in NAMELIST_DECL. */
2373 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
2377 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
2380 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2382 if (decl_function_context (decl
) == info
->context
2383 && !use_pointer_in_frame (decl
))
2385 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2388 CONSTRUCTOR_ELT (decls
, i
)->value
2389 = get_local_debug_decl (info
, decl
, field
);
2395 *handled_ops_p
= false;
2399 /* For every other statement that we are not interested in
2400 handling here, let the walker traverse the operands. */
2401 *handled_ops_p
= false;
2405 /* Indicate that we have handled all the operands ourselves. */
2406 *handled_ops_p
= true;
2411 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2412 that reference labels from outer functions. The rewrite will be a
2413 call to __builtin_nonlocal_goto. */
2416 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2417 struct walk_stmt_info
*wi
)
2419 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2420 tree label
, new_label
, target_context
, x
, field
;
2422 gimple
*stmt
= gsi_stmt (*gsi
);
2424 if (gimple_code (stmt
) != GIMPLE_GOTO
)
2426 *handled_ops_p
= false;
2430 label
= gimple_goto_dest (stmt
);
2431 if (TREE_CODE (label
) != LABEL_DECL
)
2433 *handled_ops_p
= false;
2437 target_context
= decl_function_context (label
);
2438 if (target_context
== info
->context
)
2440 *handled_ops_p
= false;
2444 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
2447 /* The original user label may also be use for a normal goto, therefore
2448 we must create a new label that will actually receive the abnormal
2449 control transfer. This new label will be marked LABEL_NONLOCAL; this
2450 mark will trigger proper behavior in the cfg, as well as cause the
2451 (hairy target-specific) non-local goto receiver code to be generated
2452 when we expand rtl. Enter this association into var_map so that we
2453 can insert the new label into the IL during a second pass. */
2454 tree
*slot
= &i
->var_map
->get_or_insert (label
);
2457 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
2458 DECL_NONLOCAL (new_label
) = 1;
2464 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2465 field
= get_nl_goto_field (i
);
2466 x
= get_frame_field (info
, target_context
, field
, gsi
);
2468 x
= gsi_gimplify_val (info
, x
, gsi
);
2469 call
= gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO
),
2470 2, build_addr (new_label
), x
);
2471 gsi_replace (gsi
, call
, false);
2473 /* We have handled all of STMT's operands, no need to keep going. */
2474 *handled_ops_p
= true;
2479 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2480 are referenced via nonlocal goto from a nested function. The rewrite
2481 will involve installing a newly generated DECL_NONLOCAL label, and
2482 (potentially) a branch around the rtl gunk that is assumed to be
2483 attached to such a label. */
2486 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2487 struct walk_stmt_info
*wi
)
2489 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2490 tree label
, new_label
;
2491 gimple_stmt_iterator tmp_gsi
;
2492 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsi
));
2496 *handled_ops_p
= false;
2500 label
= gimple_label_label (stmt
);
2502 tree
*slot
= info
->var_map
->get (label
);
2505 *handled_ops_p
= false;
2509 /* If there's any possibility that the previous statement falls through,
2510 then we must branch around the new non-local label. */
2512 gsi_prev (&tmp_gsi
);
2513 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
2515 gimple
*stmt
= gimple_build_goto (label
);
2516 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2519 new_label
= (tree
) *slot
;
2520 stmt
= gimple_build_label (new_label
);
2521 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2523 *handled_ops_p
= true;
2528 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2529 of nested functions that require the use of trampolines. The rewrite
2530 will involve a reference a trampoline generated for the occasion. */
2533 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
2535 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2536 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2537 tree t
= *tp
, decl
, target_context
, x
, builtin
;
2542 switch (TREE_CODE (t
))
2546 T.1 = &CHAIN->tramp;
2547 T.2 = __builtin_adjust_trampoline (T.1);
2548 T.3 = (func_type)T.2;
2551 decl
= TREE_OPERAND (t
, 0);
2552 if (TREE_CODE (decl
) != FUNCTION_DECL
)
2555 /* Only need to process nested functions. */
2556 target_context
= decl_function_context (decl
);
2557 if (!target_context
)
2560 /* If the nested function doesn't use a static chain, then
2561 it doesn't need a trampoline. */
2562 if (!DECL_STATIC_CHAIN (decl
))
2565 /* If we don't want a trampoline, then don't build one. */
2566 if (TREE_NO_TRAMPOLINE (t
))
2569 /* Lookup the immediate parent of the callee, as that's where
2570 we need to insert the trampoline. */
2571 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
2574 /* Decide whether to generate a descriptor or a trampoline. */
2575 descr
= FUNC_ADDR_BY_DESCRIPTOR (t
) && !flag_trampolines
;
2578 x
= lookup_descr_for_decl (i
, decl
, INSERT
);
2580 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
2582 /* Compute the address of the field holding the trampoline. */
2583 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
2585 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
2587 /* Do machine-specific ugliness. Normally this will involve
2588 computing extra alignment, but it can really be anything. */
2590 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR
);
2592 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE
);
2593 call
= gimple_build_call (builtin
, 1, x
);
2594 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
2596 /* Cast back to the proper function type. */
2597 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
2598 x
= init_tmp_var (info
, x
, &wi
->gsi
);
2604 if (!IS_TYPE_OR_DECL_P (t
))
2613 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2614 to addresses of nested functions that require the use of
2615 trampolines. The rewrite will involve a reference a trampoline
2616 generated for the occasion. */
2619 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2620 struct walk_stmt_info
*wi
)
2622 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2623 gimple
*stmt
= gsi_stmt (*gsi
);
2625 switch (gimple_code (stmt
))
2629 /* Only walk call arguments, lest we generate trampolines for
2631 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
2632 for (i
= 0; i
< nargs
; i
++)
2633 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
2638 case GIMPLE_OMP_TEAMS
:
2639 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2641 *handled_ops_p
= false;
2646 case GIMPLE_OMP_TARGET
:
2647 if (!is_gimple_omp_offloaded (stmt
))
2649 *handled_ops_p
= false;
2653 case GIMPLE_OMP_PARALLEL
:
2654 case GIMPLE_OMP_TASK
:
2657 tree save_local_var_chain
= info
->new_local_var_chain
;
2658 walk_gimple_op (stmt
, convert_tramp_reference_op
, wi
);
2659 info
->new_local_var_chain
= NULL
;
2660 char save_static_chain_added
= info
->static_chain_added
;
2661 info
->static_chain_added
= 0;
2662 walk_body (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2663 info
, gimple_omp_body_ptr (stmt
));
2664 if (info
->new_local_var_chain
)
2665 declare_vars (info
->new_local_var_chain
,
2666 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
2668 for (int i
= 0; i
< 2; i
++)
2671 if ((info
->static_chain_added
& (1 << i
)) == 0)
2673 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2674 /* Don't add CHAIN.* or FRAME.* twice. */
2675 for (c
= gimple_omp_taskreg_clauses (stmt
);
2677 c
= OMP_CLAUSE_CHAIN (c
))
2678 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2679 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2680 && OMP_CLAUSE_DECL (c
) == decl
)
2682 if (c
== NULL
&& gimple_code (stmt
) != GIMPLE_OMP_TARGET
)
2684 c
= build_omp_clause (gimple_location (stmt
),
2685 i
? OMP_CLAUSE_FIRSTPRIVATE
2686 : OMP_CLAUSE_SHARED
);
2687 OMP_CLAUSE_DECL (c
) = decl
;
2688 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2689 gimple_omp_taskreg_set_clauses (stmt
, c
);
2693 c
= build_omp_clause (gimple_location (stmt
),
2695 OMP_CLAUSE_DECL (c
) = decl
;
2696 OMP_CLAUSE_SET_MAP_KIND (c
,
2697 i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2698 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2699 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2700 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2704 info
->new_local_var_chain
= save_local_var_chain
;
2705 info
->static_chain_added
|= save_static_chain_added
;
2710 *handled_ops_p
= false;
2714 *handled_ops_p
= true;
2720 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2721 that reference nested functions to make sure that the static chain
2722 is set up properly for the call. */
2725 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2726 struct walk_stmt_info
*wi
)
2728 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2729 tree decl
, target_context
;
2730 char save_static_chain_added
;
2732 gimple
*stmt
= gsi_stmt (*gsi
);
2734 switch (gimple_code (stmt
))
2737 if (gimple_call_chain (stmt
))
2739 decl
= gimple_call_fndecl (stmt
);
2742 target_context
= decl_function_context (decl
);
2743 if (target_context
&& DECL_STATIC_CHAIN (decl
))
2745 struct nesting_info
*i
= info
;
2746 while (i
&& i
->context
!= target_context
)
2748 /* If none of the outer contexts is the target context, this means
2749 that the function is called in a wrong context. */
2751 internal_error ("%s from %s called in %s",
2752 IDENTIFIER_POINTER (DECL_NAME (decl
)),
2753 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
2754 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
2756 gimple_call_set_chain (as_a
<gcall
*> (stmt
),
2757 get_static_chain (info
, target_context
,
2759 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
2763 case GIMPLE_OMP_TEAMS
:
2764 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2766 walk_body (convert_gimple_call
, NULL
, info
,
2767 gimple_omp_body_ptr (stmt
));
2772 case GIMPLE_OMP_PARALLEL
:
2773 case GIMPLE_OMP_TASK
:
2774 save_static_chain_added
= info
->static_chain_added
;
2775 info
->static_chain_added
= 0;
2776 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2777 for (i
= 0; i
< 2; i
++)
2780 if ((info
->static_chain_added
& (1 << i
)) == 0)
2782 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2783 /* Don't add CHAIN.* or FRAME.* twice. */
2784 for (c
= gimple_omp_taskreg_clauses (stmt
);
2786 c
= OMP_CLAUSE_CHAIN (c
))
2787 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2788 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2789 && OMP_CLAUSE_DECL (c
) == decl
)
2793 c
= build_omp_clause (gimple_location (stmt
),
2794 i
? OMP_CLAUSE_FIRSTPRIVATE
2795 : OMP_CLAUSE_SHARED
);
2796 OMP_CLAUSE_DECL (c
) = decl
;
2797 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2798 gimple_omp_taskreg_set_clauses (stmt
, c
);
2801 info
->static_chain_added
|= save_static_chain_added
;
2804 case GIMPLE_OMP_TARGET
:
2805 if (!is_gimple_omp_offloaded (stmt
))
2807 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2810 save_static_chain_added
= info
->static_chain_added
;
2811 info
->static_chain_added
= 0;
2812 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2813 for (i
= 0; i
< 2; i
++)
2816 if ((info
->static_chain_added
& (1 << i
)) == 0)
2818 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2819 /* Don't add CHAIN.* or FRAME.* twice. */
2820 for (c
= gimple_omp_target_clauses (stmt
);
2822 c
= OMP_CLAUSE_CHAIN (c
))
2823 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
2824 && OMP_CLAUSE_DECL (c
) == decl
)
2828 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2829 OMP_CLAUSE_DECL (c
) = decl
;
2830 OMP_CLAUSE_SET_MAP_KIND (c
, i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2831 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2832 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2833 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2837 info
->static_chain_added
|= save_static_chain_added
;
2840 case GIMPLE_OMP_FOR
:
2841 walk_body (convert_gimple_call
, NULL
, info
,
2842 gimple_omp_for_pre_body_ptr (stmt
));
2844 case GIMPLE_OMP_SECTIONS
:
2845 case GIMPLE_OMP_SECTION
:
2846 case GIMPLE_OMP_SINGLE
:
2847 case GIMPLE_OMP_MASTER
:
2848 case GIMPLE_OMP_TASKGROUP
:
2849 case GIMPLE_OMP_ORDERED
:
2850 case GIMPLE_OMP_SCAN
:
2851 case GIMPLE_OMP_CRITICAL
:
2852 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2856 /* Keep looking for other operands. */
2857 *handled_ops_p
= false;
2861 *handled_ops_p
= true;
2865 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2866 call expressions. At the same time, determine if a nested function
2867 actually uses its static chain; if not, remember that. */
2870 convert_all_function_calls (struct nesting_info
*root
)
2872 unsigned int chain_count
= 0, old_chain_count
, iter_count
;
2873 struct nesting_info
*n
;
2875 /* First, optimistically clear static_chain for all decls that haven't
2876 used the static chain already for variable access. But always create
2877 it if not optimizing. This makes it possible to reconstruct the static
2878 nesting tree at run time and thus to resolve up-level references from
2879 within the debugger. */
2880 FOR_EACH_NEST_INFO (n
, root
)
2884 tree decl
= n
->context
;
2888 (void) get_frame_type (n
);
2890 (void) get_chain_decl (n
);
2892 else if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
2894 DECL_STATIC_CHAIN (decl
) = 0;
2895 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2896 fprintf (dump_file
, "Guessing no static-chain for %s\n",
2897 lang_hooks
.decl_printable_name (decl
, 2));
2900 DECL_STATIC_CHAIN (decl
) = 1;
2901 chain_count
+= DECL_STATIC_CHAIN (decl
);
2904 FOR_EACH_NEST_INFO (n
, root
)
2907 tree decl
= n
->context
;
2908 tree alias
= cgraph_node::get (decl
)->thunk
.alias
;
2909 DECL_STATIC_CHAIN (decl
) = DECL_STATIC_CHAIN (alias
);
2912 /* Walk the functions and perform transformations. Note that these
2913 transformations can induce new uses of the static chain, which in turn
2914 require re-examining all users of the decl. */
2915 /* ??? It would make sense to try to use the call graph to speed this up,
2916 but the call graph hasn't really been built yet. Even if it did, we
2917 would still need to iterate in this loop since address-of references
2918 wouldn't show up in the callgraph anyway. */
2922 old_chain_count
= chain_count
;
2926 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2927 fputc ('\n', dump_file
);
2929 FOR_EACH_NEST_INFO (n
, root
)
2933 tree decl
= n
->context
;
2934 walk_function (convert_tramp_reference_stmt
,
2935 convert_tramp_reference_op
, n
);
2936 walk_function (convert_gimple_call
, NULL
, n
);
2937 chain_count
+= DECL_STATIC_CHAIN (decl
);
2940 FOR_EACH_NEST_INFO (n
, root
)
2943 tree decl
= n
->context
;
2944 tree alias
= cgraph_node::get (decl
)->thunk
.alias
;
2945 DECL_STATIC_CHAIN (decl
) = DECL_STATIC_CHAIN (alias
);
2948 while (chain_count
!= old_chain_count
);
2950 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2951 fprintf (dump_file
, "convert_all_function_calls iterations: %u\n\n",
2955 struct nesting_copy_body_data
2958 struct nesting_info
*root
;
2961 /* A helper subroutine for debug_var_chain type remapping. */
2964 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2966 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2967 tree
*slot
= nid
->root
->var_map
->get (decl
);
2970 return (tree
) *slot
;
2972 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2974 tree new_decl
= copy_decl_no_change (decl
, id
);
2975 DECL_ORIGINAL_TYPE (new_decl
)
2976 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2981 || TREE_CODE (decl
) == PARM_DECL
2982 || TREE_CODE (decl
) == RESULT_DECL
)
2985 return copy_decl_no_change (decl
, id
);
2988 /* A helper function for remap_vla_decls. See if *TP contains
2989 some remapped variables. */
2992 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2994 struct nesting_info
*root
= (struct nesting_info
*) data
;
3000 tree
*slot
= root
->var_map
->get (t
);
3008 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3012 remap_vla_decls (tree block
, struct nesting_info
*root
)
3014 tree var
, subblock
, val
, type
;
3015 struct nesting_copy_body_data id
;
3017 for (subblock
= BLOCK_SUBBLOCKS (block
);
3019 subblock
= BLOCK_CHAIN (subblock
))
3020 remap_vla_decls (subblock
, root
);
3022 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
3023 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3025 val
= DECL_VALUE_EXPR (var
);
3026 type
= TREE_TYPE (var
);
3028 if (!(TREE_CODE (val
) == INDIRECT_REF
3029 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
3030 && variably_modified_type_p (type
, NULL
)))
3033 if (root
->var_map
->get (TREE_OPERAND (val
, 0))
3034 || walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
3038 if (var
== NULL_TREE
)
3041 memset (&id
, 0, sizeof (id
));
3042 id
.cb
.copy_decl
= nesting_copy_decl
;
3043 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3046 for (; var
; var
= DECL_CHAIN (var
))
3047 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3049 struct nesting_info
*i
;
3052 val
= DECL_VALUE_EXPR (var
);
3053 type
= TREE_TYPE (var
);
3055 if (!(TREE_CODE (val
) == INDIRECT_REF
3056 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
3057 && variably_modified_type_p (type
, NULL
)))
3060 tree
*slot
= root
->var_map
->get (TREE_OPERAND (val
, 0));
3061 if (!slot
&& !walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
3064 context
= decl_function_context (var
);
3065 for (i
= root
; i
; i
= i
->outer
)
3066 if (i
->context
== context
)
3072 /* Fully expand value expressions. This avoids having debug variables
3073 only referenced from them and that can be swept during GC. */
3076 tree t
= (tree
) *slot
;
3077 gcc_assert (DECL_P (t
) && DECL_HAS_VALUE_EXPR_P (t
));
3078 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
), DECL_VALUE_EXPR (t
));
3081 id
.cb
.src_fn
= i
->context
;
3082 id
.cb
.dst_fn
= i
->context
;
3083 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3085 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
3086 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3088 newt
= TREE_TYPE (newt
);
3089 type
= TREE_TYPE (type
);
3091 if (TYPE_NAME (newt
)
3092 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3093 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3095 && TYPE_NAME (newt
) == TYPE_NAME (type
))
3096 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3098 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
3099 if (val
!= DECL_VALUE_EXPR (var
))
3100 SET_DECL_VALUE_EXPR (var
, val
);
3103 delete id
.cb
.decl_map
;
3106 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3110 fixup_vla_decls (tree block
)
3112 for (tree var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
3113 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3115 tree val
= DECL_VALUE_EXPR (var
);
3117 if (!(TREE_CODE (val
) == INDIRECT_REF
3118 && VAR_P (TREE_OPERAND (val
, 0))
3119 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val
, 0))))
3122 /* Fully expand value expressions. This avoids having debug variables
3123 only referenced from them and that can be swept during GC. */
3124 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
),
3125 DECL_VALUE_EXPR (TREE_OPERAND (val
, 0)));
3126 SET_DECL_VALUE_EXPR (var
, val
);
3129 for (tree sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3130 fixup_vla_decls (sub
);
3133 /* Fold the MEM_REF *E. */
3135 fold_mem_refs (tree
*const &e
, void *data ATTRIBUTE_UNUSED
)
3137 tree
*ref_p
= CONST_CAST2 (tree
*, const tree
*, (const tree
*)e
);
3138 *ref_p
= fold (*ref_p
);
3142 /* Given DECL, a nested function, build an initialization call for FIELD,
3143 the trampoline or descriptor for DECL, using FUNC as the function. */
3146 build_init_call_stmt (struct nesting_info
*info
, tree decl
, tree field
,
3149 tree arg1
, arg2
, arg3
, x
;
3151 gcc_assert (DECL_STATIC_CHAIN (decl
));
3152 arg3
= build_addr (info
->frame_decl
);
3154 arg2
= build_addr (decl
);
3156 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3157 info
->frame_decl
, field
, NULL_TREE
);
3158 arg1
= build_addr (x
);
3160 return gimple_build_call (func
, 3, arg1
, arg2
, arg3
);
3163 /* Do "everything else" to clean up or complete state collected by the various
3164 walking passes -- create a field to hold the frame base address, lay out the
3165 types and decls, generate code to initialize the frame decl, store critical
3166 expressions in the struct function for rtl to find. */
3169 finalize_nesting_tree_1 (struct nesting_info
*root
)
3171 gimple_seq stmt_list
= NULL
;
3173 tree context
= root
->context
;
3174 struct function
*sf
;
3179 /* If we created a non-local frame type or decl, we need to lay them
3180 out at this time. */
3181 if (root
->frame_type
)
3183 /* Debugging information needs to compute the frame base address of the
3184 parent frame out of the static chain from the nested frame.
3186 The static chain is the address of the FRAME record, so one could
3187 imagine it would be possible to compute the frame base address just
3188 adding a constant offset to this address. Unfortunately, this is not
3189 possible: if the FRAME object has alignment constraints that are
3190 stronger than the stack, then the offset between the frame base and
3191 the FRAME object will be dynamic.
3193 What we do instead is to append a field to the FRAME object that holds
3194 the frame base address: then debug info just has to fetch this
3197 /* Debugging information will refer to the CFA as the frame base
3198 address: we will do the same here. */
3199 const tree frame_addr_fndecl
3200 = builtin_decl_explicit (BUILT_IN_DWARF_CFA
);
3202 /* Create a field in the FRAME record to hold the frame base address for
3203 this stack frame. Since it will be used only by the debugger, put it
3204 at the end of the record in order not to shift all other offsets. */
3205 tree fb_decl
= make_node (FIELD_DECL
);
3207 DECL_NAME (fb_decl
) = get_identifier ("FRAME_BASE.PARENT");
3208 TREE_TYPE (fb_decl
) = ptr_type_node
;
3209 TREE_ADDRESSABLE (fb_decl
) = 1;
3210 DECL_CONTEXT (fb_decl
) = root
->frame_type
;
3211 TYPE_FIELDS (root
->frame_type
) = chainon (TYPE_FIELDS (root
->frame_type
),
3214 /* In some cases the frame type will trigger the -Wpadded warning.
3215 This is not helpful; suppress it. */
3216 int save_warn_padded
= warn_padded
;
3218 layout_type (root
->frame_type
);
3219 warn_padded
= save_warn_padded
;
3220 layout_decl (root
->frame_decl
, 0);
3222 /* Initialize the frame base address field. If the builtin we need is
3223 not available, set it to NULL so that debugging information does not
3225 tree fb_ref
= build3 (COMPONENT_REF
, TREE_TYPE (fb_decl
),
3226 root
->frame_decl
, fb_decl
, NULL_TREE
);
3229 if (frame_addr_fndecl
!= NULL_TREE
)
3231 gcall
*fb_gimple
= gimple_build_call (frame_addr_fndecl
, 1,
3233 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3235 fb_tmp
= init_tmp_var_with_call (root
, &gsi
, fb_gimple
);
3238 fb_tmp
= build_int_cst (TREE_TYPE (fb_ref
), 0);
3239 gimple_seq_add_stmt (&stmt_list
,
3240 gimple_build_assign (fb_ref
, fb_tmp
));
3242 declare_vars (root
->frame_decl
,
3243 gimple_seq_first_stmt (gimple_body (context
)), true);
3246 /* If any parameters were referenced non-locally, then we need to insert
3247 a copy or a pointer. */
3248 if (root
->any_parm_remapped
)
3251 for (p
= DECL_ARGUMENTS (context
); p
; p
= DECL_CHAIN (p
))
3255 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
3259 if (use_pointer_in_frame (p
))
3264 /* If the assignment is from a non-register the stmt is
3265 not valid gimple. Make it so by using a temporary instead. */
3266 if (!is_gimple_reg (x
)
3267 && is_gimple_reg_type (TREE_TYPE (x
)))
3269 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3270 x
= init_tmp_var (root
, x
, &gsi
);
3273 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3274 root
->frame_decl
, field
, NULL_TREE
);
3275 stmt
= gimple_build_assign (y
, x
);
3276 gimple_seq_add_stmt (&stmt_list
, stmt
);
3280 /* If a chain_field was created, then it needs to be initialized
3282 if (root
->chain_field
)
3284 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
3285 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
3286 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
3287 gimple_seq_add_stmt (&stmt_list
, stmt
);
3290 /* If trampolines were created, then we need to initialize them. */
3291 if (root
->any_tramp_created
)
3293 struct nesting_info
*i
;
3294 for (i
= root
->inner
; i
; i
= i
->next
)
3298 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
3302 x
= builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE
);
3303 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3304 gimple_seq_add_stmt (&stmt_list
, stmt
);
3308 /* If descriptors were created, then we need to initialize them. */
3309 if (root
->any_descr_created
)
3311 struct nesting_info
*i
;
3312 for (i
= root
->inner
; i
; i
= i
->next
)
3316 field
= lookup_descr_for_decl (root
, i
->context
, NO_INSERT
);
3320 x
= builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR
);
3321 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3322 gimple_seq_add_stmt (&stmt_list
, stmt
);
3326 /* If we created initialization statements, insert them. */
3330 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
3331 bind
= gimple_seq_first_stmt_as_a_bind (gimple_body (context
));
3332 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
3333 gimple_bind_set_body (bind
, stmt_list
);
3336 /* If a chain_decl was created, then it needs to be registered with
3337 struct function so that it gets initialized from the static chain
3338 register at the beginning of the function. */
3339 sf
= DECL_STRUCT_FUNCTION (root
->context
);
3340 sf
->static_chain_decl
= root
->chain_decl
;
3342 /* Similarly for the non-local goto save area. */
3343 if (root
->nl_goto_field
)
3345 sf
->nonlocal_goto_save_area
3346 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
3347 sf
->has_nonlocal_label
= 1;
3350 /* Make sure all new local variables get inserted into the
3351 proper BIND_EXPR. */
3352 if (root
->new_local_var_chain
)
3353 declare_vars (root
->new_local_var_chain
,
3354 gimple_seq_first_stmt (gimple_body (root
->context
)),
3357 if (root
->debug_var_chain
)
3362 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
3364 for (debug_var
= root
->debug_var_chain
; debug_var
;
3365 debug_var
= DECL_CHAIN (debug_var
))
3366 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3369 /* If there are any debug decls with variable length types,
3370 remap those types using other debug_var_chain variables. */
3373 struct nesting_copy_body_data id
;
3375 memset (&id
, 0, sizeof (id
));
3376 id
.cb
.copy_decl
= nesting_copy_decl
;
3377 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3380 for (; debug_var
; debug_var
= DECL_CHAIN (debug_var
))
3381 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3383 tree type
= TREE_TYPE (debug_var
);
3384 tree newt
, t
= type
;
3385 struct nesting_info
*i
;
3387 for (i
= root
; i
; i
= i
->outer
)
3388 if (variably_modified_type_p (type
, i
->context
))
3394 id
.cb
.src_fn
= i
->context
;
3395 id
.cb
.dst_fn
= i
->context
;
3396 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3398 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
3399 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3401 newt
= TREE_TYPE (newt
);
3404 if (TYPE_NAME (newt
)
3405 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3406 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3408 && TYPE_NAME (newt
) == TYPE_NAME (t
))
3409 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3412 delete id
.cb
.decl_map
;
3415 scope
= gimple_seq_first_stmt_as_a_bind (gimple_body (root
->context
));
3416 if (gimple_bind_block (scope
))
3417 declare_vars (root
->debug_var_chain
, scope
, true);
3419 BLOCK_VARS (DECL_INITIAL (root
->context
))
3420 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
3421 root
->debug_var_chain
);
3424 fixup_vla_decls (DECL_INITIAL (root
->context
));
3426 /* Fold the rewritten MEM_REF trees. */
3427 root
->mem_refs
->traverse
<void *, fold_mem_refs
> (NULL
);
3429 /* Dump the translated tree function. */
3432 fputs ("\n\n", dump_file
);
3433 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
3438 finalize_nesting_tree (struct nesting_info
*root
)
3440 struct nesting_info
*n
;
3441 FOR_EACH_NEST_INFO (n
, root
)
3442 finalize_nesting_tree_1 (n
);
3445 /* Unnest the nodes and pass them to cgraph. */
3448 unnest_nesting_tree_1 (struct nesting_info
*root
)
3450 struct cgraph_node
*node
= cgraph_node::get (root
->context
);
3452 /* For nested functions update the cgraph to reflect unnesting.
3453 We also delay finalizing of these functions up to this point. */
3458 cgraph_node::finalize_function (root
->context
, true);
3463 unnest_nesting_tree (struct nesting_info
*root
)
3465 struct nesting_info
*n
;
3466 FOR_EACH_NEST_INFO (n
, root
)
3467 unnest_nesting_tree_1 (n
);
3470 /* Free the data structures allocated during this pass. */
3473 free_nesting_tree (struct nesting_info
*root
)
3475 struct nesting_info
*node
, *next
;
3477 node
= iter_nestinfo_start (root
);
3480 next
= iter_nestinfo_next (node
);
3481 delete node
->var_map
;
3482 delete node
->field_map
;
3483 delete node
->mem_refs
;
3490 /* Gimplify a function and all its nested functions. */
3492 gimplify_all_functions (struct cgraph_node
*root
)
3494 struct cgraph_node
*iter
;
3495 if (!gimple_body (root
->decl
))
3496 gimplify_function_tree (root
->decl
);
3497 for (iter
= root
->nested
; iter
; iter
= iter
->next_nested
)
3498 if (!iter
->thunk
.thunk_p
)
3499 gimplify_all_functions (iter
);
3502 /* Main entry point for this pass. Process FNDECL and all of its nested
3503 subroutines and turn them into something less tightly bound. */
3506 lower_nested_functions (tree fndecl
)
3508 struct cgraph_node
*cgn
;
3509 struct nesting_info
*root
;
3511 /* If there are no nested functions, there's nothing to do. */
3512 cgn
= cgraph_node::get (fndecl
);
3516 gimplify_all_functions (cgn
);
3518 set_dump_file (dump_begin (TDI_nested
, &dump_flags
));
3520 fprintf (dump_file
, "\n;; Function %s\n\n",
3521 lang_hooks
.decl_printable_name (fndecl
, 2));
3523 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
3524 root
= create_nesting_tree (cgn
);
3526 walk_all_functions (convert_nonlocal_reference_stmt
,
3527 convert_nonlocal_reference_op
,
3529 walk_all_functions (convert_local_reference_stmt
,
3530 convert_local_reference_op
,
3532 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
3533 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
3535 convert_all_function_calls (root
);
3536 finalize_nesting_tree (root
);
3537 unnest_nesting_tree (root
);
3539 free_nesting_tree (root
);
3540 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
3544 dump_end (TDI_nested
, dump_file
);
3545 set_dump_file (NULL
);
3549 #include "gt-tree-nested.h"