1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "stringpool.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
35 #include "tree-inline.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 #include "alloc-pool.h"
46 #include "tree-nested.h"
47 #include "symbol-summary.h"
48 #include "symtab-thunks.h"
50 /* Summary of nested functions. */
51 static function_summary
<nested_function_info
*>
52 *nested_function_sum
= NULL
;
54 /* Return nested_function_info, if available. */
55 nested_function_info
*
56 nested_function_info::get (cgraph_node
*node
)
58 if (!nested_function_sum
)
60 return nested_function_sum
->get (node
);
63 /* Return nested_function_info possibly creating new one. */
64 nested_function_info
*
65 nested_function_info::get_create (cgraph_node
*node
)
67 if (!nested_function_sum
)
68 nested_function_sum
= new function_summary
<nested_function_info
*>
70 return nested_function_sum
->get_create (node
);
73 /* cgraph_node is no longer nested function; update cgraph accordingly. */
75 unnest_function (cgraph_node
*node
)
77 nested_function_info
*info
= nested_function_info::get (node
);
78 cgraph_node
**node2
= &nested_function_info::get
79 (nested_function_origin (node
))->nested
;
81 gcc_checking_assert (info
->origin
);
82 while (*node2
!= node
)
83 node2
= &nested_function_info::get (*node2
)->next_nested
;
84 *node2
= info
->next_nested
;
85 info
->next_nested
= NULL
;
87 nested_function_sum
->remove (node
);
90 /* Destructor: unlink function from nested function lists. */
91 nested_function_info::~nested_function_info ()
94 for (cgraph_node
*n
= nested
; n
; n
= next
)
96 nested_function_info
*info
= nested_function_info::get (n
);
97 next
= info
->next_nested
;
99 info
->next_nested
= NULL
;
105 = &nested_function_info::get (origin
)->nested
;
107 nested_function_info
*info
;
108 while ((info
= nested_function_info::get (*node2
)) != this && info
)
109 node2
= &info
->next_nested
;
110 *node2
= next_nested
;
114 /* Free nested function info summaries. */
116 nested_function_info::release ()
118 if (nested_function_sum
)
119 delete (nested_function_sum
);
120 nested_function_sum
= NULL
;
123 /* If NODE is nested function, record it. */
125 maybe_record_nested_function (cgraph_node
*node
)
127 if (DECL_CONTEXT (node
->decl
)
128 && TREE_CODE (DECL_CONTEXT (node
->decl
)) == FUNCTION_DECL
)
130 cgraph_node
*origin
= cgraph_node::get_create (DECL_CONTEXT (node
->decl
));
131 nested_function_info
*info
= nested_function_info::get_create (node
);
132 nested_function_info
*origin_info
133 = nested_function_info::get_create (origin
);
135 info
->origin
= origin
;
136 info
->next_nested
= origin_info
->nested
;
137 origin_info
->nested
= node
;
141 /* The object of this pass is to lower the representation of a set of nested
142 functions in order to expose all of the gory details of the various
143 nonlocal references. We want to do this sooner rather than later, in
144 order to give us more freedom in emitting all of the functions in question.
146 Back in olden times, when gcc was young, we developed an insanely
147 complicated scheme whereby variables which were referenced nonlocally
148 were forced to live in the stack of the declaring function, and then
149 the nested functions magically discovered where these variables were
150 placed. In order for this scheme to function properly, it required
151 that the outer function be partially expanded, then we switch to
152 compiling the inner function, and once done with those we switch back
153 to compiling the outer function. Such delicate ordering requirements
154 makes it difficult to do whole translation unit optimizations
155 involving such functions.
157 The implementation here is much more direct. Everything that can be
158 referenced by an inner function is a member of an explicitly created
159 structure herein called the "nonlocal frame struct". The incoming
160 static chain for a nested function is a pointer to this struct in
161 the parent. In this way, we settle on known offsets from a known
162 base, and so are decoupled from the logic that places objects in the
163 function's stack frame. More importantly, we don't have to wait for
164 that to happen -- since the compilation of the inner function is no
165 longer tied to a real stack frame, the nonlocal frame struct can be
166 allocated anywhere. Which means that the outer function is now
169 Theory of operation here is very simple. Iterate over all the
170 statements in all the functions (depth first) several times,
171 allocating structures and fields on demand. In general we want to
172 examine inner functions first, so that we can avoid making changes
173 to outer functions which are unnecessary.
175 The order of the passes matters a bit, in that later passes will be
176 skipped if it is discovered that the functions don't actually interact
177 at all. That is, they're nested in the lexical sense but could have
178 been written as independent functions without change. */
183 struct nesting_info
*outer
;
184 struct nesting_info
*inner
;
185 struct nesting_info
*next
;
187 hash_map
<tree
, tree
> *field_map
;
188 hash_map
<tree
, tree
> *var_map
;
189 hash_set
<tree
*> *mem_refs
;
190 bitmap suppress_expansion
;
193 tree new_local_var_chain
;
194 tree debug_var_chain
;
202 bool any_parm_remapped
;
203 bool any_tramp_created
;
204 bool any_descr_created
;
205 char static_chain_added
;
209 /* Iterate over the nesting tree, starting with ROOT, depth first. */
211 static inline struct nesting_info
*
212 iter_nestinfo_start (struct nesting_info
*root
)
219 static inline struct nesting_info
*
220 iter_nestinfo_next (struct nesting_info
*node
)
223 return iter_nestinfo_start (node
->next
);
227 #define FOR_EACH_NEST_INFO(I, ROOT) \
228 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
230 /* Obstack used for the bitmaps in the struct above. */
231 static struct bitmap_obstack nesting_info_bitmap_obstack
;
234 /* We're working in so many different function contexts simultaneously,
235 that create_tmp_var is dangerous. Prevent mishap. */
236 #define create_tmp_var cant_use_create_tmp_var_here_dummy
238 /* Like create_tmp_var, except record the variable for registration at
239 the given nesting level. */
242 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
246 /* If the type is of variable size or a type which must be created by the
247 frontend, something is wrong. Note that we explicitly allow
248 incomplete types here, since we create them ourselves here. */
249 gcc_assert (!TREE_ADDRESSABLE (type
));
250 gcc_assert (!TYPE_SIZE_UNIT (type
)
251 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
253 tmp_var
= create_tmp_var_raw (type
, prefix
);
254 DECL_CONTEXT (tmp_var
) = info
->context
;
255 DECL_CHAIN (tmp_var
) = info
->new_local_var_chain
;
256 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
258 info
->new_local_var_chain
= tmp_var
;
263 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
266 build_simple_mem_ref_notrap (tree ptr
)
268 tree t
= build_simple_mem_ref (ptr
);
269 TREE_THIS_NOTRAP (t
) = 1;
273 /* Take the address of EXP to be used within function CONTEXT.
274 Mark it for addressability as necessary. */
277 build_addr (tree exp
)
279 mark_addressable (exp
);
280 return build_fold_addr_expr (exp
);
283 /* Insert FIELD into TYPE, sorted by alignment requirements. */
286 insert_field_into_struct (tree type
, tree field
)
290 DECL_CONTEXT (field
) = type
;
292 for (p
= &TYPE_FIELDS (type
); *p
; p
= &DECL_CHAIN (*p
))
293 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
296 DECL_CHAIN (field
) = *p
;
299 /* Set correct alignment for frame struct type. */
300 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
301 SET_TYPE_ALIGN (type
, DECL_ALIGN (field
));
304 /* Build or return the RECORD_TYPE that describes the frame state that is
305 shared between INFO->CONTEXT and its nested functions. This record will
306 not be complete until finalize_nesting_tree; up until that point we'll
307 be adding fields as necessary.
309 We also build the DECL that represents this frame in the function. */
312 get_frame_type (struct nesting_info
*info
)
314 tree type
= info
->frame_type
;
319 type
= make_node (RECORD_TYPE
);
321 name
= concat ("FRAME.",
322 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
324 TYPE_NAME (type
) = get_identifier (name
);
327 info
->frame_type
= type
;
329 /* Do not put info->frame_decl on info->new_local_var_chain,
330 so that we can declare it in the lexical blocks, which
331 makes sure virtual regs that end up appearing in its RTL
332 expression get substituted in instantiate_virtual_regs. */
333 info
->frame_decl
= create_tmp_var_raw (type
, "FRAME");
334 DECL_CONTEXT (info
->frame_decl
) = info
->context
;
335 DECL_NONLOCAL_FRAME (info
->frame_decl
) = 1;
336 DECL_SEEN_IN_BIND_EXPR_P (info
->frame_decl
) = 1;
338 /* ??? Always make it addressable for now, since it is meant to
339 be pointed to by the static chain pointer. This pessimizes
340 when it turns out that no static chains are needed because
341 the nested functions referencing non-local variables are not
342 reachable, but the true pessimization is to create the non-
343 local frame structure in the first place. */
344 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
350 /* Return true if DECL should be referenced by pointer in the non-local frame
354 use_pointer_in_frame (tree decl
)
356 if (TREE_CODE (decl
) == PARM_DECL
)
358 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
359 sized DECLs, and inefficient to copy large aggregates. Don't bother
360 moving anything but scalar parameters. */
361 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
365 /* Variable-sized DECLs can only come from OMP clauses at this point
366 since the gimplifier has already turned the regular variables into
367 pointers. Do the same as the gimplifier. */
368 return !DECL_SIZE (decl
) || TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
;
372 /* Given DECL, a non-locally accessed variable, find or create a field
373 in the non-local frame structure for the given nesting context. */
376 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
377 enum insert_option insert
)
379 gcc_checking_assert (decl_function_context (decl
) == info
->context
);
381 if (insert
== NO_INSERT
)
383 tree
*slot
= info
->field_map
->get (decl
);
384 return slot
? *slot
: NULL_TREE
;
387 tree
*slot
= &info
->field_map
->get_or_insert (decl
);
390 tree type
= get_frame_type (info
);
391 tree field
= make_node (FIELD_DECL
);
392 DECL_NAME (field
) = DECL_NAME (decl
);
394 if (use_pointer_in_frame (decl
))
396 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
397 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
398 DECL_NONADDRESSABLE_P (field
) = 1;
402 TREE_TYPE (field
) = TREE_TYPE (decl
);
403 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
404 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
405 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
406 DECL_IGNORED_P (field
) = DECL_IGNORED_P (decl
);
407 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
408 TREE_NO_WARNING (field
) = TREE_NO_WARNING (decl
);
409 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
411 /* Declare the transformation and adjust the original DECL. For a
412 variable or for a parameter when not optimizing, we make it point
413 to the field in the frame directly. For a parameter, we don't do
414 it when optimizing because the variable tracking pass will already
416 if (VAR_P (decl
) || !optimize
)
419 = build3 (COMPONENT_REF
, TREE_TYPE (field
), info
->frame_decl
,
422 /* If the next declaration is a PARM_DECL pointing to the DECL,
423 we need to adjust its VALUE_EXPR directly, since chains of
424 VALUE_EXPRs run afoul of garbage collection. This occurs
425 in Ada for Out parameters that aren't copied in. */
426 tree next
= DECL_CHAIN (decl
);
428 && TREE_CODE (next
) == PARM_DECL
429 && DECL_HAS_VALUE_EXPR_P (next
)
430 && DECL_VALUE_EXPR (next
) == decl
)
431 SET_DECL_VALUE_EXPR (next
, x
);
433 SET_DECL_VALUE_EXPR (decl
, x
);
434 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
438 insert_field_into_struct (type
, field
);
441 if (TREE_CODE (decl
) == PARM_DECL
)
442 info
->any_parm_remapped
= true;
448 /* Build or return the variable that holds the static chain within
449 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
452 get_chain_decl (struct nesting_info
*info
)
454 tree decl
= info
->chain_decl
;
460 type
= get_frame_type (info
->outer
);
461 type
= build_pointer_type (type
);
463 /* Note that this variable is *not* entered into any BIND_EXPR;
464 the construction of this variable is handled specially in
465 expand_function_start and initialize_inlined_parameters.
466 Note also that it's represented as a parameter. This is more
467 close to the truth, since the initial value does come from
469 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
470 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
471 DECL_ARTIFICIAL (decl
) = 1;
472 DECL_IGNORED_P (decl
) = 1;
473 TREE_USED (decl
) = 1;
474 DECL_CONTEXT (decl
) = info
->context
;
475 DECL_ARG_TYPE (decl
) = type
;
477 /* Tell tree-inline.c that we never write to this variable, so
478 it can copy-prop the replacement value immediately. */
479 TREE_READONLY (decl
) = 1;
481 info
->chain_decl
= decl
;
484 && (dump_flags
& TDF_DETAILS
)
485 && !DECL_STATIC_CHAIN (info
->context
))
486 fprintf (dump_file
, "Setting static-chain for %s\n",
487 lang_hooks
.decl_printable_name (info
->context
, 2));
489 DECL_STATIC_CHAIN (info
->context
) = 1;
494 /* Build or return the field within the non-local frame state that holds
495 the static chain for INFO->CONTEXT. This is the way to walk back up
496 multiple nesting levels. */
499 get_chain_field (struct nesting_info
*info
)
501 tree field
= info
->chain_field
;
505 tree type
= build_pointer_type (get_frame_type (info
->outer
));
507 field
= make_node (FIELD_DECL
);
508 DECL_NAME (field
) = get_identifier ("__chain");
509 TREE_TYPE (field
) = type
;
510 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
511 DECL_NONADDRESSABLE_P (field
) = 1;
513 insert_field_into_struct (get_frame_type (info
), field
);
515 info
->chain_field
= field
;
518 && (dump_flags
& TDF_DETAILS
)
519 && !DECL_STATIC_CHAIN (info
->context
))
520 fprintf (dump_file
, "Setting static-chain for %s\n",
521 lang_hooks
.decl_printable_name (info
->context
, 2));
523 DECL_STATIC_CHAIN (info
->context
) = 1;
528 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
531 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
536 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
537 gimple_call_set_lhs (call
, t
);
538 if (! gsi_end_p (*gsi
))
539 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
540 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
546 /* Copy EXP into a temporary. Allocate the temporary in the context of
547 INFO and insert the initialization statement before GSI. */
550 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
555 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
556 stmt
= gimple_build_assign (t
, exp
);
557 if (! gsi_end_p (*gsi
))
558 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
559 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
565 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
568 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
569 gimple_stmt_iterator
*gsi
)
571 if (is_gimple_val (exp
))
574 return init_tmp_var (info
, exp
, gsi
);
577 /* Similarly, but copy from the temporary and insert the statement
578 after the iterator. */
581 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
586 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
587 stmt
= gimple_build_assign (exp
, t
);
588 if (! gsi_end_p (*gsi
))
589 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
590 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
595 /* Build or return the type used to represent a nested function trampoline. */
597 static GTY(()) tree trampoline_type
;
600 get_trampoline_type (struct nesting_info
*info
)
602 unsigned align
, size
;
606 return trampoline_type
;
608 align
= TRAMPOLINE_ALIGNMENT
;
609 size
= TRAMPOLINE_SIZE
;
611 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
612 then allocate extra space so that we can do dynamic alignment. */
613 if (align
> STACK_BOUNDARY
)
615 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
616 align
= STACK_BOUNDARY
;
619 t
= build_index_type (size_int (size
- 1));
620 t
= build_array_type (char_type_node
, t
);
621 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
622 FIELD_DECL
, get_identifier ("__data"), t
);
623 SET_DECL_ALIGN (t
, align
);
624 DECL_USER_ALIGN (t
) = 1;
626 trampoline_type
= make_node (RECORD_TYPE
);
627 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
628 TYPE_FIELDS (trampoline_type
) = t
;
629 layout_type (trampoline_type
);
630 DECL_CONTEXT (t
) = trampoline_type
;
632 return trampoline_type
;
635 /* Build or return the type used to represent a nested function descriptor. */
637 static GTY(()) tree descriptor_type
;
640 get_descriptor_type (struct nesting_info
*info
)
642 /* The base alignment is that of a function. */
643 const unsigned align
= FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
);
647 return descriptor_type
;
649 t
= build_index_type (integer_one_node
);
650 t
= build_array_type (ptr_type_node
, t
);
651 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
652 FIELD_DECL
, get_identifier ("__data"), t
);
653 SET_DECL_ALIGN (t
, MAX (TYPE_ALIGN (ptr_type_node
), align
));
654 DECL_USER_ALIGN (t
) = 1;
656 descriptor_type
= make_node (RECORD_TYPE
);
657 TYPE_NAME (descriptor_type
) = get_identifier ("__builtin_descriptor");
658 TYPE_FIELDS (descriptor_type
) = t
;
659 layout_type (descriptor_type
);
660 DECL_CONTEXT (t
) = descriptor_type
;
662 return descriptor_type
;
665 /* Given DECL, a nested function, find or create an element in the
666 var map for this function. */
669 lookup_element_for_decl (struct nesting_info
*info
, tree decl
,
670 enum insert_option insert
)
672 if (insert
== NO_INSERT
)
674 tree
*slot
= info
->var_map
->get (decl
);
675 return slot
? *slot
: NULL_TREE
;
678 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
680 *slot
= build_tree_list (NULL_TREE
, NULL_TREE
);
685 /* Given DECL, a nested function, create a field in the non-local
686 frame structure for this function. */
689 create_field_for_decl (struct nesting_info
*info
, tree decl
, tree type
)
691 tree field
= make_node (FIELD_DECL
);
692 DECL_NAME (field
) = DECL_NAME (decl
);
693 TREE_TYPE (field
) = type
;
694 TREE_ADDRESSABLE (field
) = 1;
695 insert_field_into_struct (get_frame_type (info
), field
);
699 /* Given DECL, a nested function, find or create a field in the non-local
700 frame structure for a trampoline for this function. */
703 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
704 enum insert_option insert
)
708 elt
= lookup_element_for_decl (info
, decl
, insert
);
712 field
= TREE_PURPOSE (elt
);
714 if (!field
&& insert
== INSERT
)
716 field
= create_field_for_decl (info
, decl
, get_trampoline_type (info
));
717 TREE_PURPOSE (elt
) = field
;
718 info
->any_tramp_created
= true;
724 /* Given DECL, a nested function, find or create a field in the non-local
725 frame structure for a descriptor for this function. */
728 lookup_descr_for_decl (struct nesting_info
*info
, tree decl
,
729 enum insert_option insert
)
733 elt
= lookup_element_for_decl (info
, decl
, insert
);
737 field
= TREE_VALUE (elt
);
739 if (!field
&& insert
== INSERT
)
741 field
= create_field_for_decl (info
, decl
, get_descriptor_type (info
));
742 TREE_VALUE (elt
) = field
;
743 info
->any_descr_created
= true;
749 /* Build or return the field within the non-local frame state that holds
750 the non-local goto "jmp_buf". The buffer itself is maintained by the
751 rtl middle-end as dynamic stack space is allocated. */
754 get_nl_goto_field (struct nesting_info
*info
)
756 tree field
= info
->nl_goto_field
;
762 /* For __builtin_nonlocal_goto, we need N words. The first is the
763 frame pointer, the rest is for the target's stack pointer save
764 area. The number of words is controlled by STACK_SAVEAREA_MODE;
765 not the best interface, but it'll do for now. */
766 if (Pmode
== ptr_mode
)
767 type
= ptr_type_node
;
769 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
772 = as_a
<scalar_int_mode
> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
773 size
= GET_MODE_SIZE (mode
);
774 size
= size
/ GET_MODE_SIZE (Pmode
);
777 type
= build_array_type
778 (type
, build_index_type (size_int (size
)));
780 field
= make_node (FIELD_DECL
);
781 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
782 TREE_TYPE (field
) = type
;
783 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
784 TREE_ADDRESSABLE (field
) = 1;
786 insert_field_into_struct (get_frame_type (info
), field
);
788 info
->nl_goto_field
= field
;
794 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
797 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
798 struct nesting_info
*info
, gimple_seq
*pseq
)
800 struct walk_stmt_info wi
;
802 memset (&wi
, 0, sizeof (wi
));
805 walk_gimple_seq_mod (pseq
, callback_stmt
, callback_op
, &wi
);
809 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
812 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
813 struct nesting_info
*info
)
815 gimple_seq body
= gimple_body (info
->context
);
816 walk_body (callback_stmt
, callback_op
, info
, &body
);
817 gimple_set_body (info
->context
, body
);
820 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
823 walk_gimple_omp_for (gomp_for
*for_stmt
,
824 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
825 struct nesting_info
*info
)
827 struct walk_stmt_info wi
;
832 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body_ptr (for_stmt
));
835 memset (&wi
, 0, sizeof (wi
));
837 wi
.gsi
= gsi_last (seq
);
839 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
842 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
846 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
851 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
854 t
= gimple_omp_for_incr (for_stmt
, i
);
855 gcc_assert (BINARY_CLASS_P (t
));
857 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
860 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
863 seq
= gsi_seq (wi
.gsi
);
864 if (!gimple_seq_empty_p (seq
))
866 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
867 annotate_all_with_location (seq
, gimple_location (for_stmt
));
868 gimple_seq_add_seq (&pre_body
, seq
);
869 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
873 /* Similarly for ROOT and all functions nested underneath, depth first. */
876 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
877 struct nesting_info
*root
)
879 struct nesting_info
*n
;
880 FOR_EACH_NEST_INFO (n
, root
)
881 walk_function (callback_stmt
, callback_op
, n
);
885 /* We have to check for a fairly pathological case. The operands of function
886 nested function are to be interpreted in the context of the enclosing
887 function. So if any are variably-sized, they will get remapped when the
888 enclosing function is inlined. But that remapping would also have to be
889 done in the types of the PARM_DECLs of the nested function, meaning the
890 argument types of that function will disagree with the arguments in the
891 calls to that function. So we'd either have to make a copy of the nested
892 function corresponding to each time the enclosing function was inlined or
893 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
894 function. The former is not practical. The latter would still require
895 detecting this case to know when to add the conversions. So, for now at
896 least, we don't inline such an enclosing function.
898 We have to do that check recursively, so here return indicating whether
899 FNDECL has such a nested function. ORIG_FN is the function we were
900 trying to inline to use for checking whether any argument is variably
901 modified by anything in it.
903 It would be better to do this in tree-inline.c so that we could give
904 the appropriate warning for why a function can't be inlined, but that's
905 too late since the nesting structure has already been flattened and
906 adding a flag just to record this fact seems a waste of a flag. */
909 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
911 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
914 for (cgn
= first_nested_function (cgn
); cgn
;
915 cgn
= next_nested_function (cgn
))
917 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= DECL_CHAIN (arg
))
918 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
921 if (check_for_nested_with_variably_modified (cgn
->decl
,
929 /* Construct our local datastructure describing the function nesting
930 tree rooted by CGN. */
932 static struct nesting_info
*
933 create_nesting_tree (struct cgraph_node
*cgn
)
935 struct nesting_info
*info
= XCNEW (struct nesting_info
);
936 info
->field_map
= new hash_map
<tree
, tree
>;
937 info
->var_map
= new hash_map
<tree
, tree
>;
938 info
->mem_refs
= new hash_set
<tree
*>;
939 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
940 info
->context
= cgn
->decl
;
941 info
->thunk_p
= cgn
->thunk
;
943 for (cgn
= first_nested_function (cgn
); cgn
;
944 cgn
= next_nested_function (cgn
))
946 struct nesting_info
*sub
= create_nesting_tree (cgn
);
948 sub
->next
= info
->inner
;
952 /* See discussion at check_for_nested_with_variably_modified for a
953 discussion of why this has to be here. */
954 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
955 DECL_UNINLINABLE (info
->context
) = true;
960 /* Return an expression computing the static chain for TARGET_CONTEXT
961 from INFO->CONTEXT. Insert any necessary computations before TSI. */
964 get_static_chain (struct nesting_info
*info
, tree target_context
,
965 gimple_stmt_iterator
*gsi
)
967 struct nesting_info
*i
;
970 if (info
->context
== target_context
)
972 x
= build_addr (info
->frame_decl
);
973 info
->static_chain_added
|= 1;
977 x
= get_chain_decl (info
);
978 info
->static_chain_added
|= 2;
980 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
982 tree field
= get_chain_field (i
);
984 x
= build_simple_mem_ref_notrap (x
);
985 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
986 x
= init_tmp_var (info
, x
, gsi
);
994 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
995 frame as seen from INFO->CONTEXT. Insert any necessary computations
999 get_frame_field (struct nesting_info
*info
, tree target_context
,
1000 tree field
, gimple_stmt_iterator
*gsi
)
1002 struct nesting_info
*i
;
1005 if (info
->context
== target_context
)
1007 /* Make sure frame_decl gets created. */
1008 (void) get_frame_type (info
);
1009 x
= info
->frame_decl
;
1010 info
->static_chain_added
|= 1;
1014 x
= get_chain_decl (info
);
1015 info
->static_chain_added
|= 2;
1017 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
1019 tree field
= get_chain_field (i
);
1021 x
= build_simple_mem_ref_notrap (x
);
1022 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1023 x
= init_tmp_var (info
, x
, gsi
);
1026 x
= build_simple_mem_ref_notrap (x
);
1029 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1033 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
1035 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
1036 in the nested function with DECL_VALUE_EXPR set to reference the true
1037 variable in the parent function. This is used both for debug info
1038 and in OMP lowering. */
1041 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
1043 tree target_context
;
1044 struct nesting_info
*i
;
1045 tree x
, field
, new_decl
;
1047 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1052 target_context
= decl_function_context (decl
);
1054 /* A copy of the code in get_frame_field, but without the temporaries. */
1055 if (info
->context
== target_context
)
1057 /* Make sure frame_decl gets created. */
1058 (void) get_frame_type (info
);
1059 x
= info
->frame_decl
;
1061 info
->static_chain_added
|= 1;
1065 x
= get_chain_decl (info
);
1066 info
->static_chain_added
|= 2;
1067 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
1069 field
= get_chain_field (i
);
1070 x
= build_simple_mem_ref_notrap (x
);
1071 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1073 x
= build_simple_mem_ref_notrap (x
);
1076 field
= lookup_field_for_decl (i
, decl
, INSERT
);
1077 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1078 if (use_pointer_in_frame (decl
))
1079 x
= build_simple_mem_ref_notrap (x
);
1081 /* ??? We should be remapping types as well, surely. */
1082 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1083 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1084 DECL_CONTEXT (new_decl
) = info
->context
;
1085 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1086 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1087 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1088 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1089 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1090 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1091 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1092 if ((TREE_CODE (decl
) == PARM_DECL
1093 || TREE_CODE (decl
) == RESULT_DECL
1095 && DECL_BY_REFERENCE (decl
))
1096 DECL_BY_REFERENCE (new_decl
) = 1;
1098 SET_DECL_VALUE_EXPR (new_decl
, x
);
1099 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1102 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1103 info
->debug_var_chain
= new_decl
;
1106 && info
->context
!= target_context
1107 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
1108 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
1114 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1115 and PARM_DECLs that belong to outer functions.
1117 The rewrite will involve some number of structure accesses back up
1118 the static chain. E.g. for a variable FOO up one nesting level it'll
1119 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1120 indirections apply to decls for which use_pointer_in_frame is true. */
1123 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1125 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1126 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1130 switch (TREE_CODE (t
))
1133 /* Non-automatic variables are never processed. */
1134 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1140 tree x
, target_context
= decl_function_context (t
);
1142 if (info
->context
== target_context
)
1147 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1148 x
= get_nonlocal_debug_decl (info
, t
);
1151 struct nesting_info
*i
= info
;
1152 while (i
&& i
->context
!= target_context
)
1154 /* If none of the outer contexts is the target context, this means
1155 that the VAR or PARM_DECL is referenced in a wrong context. */
1157 internal_error ("%s from %s referenced in %s",
1158 IDENTIFIER_POINTER (DECL_NAME (t
)),
1159 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
1160 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
1162 x
= lookup_field_for_decl (i
, t
, INSERT
);
1163 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
1164 if (use_pointer_in_frame (t
))
1166 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1167 x
= build_simple_mem_ref_notrap (x
);
1174 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1176 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1184 /* We're taking the address of a label from a parent function, but
1185 this is not itself a non-local goto. Mark the label such that it
1186 will not be deleted, much as we would with a label address in
1188 if (decl_function_context (t
) != info
->context
)
1189 FORCED_LABEL (t
) = 1;
1194 bool save_val_only
= wi
->val_only
;
1196 wi
->val_only
= false;
1198 wi
->changed
= false;
1199 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
1200 wi
->val_only
= true;
1206 /* If we changed anything, we might no longer be directly
1207 referencing a decl. */
1208 save_context
= current_function_decl
;
1209 current_function_decl
= info
->context
;
1210 recompute_tree_invariant_for_addr_expr (t
);
1211 current_function_decl
= save_context
;
1213 /* If the callback converted the address argument in a context
1214 where we only accept variables (and min_invariant, presumably),
1215 then compute the address into a temporary. */
1217 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1227 case ARRAY_RANGE_REF
:
1229 /* Go down this entire nest and just look at the final prefix and
1230 anything that describes the references. Otherwise, we lose track
1231 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1232 wi
->val_only
= true;
1234 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1236 if (TREE_CODE (t
) == COMPONENT_REF
)
1237 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
1239 else if (TREE_CODE (t
) == ARRAY_REF
1240 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1242 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
1244 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1246 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1250 wi
->val_only
= false;
1251 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1254 case VIEW_CONVERT_EXPR
:
1255 /* Just request to look at the subtrees, leaving val_only and lhs
1256 untouched. This might actually be for !val_only + lhs, in which
1257 case we don't want to force a replacement by a temporary. */
1262 if (!IS_TYPE_OR_DECL_P (t
))
1265 wi
->val_only
= true;
1274 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1275 struct walk_stmt_info
*);
1277 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1278 and PARM_DECLs that belong to outer functions. */
1281 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1283 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1284 bool need_chain
= false, need_stmts
= false;
1285 tree clause
, decl
, *pdecl
;
1287 bitmap new_suppress
;
1289 new_suppress
= BITMAP_GGC_ALLOC ();
1290 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1292 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1295 switch (OMP_CLAUSE_CODE (clause
))
1297 case OMP_CLAUSE_REDUCTION
:
1298 case OMP_CLAUSE_IN_REDUCTION
:
1299 case OMP_CLAUSE_TASK_REDUCTION
:
1300 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1302 if (TREE_CODE (OMP_CLAUSE_DECL (clause
)) == MEM_REF
)
1304 pdecl
= &TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0);
1305 if (TREE_CODE (*pdecl
) == POINTER_PLUS_EXPR
)
1306 pdecl
= &TREE_OPERAND (*pdecl
, 0);
1307 if (TREE_CODE (*pdecl
) == INDIRECT_REF
1308 || TREE_CODE (*pdecl
) == ADDR_EXPR
)
1309 pdecl
= &TREE_OPERAND (*pdecl
, 0);
1311 goto do_decl_clause
;
1313 case OMP_CLAUSE_LASTPRIVATE
:
1314 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1316 goto do_decl_clause
;
1318 case OMP_CLAUSE_LINEAR
:
1319 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1321 wi
->val_only
= true;
1323 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
),
1325 goto do_decl_clause
;
1327 case OMP_CLAUSE_PRIVATE
:
1328 case OMP_CLAUSE_FIRSTPRIVATE
:
1329 case OMP_CLAUSE_COPYPRIVATE
:
1330 case OMP_CLAUSE_SHARED
:
1331 case OMP_CLAUSE_TO_DECLARE
:
1332 case OMP_CLAUSE_LINK
:
1333 case OMP_CLAUSE_USE_DEVICE_PTR
:
1334 case OMP_CLAUSE_USE_DEVICE_ADDR
:
1335 case OMP_CLAUSE_IS_DEVICE_PTR
:
1338 pdecl
= &OMP_CLAUSE_DECL (clause
);
1341 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1343 if (decl_function_context (decl
) != info
->context
)
1345 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1346 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1347 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1348 *pdecl
= get_nonlocal_debug_decl (info
, decl
);
1349 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1354 case OMP_CLAUSE_SCHEDULE
:
1355 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1358 case OMP_CLAUSE_FINAL
:
1360 case OMP_CLAUSE_NUM_THREADS
:
1361 case OMP_CLAUSE_DEPEND
:
1362 case OMP_CLAUSE_DEVICE
:
1363 case OMP_CLAUSE_NUM_TEAMS
:
1364 case OMP_CLAUSE_THREAD_LIMIT
:
1365 case OMP_CLAUSE_SAFELEN
:
1366 case OMP_CLAUSE_SIMDLEN
:
1367 case OMP_CLAUSE_PRIORITY
:
1368 case OMP_CLAUSE_GRAINSIZE
:
1369 case OMP_CLAUSE_NUM_TASKS
:
1370 case OMP_CLAUSE_HINT
:
1371 case OMP_CLAUSE_NUM_GANGS
:
1372 case OMP_CLAUSE_NUM_WORKERS
:
1373 case OMP_CLAUSE_VECTOR_LENGTH
:
1374 case OMP_CLAUSE_GANG
:
1375 case OMP_CLAUSE_WORKER
:
1376 case OMP_CLAUSE_VECTOR
:
1377 case OMP_CLAUSE_ASYNC
:
1378 case OMP_CLAUSE_WAIT
:
1379 /* Several OpenACC clauses have optional arguments. Check if they
1381 if (OMP_CLAUSE_OPERAND (clause
, 0))
1383 wi
->val_only
= true;
1385 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1389 /* The gang clause accepts two arguments. */
1390 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1391 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1393 wi
->val_only
= true;
1395 convert_nonlocal_reference_op
1396 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1400 case OMP_CLAUSE_DIST_SCHEDULE
:
1401 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1403 wi
->val_only
= true;
1405 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1410 case OMP_CLAUSE_MAP
:
1412 case OMP_CLAUSE_FROM
:
1413 if (OMP_CLAUSE_SIZE (clause
))
1415 wi
->val_only
= true;
1417 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause
),
1420 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1421 goto do_decl_clause
;
1422 wi
->val_only
= true;
1424 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_nonlocal_reference_op
,
1428 case OMP_CLAUSE_ALIGNED
:
1429 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1431 wi
->val_only
= true;
1433 convert_nonlocal_reference_op
1434 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1437 case OMP_CLAUSE_NONTEMPORAL
:
1438 /* Like do_decl_clause, but don't add any suppression. */
1439 decl
= OMP_CLAUSE_DECL (clause
);
1441 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1443 if (decl_function_context (decl
) != info
->context
)
1445 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1450 case OMP_CLAUSE_NOWAIT
:
1451 case OMP_CLAUSE_ORDERED
:
1452 case OMP_CLAUSE_DEFAULT
:
1453 case OMP_CLAUSE_COPYIN
:
1454 case OMP_CLAUSE_COLLAPSE
:
1455 case OMP_CLAUSE_TILE
:
1456 case OMP_CLAUSE_UNTIED
:
1457 case OMP_CLAUSE_MERGEABLE
:
1458 case OMP_CLAUSE_PROC_BIND
:
1459 case OMP_CLAUSE_NOGROUP
:
1460 case OMP_CLAUSE_THREADS
:
1461 case OMP_CLAUSE_SIMD
:
1462 case OMP_CLAUSE_DEFAULTMAP
:
1463 case OMP_CLAUSE_ORDER
:
1464 case OMP_CLAUSE_SEQ
:
1465 case OMP_CLAUSE_INDEPENDENT
:
1466 case OMP_CLAUSE_AUTO
:
1467 case OMP_CLAUSE_IF_PRESENT
:
1468 case OMP_CLAUSE_FINALIZE
:
1469 case OMP_CLAUSE__CONDTEMP_
:
1470 case OMP_CLAUSE__SCANTEMP_
:
1473 /* The following clause belongs to the OpenACC cache directive, which
1474 is discarded during gimplification. */
1475 case OMP_CLAUSE__CACHE_
:
1476 /* The following clauses are only allowed in the OpenMP declare simd
1477 directive, so not seen here. */
1478 case OMP_CLAUSE_UNIFORM
:
1479 case OMP_CLAUSE_INBRANCH
:
1480 case OMP_CLAUSE_NOTINBRANCH
:
1481 /* The following clauses are only allowed on OpenMP cancel and
1482 cancellation point directives, which at this point have already
1483 been lowered into a function call. */
1484 case OMP_CLAUSE_FOR
:
1485 case OMP_CLAUSE_PARALLEL
:
1486 case OMP_CLAUSE_SECTIONS
:
1487 case OMP_CLAUSE_TASKGROUP
:
1488 /* The following clauses are only added during OMP lowering; nested
1489 function decomposition happens before that. */
1490 case OMP_CLAUSE__LOOPTEMP_
:
1491 case OMP_CLAUSE__REDUCTEMP_
:
1492 case OMP_CLAUSE__SIMDUID_
:
1493 case OMP_CLAUSE__SIMT_
:
1494 /* Anything else. */
1500 info
->suppress_expansion
= new_suppress
;
1503 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1504 switch (OMP_CLAUSE_CODE (clause
))
1506 case OMP_CLAUSE_REDUCTION
:
1507 case OMP_CLAUSE_IN_REDUCTION
:
1508 case OMP_CLAUSE_TASK_REDUCTION
:
1509 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1512 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1513 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1515 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1516 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1518 tree save_local_var_chain
= info
->new_local_var_chain
;
1519 info
->new_local_var_chain
= NULL
;
1520 gimple_seq
*seq
= &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
);
1521 walk_body (convert_nonlocal_reference_stmt
,
1522 convert_nonlocal_reference_op
, info
, seq
);
1523 if (info
->new_local_var_chain
)
1524 declare_vars (info
->new_local_var_chain
,
1525 gimple_seq_first_stmt (*seq
), false);
1526 info
->new_local_var_chain
= NULL
;
1527 seq
= &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
);
1528 walk_body (convert_nonlocal_reference_stmt
,
1529 convert_nonlocal_reference_op
, info
, seq
);
1530 if (info
->new_local_var_chain
)
1531 declare_vars (info
->new_local_var_chain
,
1532 gimple_seq_first_stmt (*seq
), false);
1533 info
->new_local_var_chain
= save_local_var_chain
;
1534 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1536 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1537 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1542 case OMP_CLAUSE_LASTPRIVATE
:
1544 tree save_local_var_chain
= info
->new_local_var_chain
;
1545 info
->new_local_var_chain
= NULL
;
1546 gimple_seq
*seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
);
1547 walk_body (convert_nonlocal_reference_stmt
,
1548 convert_nonlocal_reference_op
, info
, seq
);
1549 if (info
->new_local_var_chain
)
1550 declare_vars (info
->new_local_var_chain
,
1551 gimple_seq_first_stmt (*seq
), false);
1552 info
->new_local_var_chain
= save_local_var_chain
;
1556 case OMP_CLAUSE_LINEAR
:
1558 tree save_local_var_chain
= info
->new_local_var_chain
;
1559 info
->new_local_var_chain
= NULL
;
1560 gimple_seq
*seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
);
1561 walk_body (convert_nonlocal_reference_stmt
,
1562 convert_nonlocal_reference_op
, info
, seq
);
1563 if (info
->new_local_var_chain
)
1564 declare_vars (info
->new_local_var_chain
,
1565 gimple_seq_first_stmt (*seq
), false);
1566 info
->new_local_var_chain
= save_local_var_chain
;
1577 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1580 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1582 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1583 type
= TREE_TYPE (type
);
1585 if (TYPE_NAME (type
)
1586 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1587 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1588 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1590 while (POINTER_TYPE_P (type
)
1591 || TREE_CODE (type
) == VECTOR_TYPE
1592 || TREE_CODE (type
) == FUNCTION_TYPE
1593 || TREE_CODE (type
) == METHOD_TYPE
)
1594 type
= TREE_TYPE (type
);
1596 if (TREE_CODE (type
) == ARRAY_TYPE
)
1600 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1601 domain
= TYPE_DOMAIN (type
);
1604 t
= TYPE_MIN_VALUE (domain
);
1605 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1606 && decl_function_context (t
) != info
->context
)
1607 get_nonlocal_debug_decl (info
, t
);
1608 t
= TYPE_MAX_VALUE (domain
);
1609 if (t
&& (VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
1610 && decl_function_context (t
) != info
->context
)
1611 get_nonlocal_debug_decl (info
, t
);
1616 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1617 PARM_DECLs that belong to outer functions. This handles statements
1618 that are not handled via the standard recursion done in
1619 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1620 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1621 operands of STMT have been handled by this function. */
1624 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1625 struct walk_stmt_info
*wi
)
1627 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1628 tree save_local_var_chain
;
1629 bitmap save_suppress
;
1630 gimple
*stmt
= gsi_stmt (*gsi
);
1632 switch (gimple_code (stmt
))
1635 /* Don't walk non-local gotos for now. */
1636 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1638 wi
->val_only
= true;
1640 *handled_ops_p
= false;
1645 case GIMPLE_OMP_TEAMS
:
1646 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
1648 save_suppress
= info
->suppress_expansion
;
1649 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
),
1651 walk_body (convert_nonlocal_reference_stmt
,
1652 convert_nonlocal_reference_op
, info
,
1653 gimple_omp_body_ptr (stmt
));
1654 info
->suppress_expansion
= save_suppress
;
1659 case GIMPLE_OMP_PARALLEL
:
1660 case GIMPLE_OMP_TASK
:
1661 save_suppress
= info
->suppress_expansion
;
1662 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1666 decl
= get_chain_decl (info
);
1667 c
= build_omp_clause (gimple_location (stmt
),
1668 OMP_CLAUSE_FIRSTPRIVATE
);
1669 OMP_CLAUSE_DECL (c
) = decl
;
1670 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1671 gimple_omp_taskreg_set_clauses (stmt
, c
);
1674 save_local_var_chain
= info
->new_local_var_chain
;
1675 info
->new_local_var_chain
= NULL
;
1677 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1678 info
, gimple_omp_body_ptr (stmt
));
1680 if (info
->new_local_var_chain
)
1681 declare_vars (info
->new_local_var_chain
,
1682 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1684 info
->new_local_var_chain
= save_local_var_chain
;
1685 info
->suppress_expansion
= save_suppress
;
1688 case GIMPLE_OMP_FOR
:
1689 save_suppress
= info
->suppress_expansion
;
1690 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1691 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
1692 convert_nonlocal_reference_stmt
,
1693 convert_nonlocal_reference_op
, info
);
1694 walk_body (convert_nonlocal_reference_stmt
,
1695 convert_nonlocal_reference_op
, info
, gimple_omp_body_ptr (stmt
));
1696 info
->suppress_expansion
= save_suppress
;
1699 case GIMPLE_OMP_SECTIONS
:
1700 save_suppress
= info
->suppress_expansion
;
1701 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1702 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1703 info
, gimple_omp_body_ptr (stmt
));
1704 info
->suppress_expansion
= save_suppress
;
1707 case GIMPLE_OMP_SINGLE
:
1708 save_suppress
= info
->suppress_expansion
;
1709 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1710 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1711 info
, gimple_omp_body_ptr (stmt
));
1712 info
->suppress_expansion
= save_suppress
;
1715 case GIMPLE_OMP_TASKGROUP
:
1716 save_suppress
= info
->suppress_expansion
;
1717 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt
), wi
);
1718 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1719 info
, gimple_omp_body_ptr (stmt
));
1720 info
->suppress_expansion
= save_suppress
;
1723 case GIMPLE_OMP_TARGET
:
1724 if (!is_gimple_omp_offloaded (stmt
))
1726 save_suppress
= info
->suppress_expansion
;
1727 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1729 info
->suppress_expansion
= save_suppress
;
1730 walk_body (convert_nonlocal_reference_stmt
,
1731 convert_nonlocal_reference_op
, info
,
1732 gimple_omp_body_ptr (stmt
));
1735 save_suppress
= info
->suppress_expansion
;
1736 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1740 decl
= get_chain_decl (info
);
1741 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
1742 OMP_CLAUSE_DECL (c
) = decl
;
1743 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
1744 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
1745 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
1746 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
1749 save_local_var_chain
= info
->new_local_var_chain
;
1750 info
->new_local_var_chain
= NULL
;
1752 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1753 info
, gimple_omp_body_ptr (stmt
));
1755 if (info
->new_local_var_chain
)
1756 declare_vars (info
->new_local_var_chain
,
1757 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1759 info
->new_local_var_chain
= save_local_var_chain
;
1760 info
->suppress_expansion
= save_suppress
;
1763 case GIMPLE_OMP_SECTION
:
1764 case GIMPLE_OMP_MASTER
:
1765 case GIMPLE_OMP_ORDERED
:
1766 case GIMPLE_OMP_SCAN
:
1767 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1768 info
, gimple_omp_body_ptr (stmt
));
1773 gbind
*bind_stmt
= as_a
<gbind
*> (stmt
);
1775 for (tree var
= gimple_bind_vars (bind_stmt
); var
; var
= DECL_CHAIN (var
))
1776 if (TREE_CODE (var
) == NAMELIST_DECL
)
1778 /* Adjust decls mentioned in NAMELIST_DECL. */
1779 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
1783 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
1786 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1788 if (decl_function_context (decl
) != info
->context
)
1789 CONSTRUCTOR_ELT (decls
, i
)->value
1790 = get_nonlocal_debug_decl (info
, decl
);
1794 *handled_ops_p
= false;
1798 wi
->val_only
= true;
1800 *handled_ops_p
= false;
1804 if (gimple_clobber_p (stmt
))
1806 tree lhs
= gimple_assign_lhs (stmt
);
1808 && !(TREE_STATIC (lhs
) || DECL_EXTERNAL (lhs
))
1809 && decl_function_context (lhs
) != info
->context
)
1811 gsi_replace (gsi
, gimple_build_nop (), true);
1815 *handled_ops_p
= false;
1819 /* For every other statement that we are not interested in
1820 handling here, let the walker traverse the operands. */
1821 *handled_ops_p
= false;
1825 /* We have handled all of STMT operands, no need to traverse the operands. */
1826 *handled_ops_p
= true;
1831 /* A subroutine of convert_local_reference. Create a local variable
1832 in the parent function with DECL_VALUE_EXPR set to reference the
1833 field in FRAME. This is used both for debug info and in OMP
1837 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1841 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1845 /* Make sure frame_decl gets created. */
1846 (void) get_frame_type (info
);
1847 x
= info
->frame_decl
;
1848 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1850 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1851 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1852 DECL_CONTEXT (new_decl
) = info
->context
;
1853 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1854 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1855 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1856 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1857 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1858 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1859 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1860 if ((TREE_CODE (decl
) == PARM_DECL
1861 || TREE_CODE (decl
) == RESULT_DECL
1863 && DECL_BY_REFERENCE (decl
))
1864 DECL_BY_REFERENCE (new_decl
) = 1;
1866 SET_DECL_VALUE_EXPR (new_decl
, x
);
1867 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1870 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1871 info
->debug_var_chain
= new_decl
;
1873 /* Do not emit debug info twice. */
1874 DECL_IGNORED_P (decl
) = 1;
1880 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1881 and PARM_DECLs that were referenced by inner nested functions.
1882 The rewrite will be a structure reference to the local frame variable. */
1884 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1887 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1889 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1890 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1891 tree t
= *tp
, field
, x
;
1895 switch (TREE_CODE (t
))
1898 /* Non-automatic variables are never processed. */
1899 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1904 if (t
!= info
->frame_decl
&& decl_function_context (t
) == info
->context
)
1906 /* If we copied a pointer to the frame, then the original decl
1907 is used unchanged in the parent function. */
1908 if (use_pointer_in_frame (t
))
1911 /* No need to transform anything if no child references the
1913 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1918 if (bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1919 x
= get_local_debug_decl (info
, t
, field
);
1921 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1926 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1928 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1936 save_val_only
= wi
->val_only
;
1937 wi
->val_only
= false;
1939 wi
->changed
= false;
1940 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1941 wi
->val_only
= save_val_only
;
1943 /* If we converted anything ... */
1948 /* Then the frame decl is now addressable. */
1949 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1951 save_context
= current_function_decl
;
1952 current_function_decl
= info
->context
;
1953 recompute_tree_invariant_for_addr_expr (t
);
1954 current_function_decl
= save_context
;
1956 /* If we are in a context where we only accept values, then
1957 compute the address into a temporary. */
1959 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1968 case ARRAY_RANGE_REF
:
1970 /* Go down this entire nest and just look at the final prefix and
1971 anything that describes the references. Otherwise, we lose track
1972 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1973 save_val_only
= wi
->val_only
;
1974 wi
->val_only
= true;
1976 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1978 if (TREE_CODE (t
) == COMPONENT_REF
)
1979 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1981 else if (TREE_CODE (t
) == ARRAY_REF
1982 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1984 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1986 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1988 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1992 wi
->val_only
= false;
1993 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1994 wi
->val_only
= save_val_only
;
1998 save_val_only
= wi
->val_only
;
1999 wi
->val_only
= true;
2001 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
,
2003 /* We need to re-fold the MEM_REF as component references as
2004 part of a ADDR_EXPR address are not allowed. But we cannot
2005 fold here, as the chain record type is not yet finalized. */
2006 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
2007 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
2008 info
->mem_refs
->add (tp
);
2009 wi
->val_only
= save_val_only
;
2012 case VIEW_CONVERT_EXPR
:
2013 /* Just request to look at the subtrees, leaving val_only and lhs
2014 untouched. This might actually be for !val_only + lhs, in which
2015 case we don't want to force a replacement by a temporary. */
2020 if (!IS_TYPE_OR_DECL_P (t
))
2023 wi
->val_only
= true;
2032 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
2033 struct walk_stmt_info
*);
2035 /* Helper for convert_local_reference. Convert all the references in
2036 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
2039 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
2041 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2042 bool need_frame
= false, need_stmts
= false;
2043 tree clause
, decl
, *pdecl
;
2045 bitmap new_suppress
;
2047 new_suppress
= BITMAP_GGC_ALLOC ();
2048 bitmap_copy (new_suppress
, info
->suppress_expansion
);
2050 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
2053 switch (OMP_CLAUSE_CODE (clause
))
2055 case OMP_CLAUSE_REDUCTION
:
2056 case OMP_CLAUSE_IN_REDUCTION
:
2057 case OMP_CLAUSE_TASK_REDUCTION
:
2058 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2060 if (TREE_CODE (OMP_CLAUSE_DECL (clause
)) == MEM_REF
)
2062 pdecl
= &TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0);
2063 if (TREE_CODE (*pdecl
) == POINTER_PLUS_EXPR
)
2064 pdecl
= &TREE_OPERAND (*pdecl
, 0);
2065 if (TREE_CODE (*pdecl
) == INDIRECT_REF
2066 || TREE_CODE (*pdecl
) == ADDR_EXPR
)
2067 pdecl
= &TREE_OPERAND (*pdecl
, 0);
2069 goto do_decl_clause
;
2071 case OMP_CLAUSE_LASTPRIVATE
:
2072 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
2074 goto do_decl_clause
;
2076 case OMP_CLAUSE_LINEAR
:
2077 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
2079 wi
->val_only
= true;
2081 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
), &dummy
,
2083 goto do_decl_clause
;
2085 case OMP_CLAUSE_PRIVATE
:
2086 case OMP_CLAUSE_FIRSTPRIVATE
:
2087 case OMP_CLAUSE_COPYPRIVATE
:
2088 case OMP_CLAUSE_SHARED
:
2089 case OMP_CLAUSE_TO_DECLARE
:
2090 case OMP_CLAUSE_LINK
:
2091 case OMP_CLAUSE_USE_DEVICE_PTR
:
2092 case OMP_CLAUSE_USE_DEVICE_ADDR
:
2093 case OMP_CLAUSE_IS_DEVICE_PTR
:
2096 pdecl
= &OMP_CLAUSE_DECL (clause
);
2099 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2101 if (decl_function_context (decl
) == info
->context
2102 && !use_pointer_in_frame (decl
))
2104 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2107 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
2108 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
2109 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
2110 *pdecl
= get_local_debug_decl (info
, decl
, field
);
2116 case OMP_CLAUSE_SCHEDULE
:
2117 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
2120 case OMP_CLAUSE_FINAL
:
2122 case OMP_CLAUSE_NUM_THREADS
:
2123 case OMP_CLAUSE_DEPEND
:
2124 case OMP_CLAUSE_DEVICE
:
2125 case OMP_CLAUSE_NUM_TEAMS
:
2126 case OMP_CLAUSE_THREAD_LIMIT
:
2127 case OMP_CLAUSE_SAFELEN
:
2128 case OMP_CLAUSE_SIMDLEN
:
2129 case OMP_CLAUSE_PRIORITY
:
2130 case OMP_CLAUSE_GRAINSIZE
:
2131 case OMP_CLAUSE_NUM_TASKS
:
2132 case OMP_CLAUSE_HINT
:
2133 case OMP_CLAUSE_NUM_GANGS
:
2134 case OMP_CLAUSE_NUM_WORKERS
:
2135 case OMP_CLAUSE_VECTOR_LENGTH
:
2136 case OMP_CLAUSE_GANG
:
2137 case OMP_CLAUSE_WORKER
:
2138 case OMP_CLAUSE_VECTOR
:
2139 case OMP_CLAUSE_ASYNC
:
2140 case OMP_CLAUSE_WAIT
:
2141 /* Several OpenACC clauses have optional arguments. Check if they
2143 if (OMP_CLAUSE_OPERAND (clause
, 0))
2145 wi
->val_only
= true;
2147 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
2151 /* The gang clause accepts two arguments. */
2152 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
2153 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
2155 wi
->val_only
= true;
2157 convert_nonlocal_reference_op
2158 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
2162 case OMP_CLAUSE_DIST_SCHEDULE
:
2163 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
2165 wi
->val_only
= true;
2167 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
2172 case OMP_CLAUSE_MAP
:
2174 case OMP_CLAUSE_FROM
:
2175 if (OMP_CLAUSE_SIZE (clause
))
2177 wi
->val_only
= true;
2179 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause
),
2182 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
2183 goto do_decl_clause
;
2184 wi
->val_only
= true;
2186 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_local_reference_op
,
2190 case OMP_CLAUSE_ALIGNED
:
2191 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
2193 wi
->val_only
= true;
2195 convert_local_reference_op
2196 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
2199 case OMP_CLAUSE_NONTEMPORAL
:
2200 /* Like do_decl_clause, but don't add any suppression. */
2201 decl
= OMP_CLAUSE_DECL (clause
);
2203 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2205 if (decl_function_context (decl
) == info
->context
2206 && !use_pointer_in_frame (decl
))
2208 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2211 OMP_CLAUSE_DECL (clause
)
2212 = get_local_debug_decl (info
, decl
, field
);
2218 case OMP_CLAUSE_NOWAIT
:
2219 case OMP_CLAUSE_ORDERED
:
2220 case OMP_CLAUSE_DEFAULT
:
2221 case OMP_CLAUSE_COPYIN
:
2222 case OMP_CLAUSE_COLLAPSE
:
2223 case OMP_CLAUSE_TILE
:
2224 case OMP_CLAUSE_UNTIED
:
2225 case OMP_CLAUSE_MERGEABLE
:
2226 case OMP_CLAUSE_PROC_BIND
:
2227 case OMP_CLAUSE_NOGROUP
:
2228 case OMP_CLAUSE_THREADS
:
2229 case OMP_CLAUSE_SIMD
:
2230 case OMP_CLAUSE_DEFAULTMAP
:
2231 case OMP_CLAUSE_ORDER
:
2232 case OMP_CLAUSE_SEQ
:
2233 case OMP_CLAUSE_INDEPENDENT
:
2234 case OMP_CLAUSE_AUTO
:
2235 case OMP_CLAUSE_IF_PRESENT
:
2236 case OMP_CLAUSE_FINALIZE
:
2237 case OMP_CLAUSE__CONDTEMP_
:
2238 case OMP_CLAUSE__SCANTEMP_
:
2241 /* The following clause belongs to the OpenACC cache directive, which
2242 is discarded during gimplification. */
2243 case OMP_CLAUSE__CACHE_
:
2244 /* The following clauses are only allowed in the OpenMP declare simd
2245 directive, so not seen here. */
2246 case OMP_CLAUSE_UNIFORM
:
2247 case OMP_CLAUSE_INBRANCH
:
2248 case OMP_CLAUSE_NOTINBRANCH
:
2249 /* The following clauses are only allowed on OpenMP cancel and
2250 cancellation point directives, which at this point have already
2251 been lowered into a function call. */
2252 case OMP_CLAUSE_FOR
:
2253 case OMP_CLAUSE_PARALLEL
:
2254 case OMP_CLAUSE_SECTIONS
:
2255 case OMP_CLAUSE_TASKGROUP
:
2256 /* The following clauses are only added during OMP lowering; nested
2257 function decomposition happens before that. */
2258 case OMP_CLAUSE__LOOPTEMP_
:
2259 case OMP_CLAUSE__REDUCTEMP_
:
2260 case OMP_CLAUSE__SIMDUID_
:
2261 case OMP_CLAUSE__SIMT_
:
2262 /* Anything else. */
2268 info
->suppress_expansion
= new_suppress
;
2271 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
2272 switch (OMP_CLAUSE_CODE (clause
))
2274 case OMP_CLAUSE_REDUCTION
:
2275 case OMP_CLAUSE_IN_REDUCTION
:
2276 case OMP_CLAUSE_TASK_REDUCTION
:
2277 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2280 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
2281 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2283 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2284 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2286 walk_body (convert_local_reference_stmt
,
2287 convert_local_reference_op
, info
,
2288 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
2289 walk_body (convert_local_reference_stmt
,
2290 convert_local_reference_op
, info
,
2291 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
2292 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
2294 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2295 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
2300 case OMP_CLAUSE_LASTPRIVATE
:
2301 walk_body (convert_local_reference_stmt
,
2302 convert_local_reference_op
, info
,
2303 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
2306 case OMP_CLAUSE_LINEAR
:
2307 walk_body (convert_local_reference_stmt
,
2308 convert_local_reference_op
, info
,
2309 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
2320 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2321 and PARM_DECLs that were referenced by inner nested functions.
2322 The rewrite will be a structure reference to the local frame variable. */
2325 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2326 struct walk_stmt_info
*wi
)
2328 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2329 tree save_local_var_chain
;
2330 bitmap save_suppress
;
2331 char save_static_chain_added
;
2332 bool frame_decl_added
;
2333 gimple
*stmt
= gsi_stmt (*gsi
);
2335 switch (gimple_code (stmt
))
2337 case GIMPLE_OMP_TEAMS
:
2338 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2340 save_suppress
= info
->suppress_expansion
;
2341 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
2342 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2343 info
, gimple_omp_body_ptr (stmt
));
2344 info
->suppress_expansion
= save_suppress
;
2349 case GIMPLE_OMP_PARALLEL
:
2350 case GIMPLE_OMP_TASK
:
2351 save_suppress
= info
->suppress_expansion
;
2352 frame_decl_added
= false;
2353 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
2356 tree c
= build_omp_clause (gimple_location (stmt
),
2358 (void) get_frame_type (info
);
2359 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2360 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2361 gimple_omp_taskreg_set_clauses (stmt
, c
);
2362 info
->static_chain_added
|= 4;
2363 frame_decl_added
= true;
2366 save_local_var_chain
= info
->new_local_var_chain
;
2367 save_static_chain_added
= info
->static_chain_added
;
2368 info
->new_local_var_chain
= NULL
;
2369 info
->static_chain_added
= 0;
2371 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2372 gimple_omp_body_ptr (stmt
));
2374 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2376 tree c
= build_omp_clause (gimple_location (stmt
),
2378 (void) get_frame_type (info
);
2379 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2380 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2381 info
->static_chain_added
|= 4;
2382 gimple_omp_taskreg_set_clauses (stmt
, c
);
2384 if (info
->new_local_var_chain
)
2385 declare_vars (info
->new_local_var_chain
,
2386 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2387 info
->new_local_var_chain
= save_local_var_chain
;
2388 info
->suppress_expansion
= save_suppress
;
2389 info
->static_chain_added
|= save_static_chain_added
;
2392 case GIMPLE_OMP_FOR
:
2393 save_suppress
= info
->suppress_expansion
;
2394 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
2395 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
2396 convert_local_reference_stmt
,
2397 convert_local_reference_op
, info
);
2398 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2399 info
, gimple_omp_body_ptr (stmt
));
2400 info
->suppress_expansion
= save_suppress
;
2403 case GIMPLE_OMP_SECTIONS
:
2404 save_suppress
= info
->suppress_expansion
;
2405 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
2406 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2407 info
, gimple_omp_body_ptr (stmt
));
2408 info
->suppress_expansion
= save_suppress
;
2411 case GIMPLE_OMP_SINGLE
:
2412 save_suppress
= info
->suppress_expansion
;
2413 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
2414 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2415 info
, gimple_omp_body_ptr (stmt
));
2416 info
->suppress_expansion
= save_suppress
;
2419 case GIMPLE_OMP_TASKGROUP
:
2420 save_suppress
= info
->suppress_expansion
;
2421 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt
), wi
);
2422 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2423 info
, gimple_omp_body_ptr (stmt
));
2424 info
->suppress_expansion
= save_suppress
;
2427 case GIMPLE_OMP_TARGET
:
2428 if (!is_gimple_omp_offloaded (stmt
))
2430 save_suppress
= info
->suppress_expansion
;
2431 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
2432 info
->suppress_expansion
= save_suppress
;
2433 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2434 info
, gimple_omp_body_ptr (stmt
));
2437 save_suppress
= info
->suppress_expansion
;
2438 frame_decl_added
= false;
2439 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
))
2441 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2442 (void) get_frame_type (info
);
2443 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2444 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2445 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2446 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2447 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2448 info
->static_chain_added
|= 4;
2449 frame_decl_added
= true;
2452 save_local_var_chain
= info
->new_local_var_chain
;
2453 save_static_chain_added
= info
->static_chain_added
;
2454 info
->new_local_var_chain
= NULL
;
2455 info
->static_chain_added
= 0;
2457 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2458 gimple_omp_body_ptr (stmt
));
2460 if ((info
->static_chain_added
& 4) != 0 && !frame_decl_added
)
2462 tree c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2463 (void) get_frame_type (info
);
2464 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2465 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2466 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2467 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2468 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2469 info
->static_chain_added
|= 4;
2472 if (info
->new_local_var_chain
)
2473 declare_vars (info
->new_local_var_chain
,
2474 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2475 info
->new_local_var_chain
= save_local_var_chain
;
2476 info
->suppress_expansion
= save_suppress
;
2477 info
->static_chain_added
|= save_static_chain_added
;
2480 case GIMPLE_OMP_SECTION
:
2481 case GIMPLE_OMP_MASTER
:
2482 case GIMPLE_OMP_ORDERED
:
2483 case GIMPLE_OMP_SCAN
:
2484 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2485 info
, gimple_omp_body_ptr (stmt
));
2489 wi
->val_only
= true;
2491 *handled_ops_p
= false;
2495 if (gimple_clobber_p (stmt
))
2497 tree lhs
= gimple_assign_lhs (stmt
);
2499 && !use_pointer_in_frame (lhs
)
2500 && lookup_field_for_decl (info
, lhs
, NO_INSERT
))
2502 gsi_replace (gsi
, gimple_build_nop (), true);
2506 *handled_ops_p
= false;
2510 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
2512 var
= DECL_CHAIN (var
))
2513 if (TREE_CODE (var
) == NAMELIST_DECL
)
2515 /* Adjust decls mentioned in NAMELIST_DECL. */
2516 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
2520 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
2523 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2525 if (decl_function_context (decl
) == info
->context
2526 && !use_pointer_in_frame (decl
))
2528 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2531 CONSTRUCTOR_ELT (decls
, i
)->value
2532 = get_local_debug_decl (info
, decl
, field
);
2538 *handled_ops_p
= false;
2542 /* For every other statement that we are not interested in
2543 handling here, let the walker traverse the operands. */
2544 *handled_ops_p
= false;
2548 /* Indicate that we have handled all the operands ourselves. */
2549 *handled_ops_p
= true;
2554 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2555 that reference labels from outer functions. The rewrite will be a
2556 call to __builtin_nonlocal_goto. */
2559 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2560 struct walk_stmt_info
*wi
)
2562 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2563 tree label
, new_label
, target_context
, x
, field
;
2565 gimple
*stmt
= gsi_stmt (*gsi
);
2567 if (gimple_code (stmt
) != GIMPLE_GOTO
)
2569 *handled_ops_p
= false;
2573 label
= gimple_goto_dest (stmt
);
2574 if (TREE_CODE (label
) != LABEL_DECL
)
2576 *handled_ops_p
= false;
2580 target_context
= decl_function_context (label
);
2581 if (target_context
== info
->context
)
2583 *handled_ops_p
= false;
2587 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
2590 /* The original user label may also be use for a normal goto, therefore
2591 we must create a new label that will actually receive the abnormal
2592 control transfer. This new label will be marked LABEL_NONLOCAL; this
2593 mark will trigger proper behavior in the cfg, as well as cause the
2594 (hairy target-specific) non-local goto receiver code to be generated
2595 when we expand rtl. Enter this association into var_map so that we
2596 can insert the new label into the IL during a second pass. */
2597 tree
*slot
= &i
->var_map
->get_or_insert (label
);
2600 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
2601 DECL_NONLOCAL (new_label
) = 1;
2607 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2608 field
= get_nl_goto_field (i
);
2609 x
= get_frame_field (info
, target_context
, field
, gsi
);
2611 x
= gsi_gimplify_val (info
, x
, gsi
);
2612 call
= gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO
),
2613 2, build_addr (new_label
), x
);
2614 gsi_replace (gsi
, call
, false);
2616 /* We have handled all of STMT's operands, no need to keep going. */
2617 *handled_ops_p
= true;
2622 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2623 are referenced via nonlocal goto from a nested function. The rewrite
2624 will involve installing a newly generated DECL_NONLOCAL label, and
2625 (potentially) a branch around the rtl gunk that is assumed to be
2626 attached to such a label. */
2629 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2630 struct walk_stmt_info
*wi
)
2632 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2633 tree label
, new_label
;
2634 gimple_stmt_iterator tmp_gsi
;
2635 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsi
));
2639 *handled_ops_p
= false;
2643 label
= gimple_label_label (stmt
);
2645 tree
*slot
= info
->var_map
->get (label
);
2648 *handled_ops_p
= false;
2652 /* If there's any possibility that the previous statement falls through,
2653 then we must branch around the new non-local label. */
2655 gsi_prev (&tmp_gsi
);
2656 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
2658 gimple
*stmt
= gimple_build_goto (label
);
2659 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2662 new_label
= (tree
) *slot
;
2663 stmt
= gimple_build_label (new_label
);
2664 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2666 *handled_ops_p
= true;
2671 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2672 of nested functions that require the use of trampolines. The rewrite
2673 will involve a reference a trampoline generated for the occasion. */
2676 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
2678 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2679 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2680 tree t
= *tp
, decl
, target_context
, x
, builtin
;
2685 switch (TREE_CODE (t
))
2689 T.1 = &CHAIN->tramp;
2690 T.2 = __builtin_adjust_trampoline (T.1);
2691 T.3 = (func_type)T.2;
2694 decl
= TREE_OPERAND (t
, 0);
2695 if (TREE_CODE (decl
) != FUNCTION_DECL
)
2698 /* Only need to process nested functions. */
2699 target_context
= decl_function_context (decl
);
2700 if (!target_context
)
2703 /* If the nested function doesn't use a static chain, then
2704 it doesn't need a trampoline. */
2705 if (!DECL_STATIC_CHAIN (decl
))
2708 /* If we don't want a trampoline, then don't build one. */
2709 if (TREE_NO_TRAMPOLINE (t
))
2712 /* Lookup the immediate parent of the callee, as that's where
2713 we need to insert the trampoline. */
2714 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
2717 /* Decide whether to generate a descriptor or a trampoline. */
2718 descr
= FUNC_ADDR_BY_DESCRIPTOR (t
) && !flag_trampolines
;
2721 x
= lookup_descr_for_decl (i
, decl
, INSERT
);
2723 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
2725 /* Compute the address of the field holding the trampoline. */
2726 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
2728 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
2730 /* Do machine-specific ugliness. Normally this will involve
2731 computing extra alignment, but it can really be anything. */
2733 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR
);
2735 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE
);
2736 call
= gimple_build_call (builtin
, 1, x
);
2737 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
2739 /* Cast back to the proper function type. */
2740 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
2741 x
= init_tmp_var (info
, x
, &wi
->gsi
);
2747 if (!IS_TYPE_OR_DECL_P (t
))
2756 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2757 to addresses of nested functions that require the use of
2758 trampolines. The rewrite will involve a reference a trampoline
2759 generated for the occasion. */
2762 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2763 struct walk_stmt_info
*wi
)
2765 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2766 gimple
*stmt
= gsi_stmt (*gsi
);
2768 switch (gimple_code (stmt
))
2772 /* Only walk call arguments, lest we generate trampolines for
2774 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
2775 for (i
= 0; i
< nargs
; i
++)
2776 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
2781 case GIMPLE_OMP_TEAMS
:
2782 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2784 *handled_ops_p
= false;
2789 case GIMPLE_OMP_TARGET
:
2790 if (!is_gimple_omp_offloaded (stmt
))
2792 *handled_ops_p
= false;
2796 case GIMPLE_OMP_PARALLEL
:
2797 case GIMPLE_OMP_TASK
:
2800 tree save_local_var_chain
= info
->new_local_var_chain
;
2801 walk_gimple_op (stmt
, convert_tramp_reference_op
, wi
);
2802 info
->new_local_var_chain
= NULL
;
2803 char save_static_chain_added
= info
->static_chain_added
;
2804 info
->static_chain_added
= 0;
2805 walk_body (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2806 info
, gimple_omp_body_ptr (stmt
));
2807 if (info
->new_local_var_chain
)
2808 declare_vars (info
->new_local_var_chain
,
2809 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
2811 for (int i
= 0; i
< 2; i
++)
2814 if ((info
->static_chain_added
& (1 << i
)) == 0)
2816 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2817 /* Don't add CHAIN.* or FRAME.* twice. */
2818 for (c
= gimple_omp_taskreg_clauses (stmt
);
2820 c
= OMP_CLAUSE_CHAIN (c
))
2821 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2822 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2823 && OMP_CLAUSE_DECL (c
) == decl
)
2825 if (c
== NULL
&& gimple_code (stmt
) != GIMPLE_OMP_TARGET
)
2827 c
= build_omp_clause (gimple_location (stmt
),
2828 i
? OMP_CLAUSE_FIRSTPRIVATE
2829 : OMP_CLAUSE_SHARED
);
2830 OMP_CLAUSE_DECL (c
) = decl
;
2831 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2832 gimple_omp_taskreg_set_clauses (stmt
, c
);
2836 c
= build_omp_clause (gimple_location (stmt
),
2838 OMP_CLAUSE_DECL (c
) = decl
;
2839 OMP_CLAUSE_SET_MAP_KIND (c
,
2840 i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2841 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2842 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2843 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2847 info
->new_local_var_chain
= save_local_var_chain
;
2848 info
->static_chain_added
|= save_static_chain_added
;
2853 *handled_ops_p
= false;
2857 *handled_ops_p
= true;
2863 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2864 that reference nested functions to make sure that the static chain
2865 is set up properly for the call. */
2868 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2869 struct walk_stmt_info
*wi
)
2871 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2872 tree decl
, target_context
;
2873 char save_static_chain_added
;
2875 gimple
*stmt
= gsi_stmt (*gsi
);
2877 switch (gimple_code (stmt
))
2880 if (gimple_call_chain (stmt
))
2882 decl
= gimple_call_fndecl (stmt
);
2885 target_context
= decl_function_context (decl
);
2886 if (target_context
&& DECL_STATIC_CHAIN (decl
))
2888 struct nesting_info
*i
= info
;
2889 while (i
&& i
->context
!= target_context
)
2891 /* If none of the outer contexts is the target context, this means
2892 that the function is called in a wrong context. */
2894 internal_error ("%s from %s called in %s",
2895 IDENTIFIER_POINTER (DECL_NAME (decl
)),
2896 IDENTIFIER_POINTER (DECL_NAME (target_context
)),
2897 IDENTIFIER_POINTER (DECL_NAME (info
->context
)));
2899 gimple_call_set_chain (as_a
<gcall
*> (stmt
),
2900 get_static_chain (info
, target_context
,
2902 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
2906 case GIMPLE_OMP_TEAMS
:
2907 if (!gimple_omp_teams_host (as_a
<gomp_teams
*> (stmt
)))
2909 walk_body (convert_gimple_call
, NULL
, info
,
2910 gimple_omp_body_ptr (stmt
));
2915 case GIMPLE_OMP_PARALLEL
:
2916 case GIMPLE_OMP_TASK
:
2917 save_static_chain_added
= info
->static_chain_added
;
2918 info
->static_chain_added
= 0;
2919 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2920 for (i
= 0; i
< 2; i
++)
2923 if ((info
->static_chain_added
& (1 << i
)) == 0)
2925 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2926 /* Don't add CHAIN.* or FRAME.* twice. */
2927 for (c
= gimple_omp_taskreg_clauses (stmt
);
2929 c
= OMP_CLAUSE_CHAIN (c
))
2930 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2931 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2932 && OMP_CLAUSE_DECL (c
) == decl
)
2936 c
= build_omp_clause (gimple_location (stmt
),
2937 i
? OMP_CLAUSE_FIRSTPRIVATE
2938 : OMP_CLAUSE_SHARED
);
2939 OMP_CLAUSE_DECL (c
) = decl
;
2940 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2941 gimple_omp_taskreg_set_clauses (stmt
, c
);
2944 info
->static_chain_added
|= save_static_chain_added
;
2947 case GIMPLE_OMP_TARGET
:
2948 if (!is_gimple_omp_offloaded (stmt
))
2950 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2953 save_static_chain_added
= info
->static_chain_added
;
2954 info
->static_chain_added
= 0;
2955 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2956 for (i
= 0; i
< 2; i
++)
2959 if ((info
->static_chain_added
& (1 << i
)) == 0)
2961 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2962 /* Don't add CHAIN.* or FRAME.* twice. */
2963 for (c
= gimple_omp_target_clauses (stmt
);
2965 c
= OMP_CLAUSE_CHAIN (c
))
2966 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
2967 && OMP_CLAUSE_DECL (c
) == decl
)
2971 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2972 OMP_CLAUSE_DECL (c
) = decl
;
2973 OMP_CLAUSE_SET_MAP_KIND (c
, i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2974 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2975 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2976 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2980 info
->static_chain_added
|= save_static_chain_added
;
2983 case GIMPLE_OMP_FOR
:
2984 walk_body (convert_gimple_call
, NULL
, info
,
2985 gimple_omp_for_pre_body_ptr (stmt
));
2987 case GIMPLE_OMP_SECTIONS
:
2988 case GIMPLE_OMP_SECTION
:
2989 case GIMPLE_OMP_SINGLE
:
2990 case GIMPLE_OMP_MASTER
:
2991 case GIMPLE_OMP_TASKGROUP
:
2992 case GIMPLE_OMP_ORDERED
:
2993 case GIMPLE_OMP_SCAN
:
2994 case GIMPLE_OMP_CRITICAL
:
2995 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2999 /* Keep looking for other operands. */
3000 *handled_ops_p
= false;
3004 *handled_ops_p
= true;
3008 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
3009 call expressions. At the same time, determine if a nested function
3010 actually uses its static chain; if not, remember that. */
3013 convert_all_function_calls (struct nesting_info
*root
)
3015 unsigned int chain_count
= 0, old_chain_count
, iter_count
;
3016 struct nesting_info
*n
;
3018 /* First, optimistically clear static_chain for all decls that haven't
3019 used the static chain already for variable access. But always create
3020 it if not optimizing. This makes it possible to reconstruct the static
3021 nesting tree at run time and thus to resolve up-level references from
3022 within the debugger. */
3023 FOR_EACH_NEST_INFO (n
, root
)
3027 tree decl
= n
->context
;
3031 (void) get_frame_type (n
);
3033 (void) get_chain_decl (n
);
3035 else if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
3037 DECL_STATIC_CHAIN (decl
) = 0;
3038 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3039 fprintf (dump_file
, "Guessing no static-chain for %s\n",
3040 lang_hooks
.decl_printable_name (decl
, 2));
3043 DECL_STATIC_CHAIN (decl
) = 1;
3044 chain_count
+= DECL_STATIC_CHAIN (decl
);
3047 FOR_EACH_NEST_INFO (n
, root
)
3050 tree decl
= n
->context
;
3051 tree alias
= thunk_info::get (cgraph_node::get (decl
))->alias
;
3052 DECL_STATIC_CHAIN (decl
) = DECL_STATIC_CHAIN (alias
);
3055 /* Walk the functions and perform transformations. Note that these
3056 transformations can induce new uses of the static chain, which in turn
3057 require re-examining all users of the decl. */
3058 /* ??? It would make sense to try to use the call graph to speed this up,
3059 but the call graph hasn't really been built yet. Even if it did, we
3060 would still need to iterate in this loop since address-of references
3061 wouldn't show up in the callgraph anyway. */
3065 old_chain_count
= chain_count
;
3069 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3070 fputc ('\n', dump_file
);
3072 FOR_EACH_NEST_INFO (n
, root
)
3076 tree decl
= n
->context
;
3077 walk_function (convert_tramp_reference_stmt
,
3078 convert_tramp_reference_op
, n
);
3079 walk_function (convert_gimple_call
, NULL
, n
);
3080 chain_count
+= DECL_STATIC_CHAIN (decl
);
3083 FOR_EACH_NEST_INFO (n
, root
)
3086 tree decl
= n
->context
;
3087 tree alias
= thunk_info::get (cgraph_node::get (decl
))->alias
;
3088 DECL_STATIC_CHAIN (decl
) = DECL_STATIC_CHAIN (alias
);
3091 while (chain_count
!= old_chain_count
);
3093 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3094 fprintf (dump_file
, "convert_all_function_calls iterations: %u\n\n",
3098 struct nesting_copy_body_data
3101 struct nesting_info
*root
;
3104 /* A helper subroutine for debug_var_chain type remapping. */
3107 nesting_copy_decl (tree decl
, copy_body_data
*id
)
3109 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
3110 tree
*slot
= nid
->root
->var_map
->get (decl
);
3113 return (tree
) *slot
;
3115 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
3117 tree new_decl
= copy_decl_no_change (decl
, id
);
3118 DECL_ORIGINAL_TYPE (new_decl
)
3119 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
3124 || TREE_CODE (decl
) == PARM_DECL
3125 || TREE_CODE (decl
) == RESULT_DECL
)
3128 return copy_decl_no_change (decl
, id
);
3131 /* A helper function for remap_vla_decls. See if *TP contains
3132 some remapped variables. */
3135 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
3137 struct nesting_info
*root
= (struct nesting_info
*) data
;
3143 tree
*slot
= root
->var_map
->get (t
);
3151 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3155 remap_vla_decls (tree block
, struct nesting_info
*root
)
3157 tree var
, subblock
, val
, type
;
3158 struct nesting_copy_body_data id
;
3160 for (subblock
= BLOCK_SUBBLOCKS (block
);
3162 subblock
= BLOCK_CHAIN (subblock
))
3163 remap_vla_decls (subblock
, root
);
3165 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
3166 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3168 val
= DECL_VALUE_EXPR (var
);
3169 type
= TREE_TYPE (var
);
3171 if (!(TREE_CODE (val
) == INDIRECT_REF
3172 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
3173 && variably_modified_type_p (type
, NULL
)))
3176 if (root
->var_map
->get (TREE_OPERAND (val
, 0))
3177 || walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
3181 if (var
== NULL_TREE
)
3184 memset (&id
, 0, sizeof (id
));
3185 id
.cb
.copy_decl
= nesting_copy_decl
;
3186 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3189 for (; var
; var
= DECL_CHAIN (var
))
3190 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3192 struct nesting_info
*i
;
3195 val
= DECL_VALUE_EXPR (var
);
3196 type
= TREE_TYPE (var
);
3198 if (!(TREE_CODE (val
) == INDIRECT_REF
3199 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
3200 && variably_modified_type_p (type
, NULL
)))
3203 tree
*slot
= root
->var_map
->get (TREE_OPERAND (val
, 0));
3204 if (!slot
&& !walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
3207 context
= decl_function_context (var
);
3208 for (i
= root
; i
; i
= i
->outer
)
3209 if (i
->context
== context
)
3215 /* Fully expand value expressions. This avoids having debug variables
3216 only referenced from them and that can be swept during GC. */
3219 tree t
= (tree
) *slot
;
3220 gcc_assert (DECL_P (t
) && DECL_HAS_VALUE_EXPR_P (t
));
3221 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
), DECL_VALUE_EXPR (t
));
3224 id
.cb
.src_fn
= i
->context
;
3225 id
.cb
.dst_fn
= i
->context
;
3226 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3228 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
3229 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3231 newt
= TREE_TYPE (newt
);
3232 type
= TREE_TYPE (type
);
3234 if (TYPE_NAME (newt
)
3235 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3236 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3238 && TYPE_NAME (newt
) == TYPE_NAME (type
))
3239 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3241 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
3242 if (val
!= DECL_VALUE_EXPR (var
))
3243 SET_DECL_VALUE_EXPR (var
, val
);
3246 delete id
.cb
.decl_map
;
3249 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3253 fixup_vla_decls (tree block
)
3255 for (tree var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
3256 if (VAR_P (var
) && DECL_HAS_VALUE_EXPR_P (var
))
3258 tree val
= DECL_VALUE_EXPR (var
);
3260 if (!(TREE_CODE (val
) == INDIRECT_REF
3261 && VAR_P (TREE_OPERAND (val
, 0))
3262 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val
, 0))))
3265 /* Fully expand value expressions. This avoids having debug variables
3266 only referenced from them and that can be swept during GC. */
3267 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
),
3268 DECL_VALUE_EXPR (TREE_OPERAND (val
, 0)));
3269 SET_DECL_VALUE_EXPR (var
, val
);
3272 for (tree sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3273 fixup_vla_decls (sub
);
3276 /* Fold the MEM_REF *E. */
3278 fold_mem_refs (tree
*const &e
, void *data ATTRIBUTE_UNUSED
)
3280 tree
*ref_p
= CONST_CAST2 (tree
*, const tree
*, (const tree
*)e
);
3281 *ref_p
= fold (*ref_p
);
3285 /* Given DECL, a nested function, build an initialization call for FIELD,
3286 the trampoline or descriptor for DECL, using FUNC as the function. */
3289 build_init_call_stmt (struct nesting_info
*info
, tree decl
, tree field
,
3292 tree arg1
, arg2
, arg3
, x
;
3294 gcc_assert (DECL_STATIC_CHAIN (decl
));
3295 arg3
= build_addr (info
->frame_decl
);
3297 arg2
= build_addr (decl
);
3299 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3300 info
->frame_decl
, field
, NULL_TREE
);
3301 arg1
= build_addr (x
);
3303 return gimple_build_call (func
, 3, arg1
, arg2
, arg3
);
3306 /* Do "everything else" to clean up or complete state collected by the various
3307 walking passes -- create a field to hold the frame base address, lay out the
3308 types and decls, generate code to initialize the frame decl, store critical
3309 expressions in the struct function for rtl to find. */
3312 finalize_nesting_tree_1 (struct nesting_info
*root
)
3314 gimple_seq stmt_list
= NULL
;
3316 tree context
= root
->context
;
3317 struct function
*sf
;
3322 /* If we created a non-local frame type or decl, we need to lay them
3323 out at this time. */
3324 if (root
->frame_type
)
3326 /* Debugging information needs to compute the frame base address of the
3327 parent frame out of the static chain from the nested frame.
3329 The static chain is the address of the FRAME record, so one could
3330 imagine it would be possible to compute the frame base address just
3331 adding a constant offset to this address. Unfortunately, this is not
3332 possible: if the FRAME object has alignment constraints that are
3333 stronger than the stack, then the offset between the frame base and
3334 the FRAME object will be dynamic.
3336 What we do instead is to append a field to the FRAME object that holds
3337 the frame base address: then debug info just has to fetch this
3340 /* Debugging information will refer to the CFA as the frame base
3341 address: we will do the same here. */
3342 const tree frame_addr_fndecl
3343 = builtin_decl_explicit (BUILT_IN_DWARF_CFA
);
3345 /* Create a field in the FRAME record to hold the frame base address for
3346 this stack frame. Since it will be used only by the debugger, put it
3347 at the end of the record in order not to shift all other offsets. */
3348 tree fb_decl
= make_node (FIELD_DECL
);
3350 DECL_NAME (fb_decl
) = get_identifier ("FRAME_BASE.PARENT");
3351 TREE_TYPE (fb_decl
) = ptr_type_node
;
3352 TREE_ADDRESSABLE (fb_decl
) = 1;
3353 DECL_CONTEXT (fb_decl
) = root
->frame_type
;
3354 TYPE_FIELDS (root
->frame_type
) = chainon (TYPE_FIELDS (root
->frame_type
),
3357 /* In some cases the frame type will trigger the -Wpadded warning.
3358 This is not helpful; suppress it. */
3359 int save_warn_padded
= warn_padded
;
3361 layout_type (root
->frame_type
);
3362 warn_padded
= save_warn_padded
;
3363 layout_decl (root
->frame_decl
, 0);
3365 /* Initialize the frame base address field. If the builtin we need is
3366 not available, set it to NULL so that debugging information does not
3368 tree fb_ref
= build3 (COMPONENT_REF
, TREE_TYPE (fb_decl
),
3369 root
->frame_decl
, fb_decl
, NULL_TREE
);
3372 if (frame_addr_fndecl
!= NULL_TREE
)
3374 gcall
*fb_gimple
= gimple_build_call (frame_addr_fndecl
, 1,
3376 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3378 fb_tmp
= init_tmp_var_with_call (root
, &gsi
, fb_gimple
);
3381 fb_tmp
= build_int_cst (TREE_TYPE (fb_ref
), 0);
3382 gimple_seq_add_stmt (&stmt_list
,
3383 gimple_build_assign (fb_ref
, fb_tmp
));
3385 declare_vars (root
->frame_decl
,
3386 gimple_seq_first_stmt (gimple_body (context
)), true);
3389 /* If any parameters were referenced non-locally, then we need to insert
3390 a copy or a pointer. */
3391 if (root
->any_parm_remapped
)
3394 for (p
= DECL_ARGUMENTS (context
); p
; p
= DECL_CHAIN (p
))
3398 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
3402 if (use_pointer_in_frame (p
))
3407 /* If the assignment is from a non-register the stmt is
3408 not valid gimple. Make it so by using a temporary instead. */
3409 if (!is_gimple_reg (x
)
3410 && is_gimple_reg_type (TREE_TYPE (x
)))
3412 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
3413 x
= init_tmp_var (root
, x
, &gsi
);
3416 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
3417 root
->frame_decl
, field
, NULL_TREE
);
3418 stmt
= gimple_build_assign (y
, x
);
3419 gimple_seq_add_stmt (&stmt_list
, stmt
);
3423 /* If a chain_field was created, then it needs to be initialized
3425 if (root
->chain_field
)
3427 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
3428 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
3429 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
3430 gimple_seq_add_stmt (&stmt_list
, stmt
);
3433 /* If trampolines were created, then we need to initialize them. */
3434 if (root
->any_tramp_created
)
3436 struct nesting_info
*i
;
3437 for (i
= root
->inner
; i
; i
= i
->next
)
3441 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
3445 x
= builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE
);
3446 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3447 gimple_seq_add_stmt (&stmt_list
, stmt
);
3451 /* If descriptors were created, then we need to initialize them. */
3452 if (root
->any_descr_created
)
3454 struct nesting_info
*i
;
3455 for (i
= root
->inner
; i
; i
= i
->next
)
3459 field
= lookup_descr_for_decl (root
, i
->context
, NO_INSERT
);
3463 x
= builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR
);
3464 stmt
= build_init_call_stmt (root
, i
->context
, field
, x
);
3465 gimple_seq_add_stmt (&stmt_list
, stmt
);
3469 /* If we created initialization statements, insert them. */
3473 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
3474 bind
= gimple_seq_first_stmt_as_a_bind (gimple_body (context
));
3475 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
3476 gimple_bind_set_body (bind
, stmt_list
);
3479 /* If a chain_decl was created, then it needs to be registered with
3480 struct function so that it gets initialized from the static chain
3481 register at the beginning of the function. */
3482 sf
= DECL_STRUCT_FUNCTION (root
->context
);
3483 sf
->static_chain_decl
= root
->chain_decl
;
3485 /* Similarly for the non-local goto save area. */
3486 if (root
->nl_goto_field
)
3488 sf
->nonlocal_goto_save_area
3489 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
3490 sf
->has_nonlocal_label
= 1;
3493 /* Make sure all new local variables get inserted into the
3494 proper BIND_EXPR. */
3495 if (root
->new_local_var_chain
)
3496 declare_vars (root
->new_local_var_chain
,
3497 gimple_seq_first_stmt (gimple_body (root
->context
)),
3500 if (root
->debug_var_chain
)
3505 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
3507 for (debug_var
= root
->debug_var_chain
; debug_var
;
3508 debug_var
= DECL_CHAIN (debug_var
))
3509 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3512 /* If there are any debug decls with variable length types,
3513 remap those types using other debug_var_chain variables. */
3516 struct nesting_copy_body_data id
;
3518 memset (&id
, 0, sizeof (id
));
3519 id
.cb
.copy_decl
= nesting_copy_decl
;
3520 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3523 for (; debug_var
; debug_var
= DECL_CHAIN (debug_var
))
3524 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3526 tree type
= TREE_TYPE (debug_var
);
3527 tree newt
, t
= type
;
3528 struct nesting_info
*i
;
3530 for (i
= root
; i
; i
= i
->outer
)
3531 if (variably_modified_type_p (type
, i
->context
))
3537 id
.cb
.src_fn
= i
->context
;
3538 id
.cb
.dst_fn
= i
->context
;
3539 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3541 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
3542 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3544 newt
= TREE_TYPE (newt
);
3547 if (TYPE_NAME (newt
)
3548 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3549 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3551 && TYPE_NAME (newt
) == TYPE_NAME (t
))
3552 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3555 delete id
.cb
.decl_map
;
3558 scope
= gimple_seq_first_stmt_as_a_bind (gimple_body (root
->context
));
3559 if (gimple_bind_block (scope
))
3560 declare_vars (root
->debug_var_chain
, scope
, true);
3562 BLOCK_VARS (DECL_INITIAL (root
->context
))
3563 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
3564 root
->debug_var_chain
);
3567 fixup_vla_decls (DECL_INITIAL (root
->context
));
3569 /* Fold the rewritten MEM_REF trees. */
3570 root
->mem_refs
->traverse
<void *, fold_mem_refs
> (NULL
);
3572 /* Dump the translated tree function. */
3575 fputs ("\n\n", dump_file
);
3576 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
3581 finalize_nesting_tree (struct nesting_info
*root
)
3583 struct nesting_info
*n
;
3584 FOR_EACH_NEST_INFO (n
, root
)
3585 finalize_nesting_tree_1 (n
);
3588 /* Unnest the nodes and pass them to cgraph. */
3591 unnest_nesting_tree_1 (struct nesting_info
*root
)
3593 struct cgraph_node
*node
= cgraph_node::get (root
->context
);
3595 /* For nested functions update the cgraph to reflect unnesting.
3596 We also delay finalizing of these functions up to this point. */
3597 if (nested_function_info::get (node
)->origin
)
3599 unnest_function (node
);
3601 cgraph_node::finalize_function (root
->context
, true);
3606 unnest_nesting_tree (struct nesting_info
*root
)
3608 struct nesting_info
*n
;
3609 FOR_EACH_NEST_INFO (n
, root
)
3610 unnest_nesting_tree_1 (n
);
3613 /* Free the data structures allocated during this pass. */
3616 free_nesting_tree (struct nesting_info
*root
)
3618 struct nesting_info
*node
, *next
;
3620 node
= iter_nestinfo_start (root
);
3623 next
= iter_nestinfo_next (node
);
3624 delete node
->var_map
;
3625 delete node
->field_map
;
3626 delete node
->mem_refs
;
3633 /* Gimplify a function and all its nested functions. */
3635 gimplify_all_functions (struct cgraph_node
*root
)
3637 struct cgraph_node
*iter
;
3638 if (!gimple_body (root
->decl
))
3639 gimplify_function_tree (root
->decl
);
3640 for (iter
= first_nested_function (root
); iter
;
3641 iter
= next_nested_function (iter
))
3643 gimplify_all_functions (iter
);
3646 /* Main entry point for this pass. Process FNDECL and all of its nested
3647 subroutines and turn them into something less tightly bound. */
3650 lower_nested_functions (tree fndecl
)
3652 struct cgraph_node
*cgn
;
3653 struct nesting_info
*root
;
3655 /* If there are no nested functions, there's nothing to do. */
3656 cgn
= cgraph_node::get (fndecl
);
3657 if (!first_nested_function (cgn
))
3660 gimplify_all_functions (cgn
);
3662 set_dump_file (dump_begin (TDI_nested
, &dump_flags
));
3664 fprintf (dump_file
, "\n;; Function %s\n\n",
3665 lang_hooks
.decl_printable_name (fndecl
, 2));
3667 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
3668 root
= create_nesting_tree (cgn
);
3670 walk_all_functions (convert_nonlocal_reference_stmt
,
3671 convert_nonlocal_reference_op
,
3673 walk_all_functions (convert_local_reference_stmt
,
3674 convert_local_reference_op
,
3676 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
3677 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
3679 convert_all_function_calls (root
);
3680 finalize_nesting_tree (root
);
3681 unnest_nesting_tree (root
);
3683 free_nesting_tree (root
);
3684 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
3688 dump_end (TDI_nested
, dump_file
);
3689 set_dump_file (NULL
);
3693 #include "gt-tree-nested.h"