1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "stringpool.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "gimple-low.h"
41 #include "gomp-constants.h"
44 /* The object of this pass is to lower the representation of a set of nested
45 functions in order to expose all of the gory details of the various
46 nonlocal references. We want to do this sooner rather than later, in
47 order to give us more freedom in emitting all of the functions in question.
49 Back in olden times, when gcc was young, we developed an insanely
50 complicated scheme whereby variables which were referenced nonlocally
51 were forced to live in the stack of the declaring function, and then
52 the nested functions magically discovered where these variables were
53 placed. In order for this scheme to function properly, it required
54 that the outer function be partially expanded, then we switch to
55 compiling the inner function, and once done with those we switch back
56 to compiling the outer function. Such delicate ordering requirements
57 makes it difficult to do whole translation unit optimizations
58 involving such functions.
60 The implementation here is much more direct. Everything that can be
61 referenced by an inner function is a member of an explicitly created
62 structure herein called the "nonlocal frame struct". The incoming
63 static chain for a nested function is a pointer to this struct in
64 the parent. In this way, we settle on known offsets from a known
65 base, and so are decoupled from the logic that places objects in the
66 function's stack frame. More importantly, we don't have to wait for
67 that to happen -- since the compilation of the inner function is no
68 longer tied to a real stack frame, the nonlocal frame struct can be
69 allocated anywhere. Which means that the outer function is now
72 Theory of operation here is very simple. Iterate over all the
73 statements in all the functions (depth first) several times,
74 allocating structures and fields on demand. In general we want to
75 examine inner functions first, so that we can avoid making changes
76 to outer functions which are unnecessary.
78 The order of the passes matters a bit, in that later passes will be
79 skipped if it is discovered that the functions don't actually interact
80 at all. That is, they're nested in the lexical sense but could have
81 been written as independent functions without change. */
86 struct nesting_info
*outer
;
87 struct nesting_info
*inner
;
88 struct nesting_info
*next
;
90 hash_map
<tree
, tree
> *field_map
;
91 hash_map
<tree
, tree
> *var_map
;
92 hash_set
<tree
*> *mem_refs
;
93 bitmap suppress_expansion
;
96 tree new_local_var_chain
;
104 bool any_parm_remapped
;
105 bool any_tramp_created
;
106 char static_chain_added
;
110 /* Iterate over the nesting tree, starting with ROOT, depth first. */
112 static inline struct nesting_info
*
113 iter_nestinfo_start (struct nesting_info
*root
)
120 static inline struct nesting_info
*
121 iter_nestinfo_next (struct nesting_info
*node
)
124 return iter_nestinfo_start (node
->next
);
128 #define FOR_EACH_NEST_INFO(I, ROOT) \
129 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
131 /* Obstack used for the bitmaps in the struct above. */
132 static struct bitmap_obstack nesting_info_bitmap_obstack
;
135 /* We're working in so many different function contexts simultaneously,
136 that create_tmp_var is dangerous. Prevent mishap. */
137 #define create_tmp_var cant_use_create_tmp_var_here_dummy
139 /* Like create_tmp_var, except record the variable for registration at
140 the given nesting level. */
143 create_tmp_var_for (struct nesting_info
*info
, tree type
, const char *prefix
)
147 /* If the type is of variable size or a type which must be created by the
148 frontend, something is wrong. Note that we explicitly allow
149 incomplete types here, since we create them ourselves here. */
150 gcc_assert (!TREE_ADDRESSABLE (type
));
151 gcc_assert (!TYPE_SIZE_UNIT (type
)
152 || TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
);
154 tmp_var
= create_tmp_var_raw (type
, prefix
);
155 DECL_CONTEXT (tmp_var
) = info
->context
;
156 DECL_CHAIN (tmp_var
) = info
->new_local_var_chain
;
157 DECL_SEEN_IN_BIND_EXPR_P (tmp_var
) = 1;
158 if (TREE_CODE (type
) == COMPLEX_TYPE
159 || TREE_CODE (type
) == VECTOR_TYPE
)
160 DECL_GIMPLE_REG_P (tmp_var
) = 1;
162 info
->new_local_var_chain
= tmp_var
;
167 /* Take the address of EXP to be used within function CONTEXT.
168 Mark it for addressability as necessary. */
171 build_addr (tree exp
)
173 mark_addressable (exp
);
174 return build_fold_addr_expr (exp
);
177 /* Insert FIELD into TYPE, sorted by alignment requirements. */
180 insert_field_into_struct (tree type
, tree field
)
184 DECL_CONTEXT (field
) = type
;
186 for (p
= &TYPE_FIELDS (type
); *p
; p
= &DECL_CHAIN (*p
))
187 if (DECL_ALIGN (field
) >= DECL_ALIGN (*p
))
190 DECL_CHAIN (field
) = *p
;
193 /* Set correct alignment for frame struct type. */
194 if (TYPE_ALIGN (type
) < DECL_ALIGN (field
))
195 SET_TYPE_ALIGN (type
, DECL_ALIGN (field
));
198 /* Build or return the RECORD_TYPE that describes the frame state that is
199 shared between INFO->CONTEXT and its nested functions. This record will
200 not be complete until finalize_nesting_tree; up until that point we'll
201 be adding fields as necessary.
203 We also build the DECL that represents this frame in the function. */
206 get_frame_type (struct nesting_info
*info
)
208 tree type
= info
->frame_type
;
213 type
= make_node (RECORD_TYPE
);
215 name
= concat ("FRAME.",
216 IDENTIFIER_POINTER (DECL_NAME (info
->context
)),
218 TYPE_NAME (type
) = get_identifier (name
);
221 info
->frame_type
= type
;
222 info
->frame_decl
= create_tmp_var_for (info
, type
, "FRAME");
223 DECL_NONLOCAL_FRAME (info
->frame_decl
) = 1;
225 /* ??? Always make it addressable for now, since it is meant to
226 be pointed to by the static chain pointer. This pessimizes
227 when it turns out that no static chains are needed because
228 the nested functions referencing non-local variables are not
229 reachable, but the true pessimization is to create the non-
230 local frame structure in the first place. */
231 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
236 /* Return true if DECL should be referenced by pointer in the non-local
240 use_pointer_in_frame (tree decl
)
242 if (TREE_CODE (decl
) == PARM_DECL
)
244 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
245 sized decls, and inefficient to copy large aggregates. Don't bother
246 moving anything but scalar variables. */
247 return AGGREGATE_TYPE_P (TREE_TYPE (decl
));
251 /* Variable sized types make things "interesting" in the frame. */
252 return DECL_SIZE (decl
) == NULL
|| !TREE_CONSTANT (DECL_SIZE (decl
));
256 /* Given DECL, a non-locally accessed variable, find or create a field
257 in the non-local frame structure for the given nesting context. */
260 lookup_field_for_decl (struct nesting_info
*info
, tree decl
,
261 enum insert_option insert
)
263 if (insert
== NO_INSERT
)
265 tree
*slot
= info
->field_map
->get (decl
);
266 return slot
? *slot
: NULL_TREE
;
269 tree
*slot
= &info
->field_map
->get_or_insert (decl
);
272 tree field
= make_node (FIELD_DECL
);
273 DECL_NAME (field
) = DECL_NAME (decl
);
275 if (use_pointer_in_frame (decl
))
277 TREE_TYPE (field
) = build_pointer_type (TREE_TYPE (decl
));
278 SET_DECL_ALIGN (field
, TYPE_ALIGN (TREE_TYPE (field
)));
279 DECL_NONADDRESSABLE_P (field
) = 1;
283 TREE_TYPE (field
) = TREE_TYPE (decl
);
284 DECL_SOURCE_LOCATION (field
) = DECL_SOURCE_LOCATION (decl
);
285 SET_DECL_ALIGN (field
, DECL_ALIGN (decl
));
286 DECL_USER_ALIGN (field
) = DECL_USER_ALIGN (decl
);
287 TREE_ADDRESSABLE (field
) = TREE_ADDRESSABLE (decl
);
288 DECL_NONADDRESSABLE_P (field
) = !TREE_ADDRESSABLE (decl
);
289 TREE_THIS_VOLATILE (field
) = TREE_THIS_VOLATILE (decl
);
292 insert_field_into_struct (get_frame_type (info
), field
);
295 if (TREE_CODE (decl
) == PARM_DECL
)
296 info
->any_parm_remapped
= true;
302 /* Build or return the variable that holds the static chain within
303 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
306 get_chain_decl (struct nesting_info
*info
)
308 tree decl
= info
->chain_decl
;
314 type
= get_frame_type (info
->outer
);
315 type
= build_pointer_type (type
);
317 /* Note that this variable is *not* entered into any BIND_EXPR;
318 the construction of this variable is handled specially in
319 expand_function_start and initialize_inlined_parameters.
320 Note also that it's represented as a parameter. This is more
321 close to the truth, since the initial value does come from
323 decl
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
324 PARM_DECL
, create_tmp_var_name ("CHAIN"), type
);
325 DECL_ARTIFICIAL (decl
) = 1;
326 DECL_IGNORED_P (decl
) = 1;
327 TREE_USED (decl
) = 1;
328 DECL_CONTEXT (decl
) = info
->context
;
329 DECL_ARG_TYPE (decl
) = type
;
331 /* Tell tree-inline.c that we never write to this variable, so
332 it can copy-prop the replacement value immediately. */
333 TREE_READONLY (decl
) = 1;
335 info
->chain_decl
= decl
;
338 && (dump_flags
& TDF_DETAILS
)
339 && !DECL_STATIC_CHAIN (info
->context
))
340 fprintf (dump_file
, "Setting static-chain for %s\n",
341 lang_hooks
.decl_printable_name (info
->context
, 2));
343 DECL_STATIC_CHAIN (info
->context
) = 1;
348 /* Build or return the field within the non-local frame state that holds
349 the static chain for INFO->CONTEXT. This is the way to walk back up
350 multiple nesting levels. */
353 get_chain_field (struct nesting_info
*info
)
355 tree field
= info
->chain_field
;
359 tree type
= build_pointer_type (get_frame_type (info
->outer
));
361 field
= make_node (FIELD_DECL
);
362 DECL_NAME (field
) = get_identifier ("__chain");
363 TREE_TYPE (field
) = type
;
364 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
365 DECL_NONADDRESSABLE_P (field
) = 1;
367 insert_field_into_struct (get_frame_type (info
), field
);
369 info
->chain_field
= field
;
372 && (dump_flags
& TDF_DETAILS
)
373 && !DECL_STATIC_CHAIN (info
->context
))
374 fprintf (dump_file
, "Setting static-chain for %s\n",
375 lang_hooks
.decl_printable_name (info
->context
, 2));
377 DECL_STATIC_CHAIN (info
->context
) = 1;
382 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
385 init_tmp_var_with_call (struct nesting_info
*info
, gimple_stmt_iterator
*gsi
,
390 t
= create_tmp_var_for (info
, gimple_call_return_type (call
), NULL
);
391 gimple_call_set_lhs (call
, t
);
392 if (! gsi_end_p (*gsi
))
393 gimple_set_location (call
, gimple_location (gsi_stmt (*gsi
)));
394 gsi_insert_before (gsi
, call
, GSI_SAME_STMT
);
400 /* Copy EXP into a temporary. Allocate the temporary in the context of
401 INFO and insert the initialization statement before GSI. */
404 init_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
409 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
410 stmt
= gimple_build_assign (t
, exp
);
411 if (! gsi_end_p (*gsi
))
412 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
413 gsi_insert_before_without_update (gsi
, stmt
, GSI_SAME_STMT
);
419 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
422 gsi_gimplify_val (struct nesting_info
*info
, tree exp
,
423 gimple_stmt_iterator
*gsi
)
425 if (is_gimple_val (exp
))
428 return init_tmp_var (info
, exp
, gsi
);
431 /* Similarly, but copy from the temporary and insert the statement
432 after the iterator. */
435 save_tmp_var (struct nesting_info
*info
, tree exp
, gimple_stmt_iterator
*gsi
)
440 t
= create_tmp_var_for (info
, TREE_TYPE (exp
), NULL
);
441 stmt
= gimple_build_assign (exp
, t
);
442 if (! gsi_end_p (*gsi
))
443 gimple_set_location (stmt
, gimple_location (gsi_stmt (*gsi
)));
444 gsi_insert_after_without_update (gsi
, stmt
, GSI_SAME_STMT
);
449 /* Build or return the type used to represent a nested function trampoline. */
451 static GTY(()) tree trampoline_type
;
454 get_trampoline_type (struct nesting_info
*info
)
456 unsigned align
, size
;
460 return trampoline_type
;
462 align
= TRAMPOLINE_ALIGNMENT
;
463 size
= TRAMPOLINE_SIZE
;
465 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
466 then allocate extra space so that we can do dynamic alignment. */
467 if (align
> STACK_BOUNDARY
)
469 size
+= ((align
/BITS_PER_UNIT
) - 1) & -(STACK_BOUNDARY
/BITS_PER_UNIT
);
470 align
= STACK_BOUNDARY
;
473 t
= build_index_type (size_int (size
- 1));
474 t
= build_array_type (char_type_node
, t
);
475 t
= build_decl (DECL_SOURCE_LOCATION (info
->context
),
476 FIELD_DECL
, get_identifier ("__data"), t
);
477 SET_DECL_ALIGN (t
, align
);
478 DECL_USER_ALIGN (t
) = 1;
480 trampoline_type
= make_node (RECORD_TYPE
);
481 TYPE_NAME (trampoline_type
) = get_identifier ("__builtin_trampoline");
482 TYPE_FIELDS (trampoline_type
) = t
;
483 layout_type (trampoline_type
);
484 DECL_CONTEXT (t
) = trampoline_type
;
486 return trampoline_type
;
489 /* Given DECL, a nested function, find or create a field in the non-local
490 frame structure for a trampoline for this function. */
493 lookup_tramp_for_decl (struct nesting_info
*info
, tree decl
,
494 enum insert_option insert
)
496 if (insert
== NO_INSERT
)
498 tree
*slot
= info
->var_map
->get (decl
);
499 return slot
? *slot
: NULL_TREE
;
502 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
505 tree field
= make_node (FIELD_DECL
);
506 DECL_NAME (field
) = DECL_NAME (decl
);
507 TREE_TYPE (field
) = get_trampoline_type (info
);
508 TREE_ADDRESSABLE (field
) = 1;
510 insert_field_into_struct (get_frame_type (info
), field
);
513 info
->any_tramp_created
= true;
519 /* Build or return the field within the non-local frame state that holds
520 the non-local goto "jmp_buf". The buffer itself is maintained by the
521 rtl middle-end as dynamic stack space is allocated. */
524 get_nl_goto_field (struct nesting_info
*info
)
526 tree field
= info
->nl_goto_field
;
532 /* For __builtin_nonlocal_goto, we need N words. The first is the
533 frame pointer, the rest is for the target's stack pointer save
534 area. The number of words is controlled by STACK_SAVEAREA_MODE;
535 not the best interface, but it'll do for now. */
536 if (Pmode
== ptr_mode
)
537 type
= ptr_type_node
;
539 type
= lang_hooks
.types
.type_for_mode (Pmode
, 1);
541 size
= GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
));
542 size
= size
/ GET_MODE_SIZE (Pmode
);
545 type
= build_array_type
546 (type
, build_index_type (size_int (size
)));
548 field
= make_node (FIELD_DECL
);
549 DECL_NAME (field
) = get_identifier ("__nl_goto_buf");
550 TREE_TYPE (field
) = type
;
551 SET_DECL_ALIGN (field
, TYPE_ALIGN (type
));
552 TREE_ADDRESSABLE (field
) = 1;
554 insert_field_into_struct (get_frame_type (info
), field
);
556 info
->nl_goto_field
= field
;
562 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
565 walk_body (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
566 struct nesting_info
*info
, gimple_seq
*pseq
)
568 struct walk_stmt_info wi
;
570 memset (&wi
, 0, sizeof (wi
));
573 walk_gimple_seq_mod (pseq
, callback_stmt
, callback_op
, &wi
);
577 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
580 walk_function (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
581 struct nesting_info
*info
)
583 gimple_seq body
= gimple_body (info
->context
);
584 walk_body (callback_stmt
, callback_op
, info
, &body
);
585 gimple_set_body (info
->context
, body
);
588 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
591 walk_gimple_omp_for (gomp_for
*for_stmt
,
592 walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
593 struct nesting_info
*info
)
595 struct walk_stmt_info wi
;
600 walk_body (callback_stmt
, callback_op
, info
, gimple_omp_for_pre_body_ptr (for_stmt
));
603 memset (&wi
, 0, sizeof (wi
));
605 wi
.gsi
= gsi_last (seq
);
607 for (i
= 0; i
< gimple_omp_for_collapse (for_stmt
); i
++)
610 walk_tree (gimple_omp_for_index_ptr (for_stmt
, i
), callback_op
,
614 walk_tree (gimple_omp_for_initial_ptr (for_stmt
, i
), callback_op
,
619 walk_tree (gimple_omp_for_final_ptr (for_stmt
, i
), callback_op
,
622 t
= gimple_omp_for_incr (for_stmt
, i
);
623 gcc_assert (BINARY_CLASS_P (t
));
625 walk_tree (&TREE_OPERAND (t
, 0), callback_op
, &wi
, NULL
);
628 walk_tree (&TREE_OPERAND (t
, 1), callback_op
, &wi
, NULL
);
631 seq
= gsi_seq (wi
.gsi
);
632 if (!gimple_seq_empty_p (seq
))
634 gimple_seq pre_body
= gimple_omp_for_pre_body (for_stmt
);
635 annotate_all_with_location (seq
, gimple_location (for_stmt
));
636 gimple_seq_add_seq (&pre_body
, seq
);
637 gimple_omp_for_set_pre_body (for_stmt
, pre_body
);
641 /* Similarly for ROOT and all functions nested underneath, depth first. */
644 walk_all_functions (walk_stmt_fn callback_stmt
, walk_tree_fn callback_op
,
645 struct nesting_info
*root
)
647 struct nesting_info
*n
;
648 FOR_EACH_NEST_INFO (n
, root
)
649 walk_function (callback_stmt
, callback_op
, n
);
653 /* We have to check for a fairly pathological case. The operands of function
654 nested function are to be interpreted in the context of the enclosing
655 function. So if any are variably-sized, they will get remapped when the
656 enclosing function is inlined. But that remapping would also have to be
657 done in the types of the PARM_DECLs of the nested function, meaning the
658 argument types of that function will disagree with the arguments in the
659 calls to that function. So we'd either have to make a copy of the nested
660 function corresponding to each time the enclosing function was inlined or
661 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
662 function. The former is not practical. The latter would still require
663 detecting this case to know when to add the conversions. So, for now at
664 least, we don't inline such an enclosing function.
666 We have to do that check recursively, so here return indicating whether
667 FNDECL has such a nested function. ORIG_FN is the function we were
668 trying to inline to use for checking whether any argument is variably
669 modified by anything in it.
671 It would be better to do this in tree-inline.c so that we could give
672 the appropriate warning for why a function can't be inlined, but that's
673 too late since the nesting structure has already been flattened and
674 adding a flag just to record this fact seems a waste of a flag. */
677 check_for_nested_with_variably_modified (tree fndecl
, tree orig_fndecl
)
679 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
682 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
684 for (arg
= DECL_ARGUMENTS (cgn
->decl
); arg
; arg
= DECL_CHAIN (arg
))
685 if (variably_modified_type_p (TREE_TYPE (arg
), orig_fndecl
))
688 if (check_for_nested_with_variably_modified (cgn
->decl
,
696 /* Construct our local datastructure describing the function nesting
697 tree rooted by CGN. */
699 static struct nesting_info
*
700 create_nesting_tree (struct cgraph_node
*cgn
)
702 struct nesting_info
*info
= XCNEW (struct nesting_info
);
703 info
->field_map
= new hash_map
<tree
, tree
>;
704 info
->var_map
= new hash_map
<tree
, tree
>;
705 info
->mem_refs
= new hash_set
<tree
*>;
706 info
->suppress_expansion
= BITMAP_ALLOC (&nesting_info_bitmap_obstack
);
707 info
->context
= cgn
->decl
;
709 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
711 struct nesting_info
*sub
= create_nesting_tree (cgn
);
713 sub
->next
= info
->inner
;
717 /* See discussion at check_for_nested_with_variably_modified for a
718 discussion of why this has to be here. */
719 if (check_for_nested_with_variably_modified (info
->context
, info
->context
))
720 DECL_UNINLINABLE (info
->context
) = true;
725 /* Return an expression computing the static chain for TARGET_CONTEXT
726 from INFO->CONTEXT. Insert any necessary computations before TSI. */
729 get_static_chain (struct nesting_info
*info
, tree target_context
,
730 gimple_stmt_iterator
*gsi
)
732 struct nesting_info
*i
;
735 if (info
->context
== target_context
)
737 x
= build_addr (info
->frame_decl
);
738 info
->static_chain_added
|= 1;
742 x
= get_chain_decl (info
);
743 info
->static_chain_added
|= 2;
745 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
747 tree field
= get_chain_field (i
);
749 x
= build_simple_mem_ref (x
);
750 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
751 x
= init_tmp_var (info
, x
, gsi
);
759 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
760 frame as seen from INFO->CONTEXT. Insert any necessary computations
764 get_frame_field (struct nesting_info
*info
, tree target_context
,
765 tree field
, gimple_stmt_iterator
*gsi
)
767 struct nesting_info
*i
;
770 if (info
->context
== target_context
)
772 /* Make sure frame_decl gets created. */
773 (void) get_frame_type (info
);
774 x
= info
->frame_decl
;
775 info
->static_chain_added
|= 1;
779 x
= get_chain_decl (info
);
780 info
->static_chain_added
|= 2;
782 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
784 tree field
= get_chain_field (i
);
786 x
= build_simple_mem_ref (x
);
787 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
788 x
= init_tmp_var (info
, x
, gsi
);
791 x
= build_simple_mem_ref (x
);
794 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
798 static void note_nonlocal_vla_type (struct nesting_info
*info
, tree type
);
800 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
801 in the nested function with DECL_VALUE_EXPR set to reference the true
802 variable in the parent function. This is used both for debug info
803 and in OMP lowering. */
806 get_nonlocal_debug_decl (struct nesting_info
*info
, tree decl
)
809 struct nesting_info
*i
;
810 tree x
, field
, new_decl
;
812 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
817 target_context
= decl_function_context (decl
);
819 /* A copy of the code in get_frame_field, but without the temporaries. */
820 if (info
->context
== target_context
)
822 /* Make sure frame_decl gets created. */
823 (void) get_frame_type (info
);
824 x
= info
->frame_decl
;
826 info
->static_chain_added
|= 1;
830 x
= get_chain_decl (info
);
831 info
->static_chain_added
|= 2;
832 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
834 field
= get_chain_field (i
);
835 x
= build_simple_mem_ref (x
);
836 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
838 x
= build_simple_mem_ref (x
);
841 field
= lookup_field_for_decl (i
, decl
, INSERT
);
842 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
843 if (use_pointer_in_frame (decl
))
844 x
= build_simple_mem_ref (x
);
846 /* ??? We should be remapping types as well, surely. */
847 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
848 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
849 DECL_CONTEXT (new_decl
) = info
->context
;
850 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
851 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
852 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
853 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
854 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
855 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
856 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
857 if ((TREE_CODE (decl
) == PARM_DECL
858 || TREE_CODE (decl
) == RESULT_DECL
859 || TREE_CODE (decl
) == VAR_DECL
)
860 && DECL_BY_REFERENCE (decl
))
861 DECL_BY_REFERENCE (new_decl
) = 1;
863 SET_DECL_VALUE_EXPR (new_decl
, x
);
864 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
867 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
868 info
->debug_var_chain
= new_decl
;
871 && info
->context
!= target_context
872 && variably_modified_type_p (TREE_TYPE (decl
), NULL
))
873 note_nonlocal_vla_type (info
, TREE_TYPE (decl
));
879 /* Callback for walk_gimple_stmt, rewrite all references to VAR
880 and PARM_DECLs that belong to outer functions.
882 The rewrite will involve some number of structure accesses back up
883 the static chain. E.g. for a variable FOO up one nesting level it'll
884 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
885 indirections apply to decls for which use_pointer_in_frame is true. */
888 convert_nonlocal_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
890 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
891 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
895 switch (TREE_CODE (t
))
898 /* Non-automatic variables are never processed. */
899 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
904 if (decl_function_context (t
) != info
->context
)
909 x
= get_nonlocal_debug_decl (info
, t
);
910 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
912 tree target_context
= decl_function_context (t
);
913 struct nesting_info
*i
;
914 for (i
= info
->outer
; i
->context
!= target_context
; i
= i
->outer
)
916 x
= lookup_field_for_decl (i
, t
, INSERT
);
917 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
918 if (use_pointer_in_frame (t
))
920 x
= init_tmp_var (info
, x
, &wi
->gsi
);
921 x
= build_simple_mem_ref (x
);
928 x
= save_tmp_var (info
, x
, &wi
->gsi
);
930 x
= init_tmp_var (info
, x
, &wi
->gsi
);
938 /* We're taking the address of a label from a parent function, but
939 this is not itself a non-local goto. Mark the label such that it
940 will not be deleted, much as we would with a label address in
942 if (decl_function_context (t
) != info
->context
)
943 FORCED_LABEL (t
) = 1;
948 bool save_val_only
= wi
->val_only
;
950 wi
->val_only
= false;
953 walk_tree (&TREE_OPERAND (t
, 0), convert_nonlocal_reference_op
, wi
, 0);
960 /* If we changed anything, we might no longer be directly
961 referencing a decl. */
962 save_context
= current_function_decl
;
963 current_function_decl
= info
->context
;
964 recompute_tree_invariant_for_addr_expr (t
);
965 current_function_decl
= save_context
;
967 /* If the callback converted the address argument in a context
968 where we only accept variables (and min_invariant, presumably),
969 then compute the address into a temporary. */
971 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
981 case ARRAY_RANGE_REF
:
983 /* Go down this entire nest and just look at the final prefix and
984 anything that describes the references. Otherwise, we lose track
985 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
988 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
990 if (TREE_CODE (t
) == COMPONENT_REF
)
991 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
, wi
,
993 else if (TREE_CODE (t
) == ARRAY_REF
994 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
996 walk_tree (&TREE_OPERAND (t
, 1), convert_nonlocal_reference_op
,
998 walk_tree (&TREE_OPERAND (t
, 2), convert_nonlocal_reference_op
,
1000 walk_tree (&TREE_OPERAND (t
, 3), convert_nonlocal_reference_op
,
1004 wi
->val_only
= false;
1005 walk_tree (tp
, convert_nonlocal_reference_op
, wi
, NULL
);
1008 case VIEW_CONVERT_EXPR
:
1009 /* Just request to look at the subtrees, leaving val_only and lhs
1010 untouched. This might actually be for !val_only + lhs, in which
1011 case we don't want to force a replacement by a temporary. */
1016 if (!IS_TYPE_OR_DECL_P (t
))
1019 wi
->val_only
= true;
1028 static tree
convert_nonlocal_reference_stmt (gimple_stmt_iterator
*, bool *,
1029 struct walk_stmt_info
*);
1031 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1032 and PARM_DECLs that belong to outer functions. */
1035 convert_nonlocal_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1037 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1038 bool need_chain
= false, need_stmts
= false;
1041 bitmap new_suppress
;
1043 new_suppress
= BITMAP_GGC_ALLOC ();
1044 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1046 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1048 switch (OMP_CLAUSE_CODE (clause
))
1050 case OMP_CLAUSE_REDUCTION
:
1051 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1053 goto do_decl_clause
;
1055 case OMP_CLAUSE_LASTPRIVATE
:
1056 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1058 goto do_decl_clause
;
1060 case OMP_CLAUSE_LINEAR
:
1061 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1063 wi
->val_only
= true;
1065 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
),
1067 goto do_decl_clause
;
1069 case OMP_CLAUSE_PRIVATE
:
1070 case OMP_CLAUSE_FIRSTPRIVATE
:
1071 case OMP_CLAUSE_COPYPRIVATE
:
1072 case OMP_CLAUSE_SHARED
:
1073 case OMP_CLAUSE_TO_DECLARE
:
1074 case OMP_CLAUSE_LINK
:
1075 case OMP_CLAUSE_USE_DEVICE_PTR
:
1076 case OMP_CLAUSE_IS_DEVICE_PTR
:
1078 decl
= OMP_CLAUSE_DECL (clause
);
1079 if (TREE_CODE (decl
) == VAR_DECL
1080 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1082 if (decl_function_context (decl
) != info
->context
)
1084 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1085 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1086 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1087 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1088 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1093 case OMP_CLAUSE_SCHEDULE
:
1094 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1097 case OMP_CLAUSE_FINAL
:
1099 case OMP_CLAUSE_NUM_THREADS
:
1100 case OMP_CLAUSE_DEPEND
:
1101 case OMP_CLAUSE_DEVICE
:
1102 case OMP_CLAUSE_NUM_TEAMS
:
1103 case OMP_CLAUSE_THREAD_LIMIT
:
1104 case OMP_CLAUSE_SAFELEN
:
1105 case OMP_CLAUSE_SIMDLEN
:
1106 case OMP_CLAUSE_PRIORITY
:
1107 case OMP_CLAUSE_GRAINSIZE
:
1108 case OMP_CLAUSE_NUM_TASKS
:
1109 case OMP_CLAUSE_HINT
:
1110 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1111 case OMP_CLAUSE_NUM_GANGS
:
1112 case OMP_CLAUSE_NUM_WORKERS
:
1113 case OMP_CLAUSE_VECTOR_LENGTH
:
1114 case OMP_CLAUSE_GANG
:
1115 case OMP_CLAUSE_WORKER
:
1116 case OMP_CLAUSE_VECTOR
:
1117 case OMP_CLAUSE_ASYNC
:
1118 case OMP_CLAUSE_WAIT
:
1119 /* Several OpenACC clauses have optional arguments. Check if they
1121 if (OMP_CLAUSE_OPERAND (clause
, 0))
1123 wi
->val_only
= true;
1125 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1129 /* The gang clause accepts two arguments. */
1130 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1131 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1133 wi
->val_only
= true;
1135 convert_nonlocal_reference_op
1136 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1140 case OMP_CLAUSE_DIST_SCHEDULE
:
1141 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1143 wi
->val_only
= true;
1145 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1150 case OMP_CLAUSE_MAP
:
1152 case OMP_CLAUSE_FROM
:
1153 if (OMP_CLAUSE_SIZE (clause
))
1155 wi
->val_only
= true;
1157 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause
),
1160 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1161 goto do_decl_clause
;
1162 wi
->val_only
= true;
1164 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_nonlocal_reference_op
,
1168 case OMP_CLAUSE_ALIGNED
:
1169 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1171 wi
->val_only
= true;
1173 convert_nonlocal_reference_op
1174 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1176 /* Like do_decl_clause, but don't add any suppression. */
1177 decl
= OMP_CLAUSE_DECL (clause
);
1178 if (TREE_CODE (decl
) == VAR_DECL
1179 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1181 if (decl_function_context (decl
) != info
->context
)
1183 OMP_CLAUSE_DECL (clause
) = get_nonlocal_debug_decl (info
, decl
);
1184 if (OMP_CLAUSE_CODE (clause
) != OMP_CLAUSE_PRIVATE
)
1189 case OMP_CLAUSE_NOWAIT
:
1190 case OMP_CLAUSE_ORDERED
:
1191 case OMP_CLAUSE_DEFAULT
:
1192 case OMP_CLAUSE_COPYIN
:
1193 case OMP_CLAUSE_COLLAPSE
:
1194 case OMP_CLAUSE_UNTIED
:
1195 case OMP_CLAUSE_MERGEABLE
:
1196 case OMP_CLAUSE_PROC_BIND
:
1197 case OMP_CLAUSE_NOGROUP
:
1198 case OMP_CLAUSE_THREADS
:
1199 case OMP_CLAUSE_SIMD
:
1200 case OMP_CLAUSE_DEFAULTMAP
:
1201 case OMP_CLAUSE_SEQ
:
1202 case OMP_CLAUSE_INDEPENDENT
:
1203 case OMP_CLAUSE_AUTO
:
1206 case OMP_CLAUSE_TILE
:
1207 /* OpenACC tile clauses are discarded during gimplification, so we
1208 don't expect to see anything here. */
1211 case OMP_CLAUSE__CACHE_
:
1212 /* These clauses belong to the OpenACC cache directive, which is
1213 discarded during gimplification, so we don't expect to see
1222 info
->suppress_expansion
= new_suppress
;
1225 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1226 switch (OMP_CLAUSE_CODE (clause
))
1228 case OMP_CLAUSE_REDUCTION
:
1229 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1232 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1233 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1235 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1236 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1238 walk_body (convert_nonlocal_reference_stmt
,
1239 convert_nonlocal_reference_op
, info
,
1240 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1241 walk_body (convert_nonlocal_reference_stmt
,
1242 convert_nonlocal_reference_op
, info
,
1243 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1244 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1246 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1247 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1252 case OMP_CLAUSE_LASTPRIVATE
:
1253 walk_body (convert_nonlocal_reference_stmt
,
1254 convert_nonlocal_reference_op
, info
,
1255 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1258 case OMP_CLAUSE_LINEAR
:
1259 walk_body (convert_nonlocal_reference_stmt
,
1260 convert_nonlocal_reference_op
, info
,
1261 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
1271 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1274 note_nonlocal_vla_type (struct nesting_info
*info
, tree type
)
1276 while (POINTER_TYPE_P (type
) && !TYPE_NAME (type
))
1277 type
= TREE_TYPE (type
);
1279 if (TYPE_NAME (type
)
1280 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1281 && DECL_ORIGINAL_TYPE (TYPE_NAME (type
)))
1282 type
= DECL_ORIGINAL_TYPE (TYPE_NAME (type
));
1284 while (POINTER_TYPE_P (type
)
1285 || TREE_CODE (type
) == VECTOR_TYPE
1286 || TREE_CODE (type
) == FUNCTION_TYPE
1287 || TREE_CODE (type
) == METHOD_TYPE
)
1288 type
= TREE_TYPE (type
);
1290 if (TREE_CODE (type
) == ARRAY_TYPE
)
1294 note_nonlocal_vla_type (info
, TREE_TYPE (type
));
1295 domain
= TYPE_DOMAIN (type
);
1298 t
= TYPE_MIN_VALUE (domain
);
1299 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1300 && decl_function_context (t
) != info
->context
)
1301 get_nonlocal_debug_decl (info
, t
);
1302 t
= TYPE_MAX_VALUE (domain
);
1303 if (t
&& (TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
1304 && decl_function_context (t
) != info
->context
)
1305 get_nonlocal_debug_decl (info
, t
);
1310 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1314 note_nonlocal_block_vlas (struct nesting_info
*info
, tree block
)
1318 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
1319 if (TREE_CODE (var
) == VAR_DECL
1320 && variably_modified_type_p (TREE_TYPE (var
), NULL
)
1321 && DECL_HAS_VALUE_EXPR_P (var
)
1322 && decl_function_context (var
) != info
->context
)
1323 note_nonlocal_vla_type (info
, TREE_TYPE (var
));
1326 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1327 PARM_DECLs that belong to outer functions. This handles statements
1328 that are not handled via the standard recursion done in
1329 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1330 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1331 operands of STMT have been handled by this function. */
1334 convert_nonlocal_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1335 struct walk_stmt_info
*wi
)
1337 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1338 tree save_local_var_chain
;
1339 bitmap save_suppress
;
1340 gimple
*stmt
= gsi_stmt (*gsi
);
1342 switch (gimple_code (stmt
))
1345 /* Don't walk non-local gotos for now. */
1346 if (TREE_CODE (gimple_goto_dest (stmt
)) != LABEL_DECL
)
1348 wi
->val_only
= true;
1350 *handled_ops_p
= true;
1355 case GIMPLE_OMP_PARALLEL
:
1356 case GIMPLE_OMP_TASK
:
1357 save_suppress
= info
->suppress_expansion
;
1358 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1362 decl
= get_chain_decl (info
);
1363 c
= build_omp_clause (gimple_location (stmt
),
1364 OMP_CLAUSE_FIRSTPRIVATE
);
1365 OMP_CLAUSE_DECL (c
) = decl
;
1366 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1367 gimple_omp_taskreg_set_clauses (stmt
, c
);
1370 save_local_var_chain
= info
->new_local_var_chain
;
1371 info
->new_local_var_chain
= NULL
;
1373 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1374 info
, gimple_omp_body_ptr (stmt
));
1376 if (info
->new_local_var_chain
)
1377 declare_vars (info
->new_local_var_chain
,
1378 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1380 info
->new_local_var_chain
= save_local_var_chain
;
1381 info
->suppress_expansion
= save_suppress
;
1384 case GIMPLE_OMP_FOR
:
1385 save_suppress
= info
->suppress_expansion
;
1386 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
1387 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
1388 convert_nonlocal_reference_stmt
,
1389 convert_nonlocal_reference_op
, info
);
1390 walk_body (convert_nonlocal_reference_stmt
,
1391 convert_nonlocal_reference_op
, info
, gimple_omp_body_ptr (stmt
));
1392 info
->suppress_expansion
= save_suppress
;
1395 case GIMPLE_OMP_SECTIONS
:
1396 save_suppress
= info
->suppress_expansion
;
1397 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
1398 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1399 info
, gimple_omp_body_ptr (stmt
));
1400 info
->suppress_expansion
= save_suppress
;
1403 case GIMPLE_OMP_SINGLE
:
1404 save_suppress
= info
->suppress_expansion
;
1405 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
1406 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1407 info
, gimple_omp_body_ptr (stmt
));
1408 info
->suppress_expansion
= save_suppress
;
1411 case GIMPLE_OMP_TARGET
:
1412 if (!is_gimple_omp_offloaded (stmt
))
1414 save_suppress
= info
->suppress_expansion
;
1415 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1417 info
->suppress_expansion
= save_suppress
;
1418 walk_body (convert_nonlocal_reference_stmt
,
1419 convert_nonlocal_reference_op
, info
,
1420 gimple_omp_body_ptr (stmt
));
1423 save_suppress
= info
->suppress_expansion
;
1424 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt
),
1428 decl
= get_chain_decl (info
);
1429 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
1430 OMP_CLAUSE_DECL (c
) = decl
;
1431 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
1432 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
1433 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
1434 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
1437 save_local_var_chain
= info
->new_local_var_chain
;
1438 info
->new_local_var_chain
= NULL
;
1440 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1441 info
, gimple_omp_body_ptr (stmt
));
1443 if (info
->new_local_var_chain
)
1444 declare_vars (info
->new_local_var_chain
,
1445 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
1447 info
->new_local_var_chain
= save_local_var_chain
;
1448 info
->suppress_expansion
= save_suppress
;
1451 case GIMPLE_OMP_TEAMS
:
1452 save_suppress
= info
->suppress_expansion
;
1453 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
1454 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1455 info
, gimple_omp_body_ptr (stmt
));
1456 info
->suppress_expansion
= save_suppress
;
1459 case GIMPLE_OMP_SECTION
:
1460 case GIMPLE_OMP_MASTER
:
1461 case GIMPLE_OMP_TASKGROUP
:
1462 case GIMPLE_OMP_ORDERED
:
1463 walk_body (convert_nonlocal_reference_stmt
, convert_nonlocal_reference_op
,
1464 info
, gimple_omp_body_ptr (stmt
));
1469 gbind
*bind_stmt
= as_a
<gbind
*> (stmt
);
1470 if (!optimize
&& gimple_bind_block (bind_stmt
))
1471 note_nonlocal_block_vlas (info
, gimple_bind_block (bind_stmt
));
1473 for (tree var
= gimple_bind_vars (bind_stmt
); var
; var
= DECL_CHAIN (var
))
1474 if (TREE_CODE (var
) == NAMELIST_DECL
)
1476 /* Adjust decls mentioned in NAMELIST_DECL. */
1477 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
1481 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
1483 if (TREE_CODE (decl
) == VAR_DECL
1484 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1486 if (decl_function_context (decl
) != info
->context
)
1487 CONSTRUCTOR_ELT (decls
, i
)->value
1488 = get_nonlocal_debug_decl (info
, decl
);
1492 *handled_ops_p
= false;
1496 wi
->val_only
= true;
1498 *handled_ops_p
= false;
1502 /* For every other statement that we are not interested in
1503 handling here, let the walker traverse the operands. */
1504 *handled_ops_p
= false;
1508 /* We have handled all of STMT operands, no need to traverse the operands. */
1509 *handled_ops_p
= true;
1514 /* A subroutine of convert_local_reference. Create a local variable
1515 in the parent function with DECL_VALUE_EXPR set to reference the
1516 field in FRAME. This is used both for debug info and in OMP
1520 get_local_debug_decl (struct nesting_info
*info
, tree decl
, tree field
)
1524 tree
*slot
= &info
->var_map
->get_or_insert (decl
);
1528 /* Make sure frame_decl gets created. */
1529 (void) get_frame_type (info
);
1530 x
= info
->frame_decl
;
1531 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
), x
, field
, NULL_TREE
);
1533 new_decl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1534 VAR_DECL
, DECL_NAME (decl
), TREE_TYPE (decl
));
1535 DECL_CONTEXT (new_decl
) = info
->context
;
1536 DECL_ARTIFICIAL (new_decl
) = DECL_ARTIFICIAL (decl
);
1537 DECL_IGNORED_P (new_decl
) = DECL_IGNORED_P (decl
);
1538 TREE_THIS_VOLATILE (new_decl
) = TREE_THIS_VOLATILE (decl
);
1539 TREE_SIDE_EFFECTS (new_decl
) = TREE_SIDE_EFFECTS (decl
);
1540 TREE_READONLY (new_decl
) = TREE_READONLY (decl
);
1541 TREE_ADDRESSABLE (new_decl
) = TREE_ADDRESSABLE (decl
);
1542 DECL_SEEN_IN_BIND_EXPR_P (new_decl
) = 1;
1543 if ((TREE_CODE (decl
) == PARM_DECL
1544 || TREE_CODE (decl
) == RESULT_DECL
1545 || TREE_CODE (decl
) == VAR_DECL
)
1546 && DECL_BY_REFERENCE (decl
))
1547 DECL_BY_REFERENCE (new_decl
) = 1;
1549 SET_DECL_VALUE_EXPR (new_decl
, x
);
1550 DECL_HAS_VALUE_EXPR_P (new_decl
) = 1;
1553 DECL_CHAIN (new_decl
) = info
->debug_var_chain
;
1554 info
->debug_var_chain
= new_decl
;
1556 /* Do not emit debug info twice. */
1557 DECL_IGNORED_P (decl
) = 1;
1563 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1564 and PARM_DECLs that were referenced by inner nested functions.
1565 The rewrite will be a structure reference to the local frame variable. */
1567 static bool convert_local_omp_clauses (tree
*, struct walk_stmt_info
*);
1570 convert_local_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
1572 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
1573 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1574 tree t
= *tp
, field
, x
;
1578 switch (TREE_CODE (t
))
1581 /* Non-automatic variables are never processed. */
1582 if (TREE_STATIC (t
) || DECL_EXTERNAL (t
))
1587 if (decl_function_context (t
) == info
->context
)
1589 /* If we copied a pointer to the frame, then the original decl
1590 is used unchanged in the parent function. */
1591 if (use_pointer_in_frame (t
))
1594 /* No need to transform anything if no child references the
1596 field
= lookup_field_for_decl (info
, t
, NO_INSERT
);
1601 x
= get_local_debug_decl (info
, t
, field
);
1602 if (!bitmap_bit_p (info
->suppress_expansion
, DECL_UID (t
)))
1603 x
= get_frame_field (info
, info
->context
, field
, &wi
->gsi
);
1608 x
= save_tmp_var (info
, x
, &wi
->gsi
);
1610 x
= init_tmp_var (info
, x
, &wi
->gsi
);
1618 save_val_only
= wi
->val_only
;
1619 wi
->val_only
= false;
1621 wi
->changed
= false;
1622 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
, wi
, NULL
);
1623 wi
->val_only
= save_val_only
;
1625 /* If we converted anything ... */
1630 /* Then the frame decl is now addressable. */
1631 TREE_ADDRESSABLE (info
->frame_decl
) = 1;
1633 save_context
= current_function_decl
;
1634 current_function_decl
= info
->context
;
1635 recompute_tree_invariant_for_addr_expr (t
);
1636 current_function_decl
= save_context
;
1638 /* If we are in a context where we only accept values, then
1639 compute the address into a temporary. */
1641 *tp
= gsi_gimplify_val ((struct nesting_info
*) wi
->info
,
1650 case ARRAY_RANGE_REF
:
1652 /* Go down this entire nest and just look at the final prefix and
1653 anything that describes the references. Otherwise, we lose track
1654 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1655 save_val_only
= wi
->val_only
;
1656 wi
->val_only
= true;
1658 for (; handled_component_p (t
); tp
= &TREE_OPERAND (t
, 0), t
= *tp
)
1660 if (TREE_CODE (t
) == COMPONENT_REF
)
1661 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1663 else if (TREE_CODE (t
) == ARRAY_REF
1664 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
1666 walk_tree (&TREE_OPERAND (t
, 1), convert_local_reference_op
, wi
,
1668 walk_tree (&TREE_OPERAND (t
, 2), convert_local_reference_op
, wi
,
1670 walk_tree (&TREE_OPERAND (t
, 3), convert_local_reference_op
, wi
,
1674 wi
->val_only
= false;
1675 walk_tree (tp
, convert_local_reference_op
, wi
, NULL
);
1676 wi
->val_only
= save_val_only
;
1680 save_val_only
= wi
->val_only
;
1681 wi
->val_only
= true;
1683 walk_tree (&TREE_OPERAND (t
, 0), convert_local_reference_op
,
1685 /* We need to re-fold the MEM_REF as component references as
1686 part of a ADDR_EXPR address are not allowed. But we cannot
1687 fold here, as the chain record type is not yet finalized. */
1688 if (TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
1689 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)))
1690 info
->mem_refs
->add (tp
);
1691 wi
->val_only
= save_val_only
;
1694 case VIEW_CONVERT_EXPR
:
1695 /* Just request to look at the subtrees, leaving val_only and lhs
1696 untouched. This might actually be for !val_only + lhs, in which
1697 case we don't want to force a replacement by a temporary. */
1702 if (!IS_TYPE_OR_DECL_P (t
))
1705 wi
->val_only
= true;
1714 static tree
convert_local_reference_stmt (gimple_stmt_iterator
*, bool *,
1715 struct walk_stmt_info
*);
1717 /* Helper for convert_local_reference. Convert all the references in
1718 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1721 convert_local_omp_clauses (tree
*pclauses
, struct walk_stmt_info
*wi
)
1723 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
1724 bool need_frame
= false, need_stmts
= false;
1727 bitmap new_suppress
;
1729 new_suppress
= BITMAP_GGC_ALLOC ();
1730 bitmap_copy (new_suppress
, info
->suppress_expansion
);
1732 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1734 switch (OMP_CLAUSE_CODE (clause
))
1736 case OMP_CLAUSE_REDUCTION
:
1737 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1739 goto do_decl_clause
;
1741 case OMP_CLAUSE_LASTPRIVATE
:
1742 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
))
1744 goto do_decl_clause
;
1746 case OMP_CLAUSE_LINEAR
:
1747 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
))
1749 wi
->val_only
= true;
1751 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause
), &dummy
,
1753 goto do_decl_clause
;
1755 case OMP_CLAUSE_PRIVATE
:
1756 case OMP_CLAUSE_FIRSTPRIVATE
:
1757 case OMP_CLAUSE_COPYPRIVATE
:
1758 case OMP_CLAUSE_SHARED
:
1759 case OMP_CLAUSE_TO_DECLARE
:
1760 case OMP_CLAUSE_LINK
:
1761 case OMP_CLAUSE_USE_DEVICE_PTR
:
1762 case OMP_CLAUSE_IS_DEVICE_PTR
:
1764 decl
= OMP_CLAUSE_DECL (clause
);
1765 if (TREE_CODE (decl
) == VAR_DECL
1766 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1768 if (decl_function_context (decl
) == info
->context
1769 && !use_pointer_in_frame (decl
))
1771 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1774 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_SHARED
)
1775 OMP_CLAUSE_SHARED_READONLY (clause
) = 0;
1776 bitmap_set_bit (new_suppress
, DECL_UID (decl
));
1777 OMP_CLAUSE_DECL (clause
)
1778 = get_local_debug_decl (info
, decl
, field
);
1784 case OMP_CLAUSE_SCHEDULE
:
1785 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause
) == NULL
)
1788 case OMP_CLAUSE_FINAL
:
1790 case OMP_CLAUSE_NUM_THREADS
:
1791 case OMP_CLAUSE_DEPEND
:
1792 case OMP_CLAUSE_DEVICE
:
1793 case OMP_CLAUSE_NUM_TEAMS
:
1794 case OMP_CLAUSE_THREAD_LIMIT
:
1795 case OMP_CLAUSE_SAFELEN
:
1796 case OMP_CLAUSE_SIMDLEN
:
1797 case OMP_CLAUSE_PRIORITY
:
1798 case OMP_CLAUSE_GRAINSIZE
:
1799 case OMP_CLAUSE_NUM_TASKS
:
1800 case OMP_CLAUSE_HINT
:
1801 case OMP_CLAUSE__CILK_FOR_COUNT_
:
1802 case OMP_CLAUSE_NUM_GANGS
:
1803 case OMP_CLAUSE_NUM_WORKERS
:
1804 case OMP_CLAUSE_VECTOR_LENGTH
:
1805 case OMP_CLAUSE_GANG
:
1806 case OMP_CLAUSE_WORKER
:
1807 case OMP_CLAUSE_VECTOR
:
1808 case OMP_CLAUSE_ASYNC
:
1809 case OMP_CLAUSE_WAIT
:
1810 /* Several OpenACC clauses have optional arguments. Check if they
1812 if (OMP_CLAUSE_OPERAND (clause
, 0))
1814 wi
->val_only
= true;
1816 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1820 /* The gang clause accepts two arguments. */
1821 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_GANG
1822 && OMP_CLAUSE_GANG_STATIC_EXPR (clause
))
1824 wi
->val_only
= true;
1826 convert_nonlocal_reference_op
1827 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause
), &dummy
, wi
);
1831 case OMP_CLAUSE_DIST_SCHEDULE
:
1832 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause
) != NULL
)
1834 wi
->val_only
= true;
1836 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause
, 0),
1841 case OMP_CLAUSE_MAP
:
1843 case OMP_CLAUSE_FROM
:
1844 if (OMP_CLAUSE_SIZE (clause
))
1846 wi
->val_only
= true;
1848 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause
),
1851 if (DECL_P (OMP_CLAUSE_DECL (clause
)))
1852 goto do_decl_clause
;
1853 wi
->val_only
= true;
1855 walk_tree (&OMP_CLAUSE_DECL (clause
), convert_local_reference_op
,
1859 case OMP_CLAUSE_ALIGNED
:
1860 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
))
1862 wi
->val_only
= true;
1864 convert_local_reference_op
1865 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause
), &dummy
, wi
);
1867 /* Like do_decl_clause, but don't add any suppression. */
1868 decl
= OMP_CLAUSE_DECL (clause
);
1869 if (TREE_CODE (decl
) == VAR_DECL
1870 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
1872 if (decl_function_context (decl
) == info
->context
1873 && !use_pointer_in_frame (decl
))
1875 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
1878 OMP_CLAUSE_DECL (clause
)
1879 = get_local_debug_decl (info
, decl
, field
);
1885 case OMP_CLAUSE_NOWAIT
:
1886 case OMP_CLAUSE_ORDERED
:
1887 case OMP_CLAUSE_DEFAULT
:
1888 case OMP_CLAUSE_COPYIN
:
1889 case OMP_CLAUSE_COLLAPSE
:
1890 case OMP_CLAUSE_UNTIED
:
1891 case OMP_CLAUSE_MERGEABLE
:
1892 case OMP_CLAUSE_PROC_BIND
:
1893 case OMP_CLAUSE_NOGROUP
:
1894 case OMP_CLAUSE_THREADS
:
1895 case OMP_CLAUSE_SIMD
:
1896 case OMP_CLAUSE_DEFAULTMAP
:
1897 case OMP_CLAUSE_SEQ
:
1898 case OMP_CLAUSE_INDEPENDENT
:
1899 case OMP_CLAUSE_AUTO
:
1902 case OMP_CLAUSE_TILE
:
1903 /* OpenACC tile clauses are discarded during gimplification, so we
1904 don't expect to see anything here. */
1907 case OMP_CLAUSE__CACHE_
:
1908 /* These clauses belong to the OpenACC cache directive, which is
1909 discarded during gimplification, so we don't expect to see
1918 info
->suppress_expansion
= new_suppress
;
1921 for (clause
= *pclauses
; clause
; clause
= OMP_CLAUSE_CHAIN (clause
))
1922 switch (OMP_CLAUSE_CODE (clause
))
1924 case OMP_CLAUSE_REDUCTION
:
1925 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1928 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
));
1929 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1931 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1932 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1934 walk_body (convert_local_reference_stmt
,
1935 convert_local_reference_op
, info
,
1936 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause
));
1937 walk_body (convert_local_reference_stmt
,
1938 convert_local_reference_op
, info
,
1939 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause
));
1940 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause
))
1942 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1943 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause
))
1948 case OMP_CLAUSE_LASTPRIVATE
:
1949 walk_body (convert_local_reference_stmt
,
1950 convert_local_reference_op
, info
,
1951 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause
));
1954 case OMP_CLAUSE_LINEAR
:
1955 walk_body (convert_local_reference_stmt
,
1956 convert_local_reference_op
, info
,
1957 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause
));
1968 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1969 and PARM_DECLs that were referenced by inner nested functions.
1970 The rewrite will be a structure reference to the local frame variable. */
1973 convert_local_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
1974 struct walk_stmt_info
*wi
)
1976 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
1977 tree save_local_var_chain
;
1978 bitmap save_suppress
;
1979 gimple
*stmt
= gsi_stmt (*gsi
);
1981 switch (gimple_code (stmt
))
1983 case GIMPLE_OMP_PARALLEL
:
1984 case GIMPLE_OMP_TASK
:
1985 save_suppress
= info
->suppress_expansion
;
1986 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt
),
1990 (void) get_frame_type (info
);
1991 c
= build_omp_clause (gimple_location (stmt
),
1993 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
1994 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
1995 gimple_omp_taskreg_set_clauses (stmt
, c
);
1998 save_local_var_chain
= info
->new_local_var_chain
;
1999 info
->new_local_var_chain
= NULL
;
2001 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2002 gimple_omp_body_ptr (stmt
));
2004 if (info
->new_local_var_chain
)
2005 declare_vars (info
->new_local_var_chain
,
2006 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2007 info
->new_local_var_chain
= save_local_var_chain
;
2008 info
->suppress_expansion
= save_suppress
;
2011 case GIMPLE_OMP_FOR
:
2012 save_suppress
= info
->suppress_expansion
;
2013 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt
), wi
);
2014 walk_gimple_omp_for (as_a
<gomp_for
*> (stmt
),
2015 convert_local_reference_stmt
,
2016 convert_local_reference_op
, info
);
2017 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2018 info
, gimple_omp_body_ptr (stmt
));
2019 info
->suppress_expansion
= save_suppress
;
2022 case GIMPLE_OMP_SECTIONS
:
2023 save_suppress
= info
->suppress_expansion
;
2024 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt
), wi
);
2025 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2026 info
, gimple_omp_body_ptr (stmt
));
2027 info
->suppress_expansion
= save_suppress
;
2030 case GIMPLE_OMP_SINGLE
:
2031 save_suppress
= info
->suppress_expansion
;
2032 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt
), wi
);
2033 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2034 info
, gimple_omp_body_ptr (stmt
));
2035 info
->suppress_expansion
= save_suppress
;
2038 case GIMPLE_OMP_TARGET
:
2039 if (!is_gimple_omp_offloaded (stmt
))
2041 save_suppress
= info
->suppress_expansion
;
2042 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
);
2043 info
->suppress_expansion
= save_suppress
;
2044 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2045 info
, gimple_omp_body_ptr (stmt
));
2048 save_suppress
= info
->suppress_expansion
;
2049 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt
), wi
))
2052 (void) get_frame_type (info
);
2053 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2054 OMP_CLAUSE_DECL (c
) = info
->frame_decl
;
2055 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
2056 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (info
->frame_decl
);
2057 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2058 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
), c
);
2061 save_local_var_chain
= info
->new_local_var_chain
;
2062 info
->new_local_var_chain
= NULL
;
2064 walk_body (convert_local_reference_stmt
, convert_local_reference_op
, info
,
2065 gimple_omp_body_ptr (stmt
));
2067 if (info
->new_local_var_chain
)
2068 declare_vars (info
->new_local_var_chain
,
2069 gimple_seq_first_stmt (gimple_omp_body (stmt
)), false);
2070 info
->new_local_var_chain
= save_local_var_chain
;
2071 info
->suppress_expansion
= save_suppress
;
2074 case GIMPLE_OMP_TEAMS
:
2075 save_suppress
= info
->suppress_expansion
;
2076 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt
), wi
);
2077 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2078 info
, gimple_omp_body_ptr (stmt
));
2079 info
->suppress_expansion
= save_suppress
;
2082 case GIMPLE_OMP_SECTION
:
2083 case GIMPLE_OMP_MASTER
:
2084 case GIMPLE_OMP_TASKGROUP
:
2085 case GIMPLE_OMP_ORDERED
:
2086 walk_body (convert_local_reference_stmt
, convert_local_reference_op
,
2087 info
, gimple_omp_body_ptr (stmt
));
2091 wi
->val_only
= true;
2093 *handled_ops_p
= false;
2097 if (gimple_clobber_p (stmt
))
2099 tree lhs
= gimple_assign_lhs (stmt
);
2100 if (!use_pointer_in_frame (lhs
)
2101 && lookup_field_for_decl (info
, lhs
, NO_INSERT
))
2103 gsi_replace (gsi
, gimple_build_nop (), true);
2107 *handled_ops_p
= false;
2111 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
2113 var
= DECL_CHAIN (var
))
2114 if (TREE_CODE (var
) == NAMELIST_DECL
)
2116 /* Adjust decls mentioned in NAMELIST_DECL. */
2117 tree decls
= NAMELIST_DECL_ASSOCIATED_DECL (var
);
2121 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls
), i
, decl
)
2123 if (TREE_CODE (decl
) == VAR_DECL
2124 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2126 if (decl_function_context (decl
) == info
->context
2127 && !use_pointer_in_frame (decl
))
2129 tree field
= lookup_field_for_decl (info
, decl
, NO_INSERT
);
2132 CONSTRUCTOR_ELT (decls
, i
)->value
2133 = get_local_debug_decl (info
, decl
, field
);
2139 *handled_ops_p
= false;
2143 /* For every other statement that we are not interested in
2144 handling here, let the walker traverse the operands. */
2145 *handled_ops_p
= false;
2149 /* Indicate that we have handled all the operands ourselves. */
2150 *handled_ops_p
= true;
2155 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2156 that reference labels from outer functions. The rewrite will be a
2157 call to __builtin_nonlocal_goto. */
2160 convert_nl_goto_reference (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2161 struct walk_stmt_info
*wi
)
2163 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2164 tree label
, new_label
, target_context
, x
, field
;
2166 gimple
*stmt
= gsi_stmt (*gsi
);
2168 if (gimple_code (stmt
) != GIMPLE_GOTO
)
2170 *handled_ops_p
= false;
2174 label
= gimple_goto_dest (stmt
);
2175 if (TREE_CODE (label
) != LABEL_DECL
)
2177 *handled_ops_p
= false;
2181 target_context
= decl_function_context (label
);
2182 if (target_context
== info
->context
)
2184 *handled_ops_p
= false;
2188 for (i
= info
->outer
; target_context
!= i
->context
; i
= i
->outer
)
2191 /* The original user label may also be use for a normal goto, therefore
2192 we must create a new label that will actually receive the abnormal
2193 control transfer. This new label will be marked LABEL_NONLOCAL; this
2194 mark will trigger proper behavior in the cfg, as well as cause the
2195 (hairy target-specific) non-local goto receiver code to be generated
2196 when we expand rtl. Enter this association into var_map so that we
2197 can insert the new label into the IL during a second pass. */
2198 tree
*slot
= &i
->var_map
->get_or_insert (label
);
2201 new_label
= create_artificial_label (UNKNOWN_LOCATION
);
2202 DECL_NONLOCAL (new_label
) = 1;
2208 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2209 field
= get_nl_goto_field (i
);
2210 x
= get_frame_field (info
, target_context
, field
, gsi
);
2212 x
= gsi_gimplify_val (info
, x
, gsi
);
2213 call
= gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO
),
2214 2, build_addr (new_label
), x
);
2215 gsi_replace (gsi
, call
, false);
2217 /* We have handled all of STMT's operands, no need to keep going. */
2218 *handled_ops_p
= true;
2223 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2224 are referenced via nonlocal goto from a nested function. The rewrite
2225 will involve installing a newly generated DECL_NONLOCAL label, and
2226 (potentially) a branch around the rtl gunk that is assumed to be
2227 attached to such a label. */
2230 convert_nl_goto_receiver (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2231 struct walk_stmt_info
*wi
)
2233 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2234 tree label
, new_label
;
2235 gimple_stmt_iterator tmp_gsi
;
2236 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (*gsi
));
2240 *handled_ops_p
= false;
2244 label
= gimple_label_label (stmt
);
2246 tree
*slot
= info
->var_map
->get (label
);
2249 *handled_ops_p
= false;
2253 /* If there's any possibility that the previous statement falls through,
2254 then we must branch around the new non-local label. */
2256 gsi_prev (&tmp_gsi
);
2257 if (gsi_end_p (tmp_gsi
) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi
)))
2259 gimple
*stmt
= gimple_build_goto (label
);
2260 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2263 new_label
= (tree
) *slot
;
2264 stmt
= gimple_build_label (new_label
);
2265 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2267 *handled_ops_p
= true;
2272 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2273 of nested functions that require the use of trampolines. The rewrite
2274 will involve a reference a trampoline generated for the occasion. */
2277 convert_tramp_reference_op (tree
*tp
, int *walk_subtrees
, void *data
)
2279 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
2280 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
, *i
;
2281 tree t
= *tp
, decl
, target_context
, x
, builtin
;
2285 switch (TREE_CODE (t
))
2289 T.1 = &CHAIN->tramp;
2290 T.2 = __builtin_adjust_trampoline (T.1);
2291 T.3 = (func_type)T.2;
2294 decl
= TREE_OPERAND (t
, 0);
2295 if (TREE_CODE (decl
) != FUNCTION_DECL
)
2298 /* Only need to process nested functions. */
2299 target_context
= decl_function_context (decl
);
2300 if (!target_context
)
2303 /* If the nested function doesn't use a static chain, then
2304 it doesn't need a trampoline. */
2305 if (!DECL_STATIC_CHAIN (decl
))
2308 /* If we don't want a trampoline, then don't build one. */
2309 if (TREE_NO_TRAMPOLINE (t
))
2312 /* Lookup the immediate parent of the callee, as that's where
2313 we need to insert the trampoline. */
2314 for (i
= info
; i
->context
!= target_context
; i
= i
->outer
)
2316 x
= lookup_tramp_for_decl (i
, decl
, INSERT
);
2318 /* Compute the address of the field holding the trampoline. */
2319 x
= get_frame_field (info
, target_context
, x
, &wi
->gsi
);
2321 x
= gsi_gimplify_val (info
, x
, &wi
->gsi
);
2323 /* Do machine-specific ugliness. Normally this will involve
2324 computing extra alignment, but it can really be anything. */
2325 builtin
= builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE
);
2326 call
= gimple_build_call (builtin
, 1, x
);
2327 x
= init_tmp_var_with_call (info
, &wi
->gsi
, call
);
2329 /* Cast back to the proper function type. */
2330 x
= build1 (NOP_EXPR
, TREE_TYPE (t
), x
);
2331 x
= init_tmp_var (info
, x
, &wi
->gsi
);
2337 if (!IS_TYPE_OR_DECL_P (t
))
2346 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2347 to addresses of nested functions that require the use of
2348 trampolines. The rewrite will involve a reference a trampoline
2349 generated for the occasion. */
2352 convert_tramp_reference_stmt (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2353 struct walk_stmt_info
*wi
)
2355 struct nesting_info
*info
= (struct nesting_info
*) wi
->info
;
2356 gimple
*stmt
= gsi_stmt (*gsi
);
2358 switch (gimple_code (stmt
))
2362 /* Only walk call arguments, lest we generate trampolines for
2364 unsigned long i
, nargs
= gimple_call_num_args (stmt
);
2365 for (i
= 0; i
< nargs
; i
++)
2366 walk_tree (gimple_call_arg_ptr (stmt
, i
), convert_tramp_reference_op
,
2371 case GIMPLE_OMP_TARGET
:
2372 if (!is_gimple_omp_offloaded (stmt
))
2374 *handled_ops_p
= false;
2378 case GIMPLE_OMP_PARALLEL
:
2379 case GIMPLE_OMP_TASK
:
2381 tree save_local_var_chain
= info
->new_local_var_chain
;
2382 walk_gimple_op (stmt
, convert_tramp_reference_op
, wi
);
2383 info
->new_local_var_chain
= NULL
;
2384 char save_static_chain_added
= info
->static_chain_added
;
2385 info
->static_chain_added
= 0;
2386 walk_body (convert_tramp_reference_stmt
, convert_tramp_reference_op
,
2387 info
, gimple_omp_body_ptr (stmt
));
2388 if (info
->new_local_var_chain
)
2389 declare_vars (info
->new_local_var_chain
,
2390 gimple_seq_first_stmt (gimple_omp_body (stmt
)),
2392 for (int i
= 0; i
< 2; i
++)
2395 if ((info
->static_chain_added
& (1 << i
)) == 0)
2397 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2398 /* Don't add CHAIN.* or FRAME.* twice. */
2399 for (c
= gimple_omp_taskreg_clauses (stmt
);
2401 c
= OMP_CLAUSE_CHAIN (c
))
2402 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2403 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2404 && OMP_CLAUSE_DECL (c
) == decl
)
2406 if (c
== NULL
&& gimple_code (stmt
) != GIMPLE_OMP_TARGET
)
2408 c
= build_omp_clause (gimple_location (stmt
),
2409 i
? OMP_CLAUSE_FIRSTPRIVATE
2410 : OMP_CLAUSE_SHARED
);
2411 OMP_CLAUSE_DECL (c
) = decl
;
2412 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2413 gimple_omp_taskreg_set_clauses (stmt
, c
);
2417 c
= build_omp_clause (gimple_location (stmt
),
2419 OMP_CLAUSE_DECL (c
) = decl
;
2420 OMP_CLAUSE_SET_MAP_KIND (c
,
2421 i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2422 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2423 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2424 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2428 info
->new_local_var_chain
= save_local_var_chain
;
2429 info
->static_chain_added
|= save_static_chain_added
;
2434 *handled_ops_p
= false;
2438 *handled_ops_p
= true;
2444 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2445 that reference nested functions to make sure that the static chain
2446 is set up properly for the call. */
2449 convert_gimple_call (gimple_stmt_iterator
*gsi
, bool *handled_ops_p
,
2450 struct walk_stmt_info
*wi
)
2452 struct nesting_info
*const info
= (struct nesting_info
*) wi
->info
;
2453 tree decl
, target_context
;
2454 char save_static_chain_added
;
2456 gimple
*stmt
= gsi_stmt (*gsi
);
2458 switch (gimple_code (stmt
))
2461 if (gimple_call_chain (stmt
))
2463 decl
= gimple_call_fndecl (stmt
);
2466 target_context
= decl_function_context (decl
);
2467 if (target_context
&& DECL_STATIC_CHAIN (decl
))
2469 gimple_call_set_chain (as_a
<gcall
*> (stmt
),
2470 get_static_chain (info
, target_context
,
2472 info
->static_chain_added
|= (1 << (info
->context
!= target_context
));
2476 case GIMPLE_OMP_PARALLEL
:
2477 case GIMPLE_OMP_TASK
:
2478 save_static_chain_added
= info
->static_chain_added
;
2479 info
->static_chain_added
= 0;
2480 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2481 for (i
= 0; i
< 2; i
++)
2484 if ((info
->static_chain_added
& (1 << i
)) == 0)
2486 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2487 /* Don't add CHAIN.* or FRAME.* twice. */
2488 for (c
= gimple_omp_taskreg_clauses (stmt
);
2490 c
= OMP_CLAUSE_CHAIN (c
))
2491 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
2492 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
)
2493 && OMP_CLAUSE_DECL (c
) == decl
)
2497 c
= build_omp_clause (gimple_location (stmt
),
2498 i
? OMP_CLAUSE_FIRSTPRIVATE
2499 : OMP_CLAUSE_SHARED
);
2500 OMP_CLAUSE_DECL (c
) = decl
;
2501 OMP_CLAUSE_CHAIN (c
) = gimple_omp_taskreg_clauses (stmt
);
2502 gimple_omp_taskreg_set_clauses (stmt
, c
);
2505 info
->static_chain_added
|= save_static_chain_added
;
2508 case GIMPLE_OMP_TARGET
:
2509 if (!is_gimple_omp_offloaded (stmt
))
2511 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2514 save_static_chain_added
= info
->static_chain_added
;
2515 info
->static_chain_added
= 0;
2516 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2517 for (i
= 0; i
< 2; i
++)
2520 if ((info
->static_chain_added
& (1 << i
)) == 0)
2522 decl
= i
? get_chain_decl (info
) : info
->frame_decl
;
2523 /* Don't add CHAIN.* or FRAME.* twice. */
2524 for (c
= gimple_omp_target_clauses (stmt
);
2526 c
= OMP_CLAUSE_CHAIN (c
))
2527 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
2528 && OMP_CLAUSE_DECL (c
) == decl
)
2532 c
= build_omp_clause (gimple_location (stmt
), OMP_CLAUSE_MAP
);
2533 OMP_CLAUSE_DECL (c
) = decl
;
2534 OMP_CLAUSE_SET_MAP_KIND (c
, i
? GOMP_MAP_TO
: GOMP_MAP_TOFROM
);
2535 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
2536 OMP_CLAUSE_CHAIN (c
) = gimple_omp_target_clauses (stmt
);
2537 gimple_omp_target_set_clauses (as_a
<gomp_target
*> (stmt
),
2541 info
->static_chain_added
|= save_static_chain_added
;
2544 case GIMPLE_OMP_FOR
:
2545 walk_body (convert_gimple_call
, NULL
, info
,
2546 gimple_omp_for_pre_body_ptr (stmt
));
2548 case GIMPLE_OMP_SECTIONS
:
2549 case GIMPLE_OMP_SECTION
:
2550 case GIMPLE_OMP_SINGLE
:
2551 case GIMPLE_OMP_TEAMS
:
2552 case GIMPLE_OMP_MASTER
:
2553 case GIMPLE_OMP_TASKGROUP
:
2554 case GIMPLE_OMP_ORDERED
:
2555 case GIMPLE_OMP_CRITICAL
:
2556 walk_body (convert_gimple_call
, NULL
, info
, gimple_omp_body_ptr (stmt
));
2560 /* Keep looking for other operands. */
2561 *handled_ops_p
= false;
2565 *handled_ops_p
= true;
2569 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2570 call expressions. At the same time, determine if a nested function
2571 actually uses its static chain; if not, remember that. */
2574 convert_all_function_calls (struct nesting_info
*root
)
2576 unsigned int chain_count
= 0, old_chain_count
, iter_count
;
2577 struct nesting_info
*n
;
2579 /* First, optimistically clear static_chain for all decls that haven't
2580 used the static chain already for variable access. But always create
2581 it if not optimizing. This makes it possible to reconstruct the static
2582 nesting tree at run time and thus to resolve up-level references from
2583 within the debugger. */
2584 FOR_EACH_NEST_INFO (n
, root
)
2586 tree decl
= n
->context
;
2590 (void) get_frame_type (n
);
2592 (void) get_chain_decl (n
);
2594 else if (!n
->outer
|| (!n
->chain_decl
&& !n
->chain_field
))
2596 DECL_STATIC_CHAIN (decl
) = 0;
2597 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2598 fprintf (dump_file
, "Guessing no static-chain for %s\n",
2599 lang_hooks
.decl_printable_name (decl
, 2));
2602 DECL_STATIC_CHAIN (decl
) = 1;
2603 chain_count
+= DECL_STATIC_CHAIN (decl
);
2606 /* Walk the functions and perform transformations. Note that these
2607 transformations can induce new uses of the static chain, which in turn
2608 require re-examining all users of the decl. */
2609 /* ??? It would make sense to try to use the call graph to speed this up,
2610 but the call graph hasn't really been built yet. Even if it did, we
2611 would still need to iterate in this loop since address-of references
2612 wouldn't show up in the callgraph anyway. */
2616 old_chain_count
= chain_count
;
2620 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2621 fputc ('\n', dump_file
);
2623 FOR_EACH_NEST_INFO (n
, root
)
2625 tree decl
= n
->context
;
2626 walk_function (convert_tramp_reference_stmt
,
2627 convert_tramp_reference_op
, n
);
2628 walk_function (convert_gimple_call
, NULL
, n
);
2629 chain_count
+= DECL_STATIC_CHAIN (decl
);
2632 while (chain_count
!= old_chain_count
);
2634 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2635 fprintf (dump_file
, "convert_all_function_calls iterations: %u\n\n",
2639 struct nesting_copy_body_data
2642 struct nesting_info
*root
;
2645 /* A helper subroutine for debug_var_chain type remapping. */
2648 nesting_copy_decl (tree decl
, copy_body_data
*id
)
2650 struct nesting_copy_body_data
*nid
= (struct nesting_copy_body_data
*) id
;
2651 tree
*slot
= nid
->root
->var_map
->get (decl
);
2654 return (tree
) *slot
;
2656 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (decl
))
2658 tree new_decl
= copy_decl_no_change (decl
, id
);
2659 DECL_ORIGINAL_TYPE (new_decl
)
2660 = remap_type (DECL_ORIGINAL_TYPE (decl
), id
);
2664 if (TREE_CODE (decl
) == VAR_DECL
2665 || TREE_CODE (decl
) == PARM_DECL
2666 || TREE_CODE (decl
) == RESULT_DECL
)
2669 return copy_decl_no_change (decl
, id
);
2672 /* A helper function for remap_vla_decls. See if *TP contains
2673 some remapped variables. */
2676 contains_remapped_vars (tree
*tp
, int *walk_subtrees
, void *data
)
2678 struct nesting_info
*root
= (struct nesting_info
*) data
;
2684 tree
*slot
= root
->var_map
->get (t
);
2692 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2696 remap_vla_decls (tree block
, struct nesting_info
*root
)
2698 tree var
, subblock
, val
, type
;
2699 struct nesting_copy_body_data id
;
2701 for (subblock
= BLOCK_SUBBLOCKS (block
);
2703 subblock
= BLOCK_CHAIN (subblock
))
2704 remap_vla_decls (subblock
, root
);
2706 for (var
= BLOCK_VARS (block
); var
; var
= DECL_CHAIN (var
))
2707 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
2709 val
= DECL_VALUE_EXPR (var
);
2710 type
= TREE_TYPE (var
);
2712 if (!(TREE_CODE (val
) == INDIRECT_REF
2713 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2714 && variably_modified_type_p (type
, NULL
)))
2717 if (root
->var_map
->get (TREE_OPERAND (val
, 0))
2718 || walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2722 if (var
== NULL_TREE
)
2725 memset (&id
, 0, sizeof (id
));
2726 id
.cb
.copy_decl
= nesting_copy_decl
;
2727 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
2730 for (; var
; var
= DECL_CHAIN (var
))
2731 if (TREE_CODE (var
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (var
))
2733 struct nesting_info
*i
;
2736 val
= DECL_VALUE_EXPR (var
);
2737 type
= TREE_TYPE (var
);
2739 if (!(TREE_CODE (val
) == INDIRECT_REF
2740 && TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
2741 && variably_modified_type_p (type
, NULL
)))
2744 tree
*slot
= root
->var_map
->get (TREE_OPERAND (val
, 0));
2745 if (!slot
&& !walk_tree (&type
, contains_remapped_vars
, root
, NULL
))
2748 context
= decl_function_context (var
);
2749 for (i
= root
; i
; i
= i
->outer
)
2750 if (i
->context
== context
)
2756 /* Fully expand value expressions. This avoids having debug variables
2757 only referenced from them and that can be swept during GC. */
2760 tree t
= (tree
) *slot
;
2761 gcc_assert (DECL_P (t
) && DECL_HAS_VALUE_EXPR_P (t
));
2762 val
= build1 (INDIRECT_REF
, TREE_TYPE (val
), DECL_VALUE_EXPR (t
));
2765 id
.cb
.src_fn
= i
->context
;
2766 id
.cb
.dst_fn
= i
->context
;
2767 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
2769 TREE_TYPE (var
) = newt
= remap_type (type
, &id
.cb
);
2770 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
2772 newt
= TREE_TYPE (newt
);
2773 type
= TREE_TYPE (type
);
2775 if (TYPE_NAME (newt
)
2776 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
2777 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
2779 && TYPE_NAME (newt
) == TYPE_NAME (type
))
2780 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
2782 walk_tree (&val
, copy_tree_body_r
, &id
.cb
, NULL
);
2783 if (val
!= DECL_VALUE_EXPR (var
))
2784 SET_DECL_VALUE_EXPR (var
, val
);
2787 delete id
.cb
.decl_map
;
2790 /* Fold the MEM_REF *E. */
2792 fold_mem_refs (tree
*const &e
, void *data ATTRIBUTE_UNUSED
)
2794 tree
*ref_p
= CONST_CAST2 (tree
*, const tree
*, (const tree
*)e
);
2795 *ref_p
= fold (*ref_p
);
2799 /* Do "everything else" to clean up or complete state collected by the various
2800 walking passes -- create a field to hold the frame base address, lay out the
2801 types and decls, generate code to initialize the frame decl, store critical
2802 expressions in the struct function for rtl to find. */
2805 finalize_nesting_tree_1 (struct nesting_info
*root
)
2807 gimple_seq stmt_list
;
2809 tree context
= root
->context
;
2810 struct function
*sf
;
2814 /* If we created a non-local frame type or decl, we need to lay them
2815 out at this time. */
2816 if (root
->frame_type
)
2818 /* Debugging information needs to compute the frame base address of the
2819 parent frame out of the static chain from the nested frame.
2821 The static chain is the address of the FRAME record, so one could
2822 imagine it would be possible to compute the frame base address just
2823 adding a constant offset to this address. Unfortunately, this is not
2824 possible: if the FRAME object has alignment constraints that are
2825 stronger than the stack, then the offset between the frame base and
2826 the FRAME object will be dynamic.
2828 What we do instead is to append a field to the FRAME object that holds
2829 the frame base address: then debug info just has to fetch this
2832 /* Debugging information will refer to the CFA as the frame base
2833 address: we will do the same here. */
2834 const tree frame_addr_fndecl
2835 = builtin_decl_explicit (BUILT_IN_DWARF_CFA
);
2837 /* Create a field in the FRAME record to hold the frame base address for
2838 this stack frame. Since it will be used only by the debugger, put it
2839 at the end of the record in order not to shift all other offsets. */
2840 tree fb_decl
= make_node (FIELD_DECL
);
2842 DECL_NAME (fb_decl
) = get_identifier ("FRAME_BASE.PARENT");
2843 TREE_TYPE (fb_decl
) = ptr_type_node
;
2844 TREE_ADDRESSABLE (fb_decl
) = 1;
2845 DECL_CONTEXT (fb_decl
) = root
->frame_type
;
2846 TYPE_FIELDS (root
->frame_type
) = chainon (TYPE_FIELDS (root
->frame_type
),
2849 /* In some cases the frame type will trigger the -Wpadded warning.
2850 This is not helpful; suppress it. */
2851 int save_warn_padded
= warn_padded
;
2853 layout_type (root
->frame_type
);
2854 warn_padded
= save_warn_padded
;
2855 layout_decl (root
->frame_decl
, 0);
2857 /* Initialize the frame base address field. If the builtin we need is
2858 not available, set it to NULL so that debugging information does not
2860 tree fb_ref
= build3 (COMPONENT_REF
, TREE_TYPE (fb_decl
),
2861 root
->frame_decl
, fb_decl
, NULL_TREE
);
2864 if (frame_addr_fndecl
!= NULL_TREE
)
2866 gcall
*fb_gimple
= gimple_build_call (frame_addr_fndecl
, 1,
2868 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
2870 fb_tmp
= init_tmp_var_with_call (root
, &gsi
, fb_gimple
);
2873 fb_tmp
= build_int_cst (TREE_TYPE (fb_ref
), 0);
2874 gimple_seq_add_stmt (&stmt_list
,
2875 gimple_build_assign (fb_ref
, fb_tmp
));
2877 /* Remove root->frame_decl from root->new_local_var_chain, so
2878 that we can declare it also in the lexical blocks, which
2879 helps ensure virtual regs that end up appearing in its RTL
2880 expression get substituted in instantiate_virtual_regs(). */
2882 for (adjust
= &root
->new_local_var_chain
;
2883 *adjust
!= root
->frame_decl
;
2884 adjust
= &DECL_CHAIN (*adjust
))
2885 gcc_assert (DECL_CHAIN (*adjust
));
2886 *adjust
= DECL_CHAIN (*adjust
);
2888 DECL_CHAIN (root
->frame_decl
) = NULL_TREE
;
2889 declare_vars (root
->frame_decl
,
2890 gimple_seq_first_stmt (gimple_body (context
)), true);
2893 /* If any parameters were referenced non-locally, then we need to
2894 insert a copy. Likewise, if any variables were referenced by
2895 pointer, we need to initialize the address. */
2896 if (root
->any_parm_remapped
)
2899 for (p
= DECL_ARGUMENTS (context
); p
; p
= DECL_CHAIN (p
))
2903 field
= lookup_field_for_decl (root
, p
, NO_INSERT
);
2907 if (use_pointer_in_frame (p
))
2912 /* If the assignment is from a non-register the stmt is
2913 not valid gimple. Make it so by using a temporary instead. */
2914 if (!is_gimple_reg (x
)
2915 && is_gimple_reg_type (TREE_TYPE (x
)))
2917 gimple_stmt_iterator gsi
= gsi_last (stmt_list
);
2918 x
= init_tmp_var (root
, x
, &gsi
);
2921 y
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2922 root
->frame_decl
, field
, NULL_TREE
);
2923 stmt
= gimple_build_assign (y
, x
);
2924 gimple_seq_add_stmt (&stmt_list
, stmt
);
2928 /* If a chain_field was created, then it needs to be initialized
2930 if (root
->chain_field
)
2932 tree x
= build3 (COMPONENT_REF
, TREE_TYPE (root
->chain_field
),
2933 root
->frame_decl
, root
->chain_field
, NULL_TREE
);
2934 stmt
= gimple_build_assign (x
, get_chain_decl (root
));
2935 gimple_seq_add_stmt (&stmt_list
, stmt
);
2938 /* If trampolines were created, then we need to initialize them. */
2939 if (root
->any_tramp_created
)
2941 struct nesting_info
*i
;
2942 for (i
= root
->inner
; i
; i
= i
->next
)
2944 tree arg1
, arg2
, arg3
, x
, field
;
2946 field
= lookup_tramp_for_decl (root
, i
->context
, NO_INSERT
);
2950 gcc_assert (DECL_STATIC_CHAIN (i
->context
));
2951 arg3
= build_addr (root
->frame_decl
);
2953 arg2
= build_addr (i
->context
);
2955 x
= build3 (COMPONENT_REF
, TREE_TYPE (field
),
2956 root
->frame_decl
, field
, NULL_TREE
);
2957 arg1
= build_addr (x
);
2959 x
= builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE
);
2960 stmt
= gimple_build_call (x
, 3, arg1
, arg2
, arg3
);
2961 gimple_seq_add_stmt (&stmt_list
, stmt
);
2965 /* If we created initialization statements, insert them. */
2969 annotate_all_with_location (stmt_list
, DECL_SOURCE_LOCATION (context
));
2970 bind
= gimple_seq_first_stmt_as_a_bind (gimple_body (context
));
2971 gimple_seq_add_seq (&stmt_list
, gimple_bind_body (bind
));
2972 gimple_bind_set_body (bind
, stmt_list
);
2975 /* If a chain_decl was created, then it needs to be registered with
2976 struct function so that it gets initialized from the static chain
2977 register at the beginning of the function. */
2978 sf
= DECL_STRUCT_FUNCTION (root
->context
);
2979 sf
->static_chain_decl
= root
->chain_decl
;
2981 /* Similarly for the non-local goto save area. */
2982 if (root
->nl_goto_field
)
2984 sf
->nonlocal_goto_save_area
2985 = get_frame_field (root
, context
, root
->nl_goto_field
, NULL
);
2986 sf
->has_nonlocal_label
= 1;
2989 /* Make sure all new local variables get inserted into the
2990 proper BIND_EXPR. */
2991 if (root
->new_local_var_chain
)
2992 declare_vars (root
->new_local_var_chain
,
2993 gimple_seq_first_stmt (gimple_body (root
->context
)),
2996 if (root
->debug_var_chain
)
3001 remap_vla_decls (DECL_INITIAL (root
->context
), root
);
3003 for (debug_var
= root
->debug_var_chain
; debug_var
;
3004 debug_var
= DECL_CHAIN (debug_var
))
3005 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3008 /* If there are any debug decls with variable length types,
3009 remap those types using other debug_var_chain variables. */
3012 struct nesting_copy_body_data id
;
3014 memset (&id
, 0, sizeof (id
));
3015 id
.cb
.copy_decl
= nesting_copy_decl
;
3016 id
.cb
.decl_map
= new hash_map
<tree
, tree
>;
3019 for (; debug_var
; debug_var
= DECL_CHAIN (debug_var
))
3020 if (variably_modified_type_p (TREE_TYPE (debug_var
), NULL
))
3022 tree type
= TREE_TYPE (debug_var
);
3023 tree newt
, t
= type
;
3024 struct nesting_info
*i
;
3026 for (i
= root
; i
; i
= i
->outer
)
3027 if (variably_modified_type_p (type
, i
->context
))
3033 id
.cb
.src_fn
= i
->context
;
3034 id
.cb
.dst_fn
= i
->context
;
3035 id
.cb
.src_cfun
= DECL_STRUCT_FUNCTION (root
->context
);
3037 TREE_TYPE (debug_var
) = newt
= remap_type (type
, &id
.cb
);
3038 while (POINTER_TYPE_P (newt
) && !TYPE_NAME (newt
))
3040 newt
= TREE_TYPE (newt
);
3043 if (TYPE_NAME (newt
)
3044 && TREE_CODE (TYPE_NAME (newt
)) == TYPE_DECL
3045 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt
))
3047 && TYPE_NAME (newt
) == TYPE_NAME (t
))
3048 TYPE_NAME (newt
) = remap_decl (TYPE_NAME (newt
), &id
.cb
);
3051 delete id
.cb
.decl_map
;
3054 scope
= gimple_seq_first_stmt_as_a_bind (gimple_body (root
->context
));
3055 if (gimple_bind_block (scope
))
3056 declare_vars (root
->debug_var_chain
, scope
, true);
3058 BLOCK_VARS (DECL_INITIAL (root
->context
))
3059 = chainon (BLOCK_VARS (DECL_INITIAL (root
->context
)),
3060 root
->debug_var_chain
);
3063 /* Fold the rewritten MEM_REF trees. */
3064 root
->mem_refs
->traverse
<void *, fold_mem_refs
> (NULL
);
3066 /* Dump the translated tree function. */
3069 fputs ("\n\n", dump_file
);
3070 dump_function_to_file (root
->context
, dump_file
, dump_flags
);
3075 finalize_nesting_tree (struct nesting_info
*root
)
3077 struct nesting_info
*n
;
3078 FOR_EACH_NEST_INFO (n
, root
)
3079 finalize_nesting_tree_1 (n
);
3082 /* Unnest the nodes and pass them to cgraph. */
3085 unnest_nesting_tree_1 (struct nesting_info
*root
)
3087 struct cgraph_node
*node
= cgraph_node::get (root
->context
);
3089 /* For nested functions update the cgraph to reflect unnesting.
3090 We also delay finalizing of these functions up to this point. */
3094 cgraph_node::finalize_function (root
->context
, true);
3099 unnest_nesting_tree (struct nesting_info
*root
)
3101 struct nesting_info
*n
;
3102 FOR_EACH_NEST_INFO (n
, root
)
3103 unnest_nesting_tree_1 (n
);
3106 /* Free the data structures allocated during this pass. */
3109 free_nesting_tree (struct nesting_info
*root
)
3111 struct nesting_info
*node
, *next
;
3113 node
= iter_nestinfo_start (root
);
3116 next
= iter_nestinfo_next (node
);
3117 delete node
->var_map
;
3118 delete node
->field_map
;
3119 delete node
->mem_refs
;
3126 /* Gimplify a function and all its nested functions. */
3128 gimplify_all_functions (struct cgraph_node
*root
)
3130 struct cgraph_node
*iter
;
3131 if (!gimple_body (root
->decl
))
3132 gimplify_function_tree (root
->decl
);
3133 for (iter
= root
->nested
; iter
; iter
= iter
->next_nested
)
3134 gimplify_all_functions (iter
);
3137 /* Main entry point for this pass. Process FNDECL and all of its nested
3138 subroutines and turn them into something less tightly bound. */
3141 lower_nested_functions (tree fndecl
)
3143 struct cgraph_node
*cgn
;
3144 struct nesting_info
*root
;
3146 /* If there are no nested functions, there's nothing to do. */
3147 cgn
= cgraph_node::get (fndecl
);
3151 gimplify_all_functions (cgn
);
3153 dump_file
= dump_begin (TDI_nested
, &dump_flags
);
3155 fprintf (dump_file
, "\n;; Function %s\n\n",
3156 lang_hooks
.decl_printable_name (fndecl
, 2));
3158 bitmap_obstack_initialize (&nesting_info_bitmap_obstack
);
3159 root
= create_nesting_tree (cgn
);
3161 walk_all_functions (convert_nonlocal_reference_stmt
,
3162 convert_nonlocal_reference_op
,
3164 walk_all_functions (convert_local_reference_stmt
,
3165 convert_local_reference_op
,
3167 walk_all_functions (convert_nl_goto_reference
, NULL
, root
);
3168 walk_all_functions (convert_nl_goto_receiver
, NULL
, root
);
3170 convert_all_function_calls (root
);
3171 finalize_nesting_tree (root
);
3172 unnest_nesting_tree (root
);
3174 free_nesting_tree (root
);
3175 bitmap_obstack_release (&nesting_info_bitmap_obstack
);
3179 dump_end (TDI_nested
, dump_file
);
3184 #include "gt-tree-nested.h"