]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-nested.c
[PR middle-end/71373] Handle more OMP_CLAUSE_* in nested function decomposition
[thirdparty/gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tm_p.h"
28 #include "stringpool.h"
29 #include "cgraph.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-cfg.h"
38 #include "explow.h"
39 #include "langhooks.h"
40 #include "gimple-low.h"
41 #include "gomp-constants.h"
42
43
44 /* The object of this pass is to lower the representation of a set of nested
45 functions in order to expose all of the gory details of the various
46 nonlocal references. We want to do this sooner rather than later, in
47 order to give us more freedom in emitting all of the functions in question.
48
49 Back in olden times, when gcc was young, we developed an insanely
50 complicated scheme whereby variables which were referenced nonlocally
51 were forced to live in the stack of the declaring function, and then
52 the nested functions magically discovered where these variables were
53 placed. In order for this scheme to function properly, it required
54 that the outer function be partially expanded, then we switch to
55 compiling the inner function, and once done with those we switch back
56 to compiling the outer function. Such delicate ordering requirements
57 makes it difficult to do whole translation unit optimizations
58 involving such functions.
59
60 The implementation here is much more direct. Everything that can be
61 referenced by an inner function is a member of an explicitly created
62 structure herein called the "nonlocal frame struct". The incoming
63 static chain for a nested function is a pointer to this struct in
64 the parent. In this way, we settle on known offsets from a known
65 base, and so are decoupled from the logic that places objects in the
66 function's stack frame. More importantly, we don't have to wait for
67 that to happen -- since the compilation of the inner function is no
68 longer tied to a real stack frame, the nonlocal frame struct can be
69 allocated anywhere. Which means that the outer function is now
70 inlinable.
71
72 Theory of operation here is very simple. Iterate over all the
73 statements in all the functions (depth first) several times,
74 allocating structures and fields on demand. In general we want to
75 examine inner functions first, so that we can avoid making changes
76 to outer functions which are unnecessary.
77
78 The order of the passes matters a bit, in that later passes will be
79 skipped if it is discovered that the functions don't actually interact
80 at all. That is, they're nested in the lexical sense but could have
81 been written as independent functions without change. */
82
83
84 struct nesting_info
85 {
86 struct nesting_info *outer;
87 struct nesting_info *inner;
88 struct nesting_info *next;
89
90 hash_map<tree, tree> *field_map;
91 hash_map<tree, tree> *var_map;
92 hash_set<tree *> *mem_refs;
93 bitmap suppress_expansion;
94
95 tree context;
96 tree new_local_var_chain;
97 tree debug_var_chain;
98 tree frame_type;
99 tree frame_decl;
100 tree chain_field;
101 tree chain_decl;
102 tree nl_goto_field;
103
104 bool any_parm_remapped;
105 bool any_tramp_created;
106 char static_chain_added;
107 };
108
109
110 /* Iterate over the nesting tree, starting with ROOT, depth first. */
111
112 static inline struct nesting_info *
113 iter_nestinfo_start (struct nesting_info *root)
114 {
115 while (root->inner)
116 root = root->inner;
117 return root;
118 }
119
120 static inline struct nesting_info *
121 iter_nestinfo_next (struct nesting_info *node)
122 {
123 if (node->next)
124 return iter_nestinfo_start (node->next);
125 return node->outer;
126 }
127
128 #define FOR_EACH_NEST_INFO(I, ROOT) \
129 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
130
131 /* Obstack used for the bitmaps in the struct above. */
132 static struct bitmap_obstack nesting_info_bitmap_obstack;
133
134
135 /* We're working in so many different function contexts simultaneously,
136 that create_tmp_var is dangerous. Prevent mishap. */
137 #define create_tmp_var cant_use_create_tmp_var_here_dummy
138
139 /* Like create_tmp_var, except record the variable for registration at
140 the given nesting level. */
141
142 static tree
143 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
144 {
145 tree tmp_var;
146
147 /* If the type is of variable size or a type which must be created by the
148 frontend, something is wrong. Note that we explicitly allow
149 incomplete types here, since we create them ourselves here. */
150 gcc_assert (!TREE_ADDRESSABLE (type));
151 gcc_assert (!TYPE_SIZE_UNIT (type)
152 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
153
154 tmp_var = create_tmp_var_raw (type, prefix);
155 DECL_CONTEXT (tmp_var) = info->context;
156 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
157 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
158 if (TREE_CODE (type) == COMPLEX_TYPE
159 || TREE_CODE (type) == VECTOR_TYPE)
160 DECL_GIMPLE_REG_P (tmp_var) = 1;
161
162 info->new_local_var_chain = tmp_var;
163
164 return tmp_var;
165 }
166
167 /* Take the address of EXP to be used within function CONTEXT.
168 Mark it for addressability as necessary. */
169
170 tree
171 build_addr (tree exp)
172 {
173 mark_addressable (exp);
174 return build_fold_addr_expr (exp);
175 }
176
177 /* Insert FIELD into TYPE, sorted by alignment requirements. */
178
179 void
180 insert_field_into_struct (tree type, tree field)
181 {
182 tree *p;
183
184 DECL_CONTEXT (field) = type;
185
186 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
187 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
188 break;
189
190 DECL_CHAIN (field) = *p;
191 *p = field;
192
193 /* Set correct alignment for frame struct type. */
194 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
195 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
196 }
197
198 /* Build or return the RECORD_TYPE that describes the frame state that is
199 shared between INFO->CONTEXT and its nested functions. This record will
200 not be complete until finalize_nesting_tree; up until that point we'll
201 be adding fields as necessary.
202
203 We also build the DECL that represents this frame in the function. */
204
205 static tree
206 get_frame_type (struct nesting_info *info)
207 {
208 tree type = info->frame_type;
209 if (!type)
210 {
211 char *name;
212
213 type = make_node (RECORD_TYPE);
214
215 name = concat ("FRAME.",
216 IDENTIFIER_POINTER (DECL_NAME (info->context)),
217 NULL);
218 TYPE_NAME (type) = get_identifier (name);
219 free (name);
220
221 info->frame_type = type;
222 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
223 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
224
225 /* ??? Always make it addressable for now, since it is meant to
226 be pointed to by the static chain pointer. This pessimizes
227 when it turns out that no static chains are needed because
228 the nested functions referencing non-local variables are not
229 reachable, but the true pessimization is to create the non-
230 local frame structure in the first place. */
231 TREE_ADDRESSABLE (info->frame_decl) = 1;
232 }
233 return type;
234 }
235
236 /* Return true if DECL should be referenced by pointer in the non-local
237 frame structure. */
238
239 static bool
240 use_pointer_in_frame (tree decl)
241 {
242 if (TREE_CODE (decl) == PARM_DECL)
243 {
244 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
245 sized decls, and inefficient to copy large aggregates. Don't bother
246 moving anything but scalar variables. */
247 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
248 }
249 else
250 {
251 /* Variable sized types make things "interesting" in the frame. */
252 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
253 }
254 }
255
256 /* Given DECL, a non-locally accessed variable, find or create a field
257 in the non-local frame structure for the given nesting context. */
258
259 static tree
260 lookup_field_for_decl (struct nesting_info *info, tree decl,
261 enum insert_option insert)
262 {
263 if (insert == NO_INSERT)
264 {
265 tree *slot = info->field_map->get (decl);
266 return slot ? *slot : NULL_TREE;
267 }
268
269 tree *slot = &info->field_map->get_or_insert (decl);
270 if (!*slot)
271 {
272 tree field = make_node (FIELD_DECL);
273 DECL_NAME (field) = DECL_NAME (decl);
274
275 if (use_pointer_in_frame (decl))
276 {
277 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
278 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
279 DECL_NONADDRESSABLE_P (field) = 1;
280 }
281 else
282 {
283 TREE_TYPE (field) = TREE_TYPE (decl);
284 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
285 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
286 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
287 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
288 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
289 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
290 }
291
292 insert_field_into_struct (get_frame_type (info), field);
293 *slot = field;
294
295 if (TREE_CODE (decl) == PARM_DECL)
296 info->any_parm_remapped = true;
297 }
298
299 return *slot;
300 }
301
302 /* Build or return the variable that holds the static chain within
303 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
304
305 static tree
306 get_chain_decl (struct nesting_info *info)
307 {
308 tree decl = info->chain_decl;
309
310 if (!decl)
311 {
312 tree type;
313
314 type = get_frame_type (info->outer);
315 type = build_pointer_type (type);
316
317 /* Note that this variable is *not* entered into any BIND_EXPR;
318 the construction of this variable is handled specially in
319 expand_function_start and initialize_inlined_parameters.
320 Note also that it's represented as a parameter. This is more
321 close to the truth, since the initial value does come from
322 the caller. */
323 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
324 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
325 DECL_ARTIFICIAL (decl) = 1;
326 DECL_IGNORED_P (decl) = 1;
327 TREE_USED (decl) = 1;
328 DECL_CONTEXT (decl) = info->context;
329 DECL_ARG_TYPE (decl) = type;
330
331 /* Tell tree-inline.c that we never write to this variable, so
332 it can copy-prop the replacement value immediately. */
333 TREE_READONLY (decl) = 1;
334
335 info->chain_decl = decl;
336
337 if (dump_file
338 && (dump_flags & TDF_DETAILS)
339 && !DECL_STATIC_CHAIN (info->context))
340 fprintf (dump_file, "Setting static-chain for %s\n",
341 lang_hooks.decl_printable_name (info->context, 2));
342
343 DECL_STATIC_CHAIN (info->context) = 1;
344 }
345 return decl;
346 }
347
348 /* Build or return the field within the non-local frame state that holds
349 the static chain for INFO->CONTEXT. This is the way to walk back up
350 multiple nesting levels. */
351
352 static tree
353 get_chain_field (struct nesting_info *info)
354 {
355 tree field = info->chain_field;
356
357 if (!field)
358 {
359 tree type = build_pointer_type (get_frame_type (info->outer));
360
361 field = make_node (FIELD_DECL);
362 DECL_NAME (field) = get_identifier ("__chain");
363 TREE_TYPE (field) = type;
364 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
365 DECL_NONADDRESSABLE_P (field) = 1;
366
367 insert_field_into_struct (get_frame_type (info), field);
368
369 info->chain_field = field;
370
371 if (dump_file
372 && (dump_flags & TDF_DETAILS)
373 && !DECL_STATIC_CHAIN (info->context))
374 fprintf (dump_file, "Setting static-chain for %s\n",
375 lang_hooks.decl_printable_name (info->context, 2));
376
377 DECL_STATIC_CHAIN (info->context) = 1;
378 }
379 return field;
380 }
381
382 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
383
384 static tree
385 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
386 gcall *call)
387 {
388 tree t;
389
390 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
391 gimple_call_set_lhs (call, t);
392 if (! gsi_end_p (*gsi))
393 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
394 gsi_insert_before (gsi, call, GSI_SAME_STMT);
395
396 return t;
397 }
398
399
400 /* Copy EXP into a temporary. Allocate the temporary in the context of
401 INFO and insert the initialization statement before GSI. */
402
403 static tree
404 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
405 {
406 tree t;
407 gimple *stmt;
408
409 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
410 stmt = gimple_build_assign (t, exp);
411 if (! gsi_end_p (*gsi))
412 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
413 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
414
415 return t;
416 }
417
418
419 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
420
421 static tree
422 gsi_gimplify_val (struct nesting_info *info, tree exp,
423 gimple_stmt_iterator *gsi)
424 {
425 if (is_gimple_val (exp))
426 return exp;
427 else
428 return init_tmp_var (info, exp, gsi);
429 }
430
431 /* Similarly, but copy from the temporary and insert the statement
432 after the iterator. */
433
434 static tree
435 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
436 {
437 tree t;
438 gimple *stmt;
439
440 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
441 stmt = gimple_build_assign (exp, t);
442 if (! gsi_end_p (*gsi))
443 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
444 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
445
446 return t;
447 }
448
449 /* Build or return the type used to represent a nested function trampoline. */
450
451 static GTY(()) tree trampoline_type;
452
453 static tree
454 get_trampoline_type (struct nesting_info *info)
455 {
456 unsigned align, size;
457 tree t;
458
459 if (trampoline_type)
460 return trampoline_type;
461
462 align = TRAMPOLINE_ALIGNMENT;
463 size = TRAMPOLINE_SIZE;
464
465 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
466 then allocate extra space so that we can do dynamic alignment. */
467 if (align > STACK_BOUNDARY)
468 {
469 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
470 align = STACK_BOUNDARY;
471 }
472
473 t = build_index_type (size_int (size - 1));
474 t = build_array_type (char_type_node, t);
475 t = build_decl (DECL_SOURCE_LOCATION (info->context),
476 FIELD_DECL, get_identifier ("__data"), t);
477 SET_DECL_ALIGN (t, align);
478 DECL_USER_ALIGN (t) = 1;
479
480 trampoline_type = make_node (RECORD_TYPE);
481 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
482 TYPE_FIELDS (trampoline_type) = t;
483 layout_type (trampoline_type);
484 DECL_CONTEXT (t) = trampoline_type;
485
486 return trampoline_type;
487 }
488
489 /* Given DECL, a nested function, find or create a field in the non-local
490 frame structure for a trampoline for this function. */
491
492 static tree
493 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
494 enum insert_option insert)
495 {
496 if (insert == NO_INSERT)
497 {
498 tree *slot = info->var_map->get (decl);
499 return slot ? *slot : NULL_TREE;
500 }
501
502 tree *slot = &info->var_map->get_or_insert (decl);
503 if (!*slot)
504 {
505 tree field = make_node (FIELD_DECL);
506 DECL_NAME (field) = DECL_NAME (decl);
507 TREE_TYPE (field) = get_trampoline_type (info);
508 TREE_ADDRESSABLE (field) = 1;
509
510 insert_field_into_struct (get_frame_type (info), field);
511 *slot = field;
512
513 info->any_tramp_created = true;
514 }
515
516 return *slot;
517 }
518
519 /* Build or return the field within the non-local frame state that holds
520 the non-local goto "jmp_buf". The buffer itself is maintained by the
521 rtl middle-end as dynamic stack space is allocated. */
522
523 static tree
524 get_nl_goto_field (struct nesting_info *info)
525 {
526 tree field = info->nl_goto_field;
527 if (!field)
528 {
529 unsigned size;
530 tree type;
531
532 /* For __builtin_nonlocal_goto, we need N words. The first is the
533 frame pointer, the rest is for the target's stack pointer save
534 area. The number of words is controlled by STACK_SAVEAREA_MODE;
535 not the best interface, but it'll do for now. */
536 if (Pmode == ptr_mode)
537 type = ptr_type_node;
538 else
539 type = lang_hooks.types.type_for_mode (Pmode, 1);
540
541 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
542 size = size / GET_MODE_SIZE (Pmode);
543 size = size + 1;
544
545 type = build_array_type
546 (type, build_index_type (size_int (size)));
547
548 field = make_node (FIELD_DECL);
549 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
550 TREE_TYPE (field) = type;
551 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
552 TREE_ADDRESSABLE (field) = 1;
553
554 insert_field_into_struct (get_frame_type (info), field);
555
556 info->nl_goto_field = field;
557 }
558
559 return field;
560 }
561
562 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
563
564 static void
565 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
566 struct nesting_info *info, gimple_seq *pseq)
567 {
568 struct walk_stmt_info wi;
569
570 memset (&wi, 0, sizeof (wi));
571 wi.info = info;
572 wi.val_only = true;
573 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
574 }
575
576
577 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
578
579 static inline void
580 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
581 struct nesting_info *info)
582 {
583 gimple_seq body = gimple_body (info->context);
584 walk_body (callback_stmt, callback_op, info, &body);
585 gimple_set_body (info->context, body);
586 }
587
588 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
589
590 static void
591 walk_gimple_omp_for (gomp_for *for_stmt,
592 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
593 struct nesting_info *info)
594 {
595 struct walk_stmt_info wi;
596 gimple_seq seq;
597 tree t;
598 size_t i;
599
600 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
601
602 seq = NULL;
603 memset (&wi, 0, sizeof (wi));
604 wi.info = info;
605 wi.gsi = gsi_last (seq);
606
607 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
608 {
609 wi.val_only = false;
610 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
611 &wi, NULL);
612 wi.val_only = true;
613 wi.is_lhs = false;
614 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
615 &wi, NULL);
616
617 wi.val_only = true;
618 wi.is_lhs = false;
619 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
620 &wi, NULL);
621
622 t = gimple_omp_for_incr (for_stmt, i);
623 gcc_assert (BINARY_CLASS_P (t));
624 wi.val_only = false;
625 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
626 wi.val_only = true;
627 wi.is_lhs = false;
628 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
629 }
630
631 seq = gsi_seq (wi.gsi);
632 if (!gimple_seq_empty_p (seq))
633 {
634 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
635 annotate_all_with_location (seq, gimple_location (for_stmt));
636 gimple_seq_add_seq (&pre_body, seq);
637 gimple_omp_for_set_pre_body (for_stmt, pre_body);
638 }
639 }
640
641 /* Similarly for ROOT and all functions nested underneath, depth first. */
642
643 static void
644 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
645 struct nesting_info *root)
646 {
647 struct nesting_info *n;
648 FOR_EACH_NEST_INFO (n, root)
649 walk_function (callback_stmt, callback_op, n);
650 }
651
652
653 /* We have to check for a fairly pathological case. The operands of function
654 nested function are to be interpreted in the context of the enclosing
655 function. So if any are variably-sized, they will get remapped when the
656 enclosing function is inlined. But that remapping would also have to be
657 done in the types of the PARM_DECLs of the nested function, meaning the
658 argument types of that function will disagree with the arguments in the
659 calls to that function. So we'd either have to make a copy of the nested
660 function corresponding to each time the enclosing function was inlined or
661 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
662 function. The former is not practical. The latter would still require
663 detecting this case to know when to add the conversions. So, for now at
664 least, we don't inline such an enclosing function.
665
666 We have to do that check recursively, so here return indicating whether
667 FNDECL has such a nested function. ORIG_FN is the function we were
668 trying to inline to use for checking whether any argument is variably
669 modified by anything in it.
670
671 It would be better to do this in tree-inline.c so that we could give
672 the appropriate warning for why a function can't be inlined, but that's
673 too late since the nesting structure has already been flattened and
674 adding a flag just to record this fact seems a waste of a flag. */
675
676 static bool
677 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
678 {
679 struct cgraph_node *cgn = cgraph_node::get (fndecl);
680 tree arg;
681
682 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
683 {
684 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
685 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
686 return true;
687
688 if (check_for_nested_with_variably_modified (cgn->decl,
689 orig_fndecl))
690 return true;
691 }
692
693 return false;
694 }
695
696 /* Construct our local datastructure describing the function nesting
697 tree rooted by CGN. */
698
699 static struct nesting_info *
700 create_nesting_tree (struct cgraph_node *cgn)
701 {
702 struct nesting_info *info = XCNEW (struct nesting_info);
703 info->field_map = new hash_map<tree, tree>;
704 info->var_map = new hash_map<tree, tree>;
705 info->mem_refs = new hash_set<tree *>;
706 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
707 info->context = cgn->decl;
708
709 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
710 {
711 struct nesting_info *sub = create_nesting_tree (cgn);
712 sub->outer = info;
713 sub->next = info->inner;
714 info->inner = sub;
715 }
716
717 /* See discussion at check_for_nested_with_variably_modified for a
718 discussion of why this has to be here. */
719 if (check_for_nested_with_variably_modified (info->context, info->context))
720 DECL_UNINLINABLE (info->context) = true;
721
722 return info;
723 }
724
725 /* Return an expression computing the static chain for TARGET_CONTEXT
726 from INFO->CONTEXT. Insert any necessary computations before TSI. */
727
728 static tree
729 get_static_chain (struct nesting_info *info, tree target_context,
730 gimple_stmt_iterator *gsi)
731 {
732 struct nesting_info *i;
733 tree x;
734
735 if (info->context == target_context)
736 {
737 x = build_addr (info->frame_decl);
738 info->static_chain_added |= 1;
739 }
740 else
741 {
742 x = get_chain_decl (info);
743 info->static_chain_added |= 2;
744
745 for (i = info->outer; i->context != target_context; i = i->outer)
746 {
747 tree field = get_chain_field (i);
748
749 x = build_simple_mem_ref (x);
750 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
751 x = init_tmp_var (info, x, gsi);
752 }
753 }
754
755 return x;
756 }
757
758
759 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
760 frame as seen from INFO->CONTEXT. Insert any necessary computations
761 before GSI. */
762
763 static tree
764 get_frame_field (struct nesting_info *info, tree target_context,
765 tree field, gimple_stmt_iterator *gsi)
766 {
767 struct nesting_info *i;
768 tree x;
769
770 if (info->context == target_context)
771 {
772 /* Make sure frame_decl gets created. */
773 (void) get_frame_type (info);
774 x = info->frame_decl;
775 info->static_chain_added |= 1;
776 }
777 else
778 {
779 x = get_chain_decl (info);
780 info->static_chain_added |= 2;
781
782 for (i = info->outer; i->context != target_context; i = i->outer)
783 {
784 tree field = get_chain_field (i);
785
786 x = build_simple_mem_ref (x);
787 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
788 x = init_tmp_var (info, x, gsi);
789 }
790
791 x = build_simple_mem_ref (x);
792 }
793
794 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
795 return x;
796 }
797
798 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
799
800 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
801 in the nested function with DECL_VALUE_EXPR set to reference the true
802 variable in the parent function. This is used both for debug info
803 and in OMP lowering. */
804
805 static tree
806 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
807 {
808 tree target_context;
809 struct nesting_info *i;
810 tree x, field, new_decl;
811
812 tree *slot = &info->var_map->get_or_insert (decl);
813
814 if (*slot)
815 return *slot;
816
817 target_context = decl_function_context (decl);
818
819 /* A copy of the code in get_frame_field, but without the temporaries. */
820 if (info->context == target_context)
821 {
822 /* Make sure frame_decl gets created. */
823 (void) get_frame_type (info);
824 x = info->frame_decl;
825 i = info;
826 info->static_chain_added |= 1;
827 }
828 else
829 {
830 x = get_chain_decl (info);
831 info->static_chain_added |= 2;
832 for (i = info->outer; i->context != target_context; i = i->outer)
833 {
834 field = get_chain_field (i);
835 x = build_simple_mem_ref (x);
836 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
837 }
838 x = build_simple_mem_ref (x);
839 }
840
841 field = lookup_field_for_decl (i, decl, INSERT);
842 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
843 if (use_pointer_in_frame (decl))
844 x = build_simple_mem_ref (x);
845
846 /* ??? We should be remapping types as well, surely. */
847 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
848 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
849 DECL_CONTEXT (new_decl) = info->context;
850 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
851 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
852 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
853 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
854 TREE_READONLY (new_decl) = TREE_READONLY (decl);
855 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
856 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
857 if ((TREE_CODE (decl) == PARM_DECL
858 || TREE_CODE (decl) == RESULT_DECL
859 || TREE_CODE (decl) == VAR_DECL)
860 && DECL_BY_REFERENCE (decl))
861 DECL_BY_REFERENCE (new_decl) = 1;
862
863 SET_DECL_VALUE_EXPR (new_decl, x);
864 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
865
866 *slot = new_decl;
867 DECL_CHAIN (new_decl) = info->debug_var_chain;
868 info->debug_var_chain = new_decl;
869
870 if (!optimize
871 && info->context != target_context
872 && variably_modified_type_p (TREE_TYPE (decl), NULL))
873 note_nonlocal_vla_type (info, TREE_TYPE (decl));
874
875 return new_decl;
876 }
877
878
879 /* Callback for walk_gimple_stmt, rewrite all references to VAR
880 and PARM_DECLs that belong to outer functions.
881
882 The rewrite will involve some number of structure accesses back up
883 the static chain. E.g. for a variable FOO up one nesting level it'll
884 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
885 indirections apply to decls for which use_pointer_in_frame is true. */
886
887 static tree
888 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
889 {
890 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
891 struct nesting_info *const info = (struct nesting_info *) wi->info;
892 tree t = *tp;
893
894 *walk_subtrees = 0;
895 switch (TREE_CODE (t))
896 {
897 case VAR_DECL:
898 /* Non-automatic variables are never processed. */
899 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
900 break;
901 /* FALLTHRU */
902
903 case PARM_DECL:
904 if (decl_function_context (t) != info->context)
905 {
906 tree x;
907 wi->changed = true;
908
909 x = get_nonlocal_debug_decl (info, t);
910 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
911 {
912 tree target_context = decl_function_context (t);
913 struct nesting_info *i;
914 for (i = info->outer; i->context != target_context; i = i->outer)
915 continue;
916 x = lookup_field_for_decl (i, t, INSERT);
917 x = get_frame_field (info, target_context, x, &wi->gsi);
918 if (use_pointer_in_frame (t))
919 {
920 x = init_tmp_var (info, x, &wi->gsi);
921 x = build_simple_mem_ref (x);
922 }
923 }
924
925 if (wi->val_only)
926 {
927 if (wi->is_lhs)
928 x = save_tmp_var (info, x, &wi->gsi);
929 else
930 x = init_tmp_var (info, x, &wi->gsi);
931 }
932
933 *tp = x;
934 }
935 break;
936
937 case LABEL_DECL:
938 /* We're taking the address of a label from a parent function, but
939 this is not itself a non-local goto. Mark the label such that it
940 will not be deleted, much as we would with a label address in
941 static storage. */
942 if (decl_function_context (t) != info->context)
943 FORCED_LABEL (t) = 1;
944 break;
945
946 case ADDR_EXPR:
947 {
948 bool save_val_only = wi->val_only;
949
950 wi->val_only = false;
951 wi->is_lhs = false;
952 wi->changed = false;
953 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
954 wi->val_only = true;
955
956 if (wi->changed)
957 {
958 tree save_context;
959
960 /* If we changed anything, we might no longer be directly
961 referencing a decl. */
962 save_context = current_function_decl;
963 current_function_decl = info->context;
964 recompute_tree_invariant_for_addr_expr (t);
965 current_function_decl = save_context;
966
967 /* If the callback converted the address argument in a context
968 where we only accept variables (and min_invariant, presumably),
969 then compute the address into a temporary. */
970 if (save_val_only)
971 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
972 t, &wi->gsi);
973 }
974 }
975 break;
976
977 case REALPART_EXPR:
978 case IMAGPART_EXPR:
979 case COMPONENT_REF:
980 case ARRAY_REF:
981 case ARRAY_RANGE_REF:
982 case BIT_FIELD_REF:
983 /* Go down this entire nest and just look at the final prefix and
984 anything that describes the references. Otherwise, we lose track
985 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
986 wi->val_only = true;
987 wi->is_lhs = false;
988 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
989 {
990 if (TREE_CODE (t) == COMPONENT_REF)
991 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
992 NULL);
993 else if (TREE_CODE (t) == ARRAY_REF
994 || TREE_CODE (t) == ARRAY_RANGE_REF)
995 {
996 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
997 wi, NULL);
998 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
999 wi, NULL);
1000 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1001 wi, NULL);
1002 }
1003 }
1004 wi->val_only = false;
1005 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1006 break;
1007
1008 case VIEW_CONVERT_EXPR:
1009 /* Just request to look at the subtrees, leaving val_only and lhs
1010 untouched. This might actually be for !val_only + lhs, in which
1011 case we don't want to force a replacement by a temporary. */
1012 *walk_subtrees = 1;
1013 break;
1014
1015 default:
1016 if (!IS_TYPE_OR_DECL_P (t))
1017 {
1018 *walk_subtrees = 1;
1019 wi->val_only = true;
1020 wi->is_lhs = false;
1021 }
1022 break;
1023 }
1024
1025 return NULL_TREE;
1026 }
1027
1028 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1029 struct walk_stmt_info *);
1030
1031 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1032 and PARM_DECLs that belong to outer functions. */
1033
1034 static bool
1035 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1036 {
1037 struct nesting_info *const info = (struct nesting_info *) wi->info;
1038 bool need_chain = false, need_stmts = false;
1039 tree clause, decl;
1040 int dummy;
1041 bitmap new_suppress;
1042
1043 new_suppress = BITMAP_GGC_ALLOC ();
1044 bitmap_copy (new_suppress, info->suppress_expansion);
1045
1046 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1047 {
1048 switch (OMP_CLAUSE_CODE (clause))
1049 {
1050 case OMP_CLAUSE_REDUCTION:
1051 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1052 need_stmts = true;
1053 goto do_decl_clause;
1054
1055 case OMP_CLAUSE_LASTPRIVATE:
1056 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1057 need_stmts = true;
1058 goto do_decl_clause;
1059
1060 case OMP_CLAUSE_LINEAR:
1061 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1062 need_stmts = true;
1063 wi->val_only = true;
1064 wi->is_lhs = false;
1065 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1066 &dummy, wi);
1067 goto do_decl_clause;
1068
1069 case OMP_CLAUSE_PRIVATE:
1070 case OMP_CLAUSE_FIRSTPRIVATE:
1071 case OMP_CLAUSE_COPYPRIVATE:
1072 case OMP_CLAUSE_SHARED:
1073 case OMP_CLAUSE_TO_DECLARE:
1074 case OMP_CLAUSE_LINK:
1075 case OMP_CLAUSE_USE_DEVICE_PTR:
1076 case OMP_CLAUSE_IS_DEVICE_PTR:
1077 do_decl_clause:
1078 decl = OMP_CLAUSE_DECL (clause);
1079 if (TREE_CODE (decl) == VAR_DECL
1080 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1081 break;
1082 if (decl_function_context (decl) != info->context)
1083 {
1084 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1085 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1086 bitmap_set_bit (new_suppress, DECL_UID (decl));
1087 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1088 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1089 need_chain = true;
1090 }
1091 break;
1092
1093 case OMP_CLAUSE_SCHEDULE:
1094 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1095 break;
1096 /* FALLTHRU */
1097 case OMP_CLAUSE_FINAL:
1098 case OMP_CLAUSE_IF:
1099 case OMP_CLAUSE_NUM_THREADS:
1100 case OMP_CLAUSE_DEPEND:
1101 case OMP_CLAUSE_DEVICE:
1102 case OMP_CLAUSE_NUM_TEAMS:
1103 case OMP_CLAUSE_THREAD_LIMIT:
1104 case OMP_CLAUSE_SAFELEN:
1105 case OMP_CLAUSE_SIMDLEN:
1106 case OMP_CLAUSE_PRIORITY:
1107 case OMP_CLAUSE_GRAINSIZE:
1108 case OMP_CLAUSE_NUM_TASKS:
1109 case OMP_CLAUSE_HINT:
1110 case OMP_CLAUSE__CILK_FOR_COUNT_:
1111 case OMP_CLAUSE_NUM_GANGS:
1112 case OMP_CLAUSE_NUM_WORKERS:
1113 case OMP_CLAUSE_VECTOR_LENGTH:
1114 case OMP_CLAUSE_GANG:
1115 case OMP_CLAUSE_WORKER:
1116 case OMP_CLAUSE_VECTOR:
1117 case OMP_CLAUSE_ASYNC:
1118 case OMP_CLAUSE_WAIT:
1119 /* Several OpenACC clauses have optional arguments. Check if they
1120 are present. */
1121 if (OMP_CLAUSE_OPERAND (clause, 0))
1122 {
1123 wi->val_only = true;
1124 wi->is_lhs = false;
1125 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1126 &dummy, wi);
1127 }
1128
1129 /* The gang clause accepts two arguments. */
1130 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1131 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1132 {
1133 wi->val_only = true;
1134 wi->is_lhs = false;
1135 convert_nonlocal_reference_op
1136 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1137 }
1138 break;
1139
1140 case OMP_CLAUSE_DIST_SCHEDULE:
1141 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1142 {
1143 wi->val_only = true;
1144 wi->is_lhs = false;
1145 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1146 &dummy, wi);
1147 }
1148 break;
1149
1150 case OMP_CLAUSE_MAP:
1151 case OMP_CLAUSE_TO:
1152 case OMP_CLAUSE_FROM:
1153 if (OMP_CLAUSE_SIZE (clause))
1154 {
1155 wi->val_only = true;
1156 wi->is_lhs = false;
1157 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1158 &dummy, wi);
1159 }
1160 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1161 goto do_decl_clause;
1162 wi->val_only = true;
1163 wi->is_lhs = false;
1164 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1165 wi, NULL);
1166 break;
1167
1168 case OMP_CLAUSE_ALIGNED:
1169 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1170 {
1171 wi->val_only = true;
1172 wi->is_lhs = false;
1173 convert_nonlocal_reference_op
1174 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1175 }
1176 /* Like do_decl_clause, but don't add any suppression. */
1177 decl = OMP_CLAUSE_DECL (clause);
1178 if (TREE_CODE (decl) == VAR_DECL
1179 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1180 break;
1181 if (decl_function_context (decl) != info->context)
1182 {
1183 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1184 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1185 need_chain = true;
1186 }
1187 break;
1188
1189 case OMP_CLAUSE_NOWAIT:
1190 case OMP_CLAUSE_ORDERED:
1191 case OMP_CLAUSE_DEFAULT:
1192 case OMP_CLAUSE_COPYIN:
1193 case OMP_CLAUSE_COLLAPSE:
1194 case OMP_CLAUSE_UNTIED:
1195 case OMP_CLAUSE_MERGEABLE:
1196 case OMP_CLAUSE_PROC_BIND:
1197 case OMP_CLAUSE_NOGROUP:
1198 case OMP_CLAUSE_THREADS:
1199 case OMP_CLAUSE_SIMD:
1200 case OMP_CLAUSE_DEFAULTMAP:
1201 case OMP_CLAUSE_SEQ:
1202 case OMP_CLAUSE_INDEPENDENT:
1203 case OMP_CLAUSE_AUTO:
1204 break;
1205
1206 case OMP_CLAUSE_TILE:
1207 /* OpenACC tile clauses are discarded during gimplification, so we
1208 don't expect to see anything here. */
1209 gcc_unreachable ();
1210
1211 case OMP_CLAUSE__CACHE_:
1212 /* These clauses belong to the OpenACC cache directive, which is
1213 discarded during gimplification, so we don't expect to see
1214 anything here. */
1215 gcc_unreachable ();
1216
1217 default:
1218 gcc_unreachable ();
1219 }
1220 }
1221
1222 info->suppress_expansion = new_suppress;
1223
1224 if (need_stmts)
1225 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1226 switch (OMP_CLAUSE_CODE (clause))
1227 {
1228 case OMP_CLAUSE_REDUCTION:
1229 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1230 {
1231 tree old_context
1232 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1233 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1234 = info->context;
1235 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1236 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1237 = info->context;
1238 walk_body (convert_nonlocal_reference_stmt,
1239 convert_nonlocal_reference_op, info,
1240 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1241 walk_body (convert_nonlocal_reference_stmt,
1242 convert_nonlocal_reference_op, info,
1243 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1244 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1245 = old_context;
1246 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1247 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1248 = old_context;
1249 }
1250 break;
1251
1252 case OMP_CLAUSE_LASTPRIVATE:
1253 walk_body (convert_nonlocal_reference_stmt,
1254 convert_nonlocal_reference_op, info,
1255 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1256 break;
1257
1258 case OMP_CLAUSE_LINEAR:
1259 walk_body (convert_nonlocal_reference_stmt,
1260 convert_nonlocal_reference_op, info,
1261 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1262 break;
1263
1264 default:
1265 break;
1266 }
1267
1268 return need_chain;
1269 }
1270
1271 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1272
1273 static void
1274 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1275 {
1276 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1277 type = TREE_TYPE (type);
1278
1279 if (TYPE_NAME (type)
1280 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1281 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1282 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1283
1284 while (POINTER_TYPE_P (type)
1285 || TREE_CODE (type) == VECTOR_TYPE
1286 || TREE_CODE (type) == FUNCTION_TYPE
1287 || TREE_CODE (type) == METHOD_TYPE)
1288 type = TREE_TYPE (type);
1289
1290 if (TREE_CODE (type) == ARRAY_TYPE)
1291 {
1292 tree domain, t;
1293
1294 note_nonlocal_vla_type (info, TREE_TYPE (type));
1295 domain = TYPE_DOMAIN (type);
1296 if (domain)
1297 {
1298 t = TYPE_MIN_VALUE (domain);
1299 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1300 && decl_function_context (t) != info->context)
1301 get_nonlocal_debug_decl (info, t);
1302 t = TYPE_MAX_VALUE (domain);
1303 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1304 && decl_function_context (t) != info->context)
1305 get_nonlocal_debug_decl (info, t);
1306 }
1307 }
1308 }
1309
1310 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1311 in BLOCK. */
1312
1313 static void
1314 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1315 {
1316 tree var;
1317
1318 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1319 if (TREE_CODE (var) == VAR_DECL
1320 && variably_modified_type_p (TREE_TYPE (var), NULL)
1321 && DECL_HAS_VALUE_EXPR_P (var)
1322 && decl_function_context (var) != info->context)
1323 note_nonlocal_vla_type (info, TREE_TYPE (var));
1324 }
1325
1326 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1327 PARM_DECLs that belong to outer functions. This handles statements
1328 that are not handled via the standard recursion done in
1329 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1330 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1331 operands of STMT have been handled by this function. */
1332
1333 static tree
1334 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1335 struct walk_stmt_info *wi)
1336 {
1337 struct nesting_info *info = (struct nesting_info *) wi->info;
1338 tree save_local_var_chain;
1339 bitmap save_suppress;
1340 gimple *stmt = gsi_stmt (*gsi);
1341
1342 switch (gimple_code (stmt))
1343 {
1344 case GIMPLE_GOTO:
1345 /* Don't walk non-local gotos for now. */
1346 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1347 {
1348 wi->val_only = true;
1349 wi->is_lhs = false;
1350 *handled_ops_p = true;
1351 return NULL_TREE;
1352 }
1353 break;
1354
1355 case GIMPLE_OMP_PARALLEL:
1356 case GIMPLE_OMP_TASK:
1357 save_suppress = info->suppress_expansion;
1358 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1359 wi))
1360 {
1361 tree c, decl;
1362 decl = get_chain_decl (info);
1363 c = build_omp_clause (gimple_location (stmt),
1364 OMP_CLAUSE_FIRSTPRIVATE);
1365 OMP_CLAUSE_DECL (c) = decl;
1366 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1367 gimple_omp_taskreg_set_clauses (stmt, c);
1368 }
1369
1370 save_local_var_chain = info->new_local_var_chain;
1371 info->new_local_var_chain = NULL;
1372
1373 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1374 info, gimple_omp_body_ptr (stmt));
1375
1376 if (info->new_local_var_chain)
1377 declare_vars (info->new_local_var_chain,
1378 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1379 false);
1380 info->new_local_var_chain = save_local_var_chain;
1381 info->suppress_expansion = save_suppress;
1382 break;
1383
1384 case GIMPLE_OMP_FOR:
1385 save_suppress = info->suppress_expansion;
1386 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1387 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1388 convert_nonlocal_reference_stmt,
1389 convert_nonlocal_reference_op, info);
1390 walk_body (convert_nonlocal_reference_stmt,
1391 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1392 info->suppress_expansion = save_suppress;
1393 break;
1394
1395 case GIMPLE_OMP_SECTIONS:
1396 save_suppress = info->suppress_expansion;
1397 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1398 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1399 info, gimple_omp_body_ptr (stmt));
1400 info->suppress_expansion = save_suppress;
1401 break;
1402
1403 case GIMPLE_OMP_SINGLE:
1404 save_suppress = info->suppress_expansion;
1405 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1406 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1407 info, gimple_omp_body_ptr (stmt));
1408 info->suppress_expansion = save_suppress;
1409 break;
1410
1411 case GIMPLE_OMP_TARGET:
1412 if (!is_gimple_omp_offloaded (stmt))
1413 {
1414 save_suppress = info->suppress_expansion;
1415 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1416 wi);
1417 info->suppress_expansion = save_suppress;
1418 walk_body (convert_nonlocal_reference_stmt,
1419 convert_nonlocal_reference_op, info,
1420 gimple_omp_body_ptr (stmt));
1421 break;
1422 }
1423 save_suppress = info->suppress_expansion;
1424 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1425 wi))
1426 {
1427 tree c, decl;
1428 decl = get_chain_decl (info);
1429 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1430 OMP_CLAUSE_DECL (c) = decl;
1431 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1432 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1433 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1434 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1435 }
1436
1437 save_local_var_chain = info->new_local_var_chain;
1438 info->new_local_var_chain = NULL;
1439
1440 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1441 info, gimple_omp_body_ptr (stmt));
1442
1443 if (info->new_local_var_chain)
1444 declare_vars (info->new_local_var_chain,
1445 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1446 false);
1447 info->new_local_var_chain = save_local_var_chain;
1448 info->suppress_expansion = save_suppress;
1449 break;
1450
1451 case GIMPLE_OMP_TEAMS:
1452 save_suppress = info->suppress_expansion;
1453 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1454 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1455 info, gimple_omp_body_ptr (stmt));
1456 info->suppress_expansion = save_suppress;
1457 break;
1458
1459 case GIMPLE_OMP_SECTION:
1460 case GIMPLE_OMP_MASTER:
1461 case GIMPLE_OMP_TASKGROUP:
1462 case GIMPLE_OMP_ORDERED:
1463 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1464 info, gimple_omp_body_ptr (stmt));
1465 break;
1466
1467 case GIMPLE_BIND:
1468 {
1469 gbind *bind_stmt = as_a <gbind *> (stmt);
1470 if (!optimize && gimple_bind_block (bind_stmt))
1471 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1472
1473 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1474 if (TREE_CODE (var) == NAMELIST_DECL)
1475 {
1476 /* Adjust decls mentioned in NAMELIST_DECL. */
1477 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1478 tree decl;
1479 unsigned int i;
1480
1481 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1482 {
1483 if (TREE_CODE (decl) == VAR_DECL
1484 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1485 continue;
1486 if (decl_function_context (decl) != info->context)
1487 CONSTRUCTOR_ELT (decls, i)->value
1488 = get_nonlocal_debug_decl (info, decl);
1489 }
1490 }
1491
1492 *handled_ops_p = false;
1493 return NULL_TREE;
1494 }
1495 case GIMPLE_COND:
1496 wi->val_only = true;
1497 wi->is_lhs = false;
1498 *handled_ops_p = false;
1499 return NULL_TREE;
1500
1501 default:
1502 /* For every other statement that we are not interested in
1503 handling here, let the walker traverse the operands. */
1504 *handled_ops_p = false;
1505 return NULL_TREE;
1506 }
1507
1508 /* We have handled all of STMT operands, no need to traverse the operands. */
1509 *handled_ops_p = true;
1510 return NULL_TREE;
1511 }
1512
1513
1514 /* A subroutine of convert_local_reference. Create a local variable
1515 in the parent function with DECL_VALUE_EXPR set to reference the
1516 field in FRAME. This is used both for debug info and in OMP
1517 lowering. */
1518
1519 static tree
1520 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1521 {
1522 tree x, new_decl;
1523
1524 tree *slot = &info->var_map->get_or_insert (decl);
1525 if (*slot)
1526 return *slot;
1527
1528 /* Make sure frame_decl gets created. */
1529 (void) get_frame_type (info);
1530 x = info->frame_decl;
1531 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1532
1533 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1534 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1535 DECL_CONTEXT (new_decl) = info->context;
1536 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1537 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1538 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1539 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1540 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1541 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1542 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1543 if ((TREE_CODE (decl) == PARM_DECL
1544 || TREE_CODE (decl) == RESULT_DECL
1545 || TREE_CODE (decl) == VAR_DECL)
1546 && DECL_BY_REFERENCE (decl))
1547 DECL_BY_REFERENCE (new_decl) = 1;
1548
1549 SET_DECL_VALUE_EXPR (new_decl, x);
1550 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1551 *slot = new_decl;
1552
1553 DECL_CHAIN (new_decl) = info->debug_var_chain;
1554 info->debug_var_chain = new_decl;
1555
1556 /* Do not emit debug info twice. */
1557 DECL_IGNORED_P (decl) = 1;
1558
1559 return new_decl;
1560 }
1561
1562
1563 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1564 and PARM_DECLs that were referenced by inner nested functions.
1565 The rewrite will be a structure reference to the local frame variable. */
1566
1567 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1568
1569 static tree
1570 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1571 {
1572 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1573 struct nesting_info *const info = (struct nesting_info *) wi->info;
1574 tree t = *tp, field, x;
1575 bool save_val_only;
1576
1577 *walk_subtrees = 0;
1578 switch (TREE_CODE (t))
1579 {
1580 case VAR_DECL:
1581 /* Non-automatic variables are never processed. */
1582 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1583 break;
1584 /* FALLTHRU */
1585
1586 case PARM_DECL:
1587 if (decl_function_context (t) == info->context)
1588 {
1589 /* If we copied a pointer to the frame, then the original decl
1590 is used unchanged in the parent function. */
1591 if (use_pointer_in_frame (t))
1592 break;
1593
1594 /* No need to transform anything if no child references the
1595 variable. */
1596 field = lookup_field_for_decl (info, t, NO_INSERT);
1597 if (!field)
1598 break;
1599 wi->changed = true;
1600
1601 x = get_local_debug_decl (info, t, field);
1602 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1603 x = get_frame_field (info, info->context, field, &wi->gsi);
1604
1605 if (wi->val_only)
1606 {
1607 if (wi->is_lhs)
1608 x = save_tmp_var (info, x, &wi->gsi);
1609 else
1610 x = init_tmp_var (info, x, &wi->gsi);
1611 }
1612
1613 *tp = x;
1614 }
1615 break;
1616
1617 case ADDR_EXPR:
1618 save_val_only = wi->val_only;
1619 wi->val_only = false;
1620 wi->is_lhs = false;
1621 wi->changed = false;
1622 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1623 wi->val_only = save_val_only;
1624
1625 /* If we converted anything ... */
1626 if (wi->changed)
1627 {
1628 tree save_context;
1629
1630 /* Then the frame decl is now addressable. */
1631 TREE_ADDRESSABLE (info->frame_decl) = 1;
1632
1633 save_context = current_function_decl;
1634 current_function_decl = info->context;
1635 recompute_tree_invariant_for_addr_expr (t);
1636 current_function_decl = save_context;
1637
1638 /* If we are in a context where we only accept values, then
1639 compute the address into a temporary. */
1640 if (save_val_only)
1641 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1642 t, &wi->gsi);
1643 }
1644 break;
1645
1646 case REALPART_EXPR:
1647 case IMAGPART_EXPR:
1648 case COMPONENT_REF:
1649 case ARRAY_REF:
1650 case ARRAY_RANGE_REF:
1651 case BIT_FIELD_REF:
1652 /* Go down this entire nest and just look at the final prefix and
1653 anything that describes the references. Otherwise, we lose track
1654 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1655 save_val_only = wi->val_only;
1656 wi->val_only = true;
1657 wi->is_lhs = false;
1658 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1659 {
1660 if (TREE_CODE (t) == COMPONENT_REF)
1661 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1662 NULL);
1663 else if (TREE_CODE (t) == ARRAY_REF
1664 || TREE_CODE (t) == ARRAY_RANGE_REF)
1665 {
1666 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1667 NULL);
1668 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1669 NULL);
1670 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1671 NULL);
1672 }
1673 }
1674 wi->val_only = false;
1675 walk_tree (tp, convert_local_reference_op, wi, NULL);
1676 wi->val_only = save_val_only;
1677 break;
1678
1679 case MEM_REF:
1680 save_val_only = wi->val_only;
1681 wi->val_only = true;
1682 wi->is_lhs = false;
1683 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1684 wi, NULL);
1685 /* We need to re-fold the MEM_REF as component references as
1686 part of a ADDR_EXPR address are not allowed. But we cannot
1687 fold here, as the chain record type is not yet finalized. */
1688 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1689 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1690 info->mem_refs->add (tp);
1691 wi->val_only = save_val_only;
1692 break;
1693
1694 case VIEW_CONVERT_EXPR:
1695 /* Just request to look at the subtrees, leaving val_only and lhs
1696 untouched. This might actually be for !val_only + lhs, in which
1697 case we don't want to force a replacement by a temporary. */
1698 *walk_subtrees = 1;
1699 break;
1700
1701 default:
1702 if (!IS_TYPE_OR_DECL_P (t))
1703 {
1704 *walk_subtrees = 1;
1705 wi->val_only = true;
1706 wi->is_lhs = false;
1707 }
1708 break;
1709 }
1710
1711 return NULL_TREE;
1712 }
1713
1714 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1715 struct walk_stmt_info *);
1716
1717 /* Helper for convert_local_reference. Convert all the references in
1718 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1719
1720 static bool
1721 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1722 {
1723 struct nesting_info *const info = (struct nesting_info *) wi->info;
1724 bool need_frame = false, need_stmts = false;
1725 tree clause, decl;
1726 int dummy;
1727 bitmap new_suppress;
1728
1729 new_suppress = BITMAP_GGC_ALLOC ();
1730 bitmap_copy (new_suppress, info->suppress_expansion);
1731
1732 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1733 {
1734 switch (OMP_CLAUSE_CODE (clause))
1735 {
1736 case OMP_CLAUSE_REDUCTION:
1737 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1738 need_stmts = true;
1739 goto do_decl_clause;
1740
1741 case OMP_CLAUSE_LASTPRIVATE:
1742 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1743 need_stmts = true;
1744 goto do_decl_clause;
1745
1746 case OMP_CLAUSE_LINEAR:
1747 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1748 need_stmts = true;
1749 wi->val_only = true;
1750 wi->is_lhs = false;
1751 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1752 wi);
1753 goto do_decl_clause;
1754
1755 case OMP_CLAUSE_PRIVATE:
1756 case OMP_CLAUSE_FIRSTPRIVATE:
1757 case OMP_CLAUSE_COPYPRIVATE:
1758 case OMP_CLAUSE_SHARED:
1759 case OMP_CLAUSE_TO_DECLARE:
1760 case OMP_CLAUSE_LINK:
1761 case OMP_CLAUSE_USE_DEVICE_PTR:
1762 case OMP_CLAUSE_IS_DEVICE_PTR:
1763 do_decl_clause:
1764 decl = OMP_CLAUSE_DECL (clause);
1765 if (TREE_CODE (decl) == VAR_DECL
1766 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1767 break;
1768 if (decl_function_context (decl) == info->context
1769 && !use_pointer_in_frame (decl))
1770 {
1771 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1772 if (field)
1773 {
1774 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1775 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1776 bitmap_set_bit (new_suppress, DECL_UID (decl));
1777 OMP_CLAUSE_DECL (clause)
1778 = get_local_debug_decl (info, decl, field);
1779 need_frame = true;
1780 }
1781 }
1782 break;
1783
1784 case OMP_CLAUSE_SCHEDULE:
1785 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1786 break;
1787 /* FALLTHRU */
1788 case OMP_CLAUSE_FINAL:
1789 case OMP_CLAUSE_IF:
1790 case OMP_CLAUSE_NUM_THREADS:
1791 case OMP_CLAUSE_DEPEND:
1792 case OMP_CLAUSE_DEVICE:
1793 case OMP_CLAUSE_NUM_TEAMS:
1794 case OMP_CLAUSE_THREAD_LIMIT:
1795 case OMP_CLAUSE_SAFELEN:
1796 case OMP_CLAUSE_SIMDLEN:
1797 case OMP_CLAUSE_PRIORITY:
1798 case OMP_CLAUSE_GRAINSIZE:
1799 case OMP_CLAUSE_NUM_TASKS:
1800 case OMP_CLAUSE_HINT:
1801 case OMP_CLAUSE__CILK_FOR_COUNT_:
1802 case OMP_CLAUSE_NUM_GANGS:
1803 case OMP_CLAUSE_NUM_WORKERS:
1804 case OMP_CLAUSE_VECTOR_LENGTH:
1805 case OMP_CLAUSE_GANG:
1806 case OMP_CLAUSE_WORKER:
1807 case OMP_CLAUSE_VECTOR:
1808 case OMP_CLAUSE_ASYNC:
1809 case OMP_CLAUSE_WAIT:
1810 /* Several OpenACC clauses have optional arguments. Check if they
1811 are present. */
1812 if (OMP_CLAUSE_OPERAND (clause, 0))
1813 {
1814 wi->val_only = true;
1815 wi->is_lhs = false;
1816 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1817 &dummy, wi);
1818 }
1819
1820 /* The gang clause accepts two arguments. */
1821 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1822 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1823 {
1824 wi->val_only = true;
1825 wi->is_lhs = false;
1826 convert_nonlocal_reference_op
1827 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1828 }
1829 break;
1830
1831 case OMP_CLAUSE_DIST_SCHEDULE:
1832 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1833 {
1834 wi->val_only = true;
1835 wi->is_lhs = false;
1836 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1837 &dummy, wi);
1838 }
1839 break;
1840
1841 case OMP_CLAUSE_MAP:
1842 case OMP_CLAUSE_TO:
1843 case OMP_CLAUSE_FROM:
1844 if (OMP_CLAUSE_SIZE (clause))
1845 {
1846 wi->val_only = true;
1847 wi->is_lhs = false;
1848 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1849 &dummy, wi);
1850 }
1851 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1852 goto do_decl_clause;
1853 wi->val_only = true;
1854 wi->is_lhs = false;
1855 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1856 wi, NULL);
1857 break;
1858
1859 case OMP_CLAUSE_ALIGNED:
1860 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1861 {
1862 wi->val_only = true;
1863 wi->is_lhs = false;
1864 convert_local_reference_op
1865 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1866 }
1867 /* Like do_decl_clause, but don't add any suppression. */
1868 decl = OMP_CLAUSE_DECL (clause);
1869 if (TREE_CODE (decl) == VAR_DECL
1870 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1871 break;
1872 if (decl_function_context (decl) == info->context
1873 && !use_pointer_in_frame (decl))
1874 {
1875 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1876 if (field)
1877 {
1878 OMP_CLAUSE_DECL (clause)
1879 = get_local_debug_decl (info, decl, field);
1880 need_frame = true;
1881 }
1882 }
1883 break;
1884
1885 case OMP_CLAUSE_NOWAIT:
1886 case OMP_CLAUSE_ORDERED:
1887 case OMP_CLAUSE_DEFAULT:
1888 case OMP_CLAUSE_COPYIN:
1889 case OMP_CLAUSE_COLLAPSE:
1890 case OMP_CLAUSE_UNTIED:
1891 case OMP_CLAUSE_MERGEABLE:
1892 case OMP_CLAUSE_PROC_BIND:
1893 case OMP_CLAUSE_NOGROUP:
1894 case OMP_CLAUSE_THREADS:
1895 case OMP_CLAUSE_SIMD:
1896 case OMP_CLAUSE_DEFAULTMAP:
1897 case OMP_CLAUSE_SEQ:
1898 case OMP_CLAUSE_INDEPENDENT:
1899 case OMP_CLAUSE_AUTO:
1900 break;
1901
1902 case OMP_CLAUSE_TILE:
1903 /* OpenACC tile clauses are discarded during gimplification, so we
1904 don't expect to see anything here. */
1905 gcc_unreachable ();
1906
1907 case OMP_CLAUSE__CACHE_:
1908 /* These clauses belong to the OpenACC cache directive, which is
1909 discarded during gimplification, so we don't expect to see
1910 anything here. */
1911 gcc_unreachable ();
1912
1913 default:
1914 gcc_unreachable ();
1915 }
1916 }
1917
1918 info->suppress_expansion = new_suppress;
1919
1920 if (need_stmts)
1921 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1922 switch (OMP_CLAUSE_CODE (clause))
1923 {
1924 case OMP_CLAUSE_REDUCTION:
1925 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1926 {
1927 tree old_context
1928 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1929 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1930 = info->context;
1931 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1932 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1933 = info->context;
1934 walk_body (convert_local_reference_stmt,
1935 convert_local_reference_op, info,
1936 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1937 walk_body (convert_local_reference_stmt,
1938 convert_local_reference_op, info,
1939 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1940 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1941 = old_context;
1942 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1943 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1944 = old_context;
1945 }
1946 break;
1947
1948 case OMP_CLAUSE_LASTPRIVATE:
1949 walk_body (convert_local_reference_stmt,
1950 convert_local_reference_op, info,
1951 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1952 break;
1953
1954 case OMP_CLAUSE_LINEAR:
1955 walk_body (convert_local_reference_stmt,
1956 convert_local_reference_op, info,
1957 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1958 break;
1959
1960 default:
1961 break;
1962 }
1963
1964 return need_frame;
1965 }
1966
1967
1968 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1969 and PARM_DECLs that were referenced by inner nested functions.
1970 The rewrite will be a structure reference to the local frame variable. */
1971
1972 static tree
1973 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1974 struct walk_stmt_info *wi)
1975 {
1976 struct nesting_info *info = (struct nesting_info *) wi->info;
1977 tree save_local_var_chain;
1978 bitmap save_suppress;
1979 gimple *stmt = gsi_stmt (*gsi);
1980
1981 switch (gimple_code (stmt))
1982 {
1983 case GIMPLE_OMP_PARALLEL:
1984 case GIMPLE_OMP_TASK:
1985 save_suppress = info->suppress_expansion;
1986 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1987 wi))
1988 {
1989 tree c;
1990 (void) get_frame_type (info);
1991 c = build_omp_clause (gimple_location (stmt),
1992 OMP_CLAUSE_SHARED);
1993 OMP_CLAUSE_DECL (c) = info->frame_decl;
1994 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1995 gimple_omp_taskreg_set_clauses (stmt, c);
1996 }
1997
1998 save_local_var_chain = info->new_local_var_chain;
1999 info->new_local_var_chain = NULL;
2000
2001 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2002 gimple_omp_body_ptr (stmt));
2003
2004 if (info->new_local_var_chain)
2005 declare_vars (info->new_local_var_chain,
2006 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2007 info->new_local_var_chain = save_local_var_chain;
2008 info->suppress_expansion = save_suppress;
2009 break;
2010
2011 case GIMPLE_OMP_FOR:
2012 save_suppress = info->suppress_expansion;
2013 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2014 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2015 convert_local_reference_stmt,
2016 convert_local_reference_op, info);
2017 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2018 info, gimple_omp_body_ptr (stmt));
2019 info->suppress_expansion = save_suppress;
2020 break;
2021
2022 case GIMPLE_OMP_SECTIONS:
2023 save_suppress = info->suppress_expansion;
2024 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2025 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2026 info, gimple_omp_body_ptr (stmt));
2027 info->suppress_expansion = save_suppress;
2028 break;
2029
2030 case GIMPLE_OMP_SINGLE:
2031 save_suppress = info->suppress_expansion;
2032 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2033 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2034 info, gimple_omp_body_ptr (stmt));
2035 info->suppress_expansion = save_suppress;
2036 break;
2037
2038 case GIMPLE_OMP_TARGET:
2039 if (!is_gimple_omp_offloaded (stmt))
2040 {
2041 save_suppress = info->suppress_expansion;
2042 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2043 info->suppress_expansion = save_suppress;
2044 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2045 info, gimple_omp_body_ptr (stmt));
2046 break;
2047 }
2048 save_suppress = info->suppress_expansion;
2049 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2050 {
2051 tree c;
2052 (void) get_frame_type (info);
2053 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2054 OMP_CLAUSE_DECL (c) = info->frame_decl;
2055 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2056 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2057 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2058 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2059 }
2060
2061 save_local_var_chain = info->new_local_var_chain;
2062 info->new_local_var_chain = NULL;
2063
2064 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2065 gimple_omp_body_ptr (stmt));
2066
2067 if (info->new_local_var_chain)
2068 declare_vars (info->new_local_var_chain,
2069 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2070 info->new_local_var_chain = save_local_var_chain;
2071 info->suppress_expansion = save_suppress;
2072 break;
2073
2074 case GIMPLE_OMP_TEAMS:
2075 save_suppress = info->suppress_expansion;
2076 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2077 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2078 info, gimple_omp_body_ptr (stmt));
2079 info->suppress_expansion = save_suppress;
2080 break;
2081
2082 case GIMPLE_OMP_SECTION:
2083 case GIMPLE_OMP_MASTER:
2084 case GIMPLE_OMP_TASKGROUP:
2085 case GIMPLE_OMP_ORDERED:
2086 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2087 info, gimple_omp_body_ptr (stmt));
2088 break;
2089
2090 case GIMPLE_COND:
2091 wi->val_only = true;
2092 wi->is_lhs = false;
2093 *handled_ops_p = false;
2094 return NULL_TREE;
2095
2096 case GIMPLE_ASSIGN:
2097 if (gimple_clobber_p (stmt))
2098 {
2099 tree lhs = gimple_assign_lhs (stmt);
2100 if (!use_pointer_in_frame (lhs)
2101 && lookup_field_for_decl (info, lhs, NO_INSERT))
2102 {
2103 gsi_replace (gsi, gimple_build_nop (), true);
2104 break;
2105 }
2106 }
2107 *handled_ops_p = false;
2108 return NULL_TREE;
2109
2110 case GIMPLE_BIND:
2111 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2112 var;
2113 var = DECL_CHAIN (var))
2114 if (TREE_CODE (var) == NAMELIST_DECL)
2115 {
2116 /* Adjust decls mentioned in NAMELIST_DECL. */
2117 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2118 tree decl;
2119 unsigned int i;
2120
2121 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2122 {
2123 if (TREE_CODE (decl) == VAR_DECL
2124 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2125 continue;
2126 if (decl_function_context (decl) == info->context
2127 && !use_pointer_in_frame (decl))
2128 {
2129 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2130 if (field)
2131 {
2132 CONSTRUCTOR_ELT (decls, i)->value
2133 = get_local_debug_decl (info, decl, field);
2134 }
2135 }
2136 }
2137 }
2138
2139 *handled_ops_p = false;
2140 return NULL_TREE;
2141
2142 default:
2143 /* For every other statement that we are not interested in
2144 handling here, let the walker traverse the operands. */
2145 *handled_ops_p = false;
2146 return NULL_TREE;
2147 }
2148
2149 /* Indicate that we have handled all the operands ourselves. */
2150 *handled_ops_p = true;
2151 return NULL_TREE;
2152 }
2153
2154
2155 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2156 that reference labels from outer functions. The rewrite will be a
2157 call to __builtin_nonlocal_goto. */
2158
2159 static tree
2160 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2161 struct walk_stmt_info *wi)
2162 {
2163 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2164 tree label, new_label, target_context, x, field;
2165 gcall *call;
2166 gimple *stmt = gsi_stmt (*gsi);
2167
2168 if (gimple_code (stmt) != GIMPLE_GOTO)
2169 {
2170 *handled_ops_p = false;
2171 return NULL_TREE;
2172 }
2173
2174 label = gimple_goto_dest (stmt);
2175 if (TREE_CODE (label) != LABEL_DECL)
2176 {
2177 *handled_ops_p = false;
2178 return NULL_TREE;
2179 }
2180
2181 target_context = decl_function_context (label);
2182 if (target_context == info->context)
2183 {
2184 *handled_ops_p = false;
2185 return NULL_TREE;
2186 }
2187
2188 for (i = info->outer; target_context != i->context; i = i->outer)
2189 continue;
2190
2191 /* The original user label may also be use for a normal goto, therefore
2192 we must create a new label that will actually receive the abnormal
2193 control transfer. This new label will be marked LABEL_NONLOCAL; this
2194 mark will trigger proper behavior in the cfg, as well as cause the
2195 (hairy target-specific) non-local goto receiver code to be generated
2196 when we expand rtl. Enter this association into var_map so that we
2197 can insert the new label into the IL during a second pass. */
2198 tree *slot = &i->var_map->get_or_insert (label);
2199 if (*slot == NULL)
2200 {
2201 new_label = create_artificial_label (UNKNOWN_LOCATION);
2202 DECL_NONLOCAL (new_label) = 1;
2203 *slot = new_label;
2204 }
2205 else
2206 new_label = *slot;
2207
2208 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2209 field = get_nl_goto_field (i);
2210 x = get_frame_field (info, target_context, field, gsi);
2211 x = build_addr (x);
2212 x = gsi_gimplify_val (info, x, gsi);
2213 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2214 2, build_addr (new_label), x);
2215 gsi_replace (gsi, call, false);
2216
2217 /* We have handled all of STMT's operands, no need to keep going. */
2218 *handled_ops_p = true;
2219 return NULL_TREE;
2220 }
2221
2222
2223 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2224 are referenced via nonlocal goto from a nested function. The rewrite
2225 will involve installing a newly generated DECL_NONLOCAL label, and
2226 (potentially) a branch around the rtl gunk that is assumed to be
2227 attached to such a label. */
2228
2229 static tree
2230 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2231 struct walk_stmt_info *wi)
2232 {
2233 struct nesting_info *const info = (struct nesting_info *) wi->info;
2234 tree label, new_label;
2235 gimple_stmt_iterator tmp_gsi;
2236 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2237
2238 if (!stmt)
2239 {
2240 *handled_ops_p = false;
2241 return NULL_TREE;
2242 }
2243
2244 label = gimple_label_label (stmt);
2245
2246 tree *slot = info->var_map->get (label);
2247 if (!slot)
2248 {
2249 *handled_ops_p = false;
2250 return NULL_TREE;
2251 }
2252
2253 /* If there's any possibility that the previous statement falls through,
2254 then we must branch around the new non-local label. */
2255 tmp_gsi = wi->gsi;
2256 gsi_prev (&tmp_gsi);
2257 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2258 {
2259 gimple *stmt = gimple_build_goto (label);
2260 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2261 }
2262
2263 new_label = (tree) *slot;
2264 stmt = gimple_build_label (new_label);
2265 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2266
2267 *handled_ops_p = true;
2268 return NULL_TREE;
2269 }
2270
2271
2272 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2273 of nested functions that require the use of trampolines. The rewrite
2274 will involve a reference a trampoline generated for the occasion. */
2275
2276 static tree
2277 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2278 {
2279 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2280 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2281 tree t = *tp, decl, target_context, x, builtin;
2282 gcall *call;
2283
2284 *walk_subtrees = 0;
2285 switch (TREE_CODE (t))
2286 {
2287 case ADDR_EXPR:
2288 /* Build
2289 T.1 = &CHAIN->tramp;
2290 T.2 = __builtin_adjust_trampoline (T.1);
2291 T.3 = (func_type)T.2;
2292 */
2293
2294 decl = TREE_OPERAND (t, 0);
2295 if (TREE_CODE (decl) != FUNCTION_DECL)
2296 break;
2297
2298 /* Only need to process nested functions. */
2299 target_context = decl_function_context (decl);
2300 if (!target_context)
2301 break;
2302
2303 /* If the nested function doesn't use a static chain, then
2304 it doesn't need a trampoline. */
2305 if (!DECL_STATIC_CHAIN (decl))
2306 break;
2307
2308 /* If we don't want a trampoline, then don't build one. */
2309 if (TREE_NO_TRAMPOLINE (t))
2310 break;
2311
2312 /* Lookup the immediate parent of the callee, as that's where
2313 we need to insert the trampoline. */
2314 for (i = info; i->context != target_context; i = i->outer)
2315 continue;
2316 x = lookup_tramp_for_decl (i, decl, INSERT);
2317
2318 /* Compute the address of the field holding the trampoline. */
2319 x = get_frame_field (info, target_context, x, &wi->gsi);
2320 x = build_addr (x);
2321 x = gsi_gimplify_val (info, x, &wi->gsi);
2322
2323 /* Do machine-specific ugliness. Normally this will involve
2324 computing extra alignment, but it can really be anything. */
2325 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2326 call = gimple_build_call (builtin, 1, x);
2327 x = init_tmp_var_with_call (info, &wi->gsi, call);
2328
2329 /* Cast back to the proper function type. */
2330 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2331 x = init_tmp_var (info, x, &wi->gsi);
2332
2333 *tp = x;
2334 break;
2335
2336 default:
2337 if (!IS_TYPE_OR_DECL_P (t))
2338 *walk_subtrees = 1;
2339 break;
2340 }
2341
2342 return NULL_TREE;
2343 }
2344
2345
2346 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2347 to addresses of nested functions that require the use of
2348 trampolines. The rewrite will involve a reference a trampoline
2349 generated for the occasion. */
2350
2351 static tree
2352 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2353 struct walk_stmt_info *wi)
2354 {
2355 struct nesting_info *info = (struct nesting_info *) wi->info;
2356 gimple *stmt = gsi_stmt (*gsi);
2357
2358 switch (gimple_code (stmt))
2359 {
2360 case GIMPLE_CALL:
2361 {
2362 /* Only walk call arguments, lest we generate trampolines for
2363 direct calls. */
2364 unsigned long i, nargs = gimple_call_num_args (stmt);
2365 for (i = 0; i < nargs; i++)
2366 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2367 wi, NULL);
2368 break;
2369 }
2370
2371 case GIMPLE_OMP_TARGET:
2372 if (!is_gimple_omp_offloaded (stmt))
2373 {
2374 *handled_ops_p = false;
2375 return NULL_TREE;
2376 }
2377 /* FALLTHRU */
2378 case GIMPLE_OMP_PARALLEL:
2379 case GIMPLE_OMP_TASK:
2380 {
2381 tree save_local_var_chain = info->new_local_var_chain;
2382 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2383 info->new_local_var_chain = NULL;
2384 char save_static_chain_added = info->static_chain_added;
2385 info->static_chain_added = 0;
2386 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2387 info, gimple_omp_body_ptr (stmt));
2388 if (info->new_local_var_chain)
2389 declare_vars (info->new_local_var_chain,
2390 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2391 false);
2392 for (int i = 0; i < 2; i++)
2393 {
2394 tree c, decl;
2395 if ((info->static_chain_added & (1 << i)) == 0)
2396 continue;
2397 decl = i ? get_chain_decl (info) : info->frame_decl;
2398 /* Don't add CHAIN.* or FRAME.* twice. */
2399 for (c = gimple_omp_taskreg_clauses (stmt);
2400 c;
2401 c = OMP_CLAUSE_CHAIN (c))
2402 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2403 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2404 && OMP_CLAUSE_DECL (c) == decl)
2405 break;
2406 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2407 {
2408 c = build_omp_clause (gimple_location (stmt),
2409 i ? OMP_CLAUSE_FIRSTPRIVATE
2410 : OMP_CLAUSE_SHARED);
2411 OMP_CLAUSE_DECL (c) = decl;
2412 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2413 gimple_omp_taskreg_set_clauses (stmt, c);
2414 }
2415 else if (c == NULL)
2416 {
2417 c = build_omp_clause (gimple_location (stmt),
2418 OMP_CLAUSE_MAP);
2419 OMP_CLAUSE_DECL (c) = decl;
2420 OMP_CLAUSE_SET_MAP_KIND (c,
2421 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2422 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2423 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2424 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2425 c);
2426 }
2427 }
2428 info->new_local_var_chain = save_local_var_chain;
2429 info->static_chain_added |= save_static_chain_added;
2430 }
2431 break;
2432
2433 default:
2434 *handled_ops_p = false;
2435 return NULL_TREE;
2436 }
2437
2438 *handled_ops_p = true;
2439 return NULL_TREE;
2440 }
2441
2442
2443
2444 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2445 that reference nested functions to make sure that the static chain
2446 is set up properly for the call. */
2447
2448 static tree
2449 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2450 struct walk_stmt_info *wi)
2451 {
2452 struct nesting_info *const info = (struct nesting_info *) wi->info;
2453 tree decl, target_context;
2454 char save_static_chain_added;
2455 int i;
2456 gimple *stmt = gsi_stmt (*gsi);
2457
2458 switch (gimple_code (stmt))
2459 {
2460 case GIMPLE_CALL:
2461 if (gimple_call_chain (stmt))
2462 break;
2463 decl = gimple_call_fndecl (stmt);
2464 if (!decl)
2465 break;
2466 target_context = decl_function_context (decl);
2467 if (target_context && DECL_STATIC_CHAIN (decl))
2468 {
2469 gimple_call_set_chain (as_a <gcall *> (stmt),
2470 get_static_chain (info, target_context,
2471 &wi->gsi));
2472 info->static_chain_added |= (1 << (info->context != target_context));
2473 }
2474 break;
2475
2476 case GIMPLE_OMP_PARALLEL:
2477 case GIMPLE_OMP_TASK:
2478 save_static_chain_added = info->static_chain_added;
2479 info->static_chain_added = 0;
2480 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2481 for (i = 0; i < 2; i++)
2482 {
2483 tree c, decl;
2484 if ((info->static_chain_added & (1 << i)) == 0)
2485 continue;
2486 decl = i ? get_chain_decl (info) : info->frame_decl;
2487 /* Don't add CHAIN.* or FRAME.* twice. */
2488 for (c = gimple_omp_taskreg_clauses (stmt);
2489 c;
2490 c = OMP_CLAUSE_CHAIN (c))
2491 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2492 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2493 && OMP_CLAUSE_DECL (c) == decl)
2494 break;
2495 if (c == NULL)
2496 {
2497 c = build_omp_clause (gimple_location (stmt),
2498 i ? OMP_CLAUSE_FIRSTPRIVATE
2499 : OMP_CLAUSE_SHARED);
2500 OMP_CLAUSE_DECL (c) = decl;
2501 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2502 gimple_omp_taskreg_set_clauses (stmt, c);
2503 }
2504 }
2505 info->static_chain_added |= save_static_chain_added;
2506 break;
2507
2508 case GIMPLE_OMP_TARGET:
2509 if (!is_gimple_omp_offloaded (stmt))
2510 {
2511 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2512 break;
2513 }
2514 save_static_chain_added = info->static_chain_added;
2515 info->static_chain_added = 0;
2516 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2517 for (i = 0; i < 2; i++)
2518 {
2519 tree c, decl;
2520 if ((info->static_chain_added & (1 << i)) == 0)
2521 continue;
2522 decl = i ? get_chain_decl (info) : info->frame_decl;
2523 /* Don't add CHAIN.* or FRAME.* twice. */
2524 for (c = gimple_omp_target_clauses (stmt);
2525 c;
2526 c = OMP_CLAUSE_CHAIN (c))
2527 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2528 && OMP_CLAUSE_DECL (c) == decl)
2529 break;
2530 if (c == NULL)
2531 {
2532 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2533 OMP_CLAUSE_DECL (c) = decl;
2534 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2535 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2536 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2537 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2538 c);
2539 }
2540 }
2541 info->static_chain_added |= save_static_chain_added;
2542 break;
2543
2544 case GIMPLE_OMP_FOR:
2545 walk_body (convert_gimple_call, NULL, info,
2546 gimple_omp_for_pre_body_ptr (stmt));
2547 /* FALLTHRU */
2548 case GIMPLE_OMP_SECTIONS:
2549 case GIMPLE_OMP_SECTION:
2550 case GIMPLE_OMP_SINGLE:
2551 case GIMPLE_OMP_TEAMS:
2552 case GIMPLE_OMP_MASTER:
2553 case GIMPLE_OMP_TASKGROUP:
2554 case GIMPLE_OMP_ORDERED:
2555 case GIMPLE_OMP_CRITICAL:
2556 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2557 break;
2558
2559 default:
2560 /* Keep looking for other operands. */
2561 *handled_ops_p = false;
2562 return NULL_TREE;
2563 }
2564
2565 *handled_ops_p = true;
2566 return NULL_TREE;
2567 }
2568
2569 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2570 call expressions. At the same time, determine if a nested function
2571 actually uses its static chain; if not, remember that. */
2572
2573 static void
2574 convert_all_function_calls (struct nesting_info *root)
2575 {
2576 unsigned int chain_count = 0, old_chain_count, iter_count;
2577 struct nesting_info *n;
2578
2579 /* First, optimistically clear static_chain for all decls that haven't
2580 used the static chain already for variable access. But always create
2581 it if not optimizing. This makes it possible to reconstruct the static
2582 nesting tree at run time and thus to resolve up-level references from
2583 within the debugger. */
2584 FOR_EACH_NEST_INFO (n, root)
2585 {
2586 tree decl = n->context;
2587 if (!optimize)
2588 {
2589 if (n->inner)
2590 (void) get_frame_type (n);
2591 if (n->outer)
2592 (void) get_chain_decl (n);
2593 }
2594 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2595 {
2596 DECL_STATIC_CHAIN (decl) = 0;
2597 if (dump_file && (dump_flags & TDF_DETAILS))
2598 fprintf (dump_file, "Guessing no static-chain for %s\n",
2599 lang_hooks.decl_printable_name (decl, 2));
2600 }
2601 else
2602 DECL_STATIC_CHAIN (decl) = 1;
2603 chain_count += DECL_STATIC_CHAIN (decl);
2604 }
2605
2606 /* Walk the functions and perform transformations. Note that these
2607 transformations can induce new uses of the static chain, which in turn
2608 require re-examining all users of the decl. */
2609 /* ??? It would make sense to try to use the call graph to speed this up,
2610 but the call graph hasn't really been built yet. Even if it did, we
2611 would still need to iterate in this loop since address-of references
2612 wouldn't show up in the callgraph anyway. */
2613 iter_count = 0;
2614 do
2615 {
2616 old_chain_count = chain_count;
2617 chain_count = 0;
2618 iter_count++;
2619
2620 if (dump_file && (dump_flags & TDF_DETAILS))
2621 fputc ('\n', dump_file);
2622
2623 FOR_EACH_NEST_INFO (n, root)
2624 {
2625 tree decl = n->context;
2626 walk_function (convert_tramp_reference_stmt,
2627 convert_tramp_reference_op, n);
2628 walk_function (convert_gimple_call, NULL, n);
2629 chain_count += DECL_STATIC_CHAIN (decl);
2630 }
2631 }
2632 while (chain_count != old_chain_count);
2633
2634 if (dump_file && (dump_flags & TDF_DETAILS))
2635 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2636 iter_count);
2637 }
2638
2639 struct nesting_copy_body_data
2640 {
2641 copy_body_data cb;
2642 struct nesting_info *root;
2643 };
2644
2645 /* A helper subroutine for debug_var_chain type remapping. */
2646
2647 static tree
2648 nesting_copy_decl (tree decl, copy_body_data *id)
2649 {
2650 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2651 tree *slot = nid->root->var_map->get (decl);
2652
2653 if (slot)
2654 return (tree) *slot;
2655
2656 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2657 {
2658 tree new_decl = copy_decl_no_change (decl, id);
2659 DECL_ORIGINAL_TYPE (new_decl)
2660 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2661 return new_decl;
2662 }
2663
2664 if (TREE_CODE (decl) == VAR_DECL
2665 || TREE_CODE (decl) == PARM_DECL
2666 || TREE_CODE (decl) == RESULT_DECL)
2667 return decl;
2668
2669 return copy_decl_no_change (decl, id);
2670 }
2671
2672 /* A helper function for remap_vla_decls. See if *TP contains
2673 some remapped variables. */
2674
2675 static tree
2676 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2677 {
2678 struct nesting_info *root = (struct nesting_info *) data;
2679 tree t = *tp;
2680
2681 if (DECL_P (t))
2682 {
2683 *walk_subtrees = 0;
2684 tree *slot = root->var_map->get (t);
2685
2686 if (slot)
2687 return *slot;
2688 }
2689 return NULL;
2690 }
2691
2692 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2693 involved. */
2694
2695 static void
2696 remap_vla_decls (tree block, struct nesting_info *root)
2697 {
2698 tree var, subblock, val, type;
2699 struct nesting_copy_body_data id;
2700
2701 for (subblock = BLOCK_SUBBLOCKS (block);
2702 subblock;
2703 subblock = BLOCK_CHAIN (subblock))
2704 remap_vla_decls (subblock, root);
2705
2706 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2707 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2708 {
2709 val = DECL_VALUE_EXPR (var);
2710 type = TREE_TYPE (var);
2711
2712 if (!(TREE_CODE (val) == INDIRECT_REF
2713 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2714 && variably_modified_type_p (type, NULL)))
2715 continue;
2716
2717 if (root->var_map->get (TREE_OPERAND (val, 0))
2718 || walk_tree (&type, contains_remapped_vars, root, NULL))
2719 break;
2720 }
2721
2722 if (var == NULL_TREE)
2723 return;
2724
2725 memset (&id, 0, sizeof (id));
2726 id.cb.copy_decl = nesting_copy_decl;
2727 id.cb.decl_map = new hash_map<tree, tree>;
2728 id.root = root;
2729
2730 for (; var; var = DECL_CHAIN (var))
2731 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2732 {
2733 struct nesting_info *i;
2734 tree newt, context;
2735
2736 val = DECL_VALUE_EXPR (var);
2737 type = TREE_TYPE (var);
2738
2739 if (!(TREE_CODE (val) == INDIRECT_REF
2740 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2741 && variably_modified_type_p (type, NULL)))
2742 continue;
2743
2744 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2745 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2746 continue;
2747
2748 context = decl_function_context (var);
2749 for (i = root; i; i = i->outer)
2750 if (i->context == context)
2751 break;
2752
2753 if (i == NULL)
2754 continue;
2755
2756 /* Fully expand value expressions. This avoids having debug variables
2757 only referenced from them and that can be swept during GC. */
2758 if (slot)
2759 {
2760 tree t = (tree) *slot;
2761 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2762 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2763 }
2764
2765 id.cb.src_fn = i->context;
2766 id.cb.dst_fn = i->context;
2767 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2768
2769 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2770 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2771 {
2772 newt = TREE_TYPE (newt);
2773 type = TREE_TYPE (type);
2774 }
2775 if (TYPE_NAME (newt)
2776 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2777 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2778 && newt != type
2779 && TYPE_NAME (newt) == TYPE_NAME (type))
2780 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2781
2782 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2783 if (val != DECL_VALUE_EXPR (var))
2784 SET_DECL_VALUE_EXPR (var, val);
2785 }
2786
2787 delete id.cb.decl_map;
2788 }
2789
2790 /* Fold the MEM_REF *E. */
2791 bool
2792 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2793 {
2794 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2795 *ref_p = fold (*ref_p);
2796 return true;
2797 }
2798
2799 /* Do "everything else" to clean up or complete state collected by the various
2800 walking passes -- create a field to hold the frame base address, lay out the
2801 types and decls, generate code to initialize the frame decl, store critical
2802 expressions in the struct function for rtl to find. */
2803
2804 static void
2805 finalize_nesting_tree_1 (struct nesting_info *root)
2806 {
2807 gimple_seq stmt_list;
2808 gimple *stmt;
2809 tree context = root->context;
2810 struct function *sf;
2811
2812 stmt_list = NULL;
2813
2814 /* If we created a non-local frame type or decl, we need to lay them
2815 out at this time. */
2816 if (root->frame_type)
2817 {
2818 /* Debugging information needs to compute the frame base address of the
2819 parent frame out of the static chain from the nested frame.
2820
2821 The static chain is the address of the FRAME record, so one could
2822 imagine it would be possible to compute the frame base address just
2823 adding a constant offset to this address. Unfortunately, this is not
2824 possible: if the FRAME object has alignment constraints that are
2825 stronger than the stack, then the offset between the frame base and
2826 the FRAME object will be dynamic.
2827
2828 What we do instead is to append a field to the FRAME object that holds
2829 the frame base address: then debug info just has to fetch this
2830 field. */
2831
2832 /* Debugging information will refer to the CFA as the frame base
2833 address: we will do the same here. */
2834 const tree frame_addr_fndecl
2835 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
2836
2837 /* Create a field in the FRAME record to hold the frame base address for
2838 this stack frame. Since it will be used only by the debugger, put it
2839 at the end of the record in order not to shift all other offsets. */
2840 tree fb_decl = make_node (FIELD_DECL);
2841
2842 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
2843 TREE_TYPE (fb_decl) = ptr_type_node;
2844 TREE_ADDRESSABLE (fb_decl) = 1;
2845 DECL_CONTEXT (fb_decl) = root->frame_type;
2846 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
2847 fb_decl);
2848
2849 /* In some cases the frame type will trigger the -Wpadded warning.
2850 This is not helpful; suppress it. */
2851 int save_warn_padded = warn_padded;
2852 warn_padded = 0;
2853 layout_type (root->frame_type);
2854 warn_padded = save_warn_padded;
2855 layout_decl (root->frame_decl, 0);
2856
2857 /* Initialize the frame base address field. If the builtin we need is
2858 not available, set it to NULL so that debugging information does not
2859 reference junk. */
2860 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
2861 root->frame_decl, fb_decl, NULL_TREE);
2862 tree fb_tmp;
2863
2864 if (frame_addr_fndecl != NULL_TREE)
2865 {
2866 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
2867 integer_zero_node);
2868 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2869
2870 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
2871 }
2872 else
2873 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
2874 gimple_seq_add_stmt (&stmt_list,
2875 gimple_build_assign (fb_ref, fb_tmp));
2876
2877 /* Remove root->frame_decl from root->new_local_var_chain, so
2878 that we can declare it also in the lexical blocks, which
2879 helps ensure virtual regs that end up appearing in its RTL
2880 expression get substituted in instantiate_virtual_regs(). */
2881 tree *adjust;
2882 for (adjust = &root->new_local_var_chain;
2883 *adjust != root->frame_decl;
2884 adjust = &DECL_CHAIN (*adjust))
2885 gcc_assert (DECL_CHAIN (*adjust));
2886 *adjust = DECL_CHAIN (*adjust);
2887
2888 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2889 declare_vars (root->frame_decl,
2890 gimple_seq_first_stmt (gimple_body (context)), true);
2891 }
2892
2893 /* If any parameters were referenced non-locally, then we need to
2894 insert a copy. Likewise, if any variables were referenced by
2895 pointer, we need to initialize the address. */
2896 if (root->any_parm_remapped)
2897 {
2898 tree p;
2899 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2900 {
2901 tree field, x, y;
2902
2903 field = lookup_field_for_decl (root, p, NO_INSERT);
2904 if (!field)
2905 continue;
2906
2907 if (use_pointer_in_frame (p))
2908 x = build_addr (p);
2909 else
2910 x = p;
2911
2912 /* If the assignment is from a non-register the stmt is
2913 not valid gimple. Make it so by using a temporary instead. */
2914 if (!is_gimple_reg (x)
2915 && is_gimple_reg_type (TREE_TYPE (x)))
2916 {
2917 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2918 x = init_tmp_var (root, x, &gsi);
2919 }
2920
2921 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2922 root->frame_decl, field, NULL_TREE);
2923 stmt = gimple_build_assign (y, x);
2924 gimple_seq_add_stmt (&stmt_list, stmt);
2925 }
2926 }
2927
2928 /* If a chain_field was created, then it needs to be initialized
2929 from chain_decl. */
2930 if (root->chain_field)
2931 {
2932 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2933 root->frame_decl, root->chain_field, NULL_TREE);
2934 stmt = gimple_build_assign (x, get_chain_decl (root));
2935 gimple_seq_add_stmt (&stmt_list, stmt);
2936 }
2937
2938 /* If trampolines were created, then we need to initialize them. */
2939 if (root->any_tramp_created)
2940 {
2941 struct nesting_info *i;
2942 for (i = root->inner; i ; i = i->next)
2943 {
2944 tree arg1, arg2, arg3, x, field;
2945
2946 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2947 if (!field)
2948 continue;
2949
2950 gcc_assert (DECL_STATIC_CHAIN (i->context));
2951 arg3 = build_addr (root->frame_decl);
2952
2953 arg2 = build_addr (i->context);
2954
2955 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2956 root->frame_decl, field, NULL_TREE);
2957 arg1 = build_addr (x);
2958
2959 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2960 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2961 gimple_seq_add_stmt (&stmt_list, stmt);
2962 }
2963 }
2964
2965 /* If we created initialization statements, insert them. */
2966 if (stmt_list)
2967 {
2968 gbind *bind;
2969 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2970 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2971 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2972 gimple_bind_set_body (bind, stmt_list);
2973 }
2974
2975 /* If a chain_decl was created, then it needs to be registered with
2976 struct function so that it gets initialized from the static chain
2977 register at the beginning of the function. */
2978 sf = DECL_STRUCT_FUNCTION (root->context);
2979 sf->static_chain_decl = root->chain_decl;
2980
2981 /* Similarly for the non-local goto save area. */
2982 if (root->nl_goto_field)
2983 {
2984 sf->nonlocal_goto_save_area
2985 = get_frame_field (root, context, root->nl_goto_field, NULL);
2986 sf->has_nonlocal_label = 1;
2987 }
2988
2989 /* Make sure all new local variables get inserted into the
2990 proper BIND_EXPR. */
2991 if (root->new_local_var_chain)
2992 declare_vars (root->new_local_var_chain,
2993 gimple_seq_first_stmt (gimple_body (root->context)),
2994 false);
2995
2996 if (root->debug_var_chain)
2997 {
2998 tree debug_var;
2999 gbind *scope;
3000
3001 remap_vla_decls (DECL_INITIAL (root->context), root);
3002
3003 for (debug_var = root->debug_var_chain; debug_var;
3004 debug_var = DECL_CHAIN (debug_var))
3005 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3006 break;
3007
3008 /* If there are any debug decls with variable length types,
3009 remap those types using other debug_var_chain variables. */
3010 if (debug_var)
3011 {
3012 struct nesting_copy_body_data id;
3013
3014 memset (&id, 0, sizeof (id));
3015 id.cb.copy_decl = nesting_copy_decl;
3016 id.cb.decl_map = new hash_map<tree, tree>;
3017 id.root = root;
3018
3019 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3020 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3021 {
3022 tree type = TREE_TYPE (debug_var);
3023 tree newt, t = type;
3024 struct nesting_info *i;
3025
3026 for (i = root; i; i = i->outer)
3027 if (variably_modified_type_p (type, i->context))
3028 break;
3029
3030 if (i == NULL)
3031 continue;
3032
3033 id.cb.src_fn = i->context;
3034 id.cb.dst_fn = i->context;
3035 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3036
3037 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3038 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3039 {
3040 newt = TREE_TYPE (newt);
3041 t = TREE_TYPE (t);
3042 }
3043 if (TYPE_NAME (newt)
3044 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3045 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3046 && newt != t
3047 && TYPE_NAME (newt) == TYPE_NAME (t))
3048 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3049 }
3050
3051 delete id.cb.decl_map;
3052 }
3053
3054 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3055 if (gimple_bind_block (scope))
3056 declare_vars (root->debug_var_chain, scope, true);
3057 else
3058 BLOCK_VARS (DECL_INITIAL (root->context))
3059 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3060 root->debug_var_chain);
3061 }
3062
3063 /* Fold the rewritten MEM_REF trees. */
3064 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3065
3066 /* Dump the translated tree function. */
3067 if (dump_file)
3068 {
3069 fputs ("\n\n", dump_file);
3070 dump_function_to_file (root->context, dump_file, dump_flags);
3071 }
3072 }
3073
3074 static void
3075 finalize_nesting_tree (struct nesting_info *root)
3076 {
3077 struct nesting_info *n;
3078 FOR_EACH_NEST_INFO (n, root)
3079 finalize_nesting_tree_1 (n);
3080 }
3081
3082 /* Unnest the nodes and pass them to cgraph. */
3083
3084 static void
3085 unnest_nesting_tree_1 (struct nesting_info *root)
3086 {
3087 struct cgraph_node *node = cgraph_node::get (root->context);
3088
3089 /* For nested functions update the cgraph to reflect unnesting.
3090 We also delay finalizing of these functions up to this point. */
3091 if (node->origin)
3092 {
3093 node->unnest ();
3094 cgraph_node::finalize_function (root->context, true);
3095 }
3096 }
3097
3098 static void
3099 unnest_nesting_tree (struct nesting_info *root)
3100 {
3101 struct nesting_info *n;
3102 FOR_EACH_NEST_INFO (n, root)
3103 unnest_nesting_tree_1 (n);
3104 }
3105
3106 /* Free the data structures allocated during this pass. */
3107
3108 static void
3109 free_nesting_tree (struct nesting_info *root)
3110 {
3111 struct nesting_info *node, *next;
3112
3113 node = iter_nestinfo_start (root);
3114 do
3115 {
3116 next = iter_nestinfo_next (node);
3117 delete node->var_map;
3118 delete node->field_map;
3119 delete node->mem_refs;
3120 free (node);
3121 node = next;
3122 }
3123 while (node);
3124 }
3125
3126 /* Gimplify a function and all its nested functions. */
3127 static void
3128 gimplify_all_functions (struct cgraph_node *root)
3129 {
3130 struct cgraph_node *iter;
3131 if (!gimple_body (root->decl))
3132 gimplify_function_tree (root->decl);
3133 for (iter = root->nested; iter; iter = iter->next_nested)
3134 gimplify_all_functions (iter);
3135 }
3136
3137 /* Main entry point for this pass. Process FNDECL and all of its nested
3138 subroutines and turn them into something less tightly bound. */
3139
3140 void
3141 lower_nested_functions (tree fndecl)
3142 {
3143 struct cgraph_node *cgn;
3144 struct nesting_info *root;
3145
3146 /* If there are no nested functions, there's nothing to do. */
3147 cgn = cgraph_node::get (fndecl);
3148 if (!cgn->nested)
3149 return;
3150
3151 gimplify_all_functions (cgn);
3152
3153 dump_file = dump_begin (TDI_nested, &dump_flags);
3154 if (dump_file)
3155 fprintf (dump_file, "\n;; Function %s\n\n",
3156 lang_hooks.decl_printable_name (fndecl, 2));
3157
3158 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3159 root = create_nesting_tree (cgn);
3160
3161 walk_all_functions (convert_nonlocal_reference_stmt,
3162 convert_nonlocal_reference_op,
3163 root);
3164 walk_all_functions (convert_local_reference_stmt,
3165 convert_local_reference_op,
3166 root);
3167 walk_all_functions (convert_nl_goto_reference, NULL, root);
3168 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3169
3170 convert_all_function_calls (root);
3171 finalize_nesting_tree (root);
3172 unnest_nesting_tree (root);
3173
3174 free_nesting_tree (root);
3175 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3176
3177 if (dump_file)
3178 {
3179 dump_end (TDI_nested, dump_file);
3180 dump_file = NULL;
3181 }
3182 }
3183
3184 #include "gt-tree-nested.h"