]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-nested.c
tree-nested.c (build_simple_mem_ref_notrap): New function.
[thirdparty/gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45
46
47 /* The object of this pass is to lower the representation of a set of nested
48 functions in order to expose all of the gory details of the various
49 nonlocal references. We want to do this sooner rather than later, in
50 order to give us more freedom in emitting all of the functions in question.
51
52 Back in olden times, when gcc was young, we developed an insanely
53 complicated scheme whereby variables which were referenced nonlocally
54 were forced to live in the stack of the declaring function, and then
55 the nested functions magically discovered where these variables were
56 placed. In order for this scheme to function properly, it required
57 that the outer function be partially expanded, then we switch to
58 compiling the inner function, and once done with those we switch back
59 to compiling the outer function. Such delicate ordering requirements
60 makes it difficult to do whole translation unit optimizations
61 involving such functions.
62
63 The implementation here is much more direct. Everything that can be
64 referenced by an inner function is a member of an explicitly created
65 structure herein called the "nonlocal frame struct". The incoming
66 static chain for a nested function is a pointer to this struct in
67 the parent. In this way, we settle on known offsets from a known
68 base, and so are decoupled from the logic that places objects in the
69 function's stack frame. More importantly, we don't have to wait for
70 that to happen -- since the compilation of the inner function is no
71 longer tied to a real stack frame, the nonlocal frame struct can be
72 allocated anywhere. Which means that the outer function is now
73 inlinable.
74
75 Theory of operation here is very simple. Iterate over all the
76 statements in all the functions (depth first) several times,
77 allocating structures and fields on demand. In general we want to
78 examine inner functions first, so that we can avoid making changes
79 to outer functions which are unnecessary.
80
81 The order of the passes matters a bit, in that later passes will be
82 skipped if it is discovered that the functions don't actually interact
83 at all. That is, they're nested in the lexical sense but could have
84 been written as independent functions without change. */
85
86
87 struct nesting_info
88 {
89 struct nesting_info *outer;
90 struct nesting_info *inner;
91 struct nesting_info *next;
92
93 hash_map<tree, tree> *field_map;
94 hash_map<tree, tree> *var_map;
95 hash_set<tree *> *mem_refs;
96 bitmap suppress_expansion;
97
98 tree context;
99 tree new_local_var_chain;
100 tree debug_var_chain;
101 tree frame_type;
102 tree frame_decl;
103 tree chain_field;
104 tree chain_decl;
105 tree nl_goto_field;
106
107 bool thunk_p;
108 bool any_parm_remapped;
109 bool any_tramp_created;
110 bool any_descr_created;
111 char static_chain_added;
112 };
113
114
115 /* Iterate over the nesting tree, starting with ROOT, depth first. */
116
117 static inline struct nesting_info *
118 iter_nestinfo_start (struct nesting_info *root)
119 {
120 while (root->inner)
121 root = root->inner;
122 return root;
123 }
124
125 static inline struct nesting_info *
126 iter_nestinfo_next (struct nesting_info *node)
127 {
128 if (node->next)
129 return iter_nestinfo_start (node->next);
130 return node->outer;
131 }
132
133 #define FOR_EACH_NEST_INFO(I, ROOT) \
134 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
135
136 /* Obstack used for the bitmaps in the struct above. */
137 static struct bitmap_obstack nesting_info_bitmap_obstack;
138
139
140 /* We're working in so many different function contexts simultaneously,
141 that create_tmp_var is dangerous. Prevent mishap. */
142 #define create_tmp_var cant_use_create_tmp_var_here_dummy
143
144 /* Like create_tmp_var, except record the variable for registration at
145 the given nesting level. */
146
147 static tree
148 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
149 {
150 tree tmp_var;
151
152 /* If the type is of variable size or a type which must be created by the
153 frontend, something is wrong. Note that we explicitly allow
154 incomplete types here, since we create them ourselves here. */
155 gcc_assert (!TREE_ADDRESSABLE (type));
156 gcc_assert (!TYPE_SIZE_UNIT (type)
157 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
158
159 tmp_var = create_tmp_var_raw (type, prefix);
160 DECL_CONTEXT (tmp_var) = info->context;
161 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
162 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
163 if (TREE_CODE (type) == COMPLEX_TYPE
164 || TREE_CODE (type) == VECTOR_TYPE)
165 DECL_GIMPLE_REG_P (tmp_var) = 1;
166
167 info->new_local_var_chain = tmp_var;
168
169 return tmp_var;
170 }
171
172 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
173
174 static tree
175 build_simple_mem_ref_notrap (tree ptr)
176 {
177 tree t = build_simple_mem_ref (ptr);
178 TREE_THIS_NOTRAP (t) = 1;
179 return t;
180 }
181
182 /* Take the address of EXP to be used within function CONTEXT.
183 Mark it for addressability as necessary. */
184
185 tree
186 build_addr (tree exp)
187 {
188 mark_addressable (exp);
189 return build_fold_addr_expr (exp);
190 }
191
192 /* Insert FIELD into TYPE, sorted by alignment requirements. */
193
194 void
195 insert_field_into_struct (tree type, tree field)
196 {
197 tree *p;
198
199 DECL_CONTEXT (field) = type;
200
201 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
202 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
203 break;
204
205 DECL_CHAIN (field) = *p;
206 *p = field;
207
208 /* Set correct alignment for frame struct type. */
209 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
210 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
211 }
212
213 /* Build or return the RECORD_TYPE that describes the frame state that is
214 shared between INFO->CONTEXT and its nested functions. This record will
215 not be complete until finalize_nesting_tree; up until that point we'll
216 be adding fields as necessary.
217
218 We also build the DECL that represents this frame in the function. */
219
220 static tree
221 get_frame_type (struct nesting_info *info)
222 {
223 tree type = info->frame_type;
224 if (!type)
225 {
226 char *name;
227
228 type = make_node (RECORD_TYPE);
229
230 name = concat ("FRAME.",
231 IDENTIFIER_POINTER (DECL_NAME (info->context)),
232 NULL);
233 TYPE_NAME (type) = get_identifier (name);
234 free (name);
235
236 info->frame_type = type;
237
238 /* Do not put info->frame_decl on info->new_local_var_chain,
239 so that we can declare it in the lexical blocks, which
240 makes sure virtual regs that end up appearing in its RTL
241 expression get substituted in instantiate_virtual_regs. */
242 info->frame_decl = create_tmp_var_raw (type, "FRAME");
243 DECL_CONTEXT (info->frame_decl) = info->context;
244 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
245 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
246
247 /* ??? Always make it addressable for now, since it is meant to
248 be pointed to by the static chain pointer. This pessimizes
249 when it turns out that no static chains are needed because
250 the nested functions referencing non-local variables are not
251 reachable, but the true pessimization is to create the non-
252 local frame structure in the first place. */
253 TREE_ADDRESSABLE (info->frame_decl) = 1;
254 }
255
256 return type;
257 }
258
259 /* Return true if DECL should be referenced by pointer in the non-local frame
260 structure. */
261
262 static bool
263 use_pointer_in_frame (tree decl)
264 {
265 if (TREE_CODE (decl) == PARM_DECL)
266 {
267 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
268 sized DECLs, and inefficient to copy large aggregates. Don't bother
269 moving anything but scalar parameters. */
270 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
271 }
272 else
273 {
274 /* Variable-sized DECLs can only come from OMP clauses at this point
275 since the gimplifier has already turned the regular variables into
276 pointers. Do the same as the gimplifier. */
277 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
278 }
279 }
280
281 /* Given DECL, a non-locally accessed variable, find or create a field
282 in the non-local frame structure for the given nesting context. */
283
284 static tree
285 lookup_field_for_decl (struct nesting_info *info, tree decl,
286 enum insert_option insert)
287 {
288 gcc_checking_assert (decl_function_context (decl) == info->context);
289
290 if (insert == NO_INSERT)
291 {
292 tree *slot = info->field_map->get (decl);
293 return slot ? *slot : NULL_TREE;
294 }
295
296 tree *slot = &info->field_map->get_or_insert (decl);
297 if (!*slot)
298 {
299 tree type = get_frame_type (info);
300 tree field = make_node (FIELD_DECL);
301 DECL_NAME (field) = DECL_NAME (decl);
302
303 if (use_pointer_in_frame (decl))
304 {
305 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
306 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
307 DECL_NONADDRESSABLE_P (field) = 1;
308 }
309 else
310 {
311 TREE_TYPE (field) = TREE_TYPE (decl);
312 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
313 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
314 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
315 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
316 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
317 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
318
319 /* Declare the transformation and adjust the original DECL. For a
320 variable or for a parameter when not optimizing, we make it point
321 to the field in the frame directly. For a parameter, we don't do
322 it when optimizing because the variable tracking pass will already
323 do the job, */
324 if (VAR_P (decl) || !optimize)
325 {
326 tree x
327 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
328 field, NULL_TREE);
329
330 /* If the next declaration is a PARM_DECL pointing to the DECL,
331 we need to adjust its VALUE_EXPR directly, since chains of
332 VALUE_EXPRs run afoul of garbage collection. This occurs
333 in Ada for Out parameters that aren't copied in. */
334 tree next = DECL_CHAIN (decl);
335 if (next
336 && TREE_CODE (next) == PARM_DECL
337 && DECL_HAS_VALUE_EXPR_P (next)
338 && DECL_VALUE_EXPR (next) == decl)
339 SET_DECL_VALUE_EXPR (next, x);
340
341 SET_DECL_VALUE_EXPR (decl, x);
342 DECL_HAS_VALUE_EXPR_P (decl) = 1;
343 }
344 }
345
346 insert_field_into_struct (type, field);
347 *slot = field;
348
349 if (TREE_CODE (decl) == PARM_DECL)
350 info->any_parm_remapped = true;
351 }
352
353 return *slot;
354 }
355
356 /* Build or return the variable that holds the static chain within
357 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
358
359 static tree
360 get_chain_decl (struct nesting_info *info)
361 {
362 tree decl = info->chain_decl;
363
364 if (!decl)
365 {
366 tree type;
367
368 type = get_frame_type (info->outer);
369 type = build_pointer_type (type);
370
371 /* Note that this variable is *not* entered into any BIND_EXPR;
372 the construction of this variable is handled specially in
373 expand_function_start and initialize_inlined_parameters.
374 Note also that it's represented as a parameter. This is more
375 close to the truth, since the initial value does come from
376 the caller. */
377 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
378 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
379 DECL_ARTIFICIAL (decl) = 1;
380 DECL_IGNORED_P (decl) = 1;
381 TREE_USED (decl) = 1;
382 DECL_CONTEXT (decl) = info->context;
383 DECL_ARG_TYPE (decl) = type;
384
385 /* Tell tree-inline.c that we never write to this variable, so
386 it can copy-prop the replacement value immediately. */
387 TREE_READONLY (decl) = 1;
388
389 info->chain_decl = decl;
390
391 if (dump_file
392 && (dump_flags & TDF_DETAILS)
393 && !DECL_STATIC_CHAIN (info->context))
394 fprintf (dump_file, "Setting static-chain for %s\n",
395 lang_hooks.decl_printable_name (info->context, 2));
396
397 DECL_STATIC_CHAIN (info->context) = 1;
398 }
399 return decl;
400 }
401
402 /* Build or return the field within the non-local frame state that holds
403 the static chain for INFO->CONTEXT. This is the way to walk back up
404 multiple nesting levels. */
405
406 static tree
407 get_chain_field (struct nesting_info *info)
408 {
409 tree field = info->chain_field;
410
411 if (!field)
412 {
413 tree type = build_pointer_type (get_frame_type (info->outer));
414
415 field = make_node (FIELD_DECL);
416 DECL_NAME (field) = get_identifier ("__chain");
417 TREE_TYPE (field) = type;
418 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
419 DECL_NONADDRESSABLE_P (field) = 1;
420
421 insert_field_into_struct (get_frame_type (info), field);
422
423 info->chain_field = field;
424
425 if (dump_file
426 && (dump_flags & TDF_DETAILS)
427 && !DECL_STATIC_CHAIN (info->context))
428 fprintf (dump_file, "Setting static-chain for %s\n",
429 lang_hooks.decl_printable_name (info->context, 2));
430
431 DECL_STATIC_CHAIN (info->context) = 1;
432 }
433 return field;
434 }
435
436 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
437
438 static tree
439 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
440 gcall *call)
441 {
442 tree t;
443
444 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
445 gimple_call_set_lhs (call, t);
446 if (! gsi_end_p (*gsi))
447 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
448 gsi_insert_before (gsi, call, GSI_SAME_STMT);
449
450 return t;
451 }
452
453
454 /* Copy EXP into a temporary. Allocate the temporary in the context of
455 INFO and insert the initialization statement before GSI. */
456
457 static tree
458 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
459 {
460 tree t;
461 gimple *stmt;
462
463 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
464 stmt = gimple_build_assign (t, exp);
465 if (! gsi_end_p (*gsi))
466 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
467 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
468
469 return t;
470 }
471
472
473 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
474
475 static tree
476 gsi_gimplify_val (struct nesting_info *info, tree exp,
477 gimple_stmt_iterator *gsi)
478 {
479 if (is_gimple_val (exp))
480 return exp;
481 else
482 return init_tmp_var (info, exp, gsi);
483 }
484
485 /* Similarly, but copy from the temporary and insert the statement
486 after the iterator. */
487
488 static tree
489 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
490 {
491 tree t;
492 gimple *stmt;
493
494 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
495 stmt = gimple_build_assign (exp, t);
496 if (! gsi_end_p (*gsi))
497 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
498 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
499
500 return t;
501 }
502
503 /* Build or return the type used to represent a nested function trampoline. */
504
505 static GTY(()) tree trampoline_type;
506
507 static tree
508 get_trampoline_type (struct nesting_info *info)
509 {
510 unsigned align, size;
511 tree t;
512
513 if (trampoline_type)
514 return trampoline_type;
515
516 align = TRAMPOLINE_ALIGNMENT;
517 size = TRAMPOLINE_SIZE;
518
519 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
520 then allocate extra space so that we can do dynamic alignment. */
521 if (align > STACK_BOUNDARY)
522 {
523 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
524 align = STACK_BOUNDARY;
525 }
526
527 t = build_index_type (size_int (size - 1));
528 t = build_array_type (char_type_node, t);
529 t = build_decl (DECL_SOURCE_LOCATION (info->context),
530 FIELD_DECL, get_identifier ("__data"), t);
531 SET_DECL_ALIGN (t, align);
532 DECL_USER_ALIGN (t) = 1;
533
534 trampoline_type = make_node (RECORD_TYPE);
535 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
536 TYPE_FIELDS (trampoline_type) = t;
537 layout_type (trampoline_type);
538 DECL_CONTEXT (t) = trampoline_type;
539
540 return trampoline_type;
541 }
542
543 /* Build or return the type used to represent a nested function descriptor. */
544
545 static GTY(()) tree descriptor_type;
546
547 static tree
548 get_descriptor_type (struct nesting_info *info)
549 {
550 /* The base alignment is that of a function. */
551 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
552 tree t;
553
554 if (descriptor_type)
555 return descriptor_type;
556
557 t = build_index_type (integer_one_node);
558 t = build_array_type (ptr_type_node, t);
559 t = build_decl (DECL_SOURCE_LOCATION (info->context),
560 FIELD_DECL, get_identifier ("__data"), t);
561 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
562 DECL_USER_ALIGN (t) = 1;
563
564 descriptor_type = make_node (RECORD_TYPE);
565 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
566 TYPE_FIELDS (descriptor_type) = t;
567 layout_type (descriptor_type);
568 DECL_CONTEXT (t) = descriptor_type;
569
570 return descriptor_type;
571 }
572
573 /* Given DECL, a nested function, find or create an element in the
574 var map for this function. */
575
576 static tree
577 lookup_element_for_decl (struct nesting_info *info, tree decl,
578 enum insert_option insert)
579 {
580 if (insert == NO_INSERT)
581 {
582 tree *slot = info->var_map->get (decl);
583 return slot ? *slot : NULL_TREE;
584 }
585
586 tree *slot = &info->var_map->get_or_insert (decl);
587 if (!*slot)
588 *slot = build_tree_list (NULL_TREE, NULL_TREE);
589
590 return (tree) *slot;
591 }
592
593 /* Given DECL, a nested function, create a field in the non-local
594 frame structure for this function. */
595
596 static tree
597 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
598 {
599 tree field = make_node (FIELD_DECL);
600 DECL_NAME (field) = DECL_NAME (decl);
601 TREE_TYPE (field) = type;
602 TREE_ADDRESSABLE (field) = 1;
603 insert_field_into_struct (get_frame_type (info), field);
604 return field;
605 }
606
607 /* Given DECL, a nested function, find or create a field in the non-local
608 frame structure for a trampoline for this function. */
609
610 static tree
611 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
612 enum insert_option insert)
613 {
614 tree elt, field;
615
616 elt = lookup_element_for_decl (info, decl, insert);
617 if (!elt)
618 return NULL_TREE;
619
620 field = TREE_PURPOSE (elt);
621
622 if (!field && insert == INSERT)
623 {
624 field = create_field_for_decl (info, decl, get_trampoline_type (info));
625 TREE_PURPOSE (elt) = field;
626 info->any_tramp_created = true;
627 }
628
629 return field;
630 }
631
632 /* Given DECL, a nested function, find or create a field in the non-local
633 frame structure for a descriptor for this function. */
634
635 static tree
636 lookup_descr_for_decl (struct nesting_info *info, tree decl,
637 enum insert_option insert)
638 {
639 tree elt, field;
640
641 elt = lookup_element_for_decl (info, decl, insert);
642 if (!elt)
643 return NULL_TREE;
644
645 field = TREE_VALUE (elt);
646
647 if (!field && insert == INSERT)
648 {
649 field = create_field_for_decl (info, decl, get_descriptor_type (info));
650 TREE_VALUE (elt) = field;
651 info->any_descr_created = true;
652 }
653
654 return field;
655 }
656
657 /* Build or return the field within the non-local frame state that holds
658 the non-local goto "jmp_buf". The buffer itself is maintained by the
659 rtl middle-end as dynamic stack space is allocated. */
660
661 static tree
662 get_nl_goto_field (struct nesting_info *info)
663 {
664 tree field = info->nl_goto_field;
665 if (!field)
666 {
667 unsigned size;
668 tree type;
669
670 /* For __builtin_nonlocal_goto, we need N words. The first is the
671 frame pointer, the rest is for the target's stack pointer save
672 area. The number of words is controlled by STACK_SAVEAREA_MODE;
673 not the best interface, but it'll do for now. */
674 if (Pmode == ptr_mode)
675 type = ptr_type_node;
676 else
677 type = lang_hooks.types.type_for_mode (Pmode, 1);
678
679 scalar_int_mode mode
680 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
681 size = GET_MODE_SIZE (mode);
682 size = size / GET_MODE_SIZE (Pmode);
683 size = size + 1;
684
685 type = build_array_type
686 (type, build_index_type (size_int (size)));
687
688 field = make_node (FIELD_DECL);
689 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
690 TREE_TYPE (field) = type;
691 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
692 TREE_ADDRESSABLE (field) = 1;
693
694 insert_field_into_struct (get_frame_type (info), field);
695
696 info->nl_goto_field = field;
697 }
698
699 return field;
700 }
701
702 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
703
704 static void
705 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
706 struct nesting_info *info, gimple_seq *pseq)
707 {
708 struct walk_stmt_info wi;
709
710 memset (&wi, 0, sizeof (wi));
711 wi.info = info;
712 wi.val_only = true;
713 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
714 }
715
716
717 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
718
719 static inline void
720 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
721 struct nesting_info *info)
722 {
723 gimple_seq body = gimple_body (info->context);
724 walk_body (callback_stmt, callback_op, info, &body);
725 gimple_set_body (info->context, body);
726 }
727
728 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
729
730 static void
731 walk_gimple_omp_for (gomp_for *for_stmt,
732 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
733 struct nesting_info *info)
734 {
735 struct walk_stmt_info wi;
736 gimple_seq seq;
737 tree t;
738 size_t i;
739
740 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
741
742 seq = NULL;
743 memset (&wi, 0, sizeof (wi));
744 wi.info = info;
745 wi.gsi = gsi_last (seq);
746
747 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
748 {
749 wi.val_only = false;
750 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
751 &wi, NULL);
752 wi.val_only = true;
753 wi.is_lhs = false;
754 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
755 &wi, NULL);
756
757 wi.val_only = true;
758 wi.is_lhs = false;
759 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
760 &wi, NULL);
761
762 t = gimple_omp_for_incr (for_stmt, i);
763 gcc_assert (BINARY_CLASS_P (t));
764 wi.val_only = false;
765 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
766 wi.val_only = true;
767 wi.is_lhs = false;
768 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
769 }
770
771 seq = gsi_seq (wi.gsi);
772 if (!gimple_seq_empty_p (seq))
773 {
774 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
775 annotate_all_with_location (seq, gimple_location (for_stmt));
776 gimple_seq_add_seq (&pre_body, seq);
777 gimple_omp_for_set_pre_body (for_stmt, pre_body);
778 }
779 }
780
781 /* Similarly for ROOT and all functions nested underneath, depth first. */
782
783 static void
784 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
785 struct nesting_info *root)
786 {
787 struct nesting_info *n;
788 FOR_EACH_NEST_INFO (n, root)
789 walk_function (callback_stmt, callback_op, n);
790 }
791
792
793 /* We have to check for a fairly pathological case. The operands of function
794 nested function are to be interpreted in the context of the enclosing
795 function. So if any are variably-sized, they will get remapped when the
796 enclosing function is inlined. But that remapping would also have to be
797 done in the types of the PARM_DECLs of the nested function, meaning the
798 argument types of that function will disagree with the arguments in the
799 calls to that function. So we'd either have to make a copy of the nested
800 function corresponding to each time the enclosing function was inlined or
801 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
802 function. The former is not practical. The latter would still require
803 detecting this case to know when to add the conversions. So, for now at
804 least, we don't inline such an enclosing function.
805
806 We have to do that check recursively, so here return indicating whether
807 FNDECL has such a nested function. ORIG_FN is the function we were
808 trying to inline to use for checking whether any argument is variably
809 modified by anything in it.
810
811 It would be better to do this in tree-inline.c so that we could give
812 the appropriate warning for why a function can't be inlined, but that's
813 too late since the nesting structure has already been flattened and
814 adding a flag just to record this fact seems a waste of a flag. */
815
816 static bool
817 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
818 {
819 struct cgraph_node *cgn = cgraph_node::get (fndecl);
820 tree arg;
821
822 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
823 {
824 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
825 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
826 return true;
827
828 if (check_for_nested_with_variably_modified (cgn->decl,
829 orig_fndecl))
830 return true;
831 }
832
833 return false;
834 }
835
836 /* Construct our local datastructure describing the function nesting
837 tree rooted by CGN. */
838
839 static struct nesting_info *
840 create_nesting_tree (struct cgraph_node *cgn)
841 {
842 struct nesting_info *info = XCNEW (struct nesting_info);
843 info->field_map = new hash_map<tree, tree>;
844 info->var_map = new hash_map<tree, tree>;
845 info->mem_refs = new hash_set<tree *>;
846 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
847 info->context = cgn->decl;
848 info->thunk_p = cgn->thunk.thunk_p;
849
850 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
851 {
852 struct nesting_info *sub = create_nesting_tree (cgn);
853 sub->outer = info;
854 sub->next = info->inner;
855 info->inner = sub;
856 }
857
858 /* See discussion at check_for_nested_with_variably_modified for a
859 discussion of why this has to be here. */
860 if (check_for_nested_with_variably_modified (info->context, info->context))
861 DECL_UNINLINABLE (info->context) = true;
862
863 return info;
864 }
865
866 /* Return an expression computing the static chain for TARGET_CONTEXT
867 from INFO->CONTEXT. Insert any necessary computations before TSI. */
868
869 static tree
870 get_static_chain (struct nesting_info *info, tree target_context,
871 gimple_stmt_iterator *gsi)
872 {
873 struct nesting_info *i;
874 tree x;
875
876 if (info->context == target_context)
877 {
878 x = build_addr (info->frame_decl);
879 info->static_chain_added |= 1;
880 }
881 else
882 {
883 x = get_chain_decl (info);
884 info->static_chain_added |= 2;
885
886 for (i = info->outer; i->context != target_context; i = i->outer)
887 {
888 tree field = get_chain_field (i);
889
890 x = build_simple_mem_ref_notrap (x);
891 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
892 x = init_tmp_var (info, x, gsi);
893 }
894 }
895
896 return x;
897 }
898
899
900 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
901 frame as seen from INFO->CONTEXT. Insert any necessary computations
902 before GSI. */
903
904 static tree
905 get_frame_field (struct nesting_info *info, tree target_context,
906 tree field, gimple_stmt_iterator *gsi)
907 {
908 struct nesting_info *i;
909 tree x;
910
911 if (info->context == target_context)
912 {
913 /* Make sure frame_decl gets created. */
914 (void) get_frame_type (info);
915 x = info->frame_decl;
916 info->static_chain_added |= 1;
917 }
918 else
919 {
920 x = get_chain_decl (info);
921 info->static_chain_added |= 2;
922
923 for (i = info->outer; i->context != target_context; i = i->outer)
924 {
925 tree field = get_chain_field (i);
926
927 x = build_simple_mem_ref_notrap (x);
928 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
929 x = init_tmp_var (info, x, gsi);
930 }
931
932 x = build_simple_mem_ref_notrap (x);
933 }
934
935 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
936 return x;
937 }
938
939 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
940
941 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
942 in the nested function with DECL_VALUE_EXPR set to reference the true
943 variable in the parent function. This is used both for debug info
944 and in OMP lowering. */
945
946 static tree
947 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
948 {
949 tree target_context;
950 struct nesting_info *i;
951 tree x, field, new_decl;
952
953 tree *slot = &info->var_map->get_or_insert (decl);
954
955 if (*slot)
956 return *slot;
957
958 target_context = decl_function_context (decl);
959
960 /* A copy of the code in get_frame_field, but without the temporaries. */
961 if (info->context == target_context)
962 {
963 /* Make sure frame_decl gets created. */
964 (void) get_frame_type (info);
965 x = info->frame_decl;
966 i = info;
967 info->static_chain_added |= 1;
968 }
969 else
970 {
971 x = get_chain_decl (info);
972 info->static_chain_added |= 2;
973 for (i = info->outer; i->context != target_context; i = i->outer)
974 {
975 field = get_chain_field (i);
976 x = build_simple_mem_ref_notrap (x);
977 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
978 }
979 x = build_simple_mem_ref_notrap (x);
980 }
981
982 field = lookup_field_for_decl (i, decl, INSERT);
983 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
984 if (use_pointer_in_frame (decl))
985 x = build_simple_mem_ref_notrap (x);
986
987 /* ??? We should be remapping types as well, surely. */
988 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
989 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
990 DECL_CONTEXT (new_decl) = info->context;
991 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
992 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
993 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
994 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
995 TREE_READONLY (new_decl) = TREE_READONLY (decl);
996 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
997 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
998 if ((TREE_CODE (decl) == PARM_DECL
999 || TREE_CODE (decl) == RESULT_DECL
1000 || VAR_P (decl))
1001 && DECL_BY_REFERENCE (decl))
1002 DECL_BY_REFERENCE (new_decl) = 1;
1003
1004 SET_DECL_VALUE_EXPR (new_decl, x);
1005 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1006
1007 *slot = new_decl;
1008 DECL_CHAIN (new_decl) = info->debug_var_chain;
1009 info->debug_var_chain = new_decl;
1010
1011 if (!optimize
1012 && info->context != target_context
1013 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1014 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1015
1016 return new_decl;
1017 }
1018
1019
1020 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1021 and PARM_DECLs that belong to outer functions.
1022
1023 The rewrite will involve some number of structure accesses back up
1024 the static chain. E.g. for a variable FOO up one nesting level it'll
1025 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1026 indirections apply to decls for which use_pointer_in_frame is true. */
1027
1028 static tree
1029 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1030 {
1031 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1032 struct nesting_info *const info = (struct nesting_info *) wi->info;
1033 tree t = *tp;
1034
1035 *walk_subtrees = 0;
1036 switch (TREE_CODE (t))
1037 {
1038 case VAR_DECL:
1039 /* Non-automatic variables are never processed. */
1040 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1041 break;
1042 /* FALLTHRU */
1043
1044 case PARM_DECL:
1045 {
1046 tree x, target_context = decl_function_context (t);
1047
1048 if (info->context == target_context)
1049 break;
1050
1051 wi->changed = true;
1052
1053 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1054 x = get_nonlocal_debug_decl (info, t);
1055 else
1056 {
1057 struct nesting_info *i = info;
1058 while (i && i->context != target_context)
1059 i = i->outer;
1060 /* If none of the outer contexts is the target context, this means
1061 that the VAR or PARM_DECL is referenced in a wrong context. */
1062 if (!i)
1063 internal_error ("%s from %s referenced in %s",
1064 IDENTIFIER_POINTER (DECL_NAME (t)),
1065 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1066 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1067
1068 x = lookup_field_for_decl (i, t, INSERT);
1069 x = get_frame_field (info, target_context, x, &wi->gsi);
1070 if (use_pointer_in_frame (t))
1071 {
1072 x = init_tmp_var (info, x, &wi->gsi);
1073 x = build_simple_mem_ref_notrap (x);
1074 }
1075 }
1076
1077 if (wi->val_only)
1078 {
1079 if (wi->is_lhs)
1080 x = save_tmp_var (info, x, &wi->gsi);
1081 else
1082 x = init_tmp_var (info, x, &wi->gsi);
1083 }
1084
1085 *tp = x;
1086 }
1087 break;
1088
1089 case LABEL_DECL:
1090 /* We're taking the address of a label from a parent function, but
1091 this is not itself a non-local goto. Mark the label such that it
1092 will not be deleted, much as we would with a label address in
1093 static storage. */
1094 if (decl_function_context (t) != info->context)
1095 FORCED_LABEL (t) = 1;
1096 break;
1097
1098 case ADDR_EXPR:
1099 {
1100 bool save_val_only = wi->val_only;
1101
1102 wi->val_only = false;
1103 wi->is_lhs = false;
1104 wi->changed = false;
1105 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1106 wi->val_only = true;
1107
1108 if (wi->changed)
1109 {
1110 tree save_context;
1111
1112 /* If we changed anything, we might no longer be directly
1113 referencing a decl. */
1114 save_context = current_function_decl;
1115 current_function_decl = info->context;
1116 recompute_tree_invariant_for_addr_expr (t);
1117 current_function_decl = save_context;
1118
1119 /* If the callback converted the address argument in a context
1120 where we only accept variables (and min_invariant, presumably),
1121 then compute the address into a temporary. */
1122 if (save_val_only)
1123 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1124 t, &wi->gsi);
1125 }
1126 }
1127 break;
1128
1129 case REALPART_EXPR:
1130 case IMAGPART_EXPR:
1131 case COMPONENT_REF:
1132 case ARRAY_REF:
1133 case ARRAY_RANGE_REF:
1134 case BIT_FIELD_REF:
1135 /* Go down this entire nest and just look at the final prefix and
1136 anything that describes the references. Otherwise, we lose track
1137 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1138 wi->val_only = true;
1139 wi->is_lhs = false;
1140 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1141 {
1142 if (TREE_CODE (t) == COMPONENT_REF)
1143 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1144 NULL);
1145 else if (TREE_CODE (t) == ARRAY_REF
1146 || TREE_CODE (t) == ARRAY_RANGE_REF)
1147 {
1148 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1149 wi, NULL);
1150 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1151 wi, NULL);
1152 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1153 wi, NULL);
1154 }
1155 }
1156 wi->val_only = false;
1157 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1158 break;
1159
1160 case VIEW_CONVERT_EXPR:
1161 /* Just request to look at the subtrees, leaving val_only and lhs
1162 untouched. This might actually be for !val_only + lhs, in which
1163 case we don't want to force a replacement by a temporary. */
1164 *walk_subtrees = 1;
1165 break;
1166
1167 default:
1168 if (!IS_TYPE_OR_DECL_P (t))
1169 {
1170 *walk_subtrees = 1;
1171 wi->val_only = true;
1172 wi->is_lhs = false;
1173 }
1174 break;
1175 }
1176
1177 return NULL_TREE;
1178 }
1179
1180 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1181 struct walk_stmt_info *);
1182
1183 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1184 and PARM_DECLs that belong to outer functions. */
1185
1186 static bool
1187 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1188 {
1189 struct nesting_info *const info = (struct nesting_info *) wi->info;
1190 bool need_chain = false, need_stmts = false;
1191 tree clause, decl;
1192 int dummy;
1193 bitmap new_suppress;
1194
1195 new_suppress = BITMAP_GGC_ALLOC ();
1196 bitmap_copy (new_suppress, info->suppress_expansion);
1197
1198 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1199 {
1200 switch (OMP_CLAUSE_CODE (clause))
1201 {
1202 case OMP_CLAUSE_REDUCTION:
1203 case OMP_CLAUSE_IN_REDUCTION:
1204 case OMP_CLAUSE_TASK_REDUCTION:
1205 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1206 need_stmts = true;
1207 goto do_decl_clause;
1208
1209 case OMP_CLAUSE_LASTPRIVATE:
1210 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1211 need_stmts = true;
1212 goto do_decl_clause;
1213
1214 case OMP_CLAUSE_LINEAR:
1215 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1216 need_stmts = true;
1217 wi->val_only = true;
1218 wi->is_lhs = false;
1219 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1220 &dummy, wi);
1221 goto do_decl_clause;
1222
1223 case OMP_CLAUSE_PRIVATE:
1224 case OMP_CLAUSE_FIRSTPRIVATE:
1225 case OMP_CLAUSE_COPYPRIVATE:
1226 case OMP_CLAUSE_SHARED:
1227 case OMP_CLAUSE_TO_DECLARE:
1228 case OMP_CLAUSE_LINK:
1229 case OMP_CLAUSE_USE_DEVICE_PTR:
1230 case OMP_CLAUSE_IS_DEVICE_PTR:
1231 do_decl_clause:
1232 decl = OMP_CLAUSE_DECL (clause);
1233 if (VAR_P (decl)
1234 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1235 break;
1236 if (decl_function_context (decl) != info->context)
1237 {
1238 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1239 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1240 bitmap_set_bit (new_suppress, DECL_UID (decl));
1241 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1242 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1243 need_chain = true;
1244 }
1245 break;
1246
1247 case OMP_CLAUSE_SCHEDULE:
1248 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1249 break;
1250 /* FALLTHRU */
1251 case OMP_CLAUSE_FINAL:
1252 case OMP_CLAUSE_IF:
1253 case OMP_CLAUSE_NUM_THREADS:
1254 case OMP_CLAUSE_DEPEND:
1255 case OMP_CLAUSE_DEVICE:
1256 case OMP_CLAUSE_NUM_TEAMS:
1257 case OMP_CLAUSE_THREAD_LIMIT:
1258 case OMP_CLAUSE_SAFELEN:
1259 case OMP_CLAUSE_SIMDLEN:
1260 case OMP_CLAUSE_PRIORITY:
1261 case OMP_CLAUSE_GRAINSIZE:
1262 case OMP_CLAUSE_NUM_TASKS:
1263 case OMP_CLAUSE_HINT:
1264 case OMP_CLAUSE_NUM_GANGS:
1265 case OMP_CLAUSE_NUM_WORKERS:
1266 case OMP_CLAUSE_VECTOR_LENGTH:
1267 case OMP_CLAUSE_GANG:
1268 case OMP_CLAUSE_WORKER:
1269 case OMP_CLAUSE_VECTOR:
1270 case OMP_CLAUSE_ASYNC:
1271 case OMP_CLAUSE_WAIT:
1272 /* Several OpenACC clauses have optional arguments. Check if they
1273 are present. */
1274 if (OMP_CLAUSE_OPERAND (clause, 0))
1275 {
1276 wi->val_only = true;
1277 wi->is_lhs = false;
1278 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1279 &dummy, wi);
1280 }
1281
1282 /* The gang clause accepts two arguments. */
1283 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1284 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1285 {
1286 wi->val_only = true;
1287 wi->is_lhs = false;
1288 convert_nonlocal_reference_op
1289 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1290 }
1291 break;
1292
1293 case OMP_CLAUSE_DIST_SCHEDULE:
1294 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1295 {
1296 wi->val_only = true;
1297 wi->is_lhs = false;
1298 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1299 &dummy, wi);
1300 }
1301 break;
1302
1303 case OMP_CLAUSE_MAP:
1304 case OMP_CLAUSE_TO:
1305 case OMP_CLAUSE_FROM:
1306 if (OMP_CLAUSE_SIZE (clause))
1307 {
1308 wi->val_only = true;
1309 wi->is_lhs = false;
1310 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1311 &dummy, wi);
1312 }
1313 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1314 goto do_decl_clause;
1315 wi->val_only = true;
1316 wi->is_lhs = false;
1317 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1318 wi, NULL);
1319 break;
1320
1321 case OMP_CLAUSE_ALIGNED:
1322 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1323 {
1324 wi->val_only = true;
1325 wi->is_lhs = false;
1326 convert_nonlocal_reference_op
1327 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1328 }
1329 /* FALLTHRU */
1330 case OMP_CLAUSE_NONTEMPORAL:
1331 /* Like do_decl_clause, but don't add any suppression. */
1332 decl = OMP_CLAUSE_DECL (clause);
1333 if (VAR_P (decl)
1334 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1335 break;
1336 if (decl_function_context (decl) != info->context)
1337 {
1338 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1339 need_chain = true;
1340 }
1341 break;
1342
1343 case OMP_CLAUSE_NOWAIT:
1344 case OMP_CLAUSE_ORDERED:
1345 case OMP_CLAUSE_DEFAULT:
1346 case OMP_CLAUSE_COPYIN:
1347 case OMP_CLAUSE_COLLAPSE:
1348 case OMP_CLAUSE_TILE:
1349 case OMP_CLAUSE_UNTIED:
1350 case OMP_CLAUSE_MERGEABLE:
1351 case OMP_CLAUSE_PROC_BIND:
1352 case OMP_CLAUSE_NOGROUP:
1353 case OMP_CLAUSE_THREADS:
1354 case OMP_CLAUSE_SIMD:
1355 case OMP_CLAUSE_DEFAULTMAP:
1356 case OMP_CLAUSE_ORDER:
1357 case OMP_CLAUSE_SEQ:
1358 case OMP_CLAUSE_INDEPENDENT:
1359 case OMP_CLAUSE_AUTO:
1360 case OMP_CLAUSE_IF_PRESENT:
1361 case OMP_CLAUSE_FINALIZE:
1362 case OMP_CLAUSE__CONDTEMP_:
1363 case OMP_CLAUSE__SCANTEMP_:
1364 break;
1365
1366 /* The following clause belongs to the OpenACC cache directive, which
1367 is discarded during gimplification. */
1368 case OMP_CLAUSE__CACHE_:
1369 /* The following clauses are only allowed in the OpenMP declare simd
1370 directive, so not seen here. */
1371 case OMP_CLAUSE_UNIFORM:
1372 case OMP_CLAUSE_INBRANCH:
1373 case OMP_CLAUSE_NOTINBRANCH:
1374 /* The following clauses are only allowed on OpenMP cancel and
1375 cancellation point directives, which at this point have already
1376 been lowered into a function call. */
1377 case OMP_CLAUSE_FOR:
1378 case OMP_CLAUSE_PARALLEL:
1379 case OMP_CLAUSE_SECTIONS:
1380 case OMP_CLAUSE_TASKGROUP:
1381 /* The following clauses are only added during OMP lowering; nested
1382 function decomposition happens before that. */
1383 case OMP_CLAUSE__LOOPTEMP_:
1384 case OMP_CLAUSE__REDUCTEMP_:
1385 case OMP_CLAUSE__SIMDUID_:
1386 case OMP_CLAUSE__GRIDDIM_:
1387 case OMP_CLAUSE__SIMT_:
1388 /* Anything else. */
1389 default:
1390 gcc_unreachable ();
1391 }
1392 }
1393
1394 info->suppress_expansion = new_suppress;
1395
1396 if (need_stmts)
1397 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1398 switch (OMP_CLAUSE_CODE (clause))
1399 {
1400 case OMP_CLAUSE_REDUCTION:
1401 case OMP_CLAUSE_IN_REDUCTION:
1402 case OMP_CLAUSE_TASK_REDUCTION:
1403 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1404 {
1405 tree old_context
1406 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1407 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1408 = info->context;
1409 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1410 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1411 = info->context;
1412 walk_body (convert_nonlocal_reference_stmt,
1413 convert_nonlocal_reference_op, info,
1414 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1415 walk_body (convert_nonlocal_reference_stmt,
1416 convert_nonlocal_reference_op, info,
1417 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1418 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1419 = old_context;
1420 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1421 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1422 = old_context;
1423 }
1424 break;
1425
1426 case OMP_CLAUSE_LASTPRIVATE:
1427 walk_body (convert_nonlocal_reference_stmt,
1428 convert_nonlocal_reference_op, info,
1429 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1430 break;
1431
1432 case OMP_CLAUSE_LINEAR:
1433 walk_body (convert_nonlocal_reference_stmt,
1434 convert_nonlocal_reference_op, info,
1435 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1436 break;
1437
1438 default:
1439 break;
1440 }
1441
1442 return need_chain;
1443 }
1444
1445 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1446
1447 static void
1448 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1449 {
1450 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1451 type = TREE_TYPE (type);
1452
1453 if (TYPE_NAME (type)
1454 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1455 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1456 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1457
1458 while (POINTER_TYPE_P (type)
1459 || TREE_CODE (type) == VECTOR_TYPE
1460 || TREE_CODE (type) == FUNCTION_TYPE
1461 || TREE_CODE (type) == METHOD_TYPE)
1462 type = TREE_TYPE (type);
1463
1464 if (TREE_CODE (type) == ARRAY_TYPE)
1465 {
1466 tree domain, t;
1467
1468 note_nonlocal_vla_type (info, TREE_TYPE (type));
1469 domain = TYPE_DOMAIN (type);
1470 if (domain)
1471 {
1472 t = TYPE_MIN_VALUE (domain);
1473 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1474 && decl_function_context (t) != info->context)
1475 get_nonlocal_debug_decl (info, t);
1476 t = TYPE_MAX_VALUE (domain);
1477 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1478 && decl_function_context (t) != info->context)
1479 get_nonlocal_debug_decl (info, t);
1480 }
1481 }
1482 }
1483
1484 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1485 PARM_DECLs that belong to outer functions. This handles statements
1486 that are not handled via the standard recursion done in
1487 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1488 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1489 operands of STMT have been handled by this function. */
1490
1491 static tree
1492 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1493 struct walk_stmt_info *wi)
1494 {
1495 struct nesting_info *info = (struct nesting_info *) wi->info;
1496 tree save_local_var_chain;
1497 bitmap save_suppress;
1498 gimple *stmt = gsi_stmt (*gsi);
1499
1500 switch (gimple_code (stmt))
1501 {
1502 case GIMPLE_GOTO:
1503 /* Don't walk non-local gotos for now. */
1504 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1505 {
1506 wi->val_only = true;
1507 wi->is_lhs = false;
1508 *handled_ops_p = false;
1509 return NULL_TREE;
1510 }
1511 break;
1512
1513 case GIMPLE_OMP_TEAMS:
1514 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1515 {
1516 save_suppress = info->suppress_expansion;
1517 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1518 wi);
1519 walk_body (convert_nonlocal_reference_stmt,
1520 convert_nonlocal_reference_op, info,
1521 gimple_omp_body_ptr (stmt));
1522 info->suppress_expansion = save_suppress;
1523 break;
1524 }
1525 /* FALLTHRU */
1526
1527 case GIMPLE_OMP_PARALLEL:
1528 case GIMPLE_OMP_TASK:
1529 save_suppress = info->suppress_expansion;
1530 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1531 wi))
1532 {
1533 tree c, decl;
1534 decl = get_chain_decl (info);
1535 c = build_omp_clause (gimple_location (stmt),
1536 OMP_CLAUSE_FIRSTPRIVATE);
1537 OMP_CLAUSE_DECL (c) = decl;
1538 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1539 gimple_omp_taskreg_set_clauses (stmt, c);
1540 }
1541
1542 save_local_var_chain = info->new_local_var_chain;
1543 info->new_local_var_chain = NULL;
1544
1545 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1546 info, gimple_omp_body_ptr (stmt));
1547
1548 if (info->new_local_var_chain)
1549 declare_vars (info->new_local_var_chain,
1550 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1551 false);
1552 info->new_local_var_chain = save_local_var_chain;
1553 info->suppress_expansion = save_suppress;
1554 break;
1555
1556 case GIMPLE_OMP_FOR:
1557 save_suppress = info->suppress_expansion;
1558 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1559 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1560 convert_nonlocal_reference_stmt,
1561 convert_nonlocal_reference_op, info);
1562 walk_body (convert_nonlocal_reference_stmt,
1563 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1564 info->suppress_expansion = save_suppress;
1565 break;
1566
1567 case GIMPLE_OMP_SECTIONS:
1568 save_suppress = info->suppress_expansion;
1569 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1570 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1571 info, gimple_omp_body_ptr (stmt));
1572 info->suppress_expansion = save_suppress;
1573 break;
1574
1575 case GIMPLE_OMP_SINGLE:
1576 save_suppress = info->suppress_expansion;
1577 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1578 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1579 info, gimple_omp_body_ptr (stmt));
1580 info->suppress_expansion = save_suppress;
1581 break;
1582
1583 case GIMPLE_OMP_TASKGROUP:
1584 save_suppress = info->suppress_expansion;
1585 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1586 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1587 info, gimple_omp_body_ptr (stmt));
1588 info->suppress_expansion = save_suppress;
1589 break;
1590
1591 case GIMPLE_OMP_TARGET:
1592 if (!is_gimple_omp_offloaded (stmt))
1593 {
1594 save_suppress = info->suppress_expansion;
1595 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1596 wi);
1597 info->suppress_expansion = save_suppress;
1598 walk_body (convert_nonlocal_reference_stmt,
1599 convert_nonlocal_reference_op, info,
1600 gimple_omp_body_ptr (stmt));
1601 break;
1602 }
1603 save_suppress = info->suppress_expansion;
1604 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1605 wi))
1606 {
1607 tree c, decl;
1608 decl = get_chain_decl (info);
1609 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1610 OMP_CLAUSE_DECL (c) = decl;
1611 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1612 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1613 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1614 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1615 }
1616
1617 save_local_var_chain = info->new_local_var_chain;
1618 info->new_local_var_chain = NULL;
1619
1620 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1621 info, gimple_omp_body_ptr (stmt));
1622
1623 if (info->new_local_var_chain)
1624 declare_vars (info->new_local_var_chain,
1625 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1626 false);
1627 info->new_local_var_chain = save_local_var_chain;
1628 info->suppress_expansion = save_suppress;
1629 break;
1630
1631 case GIMPLE_OMP_SECTION:
1632 case GIMPLE_OMP_MASTER:
1633 case GIMPLE_OMP_ORDERED:
1634 case GIMPLE_OMP_SCAN:
1635 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1636 info, gimple_omp_body_ptr (stmt));
1637 break;
1638
1639 case GIMPLE_BIND:
1640 {
1641 gbind *bind_stmt = as_a <gbind *> (stmt);
1642
1643 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1644 if (TREE_CODE (var) == NAMELIST_DECL)
1645 {
1646 /* Adjust decls mentioned in NAMELIST_DECL. */
1647 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1648 tree decl;
1649 unsigned int i;
1650
1651 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1652 {
1653 if (VAR_P (decl)
1654 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1655 continue;
1656 if (decl_function_context (decl) != info->context)
1657 CONSTRUCTOR_ELT (decls, i)->value
1658 = get_nonlocal_debug_decl (info, decl);
1659 }
1660 }
1661
1662 *handled_ops_p = false;
1663 return NULL_TREE;
1664 }
1665 case GIMPLE_COND:
1666 wi->val_only = true;
1667 wi->is_lhs = false;
1668 *handled_ops_p = false;
1669 return NULL_TREE;
1670
1671 case GIMPLE_ASSIGN:
1672 if (gimple_clobber_p (stmt))
1673 {
1674 tree lhs = gimple_assign_lhs (stmt);
1675 if (DECL_P (lhs)
1676 && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1677 && decl_function_context (lhs) != info->context)
1678 {
1679 gsi_replace (gsi, gimple_build_nop (), true);
1680 break;
1681 }
1682 }
1683 *handled_ops_p = false;
1684 return NULL_TREE;
1685
1686 default:
1687 /* For every other statement that we are not interested in
1688 handling here, let the walker traverse the operands. */
1689 *handled_ops_p = false;
1690 return NULL_TREE;
1691 }
1692
1693 /* We have handled all of STMT operands, no need to traverse the operands. */
1694 *handled_ops_p = true;
1695 return NULL_TREE;
1696 }
1697
1698
1699 /* A subroutine of convert_local_reference. Create a local variable
1700 in the parent function with DECL_VALUE_EXPR set to reference the
1701 field in FRAME. This is used both for debug info and in OMP
1702 lowering. */
1703
1704 static tree
1705 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1706 {
1707 tree x, new_decl;
1708
1709 tree *slot = &info->var_map->get_or_insert (decl);
1710 if (*slot)
1711 return *slot;
1712
1713 /* Make sure frame_decl gets created. */
1714 (void) get_frame_type (info);
1715 x = info->frame_decl;
1716 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1717
1718 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1719 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1720 DECL_CONTEXT (new_decl) = info->context;
1721 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1722 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1723 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1724 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1725 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1726 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1727 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1728 if ((TREE_CODE (decl) == PARM_DECL
1729 || TREE_CODE (decl) == RESULT_DECL
1730 || VAR_P (decl))
1731 && DECL_BY_REFERENCE (decl))
1732 DECL_BY_REFERENCE (new_decl) = 1;
1733
1734 SET_DECL_VALUE_EXPR (new_decl, x);
1735 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1736 *slot = new_decl;
1737
1738 DECL_CHAIN (new_decl) = info->debug_var_chain;
1739 info->debug_var_chain = new_decl;
1740
1741 /* Do not emit debug info twice. */
1742 DECL_IGNORED_P (decl) = 1;
1743
1744 return new_decl;
1745 }
1746
1747
1748 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1749 and PARM_DECLs that were referenced by inner nested functions.
1750 The rewrite will be a structure reference to the local frame variable. */
1751
1752 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1753
1754 static tree
1755 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1756 {
1757 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1758 struct nesting_info *const info = (struct nesting_info *) wi->info;
1759 tree t = *tp, field, x;
1760 bool save_val_only;
1761
1762 *walk_subtrees = 0;
1763 switch (TREE_CODE (t))
1764 {
1765 case VAR_DECL:
1766 /* Non-automatic variables are never processed. */
1767 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1768 break;
1769 /* FALLTHRU */
1770
1771 case PARM_DECL:
1772 if (t != info->frame_decl && decl_function_context (t) == info->context)
1773 {
1774 /* If we copied a pointer to the frame, then the original decl
1775 is used unchanged in the parent function. */
1776 if (use_pointer_in_frame (t))
1777 break;
1778
1779 /* No need to transform anything if no child references the
1780 variable. */
1781 field = lookup_field_for_decl (info, t, NO_INSERT);
1782 if (!field)
1783 break;
1784 wi->changed = true;
1785
1786 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1787 x = get_local_debug_decl (info, t, field);
1788 else
1789 x = get_frame_field (info, info->context, field, &wi->gsi);
1790
1791 if (wi->val_only)
1792 {
1793 if (wi->is_lhs)
1794 x = save_tmp_var (info, x, &wi->gsi);
1795 else
1796 x = init_tmp_var (info, x, &wi->gsi);
1797 }
1798
1799 *tp = x;
1800 }
1801 break;
1802
1803 case ADDR_EXPR:
1804 save_val_only = wi->val_only;
1805 wi->val_only = false;
1806 wi->is_lhs = false;
1807 wi->changed = false;
1808 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1809 wi->val_only = save_val_only;
1810
1811 /* If we converted anything ... */
1812 if (wi->changed)
1813 {
1814 tree save_context;
1815
1816 /* Then the frame decl is now addressable. */
1817 TREE_ADDRESSABLE (info->frame_decl) = 1;
1818
1819 save_context = current_function_decl;
1820 current_function_decl = info->context;
1821 recompute_tree_invariant_for_addr_expr (t);
1822 current_function_decl = save_context;
1823
1824 /* If we are in a context where we only accept values, then
1825 compute the address into a temporary. */
1826 if (save_val_only)
1827 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1828 t, &wi->gsi);
1829 }
1830 break;
1831
1832 case REALPART_EXPR:
1833 case IMAGPART_EXPR:
1834 case COMPONENT_REF:
1835 case ARRAY_REF:
1836 case ARRAY_RANGE_REF:
1837 case BIT_FIELD_REF:
1838 /* Go down this entire nest and just look at the final prefix and
1839 anything that describes the references. Otherwise, we lose track
1840 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1841 save_val_only = wi->val_only;
1842 wi->val_only = true;
1843 wi->is_lhs = false;
1844 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1845 {
1846 if (TREE_CODE (t) == COMPONENT_REF)
1847 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1848 NULL);
1849 else if (TREE_CODE (t) == ARRAY_REF
1850 || TREE_CODE (t) == ARRAY_RANGE_REF)
1851 {
1852 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1853 NULL);
1854 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1855 NULL);
1856 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1857 NULL);
1858 }
1859 }
1860 wi->val_only = false;
1861 walk_tree (tp, convert_local_reference_op, wi, NULL);
1862 wi->val_only = save_val_only;
1863 break;
1864
1865 case MEM_REF:
1866 save_val_only = wi->val_only;
1867 wi->val_only = true;
1868 wi->is_lhs = false;
1869 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1870 wi, NULL);
1871 /* We need to re-fold the MEM_REF as component references as
1872 part of a ADDR_EXPR address are not allowed. But we cannot
1873 fold here, as the chain record type is not yet finalized. */
1874 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1875 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1876 info->mem_refs->add (tp);
1877 wi->val_only = save_val_only;
1878 break;
1879
1880 case VIEW_CONVERT_EXPR:
1881 /* Just request to look at the subtrees, leaving val_only and lhs
1882 untouched. This might actually be for !val_only + lhs, in which
1883 case we don't want to force a replacement by a temporary. */
1884 *walk_subtrees = 1;
1885 break;
1886
1887 default:
1888 if (!IS_TYPE_OR_DECL_P (t))
1889 {
1890 *walk_subtrees = 1;
1891 wi->val_only = true;
1892 wi->is_lhs = false;
1893 }
1894 break;
1895 }
1896
1897 return NULL_TREE;
1898 }
1899
1900 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1901 struct walk_stmt_info *);
1902
1903 /* Helper for convert_local_reference. Convert all the references in
1904 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1905
1906 static bool
1907 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1908 {
1909 struct nesting_info *const info = (struct nesting_info *) wi->info;
1910 bool need_frame = false, need_stmts = false;
1911 tree clause, decl;
1912 int dummy;
1913 bitmap new_suppress;
1914
1915 new_suppress = BITMAP_GGC_ALLOC ();
1916 bitmap_copy (new_suppress, info->suppress_expansion);
1917
1918 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1919 {
1920 switch (OMP_CLAUSE_CODE (clause))
1921 {
1922 case OMP_CLAUSE_REDUCTION:
1923 case OMP_CLAUSE_IN_REDUCTION:
1924 case OMP_CLAUSE_TASK_REDUCTION:
1925 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1926 need_stmts = true;
1927 goto do_decl_clause;
1928
1929 case OMP_CLAUSE_LASTPRIVATE:
1930 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1931 need_stmts = true;
1932 goto do_decl_clause;
1933
1934 case OMP_CLAUSE_LINEAR:
1935 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1936 need_stmts = true;
1937 wi->val_only = true;
1938 wi->is_lhs = false;
1939 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1940 wi);
1941 goto do_decl_clause;
1942
1943 case OMP_CLAUSE_PRIVATE:
1944 case OMP_CLAUSE_FIRSTPRIVATE:
1945 case OMP_CLAUSE_COPYPRIVATE:
1946 case OMP_CLAUSE_SHARED:
1947 case OMP_CLAUSE_TO_DECLARE:
1948 case OMP_CLAUSE_LINK:
1949 case OMP_CLAUSE_USE_DEVICE_PTR:
1950 case OMP_CLAUSE_IS_DEVICE_PTR:
1951 do_decl_clause:
1952 decl = OMP_CLAUSE_DECL (clause);
1953 if (VAR_P (decl)
1954 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1955 break;
1956 if (decl_function_context (decl) == info->context
1957 && !use_pointer_in_frame (decl))
1958 {
1959 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1960 if (field)
1961 {
1962 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1963 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1964 bitmap_set_bit (new_suppress, DECL_UID (decl));
1965 OMP_CLAUSE_DECL (clause)
1966 = get_local_debug_decl (info, decl, field);
1967 need_frame = true;
1968 }
1969 }
1970 break;
1971
1972 case OMP_CLAUSE_SCHEDULE:
1973 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1974 break;
1975 /* FALLTHRU */
1976 case OMP_CLAUSE_FINAL:
1977 case OMP_CLAUSE_IF:
1978 case OMP_CLAUSE_NUM_THREADS:
1979 case OMP_CLAUSE_DEPEND:
1980 case OMP_CLAUSE_DEVICE:
1981 case OMP_CLAUSE_NUM_TEAMS:
1982 case OMP_CLAUSE_THREAD_LIMIT:
1983 case OMP_CLAUSE_SAFELEN:
1984 case OMP_CLAUSE_SIMDLEN:
1985 case OMP_CLAUSE_PRIORITY:
1986 case OMP_CLAUSE_GRAINSIZE:
1987 case OMP_CLAUSE_NUM_TASKS:
1988 case OMP_CLAUSE_HINT:
1989 case OMP_CLAUSE_NUM_GANGS:
1990 case OMP_CLAUSE_NUM_WORKERS:
1991 case OMP_CLAUSE_VECTOR_LENGTH:
1992 case OMP_CLAUSE_GANG:
1993 case OMP_CLAUSE_WORKER:
1994 case OMP_CLAUSE_VECTOR:
1995 case OMP_CLAUSE_ASYNC:
1996 case OMP_CLAUSE_WAIT:
1997 /* Several OpenACC clauses have optional arguments. Check if they
1998 are present. */
1999 if (OMP_CLAUSE_OPERAND (clause, 0))
2000 {
2001 wi->val_only = true;
2002 wi->is_lhs = false;
2003 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2004 &dummy, wi);
2005 }
2006
2007 /* The gang clause accepts two arguments. */
2008 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2009 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2010 {
2011 wi->val_only = true;
2012 wi->is_lhs = false;
2013 convert_nonlocal_reference_op
2014 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2015 }
2016 break;
2017
2018 case OMP_CLAUSE_DIST_SCHEDULE:
2019 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2020 {
2021 wi->val_only = true;
2022 wi->is_lhs = false;
2023 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2024 &dummy, wi);
2025 }
2026 break;
2027
2028 case OMP_CLAUSE_MAP:
2029 case OMP_CLAUSE_TO:
2030 case OMP_CLAUSE_FROM:
2031 if (OMP_CLAUSE_SIZE (clause))
2032 {
2033 wi->val_only = true;
2034 wi->is_lhs = false;
2035 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2036 &dummy, wi);
2037 }
2038 if (DECL_P (OMP_CLAUSE_DECL (clause)))
2039 goto do_decl_clause;
2040 wi->val_only = true;
2041 wi->is_lhs = false;
2042 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2043 wi, NULL);
2044 break;
2045
2046 case OMP_CLAUSE_ALIGNED:
2047 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2048 {
2049 wi->val_only = true;
2050 wi->is_lhs = false;
2051 convert_local_reference_op
2052 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2053 }
2054 /* FALLTHRU */
2055 case OMP_CLAUSE_NONTEMPORAL:
2056 /* Like do_decl_clause, but don't add any suppression. */
2057 decl = OMP_CLAUSE_DECL (clause);
2058 if (VAR_P (decl)
2059 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2060 break;
2061 if (decl_function_context (decl) == info->context
2062 && !use_pointer_in_frame (decl))
2063 {
2064 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2065 if (field)
2066 {
2067 OMP_CLAUSE_DECL (clause)
2068 = get_local_debug_decl (info, decl, field);
2069 need_frame = true;
2070 }
2071 }
2072 break;
2073
2074 case OMP_CLAUSE_NOWAIT:
2075 case OMP_CLAUSE_ORDERED:
2076 case OMP_CLAUSE_DEFAULT:
2077 case OMP_CLAUSE_COPYIN:
2078 case OMP_CLAUSE_COLLAPSE:
2079 case OMP_CLAUSE_TILE:
2080 case OMP_CLAUSE_UNTIED:
2081 case OMP_CLAUSE_MERGEABLE:
2082 case OMP_CLAUSE_PROC_BIND:
2083 case OMP_CLAUSE_NOGROUP:
2084 case OMP_CLAUSE_THREADS:
2085 case OMP_CLAUSE_SIMD:
2086 case OMP_CLAUSE_DEFAULTMAP:
2087 case OMP_CLAUSE_ORDER:
2088 case OMP_CLAUSE_SEQ:
2089 case OMP_CLAUSE_INDEPENDENT:
2090 case OMP_CLAUSE_AUTO:
2091 case OMP_CLAUSE_IF_PRESENT:
2092 case OMP_CLAUSE_FINALIZE:
2093 case OMP_CLAUSE__CONDTEMP_:
2094 case OMP_CLAUSE__SCANTEMP_:
2095 break;
2096
2097 /* The following clause belongs to the OpenACC cache directive, which
2098 is discarded during gimplification. */
2099 case OMP_CLAUSE__CACHE_:
2100 /* The following clauses are only allowed in the OpenMP declare simd
2101 directive, so not seen here. */
2102 case OMP_CLAUSE_UNIFORM:
2103 case OMP_CLAUSE_INBRANCH:
2104 case OMP_CLAUSE_NOTINBRANCH:
2105 /* The following clauses are only allowed on OpenMP cancel and
2106 cancellation point directives, which at this point have already
2107 been lowered into a function call. */
2108 case OMP_CLAUSE_FOR:
2109 case OMP_CLAUSE_PARALLEL:
2110 case OMP_CLAUSE_SECTIONS:
2111 case OMP_CLAUSE_TASKGROUP:
2112 /* The following clauses are only added during OMP lowering; nested
2113 function decomposition happens before that. */
2114 case OMP_CLAUSE__LOOPTEMP_:
2115 case OMP_CLAUSE__REDUCTEMP_:
2116 case OMP_CLAUSE__SIMDUID_:
2117 case OMP_CLAUSE__GRIDDIM_:
2118 case OMP_CLAUSE__SIMT_:
2119 /* Anything else. */
2120 default:
2121 gcc_unreachable ();
2122 }
2123 }
2124
2125 info->suppress_expansion = new_suppress;
2126
2127 if (need_stmts)
2128 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2129 switch (OMP_CLAUSE_CODE (clause))
2130 {
2131 case OMP_CLAUSE_REDUCTION:
2132 case OMP_CLAUSE_IN_REDUCTION:
2133 case OMP_CLAUSE_TASK_REDUCTION:
2134 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2135 {
2136 tree old_context
2137 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2138 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2139 = info->context;
2140 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2141 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2142 = info->context;
2143 walk_body (convert_local_reference_stmt,
2144 convert_local_reference_op, info,
2145 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2146 walk_body (convert_local_reference_stmt,
2147 convert_local_reference_op, info,
2148 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2149 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2150 = old_context;
2151 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2152 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2153 = old_context;
2154 }
2155 break;
2156
2157 case OMP_CLAUSE_LASTPRIVATE:
2158 walk_body (convert_local_reference_stmt,
2159 convert_local_reference_op, info,
2160 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2161 break;
2162
2163 case OMP_CLAUSE_LINEAR:
2164 walk_body (convert_local_reference_stmt,
2165 convert_local_reference_op, info,
2166 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2167 break;
2168
2169 default:
2170 break;
2171 }
2172
2173 return need_frame;
2174 }
2175
2176
2177 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2178 and PARM_DECLs that were referenced by inner nested functions.
2179 The rewrite will be a structure reference to the local frame variable. */
2180
2181 static tree
2182 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2183 struct walk_stmt_info *wi)
2184 {
2185 struct nesting_info *info = (struct nesting_info *) wi->info;
2186 tree save_local_var_chain;
2187 bitmap save_suppress;
2188 char save_static_chain_added;
2189 bool frame_decl_added;
2190 gimple *stmt = gsi_stmt (*gsi);
2191
2192 switch (gimple_code (stmt))
2193 {
2194 case GIMPLE_OMP_TEAMS:
2195 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2196 {
2197 save_suppress = info->suppress_expansion;
2198 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2199 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2200 info, gimple_omp_body_ptr (stmt));
2201 info->suppress_expansion = save_suppress;
2202 break;
2203 }
2204 /* FALLTHRU */
2205
2206 case GIMPLE_OMP_PARALLEL:
2207 case GIMPLE_OMP_TASK:
2208 save_suppress = info->suppress_expansion;
2209 frame_decl_added = false;
2210 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2211 wi))
2212 {
2213 tree c = build_omp_clause (gimple_location (stmt),
2214 OMP_CLAUSE_SHARED);
2215 (void) get_frame_type (info);
2216 OMP_CLAUSE_DECL (c) = info->frame_decl;
2217 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2218 gimple_omp_taskreg_set_clauses (stmt, c);
2219 info->static_chain_added |= 4;
2220 frame_decl_added = true;
2221 }
2222
2223 save_local_var_chain = info->new_local_var_chain;
2224 save_static_chain_added = info->static_chain_added;
2225 info->new_local_var_chain = NULL;
2226 info->static_chain_added = 0;
2227
2228 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2229 gimple_omp_body_ptr (stmt));
2230
2231 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2232 {
2233 tree c = build_omp_clause (gimple_location (stmt),
2234 OMP_CLAUSE_SHARED);
2235 (void) get_frame_type (info);
2236 OMP_CLAUSE_DECL (c) = info->frame_decl;
2237 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2238 info->static_chain_added |= 4;
2239 gimple_omp_taskreg_set_clauses (stmt, c);
2240 }
2241 if (info->new_local_var_chain)
2242 declare_vars (info->new_local_var_chain,
2243 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2244 info->new_local_var_chain = save_local_var_chain;
2245 info->suppress_expansion = save_suppress;
2246 info->static_chain_added |= save_static_chain_added;
2247 break;
2248
2249 case GIMPLE_OMP_FOR:
2250 save_suppress = info->suppress_expansion;
2251 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2252 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2253 convert_local_reference_stmt,
2254 convert_local_reference_op, info);
2255 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2256 info, gimple_omp_body_ptr (stmt));
2257 info->suppress_expansion = save_suppress;
2258 break;
2259
2260 case GIMPLE_OMP_SECTIONS:
2261 save_suppress = info->suppress_expansion;
2262 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2263 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2264 info, gimple_omp_body_ptr (stmt));
2265 info->suppress_expansion = save_suppress;
2266 break;
2267
2268 case GIMPLE_OMP_SINGLE:
2269 save_suppress = info->suppress_expansion;
2270 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2271 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2272 info, gimple_omp_body_ptr (stmt));
2273 info->suppress_expansion = save_suppress;
2274 break;
2275
2276 case GIMPLE_OMP_TASKGROUP:
2277 save_suppress = info->suppress_expansion;
2278 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2279 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2280 info, gimple_omp_body_ptr (stmt));
2281 info->suppress_expansion = save_suppress;
2282 break;
2283
2284 case GIMPLE_OMP_TARGET:
2285 if (!is_gimple_omp_offloaded (stmt))
2286 {
2287 save_suppress = info->suppress_expansion;
2288 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2289 info->suppress_expansion = save_suppress;
2290 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2291 info, gimple_omp_body_ptr (stmt));
2292 break;
2293 }
2294 save_suppress = info->suppress_expansion;
2295 frame_decl_added = false;
2296 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2297 {
2298 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2299 (void) get_frame_type (info);
2300 OMP_CLAUSE_DECL (c) = info->frame_decl;
2301 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2302 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2303 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2304 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2305 info->static_chain_added |= 4;
2306 frame_decl_added = true;
2307 }
2308
2309 save_local_var_chain = info->new_local_var_chain;
2310 save_static_chain_added = info->static_chain_added;
2311 info->new_local_var_chain = NULL;
2312 info->static_chain_added = 0;
2313
2314 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2315 gimple_omp_body_ptr (stmt));
2316
2317 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2318 {
2319 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2320 (void) get_frame_type (info);
2321 OMP_CLAUSE_DECL (c) = info->frame_decl;
2322 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2323 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2324 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2325 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2326 info->static_chain_added |= 4;
2327 }
2328
2329 if (info->new_local_var_chain)
2330 declare_vars (info->new_local_var_chain,
2331 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2332 info->new_local_var_chain = save_local_var_chain;
2333 info->suppress_expansion = save_suppress;
2334 info->static_chain_added |= save_static_chain_added;
2335 break;
2336
2337 case GIMPLE_OMP_SECTION:
2338 case GIMPLE_OMP_MASTER:
2339 case GIMPLE_OMP_ORDERED:
2340 case GIMPLE_OMP_SCAN:
2341 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2342 info, gimple_omp_body_ptr (stmt));
2343 break;
2344
2345 case GIMPLE_COND:
2346 wi->val_only = true;
2347 wi->is_lhs = false;
2348 *handled_ops_p = false;
2349 return NULL_TREE;
2350
2351 case GIMPLE_ASSIGN:
2352 if (gimple_clobber_p (stmt))
2353 {
2354 tree lhs = gimple_assign_lhs (stmt);
2355 if (DECL_P (lhs)
2356 && !use_pointer_in_frame (lhs)
2357 && lookup_field_for_decl (info, lhs, NO_INSERT))
2358 {
2359 gsi_replace (gsi, gimple_build_nop (), true);
2360 break;
2361 }
2362 }
2363 *handled_ops_p = false;
2364 return NULL_TREE;
2365
2366 case GIMPLE_BIND:
2367 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2368 var;
2369 var = DECL_CHAIN (var))
2370 if (TREE_CODE (var) == NAMELIST_DECL)
2371 {
2372 /* Adjust decls mentioned in NAMELIST_DECL. */
2373 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2374 tree decl;
2375 unsigned int i;
2376
2377 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2378 {
2379 if (VAR_P (decl)
2380 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2381 continue;
2382 if (decl_function_context (decl) == info->context
2383 && !use_pointer_in_frame (decl))
2384 {
2385 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2386 if (field)
2387 {
2388 CONSTRUCTOR_ELT (decls, i)->value
2389 = get_local_debug_decl (info, decl, field);
2390 }
2391 }
2392 }
2393 }
2394
2395 *handled_ops_p = false;
2396 return NULL_TREE;
2397
2398 default:
2399 /* For every other statement that we are not interested in
2400 handling here, let the walker traverse the operands. */
2401 *handled_ops_p = false;
2402 return NULL_TREE;
2403 }
2404
2405 /* Indicate that we have handled all the operands ourselves. */
2406 *handled_ops_p = true;
2407 return NULL_TREE;
2408 }
2409
2410
2411 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2412 that reference labels from outer functions. The rewrite will be a
2413 call to __builtin_nonlocal_goto. */
2414
2415 static tree
2416 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2417 struct walk_stmt_info *wi)
2418 {
2419 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2420 tree label, new_label, target_context, x, field;
2421 gcall *call;
2422 gimple *stmt = gsi_stmt (*gsi);
2423
2424 if (gimple_code (stmt) != GIMPLE_GOTO)
2425 {
2426 *handled_ops_p = false;
2427 return NULL_TREE;
2428 }
2429
2430 label = gimple_goto_dest (stmt);
2431 if (TREE_CODE (label) != LABEL_DECL)
2432 {
2433 *handled_ops_p = false;
2434 return NULL_TREE;
2435 }
2436
2437 target_context = decl_function_context (label);
2438 if (target_context == info->context)
2439 {
2440 *handled_ops_p = false;
2441 return NULL_TREE;
2442 }
2443
2444 for (i = info->outer; target_context != i->context; i = i->outer)
2445 continue;
2446
2447 /* The original user label may also be use for a normal goto, therefore
2448 we must create a new label that will actually receive the abnormal
2449 control transfer. This new label will be marked LABEL_NONLOCAL; this
2450 mark will trigger proper behavior in the cfg, as well as cause the
2451 (hairy target-specific) non-local goto receiver code to be generated
2452 when we expand rtl. Enter this association into var_map so that we
2453 can insert the new label into the IL during a second pass. */
2454 tree *slot = &i->var_map->get_or_insert (label);
2455 if (*slot == NULL)
2456 {
2457 new_label = create_artificial_label (UNKNOWN_LOCATION);
2458 DECL_NONLOCAL (new_label) = 1;
2459 *slot = new_label;
2460 }
2461 else
2462 new_label = *slot;
2463
2464 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2465 field = get_nl_goto_field (i);
2466 x = get_frame_field (info, target_context, field, gsi);
2467 x = build_addr (x);
2468 x = gsi_gimplify_val (info, x, gsi);
2469 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2470 2, build_addr (new_label), x);
2471 gsi_replace (gsi, call, false);
2472
2473 /* We have handled all of STMT's operands, no need to keep going. */
2474 *handled_ops_p = true;
2475 return NULL_TREE;
2476 }
2477
2478
2479 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2480 are referenced via nonlocal goto from a nested function. The rewrite
2481 will involve installing a newly generated DECL_NONLOCAL label, and
2482 (potentially) a branch around the rtl gunk that is assumed to be
2483 attached to such a label. */
2484
2485 static tree
2486 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2487 struct walk_stmt_info *wi)
2488 {
2489 struct nesting_info *const info = (struct nesting_info *) wi->info;
2490 tree label, new_label;
2491 gimple_stmt_iterator tmp_gsi;
2492 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2493
2494 if (!stmt)
2495 {
2496 *handled_ops_p = false;
2497 return NULL_TREE;
2498 }
2499
2500 label = gimple_label_label (stmt);
2501
2502 tree *slot = info->var_map->get (label);
2503 if (!slot)
2504 {
2505 *handled_ops_p = false;
2506 return NULL_TREE;
2507 }
2508
2509 /* If there's any possibility that the previous statement falls through,
2510 then we must branch around the new non-local label. */
2511 tmp_gsi = wi->gsi;
2512 gsi_prev (&tmp_gsi);
2513 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2514 {
2515 gimple *stmt = gimple_build_goto (label);
2516 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2517 }
2518
2519 new_label = (tree) *slot;
2520 stmt = gimple_build_label (new_label);
2521 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2522
2523 *handled_ops_p = true;
2524 return NULL_TREE;
2525 }
2526
2527
2528 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2529 of nested functions that require the use of trampolines. The rewrite
2530 will involve a reference a trampoline generated for the occasion. */
2531
2532 static tree
2533 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2534 {
2535 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2536 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2537 tree t = *tp, decl, target_context, x, builtin;
2538 bool descr;
2539 gcall *call;
2540
2541 *walk_subtrees = 0;
2542 switch (TREE_CODE (t))
2543 {
2544 case ADDR_EXPR:
2545 /* Build
2546 T.1 = &CHAIN->tramp;
2547 T.2 = __builtin_adjust_trampoline (T.1);
2548 T.3 = (func_type)T.2;
2549 */
2550
2551 decl = TREE_OPERAND (t, 0);
2552 if (TREE_CODE (decl) != FUNCTION_DECL)
2553 break;
2554
2555 /* Only need to process nested functions. */
2556 target_context = decl_function_context (decl);
2557 if (!target_context)
2558 break;
2559
2560 /* If the nested function doesn't use a static chain, then
2561 it doesn't need a trampoline. */
2562 if (!DECL_STATIC_CHAIN (decl))
2563 break;
2564
2565 /* If we don't want a trampoline, then don't build one. */
2566 if (TREE_NO_TRAMPOLINE (t))
2567 break;
2568
2569 /* Lookup the immediate parent of the callee, as that's where
2570 we need to insert the trampoline. */
2571 for (i = info; i->context != target_context; i = i->outer)
2572 continue;
2573
2574 /* Decide whether to generate a descriptor or a trampoline. */
2575 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2576
2577 if (descr)
2578 x = lookup_descr_for_decl (i, decl, INSERT);
2579 else
2580 x = lookup_tramp_for_decl (i, decl, INSERT);
2581
2582 /* Compute the address of the field holding the trampoline. */
2583 x = get_frame_field (info, target_context, x, &wi->gsi);
2584 x = build_addr (x);
2585 x = gsi_gimplify_val (info, x, &wi->gsi);
2586
2587 /* Do machine-specific ugliness. Normally this will involve
2588 computing extra alignment, but it can really be anything. */
2589 if (descr)
2590 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2591 else
2592 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2593 call = gimple_build_call (builtin, 1, x);
2594 x = init_tmp_var_with_call (info, &wi->gsi, call);
2595
2596 /* Cast back to the proper function type. */
2597 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2598 x = init_tmp_var (info, x, &wi->gsi);
2599
2600 *tp = x;
2601 break;
2602
2603 default:
2604 if (!IS_TYPE_OR_DECL_P (t))
2605 *walk_subtrees = 1;
2606 break;
2607 }
2608
2609 return NULL_TREE;
2610 }
2611
2612
2613 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2614 to addresses of nested functions that require the use of
2615 trampolines. The rewrite will involve a reference a trampoline
2616 generated for the occasion. */
2617
2618 static tree
2619 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2620 struct walk_stmt_info *wi)
2621 {
2622 struct nesting_info *info = (struct nesting_info *) wi->info;
2623 gimple *stmt = gsi_stmt (*gsi);
2624
2625 switch (gimple_code (stmt))
2626 {
2627 case GIMPLE_CALL:
2628 {
2629 /* Only walk call arguments, lest we generate trampolines for
2630 direct calls. */
2631 unsigned long i, nargs = gimple_call_num_args (stmt);
2632 for (i = 0; i < nargs; i++)
2633 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2634 wi, NULL);
2635 break;
2636 }
2637
2638 case GIMPLE_OMP_TEAMS:
2639 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2640 {
2641 *handled_ops_p = false;
2642 return NULL_TREE;
2643 }
2644 goto do_parallel;
2645
2646 case GIMPLE_OMP_TARGET:
2647 if (!is_gimple_omp_offloaded (stmt))
2648 {
2649 *handled_ops_p = false;
2650 return NULL_TREE;
2651 }
2652 /* FALLTHRU */
2653 case GIMPLE_OMP_PARALLEL:
2654 case GIMPLE_OMP_TASK:
2655 do_parallel:
2656 {
2657 tree save_local_var_chain = info->new_local_var_chain;
2658 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2659 info->new_local_var_chain = NULL;
2660 char save_static_chain_added = info->static_chain_added;
2661 info->static_chain_added = 0;
2662 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2663 info, gimple_omp_body_ptr (stmt));
2664 if (info->new_local_var_chain)
2665 declare_vars (info->new_local_var_chain,
2666 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2667 false);
2668 for (int i = 0; i < 2; i++)
2669 {
2670 tree c, decl;
2671 if ((info->static_chain_added & (1 << i)) == 0)
2672 continue;
2673 decl = i ? get_chain_decl (info) : info->frame_decl;
2674 /* Don't add CHAIN.* or FRAME.* twice. */
2675 for (c = gimple_omp_taskreg_clauses (stmt);
2676 c;
2677 c = OMP_CLAUSE_CHAIN (c))
2678 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2679 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2680 && OMP_CLAUSE_DECL (c) == decl)
2681 break;
2682 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2683 {
2684 c = build_omp_clause (gimple_location (stmt),
2685 i ? OMP_CLAUSE_FIRSTPRIVATE
2686 : OMP_CLAUSE_SHARED);
2687 OMP_CLAUSE_DECL (c) = decl;
2688 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2689 gimple_omp_taskreg_set_clauses (stmt, c);
2690 }
2691 else if (c == NULL)
2692 {
2693 c = build_omp_clause (gimple_location (stmt),
2694 OMP_CLAUSE_MAP);
2695 OMP_CLAUSE_DECL (c) = decl;
2696 OMP_CLAUSE_SET_MAP_KIND (c,
2697 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2698 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2699 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2700 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2701 c);
2702 }
2703 }
2704 info->new_local_var_chain = save_local_var_chain;
2705 info->static_chain_added |= save_static_chain_added;
2706 }
2707 break;
2708
2709 default:
2710 *handled_ops_p = false;
2711 return NULL_TREE;
2712 }
2713
2714 *handled_ops_p = true;
2715 return NULL_TREE;
2716 }
2717
2718
2719
2720 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2721 that reference nested functions to make sure that the static chain
2722 is set up properly for the call. */
2723
2724 static tree
2725 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2726 struct walk_stmt_info *wi)
2727 {
2728 struct nesting_info *const info = (struct nesting_info *) wi->info;
2729 tree decl, target_context;
2730 char save_static_chain_added;
2731 int i;
2732 gimple *stmt = gsi_stmt (*gsi);
2733
2734 switch (gimple_code (stmt))
2735 {
2736 case GIMPLE_CALL:
2737 if (gimple_call_chain (stmt))
2738 break;
2739 decl = gimple_call_fndecl (stmt);
2740 if (!decl)
2741 break;
2742 target_context = decl_function_context (decl);
2743 if (target_context && DECL_STATIC_CHAIN (decl))
2744 {
2745 struct nesting_info *i = info;
2746 while (i && i->context != target_context)
2747 i = i->outer;
2748 /* If none of the outer contexts is the target context, this means
2749 that the function is called in a wrong context. */
2750 if (!i)
2751 internal_error ("%s from %s called in %s",
2752 IDENTIFIER_POINTER (DECL_NAME (decl)),
2753 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2754 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2755
2756 gimple_call_set_chain (as_a <gcall *> (stmt),
2757 get_static_chain (info, target_context,
2758 &wi->gsi));
2759 info->static_chain_added |= (1 << (info->context != target_context));
2760 }
2761 break;
2762
2763 case GIMPLE_OMP_TEAMS:
2764 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2765 {
2766 walk_body (convert_gimple_call, NULL, info,
2767 gimple_omp_body_ptr (stmt));
2768 break;
2769 }
2770 /* FALLTHRU */
2771
2772 case GIMPLE_OMP_PARALLEL:
2773 case GIMPLE_OMP_TASK:
2774 save_static_chain_added = info->static_chain_added;
2775 info->static_chain_added = 0;
2776 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2777 for (i = 0; i < 2; i++)
2778 {
2779 tree c, decl;
2780 if ((info->static_chain_added & (1 << i)) == 0)
2781 continue;
2782 decl = i ? get_chain_decl (info) : info->frame_decl;
2783 /* Don't add CHAIN.* or FRAME.* twice. */
2784 for (c = gimple_omp_taskreg_clauses (stmt);
2785 c;
2786 c = OMP_CLAUSE_CHAIN (c))
2787 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2788 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2789 && OMP_CLAUSE_DECL (c) == decl)
2790 break;
2791 if (c == NULL)
2792 {
2793 c = build_omp_clause (gimple_location (stmt),
2794 i ? OMP_CLAUSE_FIRSTPRIVATE
2795 : OMP_CLAUSE_SHARED);
2796 OMP_CLAUSE_DECL (c) = decl;
2797 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2798 gimple_omp_taskreg_set_clauses (stmt, c);
2799 }
2800 }
2801 info->static_chain_added |= save_static_chain_added;
2802 break;
2803
2804 case GIMPLE_OMP_TARGET:
2805 if (!is_gimple_omp_offloaded (stmt))
2806 {
2807 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2808 break;
2809 }
2810 save_static_chain_added = info->static_chain_added;
2811 info->static_chain_added = 0;
2812 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2813 for (i = 0; i < 2; i++)
2814 {
2815 tree c, decl;
2816 if ((info->static_chain_added & (1 << i)) == 0)
2817 continue;
2818 decl = i ? get_chain_decl (info) : info->frame_decl;
2819 /* Don't add CHAIN.* or FRAME.* twice. */
2820 for (c = gimple_omp_target_clauses (stmt);
2821 c;
2822 c = OMP_CLAUSE_CHAIN (c))
2823 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2824 && OMP_CLAUSE_DECL (c) == decl)
2825 break;
2826 if (c == NULL)
2827 {
2828 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2829 OMP_CLAUSE_DECL (c) = decl;
2830 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2831 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2832 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2833 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2834 c);
2835 }
2836 }
2837 info->static_chain_added |= save_static_chain_added;
2838 break;
2839
2840 case GIMPLE_OMP_FOR:
2841 walk_body (convert_gimple_call, NULL, info,
2842 gimple_omp_for_pre_body_ptr (stmt));
2843 /* FALLTHRU */
2844 case GIMPLE_OMP_SECTIONS:
2845 case GIMPLE_OMP_SECTION:
2846 case GIMPLE_OMP_SINGLE:
2847 case GIMPLE_OMP_MASTER:
2848 case GIMPLE_OMP_TASKGROUP:
2849 case GIMPLE_OMP_ORDERED:
2850 case GIMPLE_OMP_SCAN:
2851 case GIMPLE_OMP_CRITICAL:
2852 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2853 break;
2854
2855 default:
2856 /* Keep looking for other operands. */
2857 *handled_ops_p = false;
2858 return NULL_TREE;
2859 }
2860
2861 *handled_ops_p = true;
2862 return NULL_TREE;
2863 }
2864
2865 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2866 call expressions. At the same time, determine if a nested function
2867 actually uses its static chain; if not, remember that. */
2868
2869 static void
2870 convert_all_function_calls (struct nesting_info *root)
2871 {
2872 unsigned int chain_count = 0, old_chain_count, iter_count;
2873 struct nesting_info *n;
2874
2875 /* First, optimistically clear static_chain for all decls that haven't
2876 used the static chain already for variable access. But always create
2877 it if not optimizing. This makes it possible to reconstruct the static
2878 nesting tree at run time and thus to resolve up-level references from
2879 within the debugger. */
2880 FOR_EACH_NEST_INFO (n, root)
2881 {
2882 if (n->thunk_p)
2883 continue;
2884 tree decl = n->context;
2885 if (!optimize)
2886 {
2887 if (n->inner)
2888 (void) get_frame_type (n);
2889 if (n->outer)
2890 (void) get_chain_decl (n);
2891 }
2892 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2893 {
2894 DECL_STATIC_CHAIN (decl) = 0;
2895 if (dump_file && (dump_flags & TDF_DETAILS))
2896 fprintf (dump_file, "Guessing no static-chain for %s\n",
2897 lang_hooks.decl_printable_name (decl, 2));
2898 }
2899 else
2900 DECL_STATIC_CHAIN (decl) = 1;
2901 chain_count += DECL_STATIC_CHAIN (decl);
2902 }
2903
2904 FOR_EACH_NEST_INFO (n, root)
2905 if (n->thunk_p)
2906 {
2907 tree decl = n->context;
2908 tree alias = cgraph_node::get (decl)->thunk.alias;
2909 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2910 }
2911
2912 /* Walk the functions and perform transformations. Note that these
2913 transformations can induce new uses of the static chain, which in turn
2914 require re-examining all users of the decl. */
2915 /* ??? It would make sense to try to use the call graph to speed this up,
2916 but the call graph hasn't really been built yet. Even if it did, we
2917 would still need to iterate in this loop since address-of references
2918 wouldn't show up in the callgraph anyway. */
2919 iter_count = 0;
2920 do
2921 {
2922 old_chain_count = chain_count;
2923 chain_count = 0;
2924 iter_count++;
2925
2926 if (dump_file && (dump_flags & TDF_DETAILS))
2927 fputc ('\n', dump_file);
2928
2929 FOR_EACH_NEST_INFO (n, root)
2930 {
2931 if (n->thunk_p)
2932 continue;
2933 tree decl = n->context;
2934 walk_function (convert_tramp_reference_stmt,
2935 convert_tramp_reference_op, n);
2936 walk_function (convert_gimple_call, NULL, n);
2937 chain_count += DECL_STATIC_CHAIN (decl);
2938 }
2939
2940 FOR_EACH_NEST_INFO (n, root)
2941 if (n->thunk_p)
2942 {
2943 tree decl = n->context;
2944 tree alias = cgraph_node::get (decl)->thunk.alias;
2945 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2946 }
2947 }
2948 while (chain_count != old_chain_count);
2949
2950 if (dump_file && (dump_flags & TDF_DETAILS))
2951 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2952 iter_count);
2953 }
2954
2955 struct nesting_copy_body_data
2956 {
2957 copy_body_data cb;
2958 struct nesting_info *root;
2959 };
2960
2961 /* A helper subroutine for debug_var_chain type remapping. */
2962
2963 static tree
2964 nesting_copy_decl (tree decl, copy_body_data *id)
2965 {
2966 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2967 tree *slot = nid->root->var_map->get (decl);
2968
2969 if (slot)
2970 return (tree) *slot;
2971
2972 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2973 {
2974 tree new_decl = copy_decl_no_change (decl, id);
2975 DECL_ORIGINAL_TYPE (new_decl)
2976 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2977 return new_decl;
2978 }
2979
2980 if (VAR_P (decl)
2981 || TREE_CODE (decl) == PARM_DECL
2982 || TREE_CODE (decl) == RESULT_DECL)
2983 return decl;
2984
2985 return copy_decl_no_change (decl, id);
2986 }
2987
2988 /* A helper function for remap_vla_decls. See if *TP contains
2989 some remapped variables. */
2990
2991 static tree
2992 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2993 {
2994 struct nesting_info *root = (struct nesting_info *) data;
2995 tree t = *tp;
2996
2997 if (DECL_P (t))
2998 {
2999 *walk_subtrees = 0;
3000 tree *slot = root->var_map->get (t);
3001
3002 if (slot)
3003 return *slot;
3004 }
3005 return NULL;
3006 }
3007
3008 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3009 involved. */
3010
3011 static void
3012 remap_vla_decls (tree block, struct nesting_info *root)
3013 {
3014 tree var, subblock, val, type;
3015 struct nesting_copy_body_data id;
3016
3017 for (subblock = BLOCK_SUBBLOCKS (block);
3018 subblock;
3019 subblock = BLOCK_CHAIN (subblock))
3020 remap_vla_decls (subblock, root);
3021
3022 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3023 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3024 {
3025 val = DECL_VALUE_EXPR (var);
3026 type = TREE_TYPE (var);
3027
3028 if (!(TREE_CODE (val) == INDIRECT_REF
3029 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3030 && variably_modified_type_p (type, NULL)))
3031 continue;
3032
3033 if (root->var_map->get (TREE_OPERAND (val, 0))
3034 || walk_tree (&type, contains_remapped_vars, root, NULL))
3035 break;
3036 }
3037
3038 if (var == NULL_TREE)
3039 return;
3040
3041 memset (&id, 0, sizeof (id));
3042 id.cb.copy_decl = nesting_copy_decl;
3043 id.cb.decl_map = new hash_map<tree, tree>;
3044 id.root = root;
3045
3046 for (; var; var = DECL_CHAIN (var))
3047 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3048 {
3049 struct nesting_info *i;
3050 tree newt, context;
3051
3052 val = DECL_VALUE_EXPR (var);
3053 type = TREE_TYPE (var);
3054
3055 if (!(TREE_CODE (val) == INDIRECT_REF
3056 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3057 && variably_modified_type_p (type, NULL)))
3058 continue;
3059
3060 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3061 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3062 continue;
3063
3064 context = decl_function_context (var);
3065 for (i = root; i; i = i->outer)
3066 if (i->context == context)
3067 break;
3068
3069 if (i == NULL)
3070 continue;
3071
3072 /* Fully expand value expressions. This avoids having debug variables
3073 only referenced from them and that can be swept during GC. */
3074 if (slot)
3075 {
3076 tree t = (tree) *slot;
3077 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3078 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3079 }
3080
3081 id.cb.src_fn = i->context;
3082 id.cb.dst_fn = i->context;
3083 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3084
3085 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3086 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3087 {
3088 newt = TREE_TYPE (newt);
3089 type = TREE_TYPE (type);
3090 }
3091 if (TYPE_NAME (newt)
3092 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3093 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3094 && newt != type
3095 && TYPE_NAME (newt) == TYPE_NAME (type))
3096 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3097
3098 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3099 if (val != DECL_VALUE_EXPR (var))
3100 SET_DECL_VALUE_EXPR (var, val);
3101 }
3102
3103 delete id.cb.decl_map;
3104 }
3105
3106 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3107 involved. */
3108
3109 static void
3110 fixup_vla_decls (tree block)
3111 {
3112 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3113 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3114 {
3115 tree val = DECL_VALUE_EXPR (var);
3116
3117 if (!(TREE_CODE (val) == INDIRECT_REF
3118 && VAR_P (TREE_OPERAND (val, 0))
3119 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3120 continue;
3121
3122 /* Fully expand value expressions. This avoids having debug variables
3123 only referenced from them and that can be swept during GC. */
3124 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3125 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3126 SET_DECL_VALUE_EXPR (var, val);
3127 }
3128
3129 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3130 fixup_vla_decls (sub);
3131 }
3132
3133 /* Fold the MEM_REF *E. */
3134 bool
3135 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3136 {
3137 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3138 *ref_p = fold (*ref_p);
3139 return true;
3140 }
3141
3142 /* Given DECL, a nested function, build an initialization call for FIELD,
3143 the trampoline or descriptor for DECL, using FUNC as the function. */
3144
3145 static gcall *
3146 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3147 tree func)
3148 {
3149 tree arg1, arg2, arg3, x;
3150
3151 gcc_assert (DECL_STATIC_CHAIN (decl));
3152 arg3 = build_addr (info->frame_decl);
3153
3154 arg2 = build_addr (decl);
3155
3156 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3157 info->frame_decl, field, NULL_TREE);
3158 arg1 = build_addr (x);
3159
3160 return gimple_build_call (func, 3, arg1, arg2, arg3);
3161 }
3162
3163 /* Do "everything else" to clean up or complete state collected by the various
3164 walking passes -- create a field to hold the frame base address, lay out the
3165 types and decls, generate code to initialize the frame decl, store critical
3166 expressions in the struct function for rtl to find. */
3167
3168 static void
3169 finalize_nesting_tree_1 (struct nesting_info *root)
3170 {
3171 gimple_seq stmt_list = NULL;
3172 gimple *stmt;
3173 tree context = root->context;
3174 struct function *sf;
3175
3176 if (root->thunk_p)
3177 return;
3178
3179 /* If we created a non-local frame type or decl, we need to lay them
3180 out at this time. */
3181 if (root->frame_type)
3182 {
3183 /* Debugging information needs to compute the frame base address of the
3184 parent frame out of the static chain from the nested frame.
3185
3186 The static chain is the address of the FRAME record, so one could
3187 imagine it would be possible to compute the frame base address just
3188 adding a constant offset to this address. Unfortunately, this is not
3189 possible: if the FRAME object has alignment constraints that are
3190 stronger than the stack, then the offset between the frame base and
3191 the FRAME object will be dynamic.
3192
3193 What we do instead is to append a field to the FRAME object that holds
3194 the frame base address: then debug info just has to fetch this
3195 field. */
3196
3197 /* Debugging information will refer to the CFA as the frame base
3198 address: we will do the same here. */
3199 const tree frame_addr_fndecl
3200 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3201
3202 /* Create a field in the FRAME record to hold the frame base address for
3203 this stack frame. Since it will be used only by the debugger, put it
3204 at the end of the record in order not to shift all other offsets. */
3205 tree fb_decl = make_node (FIELD_DECL);
3206
3207 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3208 TREE_TYPE (fb_decl) = ptr_type_node;
3209 TREE_ADDRESSABLE (fb_decl) = 1;
3210 DECL_CONTEXT (fb_decl) = root->frame_type;
3211 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3212 fb_decl);
3213
3214 /* In some cases the frame type will trigger the -Wpadded warning.
3215 This is not helpful; suppress it. */
3216 int save_warn_padded = warn_padded;
3217 warn_padded = 0;
3218 layout_type (root->frame_type);
3219 warn_padded = save_warn_padded;
3220 layout_decl (root->frame_decl, 0);
3221
3222 /* Initialize the frame base address field. If the builtin we need is
3223 not available, set it to NULL so that debugging information does not
3224 reference junk. */
3225 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3226 root->frame_decl, fb_decl, NULL_TREE);
3227 tree fb_tmp;
3228
3229 if (frame_addr_fndecl != NULL_TREE)
3230 {
3231 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3232 integer_zero_node);
3233 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3234
3235 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3236 }
3237 else
3238 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3239 gimple_seq_add_stmt (&stmt_list,
3240 gimple_build_assign (fb_ref, fb_tmp));
3241
3242 declare_vars (root->frame_decl,
3243 gimple_seq_first_stmt (gimple_body (context)), true);
3244 }
3245
3246 /* If any parameters were referenced non-locally, then we need to insert
3247 a copy or a pointer. */
3248 if (root->any_parm_remapped)
3249 {
3250 tree p;
3251 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3252 {
3253 tree field, x, y;
3254
3255 field = lookup_field_for_decl (root, p, NO_INSERT);
3256 if (!field)
3257 continue;
3258
3259 if (use_pointer_in_frame (p))
3260 x = build_addr (p);
3261 else
3262 x = p;
3263
3264 /* If the assignment is from a non-register the stmt is
3265 not valid gimple. Make it so by using a temporary instead. */
3266 if (!is_gimple_reg (x)
3267 && is_gimple_reg_type (TREE_TYPE (x)))
3268 {
3269 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3270 x = init_tmp_var (root, x, &gsi);
3271 }
3272
3273 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3274 root->frame_decl, field, NULL_TREE);
3275 stmt = gimple_build_assign (y, x);
3276 gimple_seq_add_stmt (&stmt_list, stmt);
3277 }
3278 }
3279
3280 /* If a chain_field was created, then it needs to be initialized
3281 from chain_decl. */
3282 if (root->chain_field)
3283 {
3284 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3285 root->frame_decl, root->chain_field, NULL_TREE);
3286 stmt = gimple_build_assign (x, get_chain_decl (root));
3287 gimple_seq_add_stmt (&stmt_list, stmt);
3288 }
3289
3290 /* If trampolines were created, then we need to initialize them. */
3291 if (root->any_tramp_created)
3292 {
3293 struct nesting_info *i;
3294 for (i = root->inner; i ; i = i->next)
3295 {
3296 tree field, x;
3297
3298 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3299 if (!field)
3300 continue;
3301
3302 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3303 stmt = build_init_call_stmt (root, i->context, field, x);
3304 gimple_seq_add_stmt (&stmt_list, stmt);
3305 }
3306 }
3307
3308 /* If descriptors were created, then we need to initialize them. */
3309 if (root->any_descr_created)
3310 {
3311 struct nesting_info *i;
3312 for (i = root->inner; i ; i = i->next)
3313 {
3314 tree field, x;
3315
3316 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3317 if (!field)
3318 continue;
3319
3320 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3321 stmt = build_init_call_stmt (root, i->context, field, x);
3322 gimple_seq_add_stmt (&stmt_list, stmt);
3323 }
3324 }
3325
3326 /* If we created initialization statements, insert them. */
3327 if (stmt_list)
3328 {
3329 gbind *bind;
3330 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3331 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3332 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3333 gimple_bind_set_body (bind, stmt_list);
3334 }
3335
3336 /* If a chain_decl was created, then it needs to be registered with
3337 struct function so that it gets initialized from the static chain
3338 register at the beginning of the function. */
3339 sf = DECL_STRUCT_FUNCTION (root->context);
3340 sf->static_chain_decl = root->chain_decl;
3341
3342 /* Similarly for the non-local goto save area. */
3343 if (root->nl_goto_field)
3344 {
3345 sf->nonlocal_goto_save_area
3346 = get_frame_field (root, context, root->nl_goto_field, NULL);
3347 sf->has_nonlocal_label = 1;
3348 }
3349
3350 /* Make sure all new local variables get inserted into the
3351 proper BIND_EXPR. */
3352 if (root->new_local_var_chain)
3353 declare_vars (root->new_local_var_chain,
3354 gimple_seq_first_stmt (gimple_body (root->context)),
3355 false);
3356
3357 if (root->debug_var_chain)
3358 {
3359 tree debug_var;
3360 gbind *scope;
3361
3362 remap_vla_decls (DECL_INITIAL (root->context), root);
3363
3364 for (debug_var = root->debug_var_chain; debug_var;
3365 debug_var = DECL_CHAIN (debug_var))
3366 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3367 break;
3368
3369 /* If there are any debug decls with variable length types,
3370 remap those types using other debug_var_chain variables. */
3371 if (debug_var)
3372 {
3373 struct nesting_copy_body_data id;
3374
3375 memset (&id, 0, sizeof (id));
3376 id.cb.copy_decl = nesting_copy_decl;
3377 id.cb.decl_map = new hash_map<tree, tree>;
3378 id.root = root;
3379
3380 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3381 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3382 {
3383 tree type = TREE_TYPE (debug_var);
3384 tree newt, t = type;
3385 struct nesting_info *i;
3386
3387 for (i = root; i; i = i->outer)
3388 if (variably_modified_type_p (type, i->context))
3389 break;
3390
3391 if (i == NULL)
3392 continue;
3393
3394 id.cb.src_fn = i->context;
3395 id.cb.dst_fn = i->context;
3396 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3397
3398 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3399 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3400 {
3401 newt = TREE_TYPE (newt);
3402 t = TREE_TYPE (t);
3403 }
3404 if (TYPE_NAME (newt)
3405 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3406 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3407 && newt != t
3408 && TYPE_NAME (newt) == TYPE_NAME (t))
3409 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3410 }
3411
3412 delete id.cb.decl_map;
3413 }
3414
3415 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3416 if (gimple_bind_block (scope))
3417 declare_vars (root->debug_var_chain, scope, true);
3418 else
3419 BLOCK_VARS (DECL_INITIAL (root->context))
3420 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3421 root->debug_var_chain);
3422 }
3423 else
3424 fixup_vla_decls (DECL_INITIAL (root->context));
3425
3426 /* Fold the rewritten MEM_REF trees. */
3427 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3428
3429 /* Dump the translated tree function. */
3430 if (dump_file)
3431 {
3432 fputs ("\n\n", dump_file);
3433 dump_function_to_file (root->context, dump_file, dump_flags);
3434 }
3435 }
3436
3437 static void
3438 finalize_nesting_tree (struct nesting_info *root)
3439 {
3440 struct nesting_info *n;
3441 FOR_EACH_NEST_INFO (n, root)
3442 finalize_nesting_tree_1 (n);
3443 }
3444
3445 /* Unnest the nodes and pass them to cgraph. */
3446
3447 static void
3448 unnest_nesting_tree_1 (struct nesting_info *root)
3449 {
3450 struct cgraph_node *node = cgraph_node::get (root->context);
3451
3452 /* For nested functions update the cgraph to reflect unnesting.
3453 We also delay finalizing of these functions up to this point. */
3454 if (node->origin)
3455 {
3456 node->unnest ();
3457 if (!root->thunk_p)
3458 cgraph_node::finalize_function (root->context, true);
3459 }
3460 }
3461
3462 static void
3463 unnest_nesting_tree (struct nesting_info *root)
3464 {
3465 struct nesting_info *n;
3466 FOR_EACH_NEST_INFO (n, root)
3467 unnest_nesting_tree_1 (n);
3468 }
3469
3470 /* Free the data structures allocated during this pass. */
3471
3472 static void
3473 free_nesting_tree (struct nesting_info *root)
3474 {
3475 struct nesting_info *node, *next;
3476
3477 node = iter_nestinfo_start (root);
3478 do
3479 {
3480 next = iter_nestinfo_next (node);
3481 delete node->var_map;
3482 delete node->field_map;
3483 delete node->mem_refs;
3484 free (node);
3485 node = next;
3486 }
3487 while (node);
3488 }
3489
3490 /* Gimplify a function and all its nested functions. */
3491 static void
3492 gimplify_all_functions (struct cgraph_node *root)
3493 {
3494 struct cgraph_node *iter;
3495 if (!gimple_body (root->decl))
3496 gimplify_function_tree (root->decl);
3497 for (iter = root->nested; iter; iter = iter->next_nested)
3498 if (!iter->thunk.thunk_p)
3499 gimplify_all_functions (iter);
3500 }
3501
3502 /* Main entry point for this pass. Process FNDECL and all of its nested
3503 subroutines and turn them into something less tightly bound. */
3504
3505 void
3506 lower_nested_functions (tree fndecl)
3507 {
3508 struct cgraph_node *cgn;
3509 struct nesting_info *root;
3510
3511 /* If there are no nested functions, there's nothing to do. */
3512 cgn = cgraph_node::get (fndecl);
3513 if (!cgn->nested)
3514 return;
3515
3516 gimplify_all_functions (cgn);
3517
3518 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3519 if (dump_file)
3520 fprintf (dump_file, "\n;; Function %s\n\n",
3521 lang_hooks.decl_printable_name (fndecl, 2));
3522
3523 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3524 root = create_nesting_tree (cgn);
3525
3526 walk_all_functions (convert_nonlocal_reference_stmt,
3527 convert_nonlocal_reference_op,
3528 root);
3529 walk_all_functions (convert_local_reference_stmt,
3530 convert_local_reference_op,
3531 root);
3532 walk_all_functions (convert_nl_goto_reference, NULL, root);
3533 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3534
3535 convert_all_function_calls (root);
3536 finalize_nesting_tree (root);
3537 unnest_nesting_tree (root);
3538
3539 free_nesting_tree (root);
3540 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3541
3542 if (dump_file)
3543 {
3544 dump_end (TDI_nested, dump_file);
3545 set_dump_file (NULL);
3546 }
3547 }
3548
3549 #include "gt-tree-nested.h"