]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-nested.c
Move thunks out of cgraph_node
[thirdparty/gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 #include "alloc-pool.h"
46 #include "tree-nested.h"
47 #include "symbol-summary.h"
48 #include "symtab-thunks.h"
49
50 /* Summary of nested functions. */
51 static function_summary <nested_function_info *>
52 *nested_function_sum = NULL;
53
54 /* Return nested_function_info, if available. */
55 nested_function_info *
56 nested_function_info::get (cgraph_node *node)
57 {
58 if (!nested_function_sum)
59 return NULL;
60 return nested_function_sum->get (node);
61 }
62
63 /* Return nested_function_info possibly creating new one. */
64 nested_function_info *
65 nested_function_info::get_create (cgraph_node *node)
66 {
67 if (!nested_function_sum)
68 nested_function_sum = new function_summary <nested_function_info *>
69 (symtab);
70 return nested_function_sum->get_create (node);
71 }
72
73 /* cgraph_node is no longer nested function; update cgraph accordingly. */
74 void
75 unnest_function (cgraph_node *node)
76 {
77 nested_function_info *info = nested_function_info::get (node);
78 cgraph_node **node2 = &nested_function_info::get
79 (nested_function_origin (node))->nested;
80
81 gcc_checking_assert (info->origin);
82 while (*node2 != node)
83 node2 = &nested_function_info::get (*node2)->next_nested;
84 *node2 = info->next_nested;
85 info->next_nested = NULL;
86 info->origin = NULL;
87 nested_function_sum->remove (node);
88 }
89
90 /* Destructor: unlink function from nested function lists. */
91 nested_function_info::~nested_function_info ()
92 {
93 cgraph_node *next;
94 for (cgraph_node *n = nested; n; n = next)
95 {
96 nested_function_info *info = nested_function_info::get (n);
97 next = info->next_nested;
98 info->origin = NULL;
99 info->next_nested = NULL;
100 }
101 nested = NULL;
102 if (origin)
103 {
104 cgraph_node **node2
105 = &nested_function_info::get (origin)->nested;
106
107 nested_function_info *info;
108 while ((info = nested_function_info::get (*node2)) != this && info)
109 node2 = &info->next_nested;
110 *node2 = next_nested;
111 }
112 }
113
114 /* Free nested function info summaries. */
115 void
116 nested_function_info::release ()
117 {
118 if (nested_function_sum)
119 delete (nested_function_sum);
120 nested_function_sum = NULL;
121 }
122
123 /* If NODE is nested function, record it. */
124 void
125 maybe_record_nested_function (cgraph_node *node)
126 {
127 if (DECL_CONTEXT (node->decl)
128 && TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL)
129 {
130 cgraph_node *origin = cgraph_node::get_create (DECL_CONTEXT (node->decl));
131 nested_function_info *info = nested_function_info::get_create (node);
132 nested_function_info *origin_info
133 = nested_function_info::get_create (origin);
134
135 info->origin = origin;
136 info->next_nested = origin_info->nested;
137 origin_info->nested = node;
138 }
139 }
140
141 /* The object of this pass is to lower the representation of a set of nested
142 functions in order to expose all of the gory details of the various
143 nonlocal references. We want to do this sooner rather than later, in
144 order to give us more freedom in emitting all of the functions in question.
145
146 Back in olden times, when gcc was young, we developed an insanely
147 complicated scheme whereby variables which were referenced nonlocally
148 were forced to live in the stack of the declaring function, and then
149 the nested functions magically discovered where these variables were
150 placed. In order for this scheme to function properly, it required
151 that the outer function be partially expanded, then we switch to
152 compiling the inner function, and once done with those we switch back
153 to compiling the outer function. Such delicate ordering requirements
154 makes it difficult to do whole translation unit optimizations
155 involving such functions.
156
157 The implementation here is much more direct. Everything that can be
158 referenced by an inner function is a member of an explicitly created
159 structure herein called the "nonlocal frame struct". The incoming
160 static chain for a nested function is a pointer to this struct in
161 the parent. In this way, we settle on known offsets from a known
162 base, and so are decoupled from the logic that places objects in the
163 function's stack frame. More importantly, we don't have to wait for
164 that to happen -- since the compilation of the inner function is no
165 longer tied to a real stack frame, the nonlocal frame struct can be
166 allocated anywhere. Which means that the outer function is now
167 inlinable.
168
169 Theory of operation here is very simple. Iterate over all the
170 statements in all the functions (depth first) several times,
171 allocating structures and fields on demand. In general we want to
172 examine inner functions first, so that we can avoid making changes
173 to outer functions which are unnecessary.
174
175 The order of the passes matters a bit, in that later passes will be
176 skipped if it is discovered that the functions don't actually interact
177 at all. That is, they're nested in the lexical sense but could have
178 been written as independent functions without change. */
179
180
181 struct nesting_info
182 {
183 struct nesting_info *outer;
184 struct nesting_info *inner;
185 struct nesting_info *next;
186
187 hash_map<tree, tree> *field_map;
188 hash_map<tree, tree> *var_map;
189 hash_set<tree *> *mem_refs;
190 bitmap suppress_expansion;
191
192 tree context;
193 tree new_local_var_chain;
194 tree debug_var_chain;
195 tree frame_type;
196 tree frame_decl;
197 tree chain_field;
198 tree chain_decl;
199 tree nl_goto_field;
200
201 bool thunk_p;
202 bool any_parm_remapped;
203 bool any_tramp_created;
204 bool any_descr_created;
205 char static_chain_added;
206 };
207
208
209 /* Iterate over the nesting tree, starting with ROOT, depth first. */
210
211 static inline struct nesting_info *
212 iter_nestinfo_start (struct nesting_info *root)
213 {
214 while (root->inner)
215 root = root->inner;
216 return root;
217 }
218
219 static inline struct nesting_info *
220 iter_nestinfo_next (struct nesting_info *node)
221 {
222 if (node->next)
223 return iter_nestinfo_start (node->next);
224 return node->outer;
225 }
226
227 #define FOR_EACH_NEST_INFO(I, ROOT) \
228 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
229
230 /* Obstack used for the bitmaps in the struct above. */
231 static struct bitmap_obstack nesting_info_bitmap_obstack;
232
233
234 /* We're working in so many different function contexts simultaneously,
235 that create_tmp_var is dangerous. Prevent mishap. */
236 #define create_tmp_var cant_use_create_tmp_var_here_dummy
237
238 /* Like create_tmp_var, except record the variable for registration at
239 the given nesting level. */
240
241 static tree
242 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
243 {
244 tree tmp_var;
245
246 /* If the type is of variable size or a type which must be created by the
247 frontend, something is wrong. Note that we explicitly allow
248 incomplete types here, since we create them ourselves here. */
249 gcc_assert (!TREE_ADDRESSABLE (type));
250 gcc_assert (!TYPE_SIZE_UNIT (type)
251 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
252
253 tmp_var = create_tmp_var_raw (type, prefix);
254 DECL_CONTEXT (tmp_var) = info->context;
255 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
256 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
257
258 info->new_local_var_chain = tmp_var;
259
260 return tmp_var;
261 }
262
263 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
264
265 static tree
266 build_simple_mem_ref_notrap (tree ptr)
267 {
268 tree t = build_simple_mem_ref (ptr);
269 TREE_THIS_NOTRAP (t) = 1;
270 return t;
271 }
272
273 /* Take the address of EXP to be used within function CONTEXT.
274 Mark it for addressability as necessary. */
275
276 tree
277 build_addr (tree exp)
278 {
279 mark_addressable (exp);
280 return build_fold_addr_expr (exp);
281 }
282
283 /* Insert FIELD into TYPE, sorted by alignment requirements. */
284
285 void
286 insert_field_into_struct (tree type, tree field)
287 {
288 tree *p;
289
290 DECL_CONTEXT (field) = type;
291
292 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
293 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
294 break;
295
296 DECL_CHAIN (field) = *p;
297 *p = field;
298
299 /* Set correct alignment for frame struct type. */
300 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
301 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
302 }
303
304 /* Build or return the RECORD_TYPE that describes the frame state that is
305 shared between INFO->CONTEXT and its nested functions. This record will
306 not be complete until finalize_nesting_tree; up until that point we'll
307 be adding fields as necessary.
308
309 We also build the DECL that represents this frame in the function. */
310
311 static tree
312 get_frame_type (struct nesting_info *info)
313 {
314 tree type = info->frame_type;
315 if (!type)
316 {
317 char *name;
318
319 type = make_node (RECORD_TYPE);
320
321 name = concat ("FRAME.",
322 IDENTIFIER_POINTER (DECL_NAME (info->context)),
323 NULL);
324 TYPE_NAME (type) = get_identifier (name);
325 free (name);
326
327 info->frame_type = type;
328
329 /* Do not put info->frame_decl on info->new_local_var_chain,
330 so that we can declare it in the lexical blocks, which
331 makes sure virtual regs that end up appearing in its RTL
332 expression get substituted in instantiate_virtual_regs. */
333 info->frame_decl = create_tmp_var_raw (type, "FRAME");
334 DECL_CONTEXT (info->frame_decl) = info->context;
335 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
336 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
337
338 /* ??? Always make it addressable for now, since it is meant to
339 be pointed to by the static chain pointer. This pessimizes
340 when it turns out that no static chains are needed because
341 the nested functions referencing non-local variables are not
342 reachable, but the true pessimization is to create the non-
343 local frame structure in the first place. */
344 TREE_ADDRESSABLE (info->frame_decl) = 1;
345 }
346
347 return type;
348 }
349
350 /* Return true if DECL should be referenced by pointer in the non-local frame
351 structure. */
352
353 static bool
354 use_pointer_in_frame (tree decl)
355 {
356 if (TREE_CODE (decl) == PARM_DECL)
357 {
358 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
359 sized DECLs, and inefficient to copy large aggregates. Don't bother
360 moving anything but scalar parameters. */
361 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
362 }
363 else
364 {
365 /* Variable-sized DECLs can only come from OMP clauses at this point
366 since the gimplifier has already turned the regular variables into
367 pointers. Do the same as the gimplifier. */
368 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
369 }
370 }
371
372 /* Given DECL, a non-locally accessed variable, find or create a field
373 in the non-local frame structure for the given nesting context. */
374
375 static tree
376 lookup_field_for_decl (struct nesting_info *info, tree decl,
377 enum insert_option insert)
378 {
379 gcc_checking_assert (decl_function_context (decl) == info->context);
380
381 if (insert == NO_INSERT)
382 {
383 tree *slot = info->field_map->get (decl);
384 return slot ? *slot : NULL_TREE;
385 }
386
387 tree *slot = &info->field_map->get_or_insert (decl);
388 if (!*slot)
389 {
390 tree type = get_frame_type (info);
391 tree field = make_node (FIELD_DECL);
392 DECL_NAME (field) = DECL_NAME (decl);
393
394 if (use_pointer_in_frame (decl))
395 {
396 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
397 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
398 DECL_NONADDRESSABLE_P (field) = 1;
399 }
400 else
401 {
402 TREE_TYPE (field) = TREE_TYPE (decl);
403 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
404 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
405 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
406 DECL_IGNORED_P (field) = DECL_IGNORED_P (decl);
407 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
408 TREE_NO_WARNING (field) = TREE_NO_WARNING (decl);
409 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
410
411 /* Declare the transformation and adjust the original DECL. For a
412 variable or for a parameter when not optimizing, we make it point
413 to the field in the frame directly. For a parameter, we don't do
414 it when optimizing because the variable tracking pass will already
415 do the job, */
416 if (VAR_P (decl) || !optimize)
417 {
418 tree x
419 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
420 field, NULL_TREE);
421
422 /* If the next declaration is a PARM_DECL pointing to the DECL,
423 we need to adjust its VALUE_EXPR directly, since chains of
424 VALUE_EXPRs run afoul of garbage collection. This occurs
425 in Ada for Out parameters that aren't copied in. */
426 tree next = DECL_CHAIN (decl);
427 if (next
428 && TREE_CODE (next) == PARM_DECL
429 && DECL_HAS_VALUE_EXPR_P (next)
430 && DECL_VALUE_EXPR (next) == decl)
431 SET_DECL_VALUE_EXPR (next, x);
432
433 SET_DECL_VALUE_EXPR (decl, x);
434 DECL_HAS_VALUE_EXPR_P (decl) = 1;
435 }
436 }
437
438 insert_field_into_struct (type, field);
439 *slot = field;
440
441 if (TREE_CODE (decl) == PARM_DECL)
442 info->any_parm_remapped = true;
443 }
444
445 return *slot;
446 }
447
448 /* Build or return the variable that holds the static chain within
449 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
450
451 static tree
452 get_chain_decl (struct nesting_info *info)
453 {
454 tree decl = info->chain_decl;
455
456 if (!decl)
457 {
458 tree type;
459
460 type = get_frame_type (info->outer);
461 type = build_pointer_type (type);
462
463 /* Note that this variable is *not* entered into any BIND_EXPR;
464 the construction of this variable is handled specially in
465 expand_function_start and initialize_inlined_parameters.
466 Note also that it's represented as a parameter. This is more
467 close to the truth, since the initial value does come from
468 the caller. */
469 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
470 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
471 DECL_ARTIFICIAL (decl) = 1;
472 DECL_IGNORED_P (decl) = 1;
473 TREE_USED (decl) = 1;
474 DECL_CONTEXT (decl) = info->context;
475 DECL_ARG_TYPE (decl) = type;
476
477 /* Tell tree-inline.c that we never write to this variable, so
478 it can copy-prop the replacement value immediately. */
479 TREE_READONLY (decl) = 1;
480
481 info->chain_decl = decl;
482
483 if (dump_file
484 && (dump_flags & TDF_DETAILS)
485 && !DECL_STATIC_CHAIN (info->context))
486 fprintf (dump_file, "Setting static-chain for %s\n",
487 lang_hooks.decl_printable_name (info->context, 2));
488
489 DECL_STATIC_CHAIN (info->context) = 1;
490 }
491 return decl;
492 }
493
494 /* Build or return the field within the non-local frame state that holds
495 the static chain for INFO->CONTEXT. This is the way to walk back up
496 multiple nesting levels. */
497
498 static tree
499 get_chain_field (struct nesting_info *info)
500 {
501 tree field = info->chain_field;
502
503 if (!field)
504 {
505 tree type = build_pointer_type (get_frame_type (info->outer));
506
507 field = make_node (FIELD_DECL);
508 DECL_NAME (field) = get_identifier ("__chain");
509 TREE_TYPE (field) = type;
510 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
511 DECL_NONADDRESSABLE_P (field) = 1;
512
513 insert_field_into_struct (get_frame_type (info), field);
514
515 info->chain_field = field;
516
517 if (dump_file
518 && (dump_flags & TDF_DETAILS)
519 && !DECL_STATIC_CHAIN (info->context))
520 fprintf (dump_file, "Setting static-chain for %s\n",
521 lang_hooks.decl_printable_name (info->context, 2));
522
523 DECL_STATIC_CHAIN (info->context) = 1;
524 }
525 return field;
526 }
527
528 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
529
530 static tree
531 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
532 gcall *call)
533 {
534 tree t;
535
536 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
537 gimple_call_set_lhs (call, t);
538 if (! gsi_end_p (*gsi))
539 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
540 gsi_insert_before (gsi, call, GSI_SAME_STMT);
541
542 return t;
543 }
544
545
546 /* Copy EXP into a temporary. Allocate the temporary in the context of
547 INFO and insert the initialization statement before GSI. */
548
549 static tree
550 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
551 {
552 tree t;
553 gimple *stmt;
554
555 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
556 stmt = gimple_build_assign (t, exp);
557 if (! gsi_end_p (*gsi))
558 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
559 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
560
561 return t;
562 }
563
564
565 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
566
567 static tree
568 gsi_gimplify_val (struct nesting_info *info, tree exp,
569 gimple_stmt_iterator *gsi)
570 {
571 if (is_gimple_val (exp))
572 return exp;
573 else
574 return init_tmp_var (info, exp, gsi);
575 }
576
577 /* Similarly, but copy from the temporary and insert the statement
578 after the iterator. */
579
580 static tree
581 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
582 {
583 tree t;
584 gimple *stmt;
585
586 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
587 stmt = gimple_build_assign (exp, t);
588 if (! gsi_end_p (*gsi))
589 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
590 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
591
592 return t;
593 }
594
595 /* Build or return the type used to represent a nested function trampoline. */
596
597 static GTY(()) tree trampoline_type;
598
599 static tree
600 get_trampoline_type (struct nesting_info *info)
601 {
602 unsigned align, size;
603 tree t;
604
605 if (trampoline_type)
606 return trampoline_type;
607
608 align = TRAMPOLINE_ALIGNMENT;
609 size = TRAMPOLINE_SIZE;
610
611 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
612 then allocate extra space so that we can do dynamic alignment. */
613 if (align > STACK_BOUNDARY)
614 {
615 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
616 align = STACK_BOUNDARY;
617 }
618
619 t = build_index_type (size_int (size - 1));
620 t = build_array_type (char_type_node, t);
621 t = build_decl (DECL_SOURCE_LOCATION (info->context),
622 FIELD_DECL, get_identifier ("__data"), t);
623 SET_DECL_ALIGN (t, align);
624 DECL_USER_ALIGN (t) = 1;
625
626 trampoline_type = make_node (RECORD_TYPE);
627 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
628 TYPE_FIELDS (trampoline_type) = t;
629 layout_type (trampoline_type);
630 DECL_CONTEXT (t) = trampoline_type;
631
632 return trampoline_type;
633 }
634
635 /* Build or return the type used to represent a nested function descriptor. */
636
637 static GTY(()) tree descriptor_type;
638
639 static tree
640 get_descriptor_type (struct nesting_info *info)
641 {
642 /* The base alignment is that of a function. */
643 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
644 tree t;
645
646 if (descriptor_type)
647 return descriptor_type;
648
649 t = build_index_type (integer_one_node);
650 t = build_array_type (ptr_type_node, t);
651 t = build_decl (DECL_SOURCE_LOCATION (info->context),
652 FIELD_DECL, get_identifier ("__data"), t);
653 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
654 DECL_USER_ALIGN (t) = 1;
655
656 descriptor_type = make_node (RECORD_TYPE);
657 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
658 TYPE_FIELDS (descriptor_type) = t;
659 layout_type (descriptor_type);
660 DECL_CONTEXT (t) = descriptor_type;
661
662 return descriptor_type;
663 }
664
665 /* Given DECL, a nested function, find or create an element in the
666 var map for this function. */
667
668 static tree
669 lookup_element_for_decl (struct nesting_info *info, tree decl,
670 enum insert_option insert)
671 {
672 if (insert == NO_INSERT)
673 {
674 tree *slot = info->var_map->get (decl);
675 return slot ? *slot : NULL_TREE;
676 }
677
678 tree *slot = &info->var_map->get_or_insert (decl);
679 if (!*slot)
680 *slot = build_tree_list (NULL_TREE, NULL_TREE);
681
682 return (tree) *slot;
683 }
684
685 /* Given DECL, a nested function, create a field in the non-local
686 frame structure for this function. */
687
688 static tree
689 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
690 {
691 tree field = make_node (FIELD_DECL);
692 DECL_NAME (field) = DECL_NAME (decl);
693 TREE_TYPE (field) = type;
694 TREE_ADDRESSABLE (field) = 1;
695 insert_field_into_struct (get_frame_type (info), field);
696 return field;
697 }
698
699 /* Given DECL, a nested function, find or create a field in the non-local
700 frame structure for a trampoline for this function. */
701
702 static tree
703 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
704 enum insert_option insert)
705 {
706 tree elt, field;
707
708 elt = lookup_element_for_decl (info, decl, insert);
709 if (!elt)
710 return NULL_TREE;
711
712 field = TREE_PURPOSE (elt);
713
714 if (!field && insert == INSERT)
715 {
716 field = create_field_for_decl (info, decl, get_trampoline_type (info));
717 TREE_PURPOSE (elt) = field;
718 info->any_tramp_created = true;
719 }
720
721 return field;
722 }
723
724 /* Given DECL, a nested function, find or create a field in the non-local
725 frame structure for a descriptor for this function. */
726
727 static tree
728 lookup_descr_for_decl (struct nesting_info *info, tree decl,
729 enum insert_option insert)
730 {
731 tree elt, field;
732
733 elt = lookup_element_for_decl (info, decl, insert);
734 if (!elt)
735 return NULL_TREE;
736
737 field = TREE_VALUE (elt);
738
739 if (!field && insert == INSERT)
740 {
741 field = create_field_for_decl (info, decl, get_descriptor_type (info));
742 TREE_VALUE (elt) = field;
743 info->any_descr_created = true;
744 }
745
746 return field;
747 }
748
749 /* Build or return the field within the non-local frame state that holds
750 the non-local goto "jmp_buf". The buffer itself is maintained by the
751 rtl middle-end as dynamic stack space is allocated. */
752
753 static tree
754 get_nl_goto_field (struct nesting_info *info)
755 {
756 tree field = info->nl_goto_field;
757 if (!field)
758 {
759 unsigned size;
760 tree type;
761
762 /* For __builtin_nonlocal_goto, we need N words. The first is the
763 frame pointer, the rest is for the target's stack pointer save
764 area. The number of words is controlled by STACK_SAVEAREA_MODE;
765 not the best interface, but it'll do for now. */
766 if (Pmode == ptr_mode)
767 type = ptr_type_node;
768 else
769 type = lang_hooks.types.type_for_mode (Pmode, 1);
770
771 scalar_int_mode mode
772 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
773 size = GET_MODE_SIZE (mode);
774 size = size / GET_MODE_SIZE (Pmode);
775 size = size + 1;
776
777 type = build_array_type
778 (type, build_index_type (size_int (size)));
779
780 field = make_node (FIELD_DECL);
781 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
782 TREE_TYPE (field) = type;
783 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
784 TREE_ADDRESSABLE (field) = 1;
785
786 insert_field_into_struct (get_frame_type (info), field);
787
788 info->nl_goto_field = field;
789 }
790
791 return field;
792 }
793
794 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
795
796 static void
797 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
798 struct nesting_info *info, gimple_seq *pseq)
799 {
800 struct walk_stmt_info wi;
801
802 memset (&wi, 0, sizeof (wi));
803 wi.info = info;
804 wi.val_only = true;
805 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
806 }
807
808
809 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
810
811 static inline void
812 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
813 struct nesting_info *info)
814 {
815 gimple_seq body = gimple_body (info->context);
816 walk_body (callback_stmt, callback_op, info, &body);
817 gimple_set_body (info->context, body);
818 }
819
820 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
821
822 static void
823 walk_gimple_omp_for (gomp_for *for_stmt,
824 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
825 struct nesting_info *info)
826 {
827 struct walk_stmt_info wi;
828 gimple_seq seq;
829 tree t;
830 size_t i;
831
832 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
833
834 seq = NULL;
835 memset (&wi, 0, sizeof (wi));
836 wi.info = info;
837 wi.gsi = gsi_last (seq);
838
839 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
840 {
841 wi.val_only = false;
842 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
843 &wi, NULL);
844 wi.val_only = true;
845 wi.is_lhs = false;
846 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
847 &wi, NULL);
848
849 wi.val_only = true;
850 wi.is_lhs = false;
851 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
852 &wi, NULL);
853
854 t = gimple_omp_for_incr (for_stmt, i);
855 gcc_assert (BINARY_CLASS_P (t));
856 wi.val_only = false;
857 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
858 wi.val_only = true;
859 wi.is_lhs = false;
860 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
861 }
862
863 seq = gsi_seq (wi.gsi);
864 if (!gimple_seq_empty_p (seq))
865 {
866 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
867 annotate_all_with_location (seq, gimple_location (for_stmt));
868 gimple_seq_add_seq (&pre_body, seq);
869 gimple_omp_for_set_pre_body (for_stmt, pre_body);
870 }
871 }
872
873 /* Similarly for ROOT and all functions nested underneath, depth first. */
874
875 static void
876 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
877 struct nesting_info *root)
878 {
879 struct nesting_info *n;
880 FOR_EACH_NEST_INFO (n, root)
881 walk_function (callback_stmt, callback_op, n);
882 }
883
884
885 /* We have to check for a fairly pathological case. The operands of function
886 nested function are to be interpreted in the context of the enclosing
887 function. So if any are variably-sized, they will get remapped when the
888 enclosing function is inlined. But that remapping would also have to be
889 done in the types of the PARM_DECLs of the nested function, meaning the
890 argument types of that function will disagree with the arguments in the
891 calls to that function. So we'd either have to make a copy of the nested
892 function corresponding to each time the enclosing function was inlined or
893 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
894 function. The former is not practical. The latter would still require
895 detecting this case to know when to add the conversions. So, for now at
896 least, we don't inline such an enclosing function.
897
898 We have to do that check recursively, so here return indicating whether
899 FNDECL has such a nested function. ORIG_FN is the function we were
900 trying to inline to use for checking whether any argument is variably
901 modified by anything in it.
902
903 It would be better to do this in tree-inline.c so that we could give
904 the appropriate warning for why a function can't be inlined, but that's
905 too late since the nesting structure has already been flattened and
906 adding a flag just to record this fact seems a waste of a flag. */
907
908 static bool
909 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
910 {
911 struct cgraph_node *cgn = cgraph_node::get (fndecl);
912 tree arg;
913
914 for (cgn = first_nested_function (cgn); cgn;
915 cgn = next_nested_function (cgn))
916 {
917 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
918 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
919 return true;
920
921 if (check_for_nested_with_variably_modified (cgn->decl,
922 orig_fndecl))
923 return true;
924 }
925
926 return false;
927 }
928
929 /* Construct our local datastructure describing the function nesting
930 tree rooted by CGN. */
931
932 static struct nesting_info *
933 create_nesting_tree (struct cgraph_node *cgn)
934 {
935 struct nesting_info *info = XCNEW (struct nesting_info);
936 info->field_map = new hash_map<tree, tree>;
937 info->var_map = new hash_map<tree, tree>;
938 info->mem_refs = new hash_set<tree *>;
939 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
940 info->context = cgn->decl;
941 info->thunk_p = cgn->thunk;
942
943 for (cgn = first_nested_function (cgn); cgn;
944 cgn = next_nested_function (cgn))
945 {
946 struct nesting_info *sub = create_nesting_tree (cgn);
947 sub->outer = info;
948 sub->next = info->inner;
949 info->inner = sub;
950 }
951
952 /* See discussion at check_for_nested_with_variably_modified for a
953 discussion of why this has to be here. */
954 if (check_for_nested_with_variably_modified (info->context, info->context))
955 DECL_UNINLINABLE (info->context) = true;
956
957 return info;
958 }
959
960 /* Return an expression computing the static chain for TARGET_CONTEXT
961 from INFO->CONTEXT. Insert any necessary computations before TSI. */
962
963 static tree
964 get_static_chain (struct nesting_info *info, tree target_context,
965 gimple_stmt_iterator *gsi)
966 {
967 struct nesting_info *i;
968 tree x;
969
970 if (info->context == target_context)
971 {
972 x = build_addr (info->frame_decl);
973 info->static_chain_added |= 1;
974 }
975 else
976 {
977 x = get_chain_decl (info);
978 info->static_chain_added |= 2;
979
980 for (i = info->outer; i->context != target_context; i = i->outer)
981 {
982 tree field = get_chain_field (i);
983
984 x = build_simple_mem_ref_notrap (x);
985 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
986 x = init_tmp_var (info, x, gsi);
987 }
988 }
989
990 return x;
991 }
992
993
994 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
995 frame as seen from INFO->CONTEXT. Insert any necessary computations
996 before GSI. */
997
998 static tree
999 get_frame_field (struct nesting_info *info, tree target_context,
1000 tree field, gimple_stmt_iterator *gsi)
1001 {
1002 struct nesting_info *i;
1003 tree x;
1004
1005 if (info->context == target_context)
1006 {
1007 /* Make sure frame_decl gets created. */
1008 (void) get_frame_type (info);
1009 x = info->frame_decl;
1010 info->static_chain_added |= 1;
1011 }
1012 else
1013 {
1014 x = get_chain_decl (info);
1015 info->static_chain_added |= 2;
1016
1017 for (i = info->outer; i->context != target_context; i = i->outer)
1018 {
1019 tree field = get_chain_field (i);
1020
1021 x = build_simple_mem_ref_notrap (x);
1022 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1023 x = init_tmp_var (info, x, gsi);
1024 }
1025
1026 x = build_simple_mem_ref_notrap (x);
1027 }
1028
1029 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1030 return x;
1031 }
1032
1033 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
1034
1035 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
1036 in the nested function with DECL_VALUE_EXPR set to reference the true
1037 variable in the parent function. This is used both for debug info
1038 and in OMP lowering. */
1039
1040 static tree
1041 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
1042 {
1043 tree target_context;
1044 struct nesting_info *i;
1045 tree x, field, new_decl;
1046
1047 tree *slot = &info->var_map->get_or_insert (decl);
1048
1049 if (*slot)
1050 return *slot;
1051
1052 target_context = decl_function_context (decl);
1053
1054 /* A copy of the code in get_frame_field, but without the temporaries. */
1055 if (info->context == target_context)
1056 {
1057 /* Make sure frame_decl gets created. */
1058 (void) get_frame_type (info);
1059 x = info->frame_decl;
1060 i = info;
1061 info->static_chain_added |= 1;
1062 }
1063 else
1064 {
1065 x = get_chain_decl (info);
1066 info->static_chain_added |= 2;
1067 for (i = info->outer; i->context != target_context; i = i->outer)
1068 {
1069 field = get_chain_field (i);
1070 x = build_simple_mem_ref_notrap (x);
1071 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1072 }
1073 x = build_simple_mem_ref_notrap (x);
1074 }
1075
1076 field = lookup_field_for_decl (i, decl, INSERT);
1077 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1078 if (use_pointer_in_frame (decl))
1079 x = build_simple_mem_ref_notrap (x);
1080
1081 /* ??? We should be remapping types as well, surely. */
1082 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1083 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1084 DECL_CONTEXT (new_decl) = info->context;
1085 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1086 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1087 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1088 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1089 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1090 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1091 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1092 if ((TREE_CODE (decl) == PARM_DECL
1093 || TREE_CODE (decl) == RESULT_DECL
1094 || VAR_P (decl))
1095 && DECL_BY_REFERENCE (decl))
1096 DECL_BY_REFERENCE (new_decl) = 1;
1097
1098 SET_DECL_VALUE_EXPR (new_decl, x);
1099 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1100
1101 *slot = new_decl;
1102 DECL_CHAIN (new_decl) = info->debug_var_chain;
1103 info->debug_var_chain = new_decl;
1104
1105 if (!optimize
1106 && info->context != target_context
1107 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1108 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1109
1110 return new_decl;
1111 }
1112
1113
1114 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1115 and PARM_DECLs that belong to outer functions.
1116
1117 The rewrite will involve some number of structure accesses back up
1118 the static chain. E.g. for a variable FOO up one nesting level it'll
1119 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1120 indirections apply to decls for which use_pointer_in_frame is true. */
1121
1122 static tree
1123 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1124 {
1125 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1126 struct nesting_info *const info = (struct nesting_info *) wi->info;
1127 tree t = *tp;
1128
1129 *walk_subtrees = 0;
1130 switch (TREE_CODE (t))
1131 {
1132 case VAR_DECL:
1133 /* Non-automatic variables are never processed. */
1134 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1135 break;
1136 /* FALLTHRU */
1137
1138 case PARM_DECL:
1139 {
1140 tree x, target_context = decl_function_context (t);
1141
1142 if (info->context == target_context)
1143 break;
1144
1145 wi->changed = true;
1146
1147 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1148 x = get_nonlocal_debug_decl (info, t);
1149 else
1150 {
1151 struct nesting_info *i = info;
1152 while (i && i->context != target_context)
1153 i = i->outer;
1154 /* If none of the outer contexts is the target context, this means
1155 that the VAR or PARM_DECL is referenced in a wrong context. */
1156 if (!i)
1157 internal_error ("%s from %s referenced in %s",
1158 IDENTIFIER_POINTER (DECL_NAME (t)),
1159 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1160 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1161
1162 x = lookup_field_for_decl (i, t, INSERT);
1163 x = get_frame_field (info, target_context, x, &wi->gsi);
1164 if (use_pointer_in_frame (t))
1165 {
1166 x = init_tmp_var (info, x, &wi->gsi);
1167 x = build_simple_mem_ref_notrap (x);
1168 }
1169 }
1170
1171 if (wi->val_only)
1172 {
1173 if (wi->is_lhs)
1174 x = save_tmp_var (info, x, &wi->gsi);
1175 else
1176 x = init_tmp_var (info, x, &wi->gsi);
1177 }
1178
1179 *tp = x;
1180 }
1181 break;
1182
1183 case LABEL_DECL:
1184 /* We're taking the address of a label from a parent function, but
1185 this is not itself a non-local goto. Mark the label such that it
1186 will not be deleted, much as we would with a label address in
1187 static storage. */
1188 if (decl_function_context (t) != info->context)
1189 FORCED_LABEL (t) = 1;
1190 break;
1191
1192 case ADDR_EXPR:
1193 {
1194 bool save_val_only = wi->val_only;
1195
1196 wi->val_only = false;
1197 wi->is_lhs = false;
1198 wi->changed = false;
1199 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1200 wi->val_only = true;
1201
1202 if (wi->changed)
1203 {
1204 tree save_context;
1205
1206 /* If we changed anything, we might no longer be directly
1207 referencing a decl. */
1208 save_context = current_function_decl;
1209 current_function_decl = info->context;
1210 recompute_tree_invariant_for_addr_expr (t);
1211 current_function_decl = save_context;
1212
1213 /* If the callback converted the address argument in a context
1214 where we only accept variables (and min_invariant, presumably),
1215 then compute the address into a temporary. */
1216 if (save_val_only)
1217 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1218 t, &wi->gsi);
1219 }
1220 }
1221 break;
1222
1223 case REALPART_EXPR:
1224 case IMAGPART_EXPR:
1225 case COMPONENT_REF:
1226 case ARRAY_REF:
1227 case ARRAY_RANGE_REF:
1228 case BIT_FIELD_REF:
1229 /* Go down this entire nest and just look at the final prefix and
1230 anything that describes the references. Otherwise, we lose track
1231 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1232 wi->val_only = true;
1233 wi->is_lhs = false;
1234 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1235 {
1236 if (TREE_CODE (t) == COMPONENT_REF)
1237 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1238 NULL);
1239 else if (TREE_CODE (t) == ARRAY_REF
1240 || TREE_CODE (t) == ARRAY_RANGE_REF)
1241 {
1242 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1243 wi, NULL);
1244 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1245 wi, NULL);
1246 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1247 wi, NULL);
1248 }
1249 }
1250 wi->val_only = false;
1251 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1252 break;
1253
1254 case VIEW_CONVERT_EXPR:
1255 /* Just request to look at the subtrees, leaving val_only and lhs
1256 untouched. This might actually be for !val_only + lhs, in which
1257 case we don't want to force a replacement by a temporary. */
1258 *walk_subtrees = 1;
1259 break;
1260
1261 default:
1262 if (!IS_TYPE_OR_DECL_P (t))
1263 {
1264 *walk_subtrees = 1;
1265 wi->val_only = true;
1266 wi->is_lhs = false;
1267 }
1268 break;
1269 }
1270
1271 return NULL_TREE;
1272 }
1273
1274 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1275 struct walk_stmt_info *);
1276
1277 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1278 and PARM_DECLs that belong to outer functions. */
1279
1280 static bool
1281 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1282 {
1283 struct nesting_info *const info = (struct nesting_info *) wi->info;
1284 bool need_chain = false, need_stmts = false;
1285 tree clause, decl, *pdecl;
1286 int dummy;
1287 bitmap new_suppress;
1288
1289 new_suppress = BITMAP_GGC_ALLOC ();
1290 bitmap_copy (new_suppress, info->suppress_expansion);
1291
1292 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1293 {
1294 pdecl = NULL;
1295 switch (OMP_CLAUSE_CODE (clause))
1296 {
1297 case OMP_CLAUSE_REDUCTION:
1298 case OMP_CLAUSE_IN_REDUCTION:
1299 case OMP_CLAUSE_TASK_REDUCTION:
1300 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1301 need_stmts = true;
1302 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1303 {
1304 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1305 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1306 pdecl = &TREE_OPERAND (*pdecl, 0);
1307 if (TREE_CODE (*pdecl) == INDIRECT_REF
1308 || TREE_CODE (*pdecl) == ADDR_EXPR)
1309 pdecl = &TREE_OPERAND (*pdecl, 0);
1310 }
1311 goto do_decl_clause;
1312
1313 case OMP_CLAUSE_LASTPRIVATE:
1314 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1315 need_stmts = true;
1316 goto do_decl_clause;
1317
1318 case OMP_CLAUSE_LINEAR:
1319 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1320 need_stmts = true;
1321 wi->val_only = true;
1322 wi->is_lhs = false;
1323 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1324 &dummy, wi);
1325 goto do_decl_clause;
1326
1327 case OMP_CLAUSE_PRIVATE:
1328 case OMP_CLAUSE_FIRSTPRIVATE:
1329 case OMP_CLAUSE_COPYPRIVATE:
1330 case OMP_CLAUSE_SHARED:
1331 case OMP_CLAUSE_TO_DECLARE:
1332 case OMP_CLAUSE_LINK:
1333 case OMP_CLAUSE_USE_DEVICE_PTR:
1334 case OMP_CLAUSE_USE_DEVICE_ADDR:
1335 case OMP_CLAUSE_IS_DEVICE_PTR:
1336 do_decl_clause:
1337 if (pdecl == NULL)
1338 pdecl = &OMP_CLAUSE_DECL (clause);
1339 decl = *pdecl;
1340 if (VAR_P (decl)
1341 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1342 break;
1343 if (decl_function_context (decl) != info->context)
1344 {
1345 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1346 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1347 bitmap_set_bit (new_suppress, DECL_UID (decl));
1348 *pdecl = get_nonlocal_debug_decl (info, decl);
1349 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1350 need_chain = true;
1351 }
1352 break;
1353
1354 case OMP_CLAUSE_SCHEDULE:
1355 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1356 break;
1357 /* FALLTHRU */
1358 case OMP_CLAUSE_FINAL:
1359 case OMP_CLAUSE_IF:
1360 case OMP_CLAUSE_NUM_THREADS:
1361 case OMP_CLAUSE_DEPEND:
1362 case OMP_CLAUSE_DEVICE:
1363 case OMP_CLAUSE_NUM_TEAMS:
1364 case OMP_CLAUSE_THREAD_LIMIT:
1365 case OMP_CLAUSE_SAFELEN:
1366 case OMP_CLAUSE_SIMDLEN:
1367 case OMP_CLAUSE_PRIORITY:
1368 case OMP_CLAUSE_GRAINSIZE:
1369 case OMP_CLAUSE_NUM_TASKS:
1370 case OMP_CLAUSE_HINT:
1371 case OMP_CLAUSE_NUM_GANGS:
1372 case OMP_CLAUSE_NUM_WORKERS:
1373 case OMP_CLAUSE_VECTOR_LENGTH:
1374 case OMP_CLAUSE_GANG:
1375 case OMP_CLAUSE_WORKER:
1376 case OMP_CLAUSE_VECTOR:
1377 case OMP_CLAUSE_ASYNC:
1378 case OMP_CLAUSE_WAIT:
1379 /* Several OpenACC clauses have optional arguments. Check if they
1380 are present. */
1381 if (OMP_CLAUSE_OPERAND (clause, 0))
1382 {
1383 wi->val_only = true;
1384 wi->is_lhs = false;
1385 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1386 &dummy, wi);
1387 }
1388
1389 /* The gang clause accepts two arguments. */
1390 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1391 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1392 {
1393 wi->val_only = true;
1394 wi->is_lhs = false;
1395 convert_nonlocal_reference_op
1396 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1397 }
1398 break;
1399
1400 case OMP_CLAUSE_DIST_SCHEDULE:
1401 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1402 {
1403 wi->val_only = true;
1404 wi->is_lhs = false;
1405 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1406 &dummy, wi);
1407 }
1408 break;
1409
1410 case OMP_CLAUSE_MAP:
1411 case OMP_CLAUSE_TO:
1412 case OMP_CLAUSE_FROM:
1413 if (OMP_CLAUSE_SIZE (clause))
1414 {
1415 wi->val_only = true;
1416 wi->is_lhs = false;
1417 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1418 &dummy, wi);
1419 }
1420 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1421 goto do_decl_clause;
1422 wi->val_only = true;
1423 wi->is_lhs = false;
1424 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1425 wi, NULL);
1426 break;
1427
1428 case OMP_CLAUSE_ALIGNED:
1429 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1430 {
1431 wi->val_only = true;
1432 wi->is_lhs = false;
1433 convert_nonlocal_reference_op
1434 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1435 }
1436 /* FALLTHRU */
1437 case OMP_CLAUSE_NONTEMPORAL:
1438 /* Like do_decl_clause, but don't add any suppression. */
1439 decl = OMP_CLAUSE_DECL (clause);
1440 if (VAR_P (decl)
1441 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1442 break;
1443 if (decl_function_context (decl) != info->context)
1444 {
1445 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1446 need_chain = true;
1447 }
1448 break;
1449
1450 case OMP_CLAUSE_NOWAIT:
1451 case OMP_CLAUSE_ORDERED:
1452 case OMP_CLAUSE_DEFAULT:
1453 case OMP_CLAUSE_COPYIN:
1454 case OMP_CLAUSE_COLLAPSE:
1455 case OMP_CLAUSE_TILE:
1456 case OMP_CLAUSE_UNTIED:
1457 case OMP_CLAUSE_MERGEABLE:
1458 case OMP_CLAUSE_PROC_BIND:
1459 case OMP_CLAUSE_NOGROUP:
1460 case OMP_CLAUSE_THREADS:
1461 case OMP_CLAUSE_SIMD:
1462 case OMP_CLAUSE_DEFAULTMAP:
1463 case OMP_CLAUSE_ORDER:
1464 case OMP_CLAUSE_SEQ:
1465 case OMP_CLAUSE_INDEPENDENT:
1466 case OMP_CLAUSE_AUTO:
1467 case OMP_CLAUSE_IF_PRESENT:
1468 case OMP_CLAUSE_FINALIZE:
1469 case OMP_CLAUSE__CONDTEMP_:
1470 case OMP_CLAUSE__SCANTEMP_:
1471 break;
1472
1473 /* The following clause belongs to the OpenACC cache directive, which
1474 is discarded during gimplification. */
1475 case OMP_CLAUSE__CACHE_:
1476 /* The following clauses are only allowed in the OpenMP declare simd
1477 directive, so not seen here. */
1478 case OMP_CLAUSE_UNIFORM:
1479 case OMP_CLAUSE_INBRANCH:
1480 case OMP_CLAUSE_NOTINBRANCH:
1481 /* The following clauses are only allowed on OpenMP cancel and
1482 cancellation point directives, which at this point have already
1483 been lowered into a function call. */
1484 case OMP_CLAUSE_FOR:
1485 case OMP_CLAUSE_PARALLEL:
1486 case OMP_CLAUSE_SECTIONS:
1487 case OMP_CLAUSE_TASKGROUP:
1488 /* The following clauses are only added during OMP lowering; nested
1489 function decomposition happens before that. */
1490 case OMP_CLAUSE__LOOPTEMP_:
1491 case OMP_CLAUSE__REDUCTEMP_:
1492 case OMP_CLAUSE__SIMDUID_:
1493 case OMP_CLAUSE__SIMT_:
1494 /* Anything else. */
1495 default:
1496 gcc_unreachable ();
1497 }
1498 }
1499
1500 info->suppress_expansion = new_suppress;
1501
1502 if (need_stmts)
1503 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1504 switch (OMP_CLAUSE_CODE (clause))
1505 {
1506 case OMP_CLAUSE_REDUCTION:
1507 case OMP_CLAUSE_IN_REDUCTION:
1508 case OMP_CLAUSE_TASK_REDUCTION:
1509 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1510 {
1511 tree old_context
1512 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1513 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1514 = info->context;
1515 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1516 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1517 = info->context;
1518 tree save_local_var_chain = info->new_local_var_chain;
1519 info->new_local_var_chain = NULL;
1520 gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1521 walk_body (convert_nonlocal_reference_stmt,
1522 convert_nonlocal_reference_op, info, seq);
1523 if (info->new_local_var_chain)
1524 declare_vars (info->new_local_var_chain,
1525 gimple_seq_first_stmt (*seq), false);
1526 info->new_local_var_chain = NULL;
1527 seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1528 walk_body (convert_nonlocal_reference_stmt,
1529 convert_nonlocal_reference_op, info, seq);
1530 if (info->new_local_var_chain)
1531 declare_vars (info->new_local_var_chain,
1532 gimple_seq_first_stmt (*seq), false);
1533 info->new_local_var_chain = save_local_var_chain;
1534 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1535 = old_context;
1536 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1537 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1538 = old_context;
1539 }
1540 break;
1541
1542 case OMP_CLAUSE_LASTPRIVATE:
1543 {
1544 tree save_local_var_chain = info->new_local_var_chain;
1545 info->new_local_var_chain = NULL;
1546 gimple_seq *seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1547 walk_body (convert_nonlocal_reference_stmt,
1548 convert_nonlocal_reference_op, info, seq);
1549 if (info->new_local_var_chain)
1550 declare_vars (info->new_local_var_chain,
1551 gimple_seq_first_stmt (*seq), false);
1552 info->new_local_var_chain = save_local_var_chain;
1553 }
1554 break;
1555
1556 case OMP_CLAUSE_LINEAR:
1557 {
1558 tree save_local_var_chain = info->new_local_var_chain;
1559 info->new_local_var_chain = NULL;
1560 gimple_seq *seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1561 walk_body (convert_nonlocal_reference_stmt,
1562 convert_nonlocal_reference_op, info, seq);
1563 if (info->new_local_var_chain)
1564 declare_vars (info->new_local_var_chain,
1565 gimple_seq_first_stmt (*seq), false);
1566 info->new_local_var_chain = save_local_var_chain;
1567 }
1568 break;
1569
1570 default:
1571 break;
1572 }
1573
1574 return need_chain;
1575 }
1576
1577 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1578
1579 static void
1580 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1581 {
1582 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1583 type = TREE_TYPE (type);
1584
1585 if (TYPE_NAME (type)
1586 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1587 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1588 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1589
1590 while (POINTER_TYPE_P (type)
1591 || TREE_CODE (type) == VECTOR_TYPE
1592 || TREE_CODE (type) == FUNCTION_TYPE
1593 || TREE_CODE (type) == METHOD_TYPE)
1594 type = TREE_TYPE (type);
1595
1596 if (TREE_CODE (type) == ARRAY_TYPE)
1597 {
1598 tree domain, t;
1599
1600 note_nonlocal_vla_type (info, TREE_TYPE (type));
1601 domain = TYPE_DOMAIN (type);
1602 if (domain)
1603 {
1604 t = TYPE_MIN_VALUE (domain);
1605 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1606 && decl_function_context (t) != info->context)
1607 get_nonlocal_debug_decl (info, t);
1608 t = TYPE_MAX_VALUE (domain);
1609 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1610 && decl_function_context (t) != info->context)
1611 get_nonlocal_debug_decl (info, t);
1612 }
1613 }
1614 }
1615
1616 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1617 PARM_DECLs that belong to outer functions. This handles statements
1618 that are not handled via the standard recursion done in
1619 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1620 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1621 operands of STMT have been handled by this function. */
1622
1623 static tree
1624 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1625 struct walk_stmt_info *wi)
1626 {
1627 struct nesting_info *info = (struct nesting_info *) wi->info;
1628 tree save_local_var_chain;
1629 bitmap save_suppress;
1630 gimple *stmt = gsi_stmt (*gsi);
1631
1632 switch (gimple_code (stmt))
1633 {
1634 case GIMPLE_GOTO:
1635 /* Don't walk non-local gotos for now. */
1636 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1637 {
1638 wi->val_only = true;
1639 wi->is_lhs = false;
1640 *handled_ops_p = false;
1641 return NULL_TREE;
1642 }
1643 break;
1644
1645 case GIMPLE_OMP_TEAMS:
1646 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1647 {
1648 save_suppress = info->suppress_expansion;
1649 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1650 wi);
1651 walk_body (convert_nonlocal_reference_stmt,
1652 convert_nonlocal_reference_op, info,
1653 gimple_omp_body_ptr (stmt));
1654 info->suppress_expansion = save_suppress;
1655 break;
1656 }
1657 /* FALLTHRU */
1658
1659 case GIMPLE_OMP_PARALLEL:
1660 case GIMPLE_OMP_TASK:
1661 save_suppress = info->suppress_expansion;
1662 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1663 wi))
1664 {
1665 tree c, decl;
1666 decl = get_chain_decl (info);
1667 c = build_omp_clause (gimple_location (stmt),
1668 OMP_CLAUSE_FIRSTPRIVATE);
1669 OMP_CLAUSE_DECL (c) = decl;
1670 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1671 gimple_omp_taskreg_set_clauses (stmt, c);
1672 }
1673
1674 save_local_var_chain = info->new_local_var_chain;
1675 info->new_local_var_chain = NULL;
1676
1677 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1678 info, gimple_omp_body_ptr (stmt));
1679
1680 if (info->new_local_var_chain)
1681 declare_vars (info->new_local_var_chain,
1682 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1683 false);
1684 info->new_local_var_chain = save_local_var_chain;
1685 info->suppress_expansion = save_suppress;
1686 break;
1687
1688 case GIMPLE_OMP_FOR:
1689 save_suppress = info->suppress_expansion;
1690 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1691 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1692 convert_nonlocal_reference_stmt,
1693 convert_nonlocal_reference_op, info);
1694 walk_body (convert_nonlocal_reference_stmt,
1695 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1696 info->suppress_expansion = save_suppress;
1697 break;
1698
1699 case GIMPLE_OMP_SECTIONS:
1700 save_suppress = info->suppress_expansion;
1701 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1702 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1703 info, gimple_omp_body_ptr (stmt));
1704 info->suppress_expansion = save_suppress;
1705 break;
1706
1707 case GIMPLE_OMP_SINGLE:
1708 save_suppress = info->suppress_expansion;
1709 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1710 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1711 info, gimple_omp_body_ptr (stmt));
1712 info->suppress_expansion = save_suppress;
1713 break;
1714
1715 case GIMPLE_OMP_TASKGROUP:
1716 save_suppress = info->suppress_expansion;
1717 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1718 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1719 info, gimple_omp_body_ptr (stmt));
1720 info->suppress_expansion = save_suppress;
1721 break;
1722
1723 case GIMPLE_OMP_TARGET:
1724 if (!is_gimple_omp_offloaded (stmt))
1725 {
1726 save_suppress = info->suppress_expansion;
1727 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1728 wi);
1729 info->suppress_expansion = save_suppress;
1730 walk_body (convert_nonlocal_reference_stmt,
1731 convert_nonlocal_reference_op, info,
1732 gimple_omp_body_ptr (stmt));
1733 break;
1734 }
1735 save_suppress = info->suppress_expansion;
1736 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1737 wi))
1738 {
1739 tree c, decl;
1740 decl = get_chain_decl (info);
1741 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1742 OMP_CLAUSE_DECL (c) = decl;
1743 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1744 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1745 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1746 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1747 }
1748
1749 save_local_var_chain = info->new_local_var_chain;
1750 info->new_local_var_chain = NULL;
1751
1752 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1753 info, gimple_omp_body_ptr (stmt));
1754
1755 if (info->new_local_var_chain)
1756 declare_vars (info->new_local_var_chain,
1757 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1758 false);
1759 info->new_local_var_chain = save_local_var_chain;
1760 info->suppress_expansion = save_suppress;
1761 break;
1762
1763 case GIMPLE_OMP_SECTION:
1764 case GIMPLE_OMP_MASTER:
1765 case GIMPLE_OMP_ORDERED:
1766 case GIMPLE_OMP_SCAN:
1767 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1768 info, gimple_omp_body_ptr (stmt));
1769 break;
1770
1771 case GIMPLE_BIND:
1772 {
1773 gbind *bind_stmt = as_a <gbind *> (stmt);
1774
1775 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1776 if (TREE_CODE (var) == NAMELIST_DECL)
1777 {
1778 /* Adjust decls mentioned in NAMELIST_DECL. */
1779 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1780 tree decl;
1781 unsigned int i;
1782
1783 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1784 {
1785 if (VAR_P (decl)
1786 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1787 continue;
1788 if (decl_function_context (decl) != info->context)
1789 CONSTRUCTOR_ELT (decls, i)->value
1790 = get_nonlocal_debug_decl (info, decl);
1791 }
1792 }
1793
1794 *handled_ops_p = false;
1795 return NULL_TREE;
1796 }
1797 case GIMPLE_COND:
1798 wi->val_only = true;
1799 wi->is_lhs = false;
1800 *handled_ops_p = false;
1801 return NULL_TREE;
1802
1803 case GIMPLE_ASSIGN:
1804 if (gimple_clobber_p (stmt))
1805 {
1806 tree lhs = gimple_assign_lhs (stmt);
1807 if (DECL_P (lhs)
1808 && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1809 && decl_function_context (lhs) != info->context)
1810 {
1811 gsi_replace (gsi, gimple_build_nop (), true);
1812 break;
1813 }
1814 }
1815 *handled_ops_p = false;
1816 return NULL_TREE;
1817
1818 default:
1819 /* For every other statement that we are not interested in
1820 handling here, let the walker traverse the operands. */
1821 *handled_ops_p = false;
1822 return NULL_TREE;
1823 }
1824
1825 /* We have handled all of STMT operands, no need to traverse the operands. */
1826 *handled_ops_p = true;
1827 return NULL_TREE;
1828 }
1829
1830
1831 /* A subroutine of convert_local_reference. Create a local variable
1832 in the parent function with DECL_VALUE_EXPR set to reference the
1833 field in FRAME. This is used both for debug info and in OMP
1834 lowering. */
1835
1836 static tree
1837 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1838 {
1839 tree x, new_decl;
1840
1841 tree *slot = &info->var_map->get_or_insert (decl);
1842 if (*slot)
1843 return *slot;
1844
1845 /* Make sure frame_decl gets created. */
1846 (void) get_frame_type (info);
1847 x = info->frame_decl;
1848 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1849
1850 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1851 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1852 DECL_CONTEXT (new_decl) = info->context;
1853 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1854 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1855 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1856 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1857 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1858 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1859 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1860 if ((TREE_CODE (decl) == PARM_DECL
1861 || TREE_CODE (decl) == RESULT_DECL
1862 || VAR_P (decl))
1863 && DECL_BY_REFERENCE (decl))
1864 DECL_BY_REFERENCE (new_decl) = 1;
1865
1866 SET_DECL_VALUE_EXPR (new_decl, x);
1867 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1868 *slot = new_decl;
1869
1870 DECL_CHAIN (new_decl) = info->debug_var_chain;
1871 info->debug_var_chain = new_decl;
1872
1873 /* Do not emit debug info twice. */
1874 DECL_IGNORED_P (decl) = 1;
1875
1876 return new_decl;
1877 }
1878
1879
1880 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1881 and PARM_DECLs that were referenced by inner nested functions.
1882 The rewrite will be a structure reference to the local frame variable. */
1883
1884 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1885
1886 static tree
1887 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1888 {
1889 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1890 struct nesting_info *const info = (struct nesting_info *) wi->info;
1891 tree t = *tp, field, x;
1892 bool save_val_only;
1893
1894 *walk_subtrees = 0;
1895 switch (TREE_CODE (t))
1896 {
1897 case VAR_DECL:
1898 /* Non-automatic variables are never processed. */
1899 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1900 break;
1901 /* FALLTHRU */
1902
1903 case PARM_DECL:
1904 if (t != info->frame_decl && decl_function_context (t) == info->context)
1905 {
1906 /* If we copied a pointer to the frame, then the original decl
1907 is used unchanged in the parent function. */
1908 if (use_pointer_in_frame (t))
1909 break;
1910
1911 /* No need to transform anything if no child references the
1912 variable. */
1913 field = lookup_field_for_decl (info, t, NO_INSERT);
1914 if (!field)
1915 break;
1916 wi->changed = true;
1917
1918 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1919 x = get_local_debug_decl (info, t, field);
1920 else
1921 x = get_frame_field (info, info->context, field, &wi->gsi);
1922
1923 if (wi->val_only)
1924 {
1925 if (wi->is_lhs)
1926 x = save_tmp_var (info, x, &wi->gsi);
1927 else
1928 x = init_tmp_var (info, x, &wi->gsi);
1929 }
1930
1931 *tp = x;
1932 }
1933 break;
1934
1935 case ADDR_EXPR:
1936 save_val_only = wi->val_only;
1937 wi->val_only = false;
1938 wi->is_lhs = false;
1939 wi->changed = false;
1940 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1941 wi->val_only = save_val_only;
1942
1943 /* If we converted anything ... */
1944 if (wi->changed)
1945 {
1946 tree save_context;
1947
1948 /* Then the frame decl is now addressable. */
1949 TREE_ADDRESSABLE (info->frame_decl) = 1;
1950
1951 save_context = current_function_decl;
1952 current_function_decl = info->context;
1953 recompute_tree_invariant_for_addr_expr (t);
1954 current_function_decl = save_context;
1955
1956 /* If we are in a context where we only accept values, then
1957 compute the address into a temporary. */
1958 if (save_val_only)
1959 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1960 t, &wi->gsi);
1961 }
1962 break;
1963
1964 case REALPART_EXPR:
1965 case IMAGPART_EXPR:
1966 case COMPONENT_REF:
1967 case ARRAY_REF:
1968 case ARRAY_RANGE_REF:
1969 case BIT_FIELD_REF:
1970 /* Go down this entire nest and just look at the final prefix and
1971 anything that describes the references. Otherwise, we lose track
1972 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1973 save_val_only = wi->val_only;
1974 wi->val_only = true;
1975 wi->is_lhs = false;
1976 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1977 {
1978 if (TREE_CODE (t) == COMPONENT_REF)
1979 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1980 NULL);
1981 else if (TREE_CODE (t) == ARRAY_REF
1982 || TREE_CODE (t) == ARRAY_RANGE_REF)
1983 {
1984 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1985 NULL);
1986 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1987 NULL);
1988 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1989 NULL);
1990 }
1991 }
1992 wi->val_only = false;
1993 walk_tree (tp, convert_local_reference_op, wi, NULL);
1994 wi->val_only = save_val_only;
1995 break;
1996
1997 case MEM_REF:
1998 save_val_only = wi->val_only;
1999 wi->val_only = true;
2000 wi->is_lhs = false;
2001 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
2002 wi, NULL);
2003 /* We need to re-fold the MEM_REF as component references as
2004 part of a ADDR_EXPR address are not allowed. But we cannot
2005 fold here, as the chain record type is not yet finalized. */
2006 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
2007 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2008 info->mem_refs->add (tp);
2009 wi->val_only = save_val_only;
2010 break;
2011
2012 case VIEW_CONVERT_EXPR:
2013 /* Just request to look at the subtrees, leaving val_only and lhs
2014 untouched. This might actually be for !val_only + lhs, in which
2015 case we don't want to force a replacement by a temporary. */
2016 *walk_subtrees = 1;
2017 break;
2018
2019 default:
2020 if (!IS_TYPE_OR_DECL_P (t))
2021 {
2022 *walk_subtrees = 1;
2023 wi->val_only = true;
2024 wi->is_lhs = false;
2025 }
2026 break;
2027 }
2028
2029 return NULL_TREE;
2030 }
2031
2032 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
2033 struct walk_stmt_info *);
2034
2035 /* Helper for convert_local_reference. Convert all the references in
2036 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
2037
2038 static bool
2039 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
2040 {
2041 struct nesting_info *const info = (struct nesting_info *) wi->info;
2042 bool need_frame = false, need_stmts = false;
2043 tree clause, decl, *pdecl;
2044 int dummy;
2045 bitmap new_suppress;
2046
2047 new_suppress = BITMAP_GGC_ALLOC ();
2048 bitmap_copy (new_suppress, info->suppress_expansion);
2049
2050 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2051 {
2052 pdecl = NULL;
2053 switch (OMP_CLAUSE_CODE (clause))
2054 {
2055 case OMP_CLAUSE_REDUCTION:
2056 case OMP_CLAUSE_IN_REDUCTION:
2057 case OMP_CLAUSE_TASK_REDUCTION:
2058 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2059 need_stmts = true;
2060 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
2061 {
2062 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
2063 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
2064 pdecl = &TREE_OPERAND (*pdecl, 0);
2065 if (TREE_CODE (*pdecl) == INDIRECT_REF
2066 || TREE_CODE (*pdecl) == ADDR_EXPR)
2067 pdecl = &TREE_OPERAND (*pdecl, 0);
2068 }
2069 goto do_decl_clause;
2070
2071 case OMP_CLAUSE_LASTPRIVATE:
2072 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
2073 need_stmts = true;
2074 goto do_decl_clause;
2075
2076 case OMP_CLAUSE_LINEAR:
2077 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
2078 need_stmts = true;
2079 wi->val_only = true;
2080 wi->is_lhs = false;
2081 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
2082 wi);
2083 goto do_decl_clause;
2084
2085 case OMP_CLAUSE_PRIVATE:
2086 case OMP_CLAUSE_FIRSTPRIVATE:
2087 case OMP_CLAUSE_COPYPRIVATE:
2088 case OMP_CLAUSE_SHARED:
2089 case OMP_CLAUSE_TO_DECLARE:
2090 case OMP_CLAUSE_LINK:
2091 case OMP_CLAUSE_USE_DEVICE_PTR:
2092 case OMP_CLAUSE_USE_DEVICE_ADDR:
2093 case OMP_CLAUSE_IS_DEVICE_PTR:
2094 do_decl_clause:
2095 if (pdecl == NULL)
2096 pdecl = &OMP_CLAUSE_DECL (clause);
2097 decl = *pdecl;
2098 if (VAR_P (decl)
2099 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2100 break;
2101 if (decl_function_context (decl) == info->context
2102 && !use_pointer_in_frame (decl))
2103 {
2104 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2105 if (field)
2106 {
2107 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
2108 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
2109 bitmap_set_bit (new_suppress, DECL_UID (decl));
2110 *pdecl = get_local_debug_decl (info, decl, field);
2111 need_frame = true;
2112 }
2113 }
2114 break;
2115
2116 case OMP_CLAUSE_SCHEDULE:
2117 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
2118 break;
2119 /* FALLTHRU */
2120 case OMP_CLAUSE_FINAL:
2121 case OMP_CLAUSE_IF:
2122 case OMP_CLAUSE_NUM_THREADS:
2123 case OMP_CLAUSE_DEPEND:
2124 case OMP_CLAUSE_DEVICE:
2125 case OMP_CLAUSE_NUM_TEAMS:
2126 case OMP_CLAUSE_THREAD_LIMIT:
2127 case OMP_CLAUSE_SAFELEN:
2128 case OMP_CLAUSE_SIMDLEN:
2129 case OMP_CLAUSE_PRIORITY:
2130 case OMP_CLAUSE_GRAINSIZE:
2131 case OMP_CLAUSE_NUM_TASKS:
2132 case OMP_CLAUSE_HINT:
2133 case OMP_CLAUSE_NUM_GANGS:
2134 case OMP_CLAUSE_NUM_WORKERS:
2135 case OMP_CLAUSE_VECTOR_LENGTH:
2136 case OMP_CLAUSE_GANG:
2137 case OMP_CLAUSE_WORKER:
2138 case OMP_CLAUSE_VECTOR:
2139 case OMP_CLAUSE_ASYNC:
2140 case OMP_CLAUSE_WAIT:
2141 /* Several OpenACC clauses have optional arguments. Check if they
2142 are present. */
2143 if (OMP_CLAUSE_OPERAND (clause, 0))
2144 {
2145 wi->val_only = true;
2146 wi->is_lhs = false;
2147 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2148 &dummy, wi);
2149 }
2150
2151 /* The gang clause accepts two arguments. */
2152 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2153 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2154 {
2155 wi->val_only = true;
2156 wi->is_lhs = false;
2157 convert_nonlocal_reference_op
2158 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2159 }
2160 break;
2161
2162 case OMP_CLAUSE_DIST_SCHEDULE:
2163 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2164 {
2165 wi->val_only = true;
2166 wi->is_lhs = false;
2167 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2168 &dummy, wi);
2169 }
2170 break;
2171
2172 case OMP_CLAUSE_MAP:
2173 case OMP_CLAUSE_TO:
2174 case OMP_CLAUSE_FROM:
2175 if (OMP_CLAUSE_SIZE (clause))
2176 {
2177 wi->val_only = true;
2178 wi->is_lhs = false;
2179 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2180 &dummy, wi);
2181 }
2182 if (DECL_P (OMP_CLAUSE_DECL (clause)))
2183 goto do_decl_clause;
2184 wi->val_only = true;
2185 wi->is_lhs = false;
2186 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2187 wi, NULL);
2188 break;
2189
2190 case OMP_CLAUSE_ALIGNED:
2191 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2192 {
2193 wi->val_only = true;
2194 wi->is_lhs = false;
2195 convert_local_reference_op
2196 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2197 }
2198 /* FALLTHRU */
2199 case OMP_CLAUSE_NONTEMPORAL:
2200 /* Like do_decl_clause, but don't add any suppression. */
2201 decl = OMP_CLAUSE_DECL (clause);
2202 if (VAR_P (decl)
2203 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2204 break;
2205 if (decl_function_context (decl) == info->context
2206 && !use_pointer_in_frame (decl))
2207 {
2208 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2209 if (field)
2210 {
2211 OMP_CLAUSE_DECL (clause)
2212 = get_local_debug_decl (info, decl, field);
2213 need_frame = true;
2214 }
2215 }
2216 break;
2217
2218 case OMP_CLAUSE_NOWAIT:
2219 case OMP_CLAUSE_ORDERED:
2220 case OMP_CLAUSE_DEFAULT:
2221 case OMP_CLAUSE_COPYIN:
2222 case OMP_CLAUSE_COLLAPSE:
2223 case OMP_CLAUSE_TILE:
2224 case OMP_CLAUSE_UNTIED:
2225 case OMP_CLAUSE_MERGEABLE:
2226 case OMP_CLAUSE_PROC_BIND:
2227 case OMP_CLAUSE_NOGROUP:
2228 case OMP_CLAUSE_THREADS:
2229 case OMP_CLAUSE_SIMD:
2230 case OMP_CLAUSE_DEFAULTMAP:
2231 case OMP_CLAUSE_ORDER:
2232 case OMP_CLAUSE_SEQ:
2233 case OMP_CLAUSE_INDEPENDENT:
2234 case OMP_CLAUSE_AUTO:
2235 case OMP_CLAUSE_IF_PRESENT:
2236 case OMP_CLAUSE_FINALIZE:
2237 case OMP_CLAUSE__CONDTEMP_:
2238 case OMP_CLAUSE__SCANTEMP_:
2239 break;
2240
2241 /* The following clause belongs to the OpenACC cache directive, which
2242 is discarded during gimplification. */
2243 case OMP_CLAUSE__CACHE_:
2244 /* The following clauses are only allowed in the OpenMP declare simd
2245 directive, so not seen here. */
2246 case OMP_CLAUSE_UNIFORM:
2247 case OMP_CLAUSE_INBRANCH:
2248 case OMP_CLAUSE_NOTINBRANCH:
2249 /* The following clauses are only allowed on OpenMP cancel and
2250 cancellation point directives, which at this point have already
2251 been lowered into a function call. */
2252 case OMP_CLAUSE_FOR:
2253 case OMP_CLAUSE_PARALLEL:
2254 case OMP_CLAUSE_SECTIONS:
2255 case OMP_CLAUSE_TASKGROUP:
2256 /* The following clauses are only added during OMP lowering; nested
2257 function decomposition happens before that. */
2258 case OMP_CLAUSE__LOOPTEMP_:
2259 case OMP_CLAUSE__REDUCTEMP_:
2260 case OMP_CLAUSE__SIMDUID_:
2261 case OMP_CLAUSE__SIMT_:
2262 /* Anything else. */
2263 default:
2264 gcc_unreachable ();
2265 }
2266 }
2267
2268 info->suppress_expansion = new_suppress;
2269
2270 if (need_stmts)
2271 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2272 switch (OMP_CLAUSE_CODE (clause))
2273 {
2274 case OMP_CLAUSE_REDUCTION:
2275 case OMP_CLAUSE_IN_REDUCTION:
2276 case OMP_CLAUSE_TASK_REDUCTION:
2277 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2278 {
2279 tree old_context
2280 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2281 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2282 = info->context;
2283 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2284 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2285 = info->context;
2286 walk_body (convert_local_reference_stmt,
2287 convert_local_reference_op, info,
2288 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2289 walk_body (convert_local_reference_stmt,
2290 convert_local_reference_op, info,
2291 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2292 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2293 = old_context;
2294 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2295 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2296 = old_context;
2297 }
2298 break;
2299
2300 case OMP_CLAUSE_LASTPRIVATE:
2301 walk_body (convert_local_reference_stmt,
2302 convert_local_reference_op, info,
2303 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2304 break;
2305
2306 case OMP_CLAUSE_LINEAR:
2307 walk_body (convert_local_reference_stmt,
2308 convert_local_reference_op, info,
2309 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2310 break;
2311
2312 default:
2313 break;
2314 }
2315
2316 return need_frame;
2317 }
2318
2319
2320 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2321 and PARM_DECLs that were referenced by inner nested functions.
2322 The rewrite will be a structure reference to the local frame variable. */
2323
2324 static tree
2325 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2326 struct walk_stmt_info *wi)
2327 {
2328 struct nesting_info *info = (struct nesting_info *) wi->info;
2329 tree save_local_var_chain;
2330 bitmap save_suppress;
2331 char save_static_chain_added;
2332 bool frame_decl_added;
2333 gimple *stmt = gsi_stmt (*gsi);
2334
2335 switch (gimple_code (stmt))
2336 {
2337 case GIMPLE_OMP_TEAMS:
2338 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2339 {
2340 save_suppress = info->suppress_expansion;
2341 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2342 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2343 info, gimple_omp_body_ptr (stmt));
2344 info->suppress_expansion = save_suppress;
2345 break;
2346 }
2347 /* FALLTHRU */
2348
2349 case GIMPLE_OMP_PARALLEL:
2350 case GIMPLE_OMP_TASK:
2351 save_suppress = info->suppress_expansion;
2352 frame_decl_added = false;
2353 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2354 wi))
2355 {
2356 tree c = build_omp_clause (gimple_location (stmt),
2357 OMP_CLAUSE_SHARED);
2358 (void) get_frame_type (info);
2359 OMP_CLAUSE_DECL (c) = info->frame_decl;
2360 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2361 gimple_omp_taskreg_set_clauses (stmt, c);
2362 info->static_chain_added |= 4;
2363 frame_decl_added = true;
2364 }
2365
2366 save_local_var_chain = info->new_local_var_chain;
2367 save_static_chain_added = info->static_chain_added;
2368 info->new_local_var_chain = NULL;
2369 info->static_chain_added = 0;
2370
2371 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2372 gimple_omp_body_ptr (stmt));
2373
2374 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2375 {
2376 tree c = build_omp_clause (gimple_location (stmt),
2377 OMP_CLAUSE_SHARED);
2378 (void) get_frame_type (info);
2379 OMP_CLAUSE_DECL (c) = info->frame_decl;
2380 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2381 info->static_chain_added |= 4;
2382 gimple_omp_taskreg_set_clauses (stmt, c);
2383 }
2384 if (info->new_local_var_chain)
2385 declare_vars (info->new_local_var_chain,
2386 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2387 info->new_local_var_chain = save_local_var_chain;
2388 info->suppress_expansion = save_suppress;
2389 info->static_chain_added |= save_static_chain_added;
2390 break;
2391
2392 case GIMPLE_OMP_FOR:
2393 save_suppress = info->suppress_expansion;
2394 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2395 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2396 convert_local_reference_stmt,
2397 convert_local_reference_op, info);
2398 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2399 info, gimple_omp_body_ptr (stmt));
2400 info->suppress_expansion = save_suppress;
2401 break;
2402
2403 case GIMPLE_OMP_SECTIONS:
2404 save_suppress = info->suppress_expansion;
2405 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2406 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2407 info, gimple_omp_body_ptr (stmt));
2408 info->suppress_expansion = save_suppress;
2409 break;
2410
2411 case GIMPLE_OMP_SINGLE:
2412 save_suppress = info->suppress_expansion;
2413 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2414 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2415 info, gimple_omp_body_ptr (stmt));
2416 info->suppress_expansion = save_suppress;
2417 break;
2418
2419 case GIMPLE_OMP_TASKGROUP:
2420 save_suppress = info->suppress_expansion;
2421 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2422 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2423 info, gimple_omp_body_ptr (stmt));
2424 info->suppress_expansion = save_suppress;
2425 break;
2426
2427 case GIMPLE_OMP_TARGET:
2428 if (!is_gimple_omp_offloaded (stmt))
2429 {
2430 save_suppress = info->suppress_expansion;
2431 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2432 info->suppress_expansion = save_suppress;
2433 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2434 info, gimple_omp_body_ptr (stmt));
2435 break;
2436 }
2437 save_suppress = info->suppress_expansion;
2438 frame_decl_added = false;
2439 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2440 {
2441 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2442 (void) get_frame_type (info);
2443 OMP_CLAUSE_DECL (c) = info->frame_decl;
2444 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2445 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2446 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2447 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2448 info->static_chain_added |= 4;
2449 frame_decl_added = true;
2450 }
2451
2452 save_local_var_chain = info->new_local_var_chain;
2453 save_static_chain_added = info->static_chain_added;
2454 info->new_local_var_chain = NULL;
2455 info->static_chain_added = 0;
2456
2457 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2458 gimple_omp_body_ptr (stmt));
2459
2460 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2461 {
2462 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2463 (void) get_frame_type (info);
2464 OMP_CLAUSE_DECL (c) = info->frame_decl;
2465 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2466 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2467 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2468 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2469 info->static_chain_added |= 4;
2470 }
2471
2472 if (info->new_local_var_chain)
2473 declare_vars (info->new_local_var_chain,
2474 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2475 info->new_local_var_chain = save_local_var_chain;
2476 info->suppress_expansion = save_suppress;
2477 info->static_chain_added |= save_static_chain_added;
2478 break;
2479
2480 case GIMPLE_OMP_SECTION:
2481 case GIMPLE_OMP_MASTER:
2482 case GIMPLE_OMP_ORDERED:
2483 case GIMPLE_OMP_SCAN:
2484 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2485 info, gimple_omp_body_ptr (stmt));
2486 break;
2487
2488 case GIMPLE_COND:
2489 wi->val_only = true;
2490 wi->is_lhs = false;
2491 *handled_ops_p = false;
2492 return NULL_TREE;
2493
2494 case GIMPLE_ASSIGN:
2495 if (gimple_clobber_p (stmt))
2496 {
2497 tree lhs = gimple_assign_lhs (stmt);
2498 if (DECL_P (lhs)
2499 && !use_pointer_in_frame (lhs)
2500 && lookup_field_for_decl (info, lhs, NO_INSERT))
2501 {
2502 gsi_replace (gsi, gimple_build_nop (), true);
2503 break;
2504 }
2505 }
2506 *handled_ops_p = false;
2507 return NULL_TREE;
2508
2509 case GIMPLE_BIND:
2510 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2511 var;
2512 var = DECL_CHAIN (var))
2513 if (TREE_CODE (var) == NAMELIST_DECL)
2514 {
2515 /* Adjust decls mentioned in NAMELIST_DECL. */
2516 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2517 tree decl;
2518 unsigned int i;
2519
2520 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2521 {
2522 if (VAR_P (decl)
2523 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2524 continue;
2525 if (decl_function_context (decl) == info->context
2526 && !use_pointer_in_frame (decl))
2527 {
2528 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2529 if (field)
2530 {
2531 CONSTRUCTOR_ELT (decls, i)->value
2532 = get_local_debug_decl (info, decl, field);
2533 }
2534 }
2535 }
2536 }
2537
2538 *handled_ops_p = false;
2539 return NULL_TREE;
2540
2541 default:
2542 /* For every other statement that we are not interested in
2543 handling here, let the walker traverse the operands. */
2544 *handled_ops_p = false;
2545 return NULL_TREE;
2546 }
2547
2548 /* Indicate that we have handled all the operands ourselves. */
2549 *handled_ops_p = true;
2550 return NULL_TREE;
2551 }
2552
2553
2554 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2555 that reference labels from outer functions. The rewrite will be a
2556 call to __builtin_nonlocal_goto. */
2557
2558 static tree
2559 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2560 struct walk_stmt_info *wi)
2561 {
2562 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2563 tree label, new_label, target_context, x, field;
2564 gcall *call;
2565 gimple *stmt = gsi_stmt (*gsi);
2566
2567 if (gimple_code (stmt) != GIMPLE_GOTO)
2568 {
2569 *handled_ops_p = false;
2570 return NULL_TREE;
2571 }
2572
2573 label = gimple_goto_dest (stmt);
2574 if (TREE_CODE (label) != LABEL_DECL)
2575 {
2576 *handled_ops_p = false;
2577 return NULL_TREE;
2578 }
2579
2580 target_context = decl_function_context (label);
2581 if (target_context == info->context)
2582 {
2583 *handled_ops_p = false;
2584 return NULL_TREE;
2585 }
2586
2587 for (i = info->outer; target_context != i->context; i = i->outer)
2588 continue;
2589
2590 /* The original user label may also be use for a normal goto, therefore
2591 we must create a new label that will actually receive the abnormal
2592 control transfer. This new label will be marked LABEL_NONLOCAL; this
2593 mark will trigger proper behavior in the cfg, as well as cause the
2594 (hairy target-specific) non-local goto receiver code to be generated
2595 when we expand rtl. Enter this association into var_map so that we
2596 can insert the new label into the IL during a second pass. */
2597 tree *slot = &i->var_map->get_or_insert (label);
2598 if (*slot == NULL)
2599 {
2600 new_label = create_artificial_label (UNKNOWN_LOCATION);
2601 DECL_NONLOCAL (new_label) = 1;
2602 *slot = new_label;
2603 }
2604 else
2605 new_label = *slot;
2606
2607 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2608 field = get_nl_goto_field (i);
2609 x = get_frame_field (info, target_context, field, gsi);
2610 x = build_addr (x);
2611 x = gsi_gimplify_val (info, x, gsi);
2612 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2613 2, build_addr (new_label), x);
2614 gsi_replace (gsi, call, false);
2615
2616 /* We have handled all of STMT's operands, no need to keep going. */
2617 *handled_ops_p = true;
2618 return NULL_TREE;
2619 }
2620
2621
2622 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2623 are referenced via nonlocal goto from a nested function. The rewrite
2624 will involve installing a newly generated DECL_NONLOCAL label, and
2625 (potentially) a branch around the rtl gunk that is assumed to be
2626 attached to such a label. */
2627
2628 static tree
2629 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2630 struct walk_stmt_info *wi)
2631 {
2632 struct nesting_info *const info = (struct nesting_info *) wi->info;
2633 tree label, new_label;
2634 gimple_stmt_iterator tmp_gsi;
2635 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2636
2637 if (!stmt)
2638 {
2639 *handled_ops_p = false;
2640 return NULL_TREE;
2641 }
2642
2643 label = gimple_label_label (stmt);
2644
2645 tree *slot = info->var_map->get (label);
2646 if (!slot)
2647 {
2648 *handled_ops_p = false;
2649 return NULL_TREE;
2650 }
2651
2652 /* If there's any possibility that the previous statement falls through,
2653 then we must branch around the new non-local label. */
2654 tmp_gsi = wi->gsi;
2655 gsi_prev (&tmp_gsi);
2656 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2657 {
2658 gimple *stmt = gimple_build_goto (label);
2659 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2660 }
2661
2662 new_label = (tree) *slot;
2663 stmt = gimple_build_label (new_label);
2664 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2665
2666 *handled_ops_p = true;
2667 return NULL_TREE;
2668 }
2669
2670
2671 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2672 of nested functions that require the use of trampolines. The rewrite
2673 will involve a reference a trampoline generated for the occasion. */
2674
2675 static tree
2676 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2677 {
2678 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2679 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2680 tree t = *tp, decl, target_context, x, builtin;
2681 bool descr;
2682 gcall *call;
2683
2684 *walk_subtrees = 0;
2685 switch (TREE_CODE (t))
2686 {
2687 case ADDR_EXPR:
2688 /* Build
2689 T.1 = &CHAIN->tramp;
2690 T.2 = __builtin_adjust_trampoline (T.1);
2691 T.3 = (func_type)T.2;
2692 */
2693
2694 decl = TREE_OPERAND (t, 0);
2695 if (TREE_CODE (decl) != FUNCTION_DECL)
2696 break;
2697
2698 /* Only need to process nested functions. */
2699 target_context = decl_function_context (decl);
2700 if (!target_context)
2701 break;
2702
2703 /* If the nested function doesn't use a static chain, then
2704 it doesn't need a trampoline. */
2705 if (!DECL_STATIC_CHAIN (decl))
2706 break;
2707
2708 /* If we don't want a trampoline, then don't build one. */
2709 if (TREE_NO_TRAMPOLINE (t))
2710 break;
2711
2712 /* Lookup the immediate parent of the callee, as that's where
2713 we need to insert the trampoline. */
2714 for (i = info; i->context != target_context; i = i->outer)
2715 continue;
2716
2717 /* Decide whether to generate a descriptor or a trampoline. */
2718 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2719
2720 if (descr)
2721 x = lookup_descr_for_decl (i, decl, INSERT);
2722 else
2723 x = lookup_tramp_for_decl (i, decl, INSERT);
2724
2725 /* Compute the address of the field holding the trampoline. */
2726 x = get_frame_field (info, target_context, x, &wi->gsi);
2727 x = build_addr (x);
2728 x = gsi_gimplify_val (info, x, &wi->gsi);
2729
2730 /* Do machine-specific ugliness. Normally this will involve
2731 computing extra alignment, but it can really be anything. */
2732 if (descr)
2733 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2734 else
2735 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2736 call = gimple_build_call (builtin, 1, x);
2737 x = init_tmp_var_with_call (info, &wi->gsi, call);
2738
2739 /* Cast back to the proper function type. */
2740 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2741 x = init_tmp_var (info, x, &wi->gsi);
2742
2743 *tp = x;
2744 break;
2745
2746 default:
2747 if (!IS_TYPE_OR_DECL_P (t))
2748 *walk_subtrees = 1;
2749 break;
2750 }
2751
2752 return NULL_TREE;
2753 }
2754
2755
2756 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2757 to addresses of nested functions that require the use of
2758 trampolines. The rewrite will involve a reference a trampoline
2759 generated for the occasion. */
2760
2761 static tree
2762 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2763 struct walk_stmt_info *wi)
2764 {
2765 struct nesting_info *info = (struct nesting_info *) wi->info;
2766 gimple *stmt = gsi_stmt (*gsi);
2767
2768 switch (gimple_code (stmt))
2769 {
2770 case GIMPLE_CALL:
2771 {
2772 /* Only walk call arguments, lest we generate trampolines for
2773 direct calls. */
2774 unsigned long i, nargs = gimple_call_num_args (stmt);
2775 for (i = 0; i < nargs; i++)
2776 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2777 wi, NULL);
2778 break;
2779 }
2780
2781 case GIMPLE_OMP_TEAMS:
2782 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2783 {
2784 *handled_ops_p = false;
2785 return NULL_TREE;
2786 }
2787 goto do_parallel;
2788
2789 case GIMPLE_OMP_TARGET:
2790 if (!is_gimple_omp_offloaded (stmt))
2791 {
2792 *handled_ops_p = false;
2793 return NULL_TREE;
2794 }
2795 /* FALLTHRU */
2796 case GIMPLE_OMP_PARALLEL:
2797 case GIMPLE_OMP_TASK:
2798 do_parallel:
2799 {
2800 tree save_local_var_chain = info->new_local_var_chain;
2801 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2802 info->new_local_var_chain = NULL;
2803 char save_static_chain_added = info->static_chain_added;
2804 info->static_chain_added = 0;
2805 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2806 info, gimple_omp_body_ptr (stmt));
2807 if (info->new_local_var_chain)
2808 declare_vars (info->new_local_var_chain,
2809 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2810 false);
2811 for (int i = 0; i < 2; i++)
2812 {
2813 tree c, decl;
2814 if ((info->static_chain_added & (1 << i)) == 0)
2815 continue;
2816 decl = i ? get_chain_decl (info) : info->frame_decl;
2817 /* Don't add CHAIN.* or FRAME.* twice. */
2818 for (c = gimple_omp_taskreg_clauses (stmt);
2819 c;
2820 c = OMP_CLAUSE_CHAIN (c))
2821 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2822 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2823 && OMP_CLAUSE_DECL (c) == decl)
2824 break;
2825 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2826 {
2827 c = build_omp_clause (gimple_location (stmt),
2828 i ? OMP_CLAUSE_FIRSTPRIVATE
2829 : OMP_CLAUSE_SHARED);
2830 OMP_CLAUSE_DECL (c) = decl;
2831 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2832 gimple_omp_taskreg_set_clauses (stmt, c);
2833 }
2834 else if (c == NULL)
2835 {
2836 c = build_omp_clause (gimple_location (stmt),
2837 OMP_CLAUSE_MAP);
2838 OMP_CLAUSE_DECL (c) = decl;
2839 OMP_CLAUSE_SET_MAP_KIND (c,
2840 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2841 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2842 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2843 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2844 c);
2845 }
2846 }
2847 info->new_local_var_chain = save_local_var_chain;
2848 info->static_chain_added |= save_static_chain_added;
2849 }
2850 break;
2851
2852 default:
2853 *handled_ops_p = false;
2854 return NULL_TREE;
2855 }
2856
2857 *handled_ops_p = true;
2858 return NULL_TREE;
2859 }
2860
2861
2862
2863 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2864 that reference nested functions to make sure that the static chain
2865 is set up properly for the call. */
2866
2867 static tree
2868 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2869 struct walk_stmt_info *wi)
2870 {
2871 struct nesting_info *const info = (struct nesting_info *) wi->info;
2872 tree decl, target_context;
2873 char save_static_chain_added;
2874 int i;
2875 gimple *stmt = gsi_stmt (*gsi);
2876
2877 switch (gimple_code (stmt))
2878 {
2879 case GIMPLE_CALL:
2880 if (gimple_call_chain (stmt))
2881 break;
2882 decl = gimple_call_fndecl (stmt);
2883 if (!decl)
2884 break;
2885 target_context = decl_function_context (decl);
2886 if (target_context && DECL_STATIC_CHAIN (decl))
2887 {
2888 struct nesting_info *i = info;
2889 while (i && i->context != target_context)
2890 i = i->outer;
2891 /* If none of the outer contexts is the target context, this means
2892 that the function is called in a wrong context. */
2893 if (!i)
2894 internal_error ("%s from %s called in %s",
2895 IDENTIFIER_POINTER (DECL_NAME (decl)),
2896 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2897 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2898
2899 gimple_call_set_chain (as_a <gcall *> (stmt),
2900 get_static_chain (info, target_context,
2901 &wi->gsi));
2902 info->static_chain_added |= (1 << (info->context != target_context));
2903 }
2904 break;
2905
2906 case GIMPLE_OMP_TEAMS:
2907 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2908 {
2909 walk_body (convert_gimple_call, NULL, info,
2910 gimple_omp_body_ptr (stmt));
2911 break;
2912 }
2913 /* FALLTHRU */
2914
2915 case GIMPLE_OMP_PARALLEL:
2916 case GIMPLE_OMP_TASK:
2917 save_static_chain_added = info->static_chain_added;
2918 info->static_chain_added = 0;
2919 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2920 for (i = 0; i < 2; i++)
2921 {
2922 tree c, decl;
2923 if ((info->static_chain_added & (1 << i)) == 0)
2924 continue;
2925 decl = i ? get_chain_decl (info) : info->frame_decl;
2926 /* Don't add CHAIN.* or FRAME.* twice. */
2927 for (c = gimple_omp_taskreg_clauses (stmt);
2928 c;
2929 c = OMP_CLAUSE_CHAIN (c))
2930 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2931 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2932 && OMP_CLAUSE_DECL (c) == decl)
2933 break;
2934 if (c == NULL)
2935 {
2936 c = build_omp_clause (gimple_location (stmt),
2937 i ? OMP_CLAUSE_FIRSTPRIVATE
2938 : OMP_CLAUSE_SHARED);
2939 OMP_CLAUSE_DECL (c) = decl;
2940 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2941 gimple_omp_taskreg_set_clauses (stmt, c);
2942 }
2943 }
2944 info->static_chain_added |= save_static_chain_added;
2945 break;
2946
2947 case GIMPLE_OMP_TARGET:
2948 if (!is_gimple_omp_offloaded (stmt))
2949 {
2950 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2951 break;
2952 }
2953 save_static_chain_added = info->static_chain_added;
2954 info->static_chain_added = 0;
2955 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2956 for (i = 0; i < 2; i++)
2957 {
2958 tree c, decl;
2959 if ((info->static_chain_added & (1 << i)) == 0)
2960 continue;
2961 decl = i ? get_chain_decl (info) : info->frame_decl;
2962 /* Don't add CHAIN.* or FRAME.* twice. */
2963 for (c = gimple_omp_target_clauses (stmt);
2964 c;
2965 c = OMP_CLAUSE_CHAIN (c))
2966 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2967 && OMP_CLAUSE_DECL (c) == decl)
2968 break;
2969 if (c == NULL)
2970 {
2971 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2972 OMP_CLAUSE_DECL (c) = decl;
2973 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2974 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2975 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2976 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2977 c);
2978 }
2979 }
2980 info->static_chain_added |= save_static_chain_added;
2981 break;
2982
2983 case GIMPLE_OMP_FOR:
2984 walk_body (convert_gimple_call, NULL, info,
2985 gimple_omp_for_pre_body_ptr (stmt));
2986 /* FALLTHRU */
2987 case GIMPLE_OMP_SECTIONS:
2988 case GIMPLE_OMP_SECTION:
2989 case GIMPLE_OMP_SINGLE:
2990 case GIMPLE_OMP_MASTER:
2991 case GIMPLE_OMP_TASKGROUP:
2992 case GIMPLE_OMP_ORDERED:
2993 case GIMPLE_OMP_SCAN:
2994 case GIMPLE_OMP_CRITICAL:
2995 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2996 break;
2997
2998 default:
2999 /* Keep looking for other operands. */
3000 *handled_ops_p = false;
3001 return NULL_TREE;
3002 }
3003
3004 *handled_ops_p = true;
3005 return NULL_TREE;
3006 }
3007
3008 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
3009 call expressions. At the same time, determine if a nested function
3010 actually uses its static chain; if not, remember that. */
3011
3012 static void
3013 convert_all_function_calls (struct nesting_info *root)
3014 {
3015 unsigned int chain_count = 0, old_chain_count, iter_count;
3016 struct nesting_info *n;
3017
3018 /* First, optimistically clear static_chain for all decls that haven't
3019 used the static chain already for variable access. But always create
3020 it if not optimizing. This makes it possible to reconstruct the static
3021 nesting tree at run time and thus to resolve up-level references from
3022 within the debugger. */
3023 FOR_EACH_NEST_INFO (n, root)
3024 {
3025 if (n->thunk_p)
3026 continue;
3027 tree decl = n->context;
3028 if (!optimize)
3029 {
3030 if (n->inner)
3031 (void) get_frame_type (n);
3032 if (n->outer)
3033 (void) get_chain_decl (n);
3034 }
3035 else if (!n->outer || (!n->chain_decl && !n->chain_field))
3036 {
3037 DECL_STATIC_CHAIN (decl) = 0;
3038 if (dump_file && (dump_flags & TDF_DETAILS))
3039 fprintf (dump_file, "Guessing no static-chain for %s\n",
3040 lang_hooks.decl_printable_name (decl, 2));
3041 }
3042 else
3043 DECL_STATIC_CHAIN (decl) = 1;
3044 chain_count += DECL_STATIC_CHAIN (decl);
3045 }
3046
3047 FOR_EACH_NEST_INFO (n, root)
3048 if (n->thunk_p)
3049 {
3050 tree decl = n->context;
3051 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3052 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3053 }
3054
3055 /* Walk the functions and perform transformations. Note that these
3056 transformations can induce new uses of the static chain, which in turn
3057 require re-examining all users of the decl. */
3058 /* ??? It would make sense to try to use the call graph to speed this up,
3059 but the call graph hasn't really been built yet. Even if it did, we
3060 would still need to iterate in this loop since address-of references
3061 wouldn't show up in the callgraph anyway. */
3062 iter_count = 0;
3063 do
3064 {
3065 old_chain_count = chain_count;
3066 chain_count = 0;
3067 iter_count++;
3068
3069 if (dump_file && (dump_flags & TDF_DETAILS))
3070 fputc ('\n', dump_file);
3071
3072 FOR_EACH_NEST_INFO (n, root)
3073 {
3074 if (n->thunk_p)
3075 continue;
3076 tree decl = n->context;
3077 walk_function (convert_tramp_reference_stmt,
3078 convert_tramp_reference_op, n);
3079 walk_function (convert_gimple_call, NULL, n);
3080 chain_count += DECL_STATIC_CHAIN (decl);
3081 }
3082
3083 FOR_EACH_NEST_INFO (n, root)
3084 if (n->thunk_p)
3085 {
3086 tree decl = n->context;
3087 tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3088 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3089 }
3090 }
3091 while (chain_count != old_chain_count);
3092
3093 if (dump_file && (dump_flags & TDF_DETAILS))
3094 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
3095 iter_count);
3096 }
3097
3098 struct nesting_copy_body_data
3099 {
3100 copy_body_data cb;
3101 struct nesting_info *root;
3102 };
3103
3104 /* A helper subroutine for debug_var_chain type remapping. */
3105
3106 static tree
3107 nesting_copy_decl (tree decl, copy_body_data *id)
3108 {
3109 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
3110 tree *slot = nid->root->var_map->get (decl);
3111
3112 if (slot)
3113 return (tree) *slot;
3114
3115 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
3116 {
3117 tree new_decl = copy_decl_no_change (decl, id);
3118 DECL_ORIGINAL_TYPE (new_decl)
3119 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
3120 return new_decl;
3121 }
3122
3123 if (VAR_P (decl)
3124 || TREE_CODE (decl) == PARM_DECL
3125 || TREE_CODE (decl) == RESULT_DECL)
3126 return decl;
3127
3128 return copy_decl_no_change (decl, id);
3129 }
3130
3131 /* A helper function for remap_vla_decls. See if *TP contains
3132 some remapped variables. */
3133
3134 static tree
3135 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3136 {
3137 struct nesting_info *root = (struct nesting_info *) data;
3138 tree t = *tp;
3139
3140 if (DECL_P (t))
3141 {
3142 *walk_subtrees = 0;
3143 tree *slot = root->var_map->get (t);
3144
3145 if (slot)
3146 return *slot;
3147 }
3148 return NULL;
3149 }
3150
3151 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3152 involved. */
3153
3154 static void
3155 remap_vla_decls (tree block, struct nesting_info *root)
3156 {
3157 tree var, subblock, val, type;
3158 struct nesting_copy_body_data id;
3159
3160 for (subblock = BLOCK_SUBBLOCKS (block);
3161 subblock;
3162 subblock = BLOCK_CHAIN (subblock))
3163 remap_vla_decls (subblock, root);
3164
3165 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3166 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3167 {
3168 val = DECL_VALUE_EXPR (var);
3169 type = TREE_TYPE (var);
3170
3171 if (!(TREE_CODE (val) == INDIRECT_REF
3172 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3173 && variably_modified_type_p (type, NULL)))
3174 continue;
3175
3176 if (root->var_map->get (TREE_OPERAND (val, 0))
3177 || walk_tree (&type, contains_remapped_vars, root, NULL))
3178 break;
3179 }
3180
3181 if (var == NULL_TREE)
3182 return;
3183
3184 memset (&id, 0, sizeof (id));
3185 id.cb.copy_decl = nesting_copy_decl;
3186 id.cb.decl_map = new hash_map<tree, tree>;
3187 id.root = root;
3188
3189 for (; var; var = DECL_CHAIN (var))
3190 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3191 {
3192 struct nesting_info *i;
3193 tree newt, context;
3194
3195 val = DECL_VALUE_EXPR (var);
3196 type = TREE_TYPE (var);
3197
3198 if (!(TREE_CODE (val) == INDIRECT_REF
3199 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3200 && variably_modified_type_p (type, NULL)))
3201 continue;
3202
3203 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3204 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3205 continue;
3206
3207 context = decl_function_context (var);
3208 for (i = root; i; i = i->outer)
3209 if (i->context == context)
3210 break;
3211
3212 if (i == NULL)
3213 continue;
3214
3215 /* Fully expand value expressions. This avoids having debug variables
3216 only referenced from them and that can be swept during GC. */
3217 if (slot)
3218 {
3219 tree t = (tree) *slot;
3220 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3221 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3222 }
3223
3224 id.cb.src_fn = i->context;
3225 id.cb.dst_fn = i->context;
3226 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3227
3228 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3229 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3230 {
3231 newt = TREE_TYPE (newt);
3232 type = TREE_TYPE (type);
3233 }
3234 if (TYPE_NAME (newt)
3235 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3236 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3237 && newt != type
3238 && TYPE_NAME (newt) == TYPE_NAME (type))
3239 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3240
3241 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3242 if (val != DECL_VALUE_EXPR (var))
3243 SET_DECL_VALUE_EXPR (var, val);
3244 }
3245
3246 delete id.cb.decl_map;
3247 }
3248
3249 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3250 involved. */
3251
3252 static void
3253 fixup_vla_decls (tree block)
3254 {
3255 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3256 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3257 {
3258 tree val = DECL_VALUE_EXPR (var);
3259
3260 if (!(TREE_CODE (val) == INDIRECT_REF
3261 && VAR_P (TREE_OPERAND (val, 0))
3262 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3263 continue;
3264
3265 /* Fully expand value expressions. This avoids having debug variables
3266 only referenced from them and that can be swept during GC. */
3267 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3268 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3269 SET_DECL_VALUE_EXPR (var, val);
3270 }
3271
3272 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3273 fixup_vla_decls (sub);
3274 }
3275
3276 /* Fold the MEM_REF *E. */
3277 bool
3278 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3279 {
3280 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3281 *ref_p = fold (*ref_p);
3282 return true;
3283 }
3284
3285 /* Given DECL, a nested function, build an initialization call for FIELD,
3286 the trampoline or descriptor for DECL, using FUNC as the function. */
3287
3288 static gcall *
3289 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3290 tree func)
3291 {
3292 tree arg1, arg2, arg3, x;
3293
3294 gcc_assert (DECL_STATIC_CHAIN (decl));
3295 arg3 = build_addr (info->frame_decl);
3296
3297 arg2 = build_addr (decl);
3298
3299 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3300 info->frame_decl, field, NULL_TREE);
3301 arg1 = build_addr (x);
3302
3303 return gimple_build_call (func, 3, arg1, arg2, arg3);
3304 }
3305
3306 /* Do "everything else" to clean up or complete state collected by the various
3307 walking passes -- create a field to hold the frame base address, lay out the
3308 types and decls, generate code to initialize the frame decl, store critical
3309 expressions in the struct function for rtl to find. */
3310
3311 static void
3312 finalize_nesting_tree_1 (struct nesting_info *root)
3313 {
3314 gimple_seq stmt_list = NULL;
3315 gimple *stmt;
3316 tree context = root->context;
3317 struct function *sf;
3318
3319 if (root->thunk_p)
3320 return;
3321
3322 /* If we created a non-local frame type or decl, we need to lay them
3323 out at this time. */
3324 if (root->frame_type)
3325 {
3326 /* Debugging information needs to compute the frame base address of the
3327 parent frame out of the static chain from the nested frame.
3328
3329 The static chain is the address of the FRAME record, so one could
3330 imagine it would be possible to compute the frame base address just
3331 adding a constant offset to this address. Unfortunately, this is not
3332 possible: if the FRAME object has alignment constraints that are
3333 stronger than the stack, then the offset between the frame base and
3334 the FRAME object will be dynamic.
3335
3336 What we do instead is to append a field to the FRAME object that holds
3337 the frame base address: then debug info just has to fetch this
3338 field. */
3339
3340 /* Debugging information will refer to the CFA as the frame base
3341 address: we will do the same here. */
3342 const tree frame_addr_fndecl
3343 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3344
3345 /* Create a field in the FRAME record to hold the frame base address for
3346 this stack frame. Since it will be used only by the debugger, put it
3347 at the end of the record in order not to shift all other offsets. */
3348 tree fb_decl = make_node (FIELD_DECL);
3349
3350 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3351 TREE_TYPE (fb_decl) = ptr_type_node;
3352 TREE_ADDRESSABLE (fb_decl) = 1;
3353 DECL_CONTEXT (fb_decl) = root->frame_type;
3354 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3355 fb_decl);
3356
3357 /* In some cases the frame type will trigger the -Wpadded warning.
3358 This is not helpful; suppress it. */
3359 int save_warn_padded = warn_padded;
3360 warn_padded = 0;
3361 layout_type (root->frame_type);
3362 warn_padded = save_warn_padded;
3363 layout_decl (root->frame_decl, 0);
3364
3365 /* Initialize the frame base address field. If the builtin we need is
3366 not available, set it to NULL so that debugging information does not
3367 reference junk. */
3368 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3369 root->frame_decl, fb_decl, NULL_TREE);
3370 tree fb_tmp;
3371
3372 if (frame_addr_fndecl != NULL_TREE)
3373 {
3374 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3375 integer_zero_node);
3376 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3377
3378 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3379 }
3380 else
3381 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3382 gimple_seq_add_stmt (&stmt_list,
3383 gimple_build_assign (fb_ref, fb_tmp));
3384
3385 declare_vars (root->frame_decl,
3386 gimple_seq_first_stmt (gimple_body (context)), true);
3387 }
3388
3389 /* If any parameters were referenced non-locally, then we need to insert
3390 a copy or a pointer. */
3391 if (root->any_parm_remapped)
3392 {
3393 tree p;
3394 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3395 {
3396 tree field, x, y;
3397
3398 field = lookup_field_for_decl (root, p, NO_INSERT);
3399 if (!field)
3400 continue;
3401
3402 if (use_pointer_in_frame (p))
3403 x = build_addr (p);
3404 else
3405 x = p;
3406
3407 /* If the assignment is from a non-register the stmt is
3408 not valid gimple. Make it so by using a temporary instead. */
3409 if (!is_gimple_reg (x)
3410 && is_gimple_reg_type (TREE_TYPE (x)))
3411 {
3412 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3413 x = init_tmp_var (root, x, &gsi);
3414 }
3415
3416 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3417 root->frame_decl, field, NULL_TREE);
3418 stmt = gimple_build_assign (y, x);
3419 gimple_seq_add_stmt (&stmt_list, stmt);
3420 }
3421 }
3422
3423 /* If a chain_field was created, then it needs to be initialized
3424 from chain_decl. */
3425 if (root->chain_field)
3426 {
3427 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3428 root->frame_decl, root->chain_field, NULL_TREE);
3429 stmt = gimple_build_assign (x, get_chain_decl (root));
3430 gimple_seq_add_stmt (&stmt_list, stmt);
3431 }
3432
3433 /* If trampolines were created, then we need to initialize them. */
3434 if (root->any_tramp_created)
3435 {
3436 struct nesting_info *i;
3437 for (i = root->inner; i ; i = i->next)
3438 {
3439 tree field, x;
3440
3441 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3442 if (!field)
3443 continue;
3444
3445 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3446 stmt = build_init_call_stmt (root, i->context, field, x);
3447 gimple_seq_add_stmt (&stmt_list, stmt);
3448 }
3449 }
3450
3451 /* If descriptors were created, then we need to initialize them. */
3452 if (root->any_descr_created)
3453 {
3454 struct nesting_info *i;
3455 for (i = root->inner; i ; i = i->next)
3456 {
3457 tree field, x;
3458
3459 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3460 if (!field)
3461 continue;
3462
3463 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3464 stmt = build_init_call_stmt (root, i->context, field, x);
3465 gimple_seq_add_stmt (&stmt_list, stmt);
3466 }
3467 }
3468
3469 /* If we created initialization statements, insert them. */
3470 if (stmt_list)
3471 {
3472 gbind *bind;
3473 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3474 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3475 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3476 gimple_bind_set_body (bind, stmt_list);
3477 }
3478
3479 /* If a chain_decl was created, then it needs to be registered with
3480 struct function so that it gets initialized from the static chain
3481 register at the beginning of the function. */
3482 sf = DECL_STRUCT_FUNCTION (root->context);
3483 sf->static_chain_decl = root->chain_decl;
3484
3485 /* Similarly for the non-local goto save area. */
3486 if (root->nl_goto_field)
3487 {
3488 sf->nonlocal_goto_save_area
3489 = get_frame_field (root, context, root->nl_goto_field, NULL);
3490 sf->has_nonlocal_label = 1;
3491 }
3492
3493 /* Make sure all new local variables get inserted into the
3494 proper BIND_EXPR. */
3495 if (root->new_local_var_chain)
3496 declare_vars (root->new_local_var_chain,
3497 gimple_seq_first_stmt (gimple_body (root->context)),
3498 false);
3499
3500 if (root->debug_var_chain)
3501 {
3502 tree debug_var;
3503 gbind *scope;
3504
3505 remap_vla_decls (DECL_INITIAL (root->context), root);
3506
3507 for (debug_var = root->debug_var_chain; debug_var;
3508 debug_var = DECL_CHAIN (debug_var))
3509 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3510 break;
3511
3512 /* If there are any debug decls with variable length types,
3513 remap those types using other debug_var_chain variables. */
3514 if (debug_var)
3515 {
3516 struct nesting_copy_body_data id;
3517
3518 memset (&id, 0, sizeof (id));
3519 id.cb.copy_decl = nesting_copy_decl;
3520 id.cb.decl_map = new hash_map<tree, tree>;
3521 id.root = root;
3522
3523 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3524 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3525 {
3526 tree type = TREE_TYPE (debug_var);
3527 tree newt, t = type;
3528 struct nesting_info *i;
3529
3530 for (i = root; i; i = i->outer)
3531 if (variably_modified_type_p (type, i->context))
3532 break;
3533
3534 if (i == NULL)
3535 continue;
3536
3537 id.cb.src_fn = i->context;
3538 id.cb.dst_fn = i->context;
3539 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3540
3541 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3542 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3543 {
3544 newt = TREE_TYPE (newt);
3545 t = TREE_TYPE (t);
3546 }
3547 if (TYPE_NAME (newt)
3548 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3549 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3550 && newt != t
3551 && TYPE_NAME (newt) == TYPE_NAME (t))
3552 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3553 }
3554
3555 delete id.cb.decl_map;
3556 }
3557
3558 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3559 if (gimple_bind_block (scope))
3560 declare_vars (root->debug_var_chain, scope, true);
3561 else
3562 BLOCK_VARS (DECL_INITIAL (root->context))
3563 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3564 root->debug_var_chain);
3565 }
3566 else
3567 fixup_vla_decls (DECL_INITIAL (root->context));
3568
3569 /* Fold the rewritten MEM_REF trees. */
3570 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3571
3572 /* Dump the translated tree function. */
3573 if (dump_file)
3574 {
3575 fputs ("\n\n", dump_file);
3576 dump_function_to_file (root->context, dump_file, dump_flags);
3577 }
3578 }
3579
3580 static void
3581 finalize_nesting_tree (struct nesting_info *root)
3582 {
3583 struct nesting_info *n;
3584 FOR_EACH_NEST_INFO (n, root)
3585 finalize_nesting_tree_1 (n);
3586 }
3587
3588 /* Unnest the nodes and pass them to cgraph. */
3589
3590 static void
3591 unnest_nesting_tree_1 (struct nesting_info *root)
3592 {
3593 struct cgraph_node *node = cgraph_node::get (root->context);
3594
3595 /* For nested functions update the cgraph to reflect unnesting.
3596 We also delay finalizing of these functions up to this point. */
3597 if (nested_function_info::get (node)->origin)
3598 {
3599 unnest_function (node);
3600 if (!root->thunk_p)
3601 cgraph_node::finalize_function (root->context, true);
3602 }
3603 }
3604
3605 static void
3606 unnest_nesting_tree (struct nesting_info *root)
3607 {
3608 struct nesting_info *n;
3609 FOR_EACH_NEST_INFO (n, root)
3610 unnest_nesting_tree_1 (n);
3611 }
3612
3613 /* Free the data structures allocated during this pass. */
3614
3615 static void
3616 free_nesting_tree (struct nesting_info *root)
3617 {
3618 struct nesting_info *node, *next;
3619
3620 node = iter_nestinfo_start (root);
3621 do
3622 {
3623 next = iter_nestinfo_next (node);
3624 delete node->var_map;
3625 delete node->field_map;
3626 delete node->mem_refs;
3627 free (node);
3628 node = next;
3629 }
3630 while (node);
3631 }
3632
3633 /* Gimplify a function and all its nested functions. */
3634 static void
3635 gimplify_all_functions (struct cgraph_node *root)
3636 {
3637 struct cgraph_node *iter;
3638 if (!gimple_body (root->decl))
3639 gimplify_function_tree (root->decl);
3640 for (iter = first_nested_function (root); iter;
3641 iter = next_nested_function (iter))
3642 if (!iter->thunk)
3643 gimplify_all_functions (iter);
3644 }
3645
3646 /* Main entry point for this pass. Process FNDECL and all of its nested
3647 subroutines and turn them into something less tightly bound. */
3648
3649 void
3650 lower_nested_functions (tree fndecl)
3651 {
3652 struct cgraph_node *cgn;
3653 struct nesting_info *root;
3654
3655 /* If there are no nested functions, there's nothing to do. */
3656 cgn = cgraph_node::get (fndecl);
3657 if (!first_nested_function (cgn))
3658 return;
3659
3660 gimplify_all_functions (cgn);
3661
3662 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3663 if (dump_file)
3664 fprintf (dump_file, "\n;; Function %s\n\n",
3665 lang_hooks.decl_printable_name (fndecl, 2));
3666
3667 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3668 root = create_nesting_tree (cgn);
3669
3670 walk_all_functions (convert_nonlocal_reference_stmt,
3671 convert_nonlocal_reference_op,
3672 root);
3673 walk_all_functions (convert_local_reference_stmt,
3674 convert_local_reference_op,
3675 root);
3676 walk_all_functions (convert_nl_goto_reference, NULL, root);
3677 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3678
3679 convert_all_function_calls (root);
3680 finalize_nesting_tree (root);
3681 unnest_nesting_tree (root);
3682
3683 free_nesting_tree (root);
3684 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3685
3686 if (dump_file)
3687 {
3688 dump_end (TDI_nested, dump_file);
3689 set_dump_file (NULL);
3690 }
3691 }
3692
3693 #include "gt-tree-nested.h"