]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-nested.c
tree-nested: Fix handling of *reduction clauses with C array sections [PR93566]
[thirdparty/gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45
46
47 /* The object of this pass is to lower the representation of a set of nested
48 functions in order to expose all of the gory details of the various
49 nonlocal references. We want to do this sooner rather than later, in
50 order to give us more freedom in emitting all of the functions in question.
51
52 Back in olden times, when gcc was young, we developed an insanely
53 complicated scheme whereby variables which were referenced nonlocally
54 were forced to live in the stack of the declaring function, and then
55 the nested functions magically discovered where these variables were
56 placed. In order for this scheme to function properly, it required
57 that the outer function be partially expanded, then we switch to
58 compiling the inner function, and once done with those we switch back
59 to compiling the outer function. Such delicate ordering requirements
60 makes it difficult to do whole translation unit optimizations
61 involving such functions.
62
63 The implementation here is much more direct. Everything that can be
64 referenced by an inner function is a member of an explicitly created
65 structure herein called the "nonlocal frame struct". The incoming
66 static chain for a nested function is a pointer to this struct in
67 the parent. In this way, we settle on known offsets from a known
68 base, and so are decoupled from the logic that places objects in the
69 function's stack frame. More importantly, we don't have to wait for
70 that to happen -- since the compilation of the inner function is no
71 longer tied to a real stack frame, the nonlocal frame struct can be
72 allocated anywhere. Which means that the outer function is now
73 inlinable.
74
75 Theory of operation here is very simple. Iterate over all the
76 statements in all the functions (depth first) several times,
77 allocating structures and fields on demand. In general we want to
78 examine inner functions first, so that we can avoid making changes
79 to outer functions which are unnecessary.
80
81 The order of the passes matters a bit, in that later passes will be
82 skipped if it is discovered that the functions don't actually interact
83 at all. That is, they're nested in the lexical sense but could have
84 been written as independent functions without change. */
85
86
87 struct nesting_info
88 {
89 struct nesting_info *outer;
90 struct nesting_info *inner;
91 struct nesting_info *next;
92
93 hash_map<tree, tree> *field_map;
94 hash_map<tree, tree> *var_map;
95 hash_set<tree *> *mem_refs;
96 bitmap suppress_expansion;
97
98 tree context;
99 tree new_local_var_chain;
100 tree debug_var_chain;
101 tree frame_type;
102 tree frame_decl;
103 tree chain_field;
104 tree chain_decl;
105 tree nl_goto_field;
106
107 bool thunk_p;
108 bool any_parm_remapped;
109 bool any_tramp_created;
110 bool any_descr_created;
111 char static_chain_added;
112 };
113
114
115 /* Iterate over the nesting tree, starting with ROOT, depth first. */
116
117 static inline struct nesting_info *
118 iter_nestinfo_start (struct nesting_info *root)
119 {
120 while (root->inner)
121 root = root->inner;
122 return root;
123 }
124
125 static inline struct nesting_info *
126 iter_nestinfo_next (struct nesting_info *node)
127 {
128 if (node->next)
129 return iter_nestinfo_start (node->next);
130 return node->outer;
131 }
132
133 #define FOR_EACH_NEST_INFO(I, ROOT) \
134 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
135
136 /* Obstack used for the bitmaps in the struct above. */
137 static struct bitmap_obstack nesting_info_bitmap_obstack;
138
139
140 /* We're working in so many different function contexts simultaneously,
141 that create_tmp_var is dangerous. Prevent mishap. */
142 #define create_tmp_var cant_use_create_tmp_var_here_dummy
143
144 /* Like create_tmp_var, except record the variable for registration at
145 the given nesting level. */
146
147 static tree
148 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
149 {
150 tree tmp_var;
151
152 /* If the type is of variable size or a type which must be created by the
153 frontend, something is wrong. Note that we explicitly allow
154 incomplete types here, since we create them ourselves here. */
155 gcc_assert (!TREE_ADDRESSABLE (type));
156 gcc_assert (!TYPE_SIZE_UNIT (type)
157 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
158
159 tmp_var = create_tmp_var_raw (type, prefix);
160 DECL_CONTEXT (tmp_var) = info->context;
161 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
162 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
163 if (TREE_CODE (type) == COMPLEX_TYPE
164 || TREE_CODE (type) == VECTOR_TYPE)
165 DECL_GIMPLE_REG_P (tmp_var) = 1;
166
167 info->new_local_var_chain = tmp_var;
168
169 return tmp_var;
170 }
171
172 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
173
174 static tree
175 build_simple_mem_ref_notrap (tree ptr)
176 {
177 tree t = build_simple_mem_ref (ptr);
178 TREE_THIS_NOTRAP (t) = 1;
179 return t;
180 }
181
182 /* Take the address of EXP to be used within function CONTEXT.
183 Mark it for addressability as necessary. */
184
185 tree
186 build_addr (tree exp)
187 {
188 mark_addressable (exp);
189 return build_fold_addr_expr (exp);
190 }
191
192 /* Insert FIELD into TYPE, sorted by alignment requirements. */
193
194 void
195 insert_field_into_struct (tree type, tree field)
196 {
197 tree *p;
198
199 DECL_CONTEXT (field) = type;
200
201 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
202 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
203 break;
204
205 DECL_CHAIN (field) = *p;
206 *p = field;
207
208 /* Set correct alignment for frame struct type. */
209 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
210 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
211 }
212
213 /* Build or return the RECORD_TYPE that describes the frame state that is
214 shared between INFO->CONTEXT and its nested functions. This record will
215 not be complete until finalize_nesting_tree; up until that point we'll
216 be adding fields as necessary.
217
218 We also build the DECL that represents this frame in the function. */
219
220 static tree
221 get_frame_type (struct nesting_info *info)
222 {
223 tree type = info->frame_type;
224 if (!type)
225 {
226 char *name;
227
228 type = make_node (RECORD_TYPE);
229
230 name = concat ("FRAME.",
231 IDENTIFIER_POINTER (DECL_NAME (info->context)),
232 NULL);
233 TYPE_NAME (type) = get_identifier (name);
234 free (name);
235
236 info->frame_type = type;
237
238 /* Do not put info->frame_decl on info->new_local_var_chain,
239 so that we can declare it in the lexical blocks, which
240 makes sure virtual regs that end up appearing in its RTL
241 expression get substituted in instantiate_virtual_regs. */
242 info->frame_decl = create_tmp_var_raw (type, "FRAME");
243 DECL_CONTEXT (info->frame_decl) = info->context;
244 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
245 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
246
247 /* ??? Always make it addressable for now, since it is meant to
248 be pointed to by the static chain pointer. This pessimizes
249 when it turns out that no static chains are needed because
250 the nested functions referencing non-local variables are not
251 reachable, but the true pessimization is to create the non-
252 local frame structure in the first place. */
253 TREE_ADDRESSABLE (info->frame_decl) = 1;
254 }
255
256 return type;
257 }
258
259 /* Return true if DECL should be referenced by pointer in the non-local frame
260 structure. */
261
262 static bool
263 use_pointer_in_frame (tree decl)
264 {
265 if (TREE_CODE (decl) == PARM_DECL)
266 {
267 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
268 sized DECLs, and inefficient to copy large aggregates. Don't bother
269 moving anything but scalar parameters. */
270 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
271 }
272 else
273 {
274 /* Variable-sized DECLs can only come from OMP clauses at this point
275 since the gimplifier has already turned the regular variables into
276 pointers. Do the same as the gimplifier. */
277 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
278 }
279 }
280
281 /* Given DECL, a non-locally accessed variable, find or create a field
282 in the non-local frame structure for the given nesting context. */
283
284 static tree
285 lookup_field_for_decl (struct nesting_info *info, tree decl,
286 enum insert_option insert)
287 {
288 gcc_checking_assert (decl_function_context (decl) == info->context);
289
290 if (insert == NO_INSERT)
291 {
292 tree *slot = info->field_map->get (decl);
293 return slot ? *slot : NULL_TREE;
294 }
295
296 tree *slot = &info->field_map->get_or_insert (decl);
297 if (!*slot)
298 {
299 tree type = get_frame_type (info);
300 tree field = make_node (FIELD_DECL);
301 DECL_NAME (field) = DECL_NAME (decl);
302
303 if (use_pointer_in_frame (decl))
304 {
305 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
306 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
307 DECL_NONADDRESSABLE_P (field) = 1;
308 }
309 else
310 {
311 TREE_TYPE (field) = TREE_TYPE (decl);
312 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
313 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
314 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
315 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
316 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
317 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
318
319 /* Declare the transformation and adjust the original DECL. For a
320 variable or for a parameter when not optimizing, we make it point
321 to the field in the frame directly. For a parameter, we don't do
322 it when optimizing because the variable tracking pass will already
323 do the job, */
324 if (VAR_P (decl) || !optimize)
325 {
326 tree x
327 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
328 field, NULL_TREE);
329
330 /* If the next declaration is a PARM_DECL pointing to the DECL,
331 we need to adjust its VALUE_EXPR directly, since chains of
332 VALUE_EXPRs run afoul of garbage collection. This occurs
333 in Ada for Out parameters that aren't copied in. */
334 tree next = DECL_CHAIN (decl);
335 if (next
336 && TREE_CODE (next) == PARM_DECL
337 && DECL_HAS_VALUE_EXPR_P (next)
338 && DECL_VALUE_EXPR (next) == decl)
339 SET_DECL_VALUE_EXPR (next, x);
340
341 SET_DECL_VALUE_EXPR (decl, x);
342 DECL_HAS_VALUE_EXPR_P (decl) = 1;
343 }
344 }
345
346 insert_field_into_struct (type, field);
347 *slot = field;
348
349 if (TREE_CODE (decl) == PARM_DECL)
350 info->any_parm_remapped = true;
351 }
352
353 return *slot;
354 }
355
356 /* Build or return the variable that holds the static chain within
357 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
358
359 static tree
360 get_chain_decl (struct nesting_info *info)
361 {
362 tree decl = info->chain_decl;
363
364 if (!decl)
365 {
366 tree type;
367
368 type = get_frame_type (info->outer);
369 type = build_pointer_type (type);
370
371 /* Note that this variable is *not* entered into any BIND_EXPR;
372 the construction of this variable is handled specially in
373 expand_function_start and initialize_inlined_parameters.
374 Note also that it's represented as a parameter. This is more
375 close to the truth, since the initial value does come from
376 the caller. */
377 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
378 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
379 DECL_ARTIFICIAL (decl) = 1;
380 DECL_IGNORED_P (decl) = 1;
381 TREE_USED (decl) = 1;
382 DECL_CONTEXT (decl) = info->context;
383 DECL_ARG_TYPE (decl) = type;
384
385 /* Tell tree-inline.c that we never write to this variable, so
386 it can copy-prop the replacement value immediately. */
387 TREE_READONLY (decl) = 1;
388
389 info->chain_decl = decl;
390
391 if (dump_file
392 && (dump_flags & TDF_DETAILS)
393 && !DECL_STATIC_CHAIN (info->context))
394 fprintf (dump_file, "Setting static-chain for %s\n",
395 lang_hooks.decl_printable_name (info->context, 2));
396
397 DECL_STATIC_CHAIN (info->context) = 1;
398 }
399 return decl;
400 }
401
402 /* Build or return the field within the non-local frame state that holds
403 the static chain for INFO->CONTEXT. This is the way to walk back up
404 multiple nesting levels. */
405
406 static tree
407 get_chain_field (struct nesting_info *info)
408 {
409 tree field = info->chain_field;
410
411 if (!field)
412 {
413 tree type = build_pointer_type (get_frame_type (info->outer));
414
415 field = make_node (FIELD_DECL);
416 DECL_NAME (field) = get_identifier ("__chain");
417 TREE_TYPE (field) = type;
418 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
419 DECL_NONADDRESSABLE_P (field) = 1;
420
421 insert_field_into_struct (get_frame_type (info), field);
422
423 info->chain_field = field;
424
425 if (dump_file
426 && (dump_flags & TDF_DETAILS)
427 && !DECL_STATIC_CHAIN (info->context))
428 fprintf (dump_file, "Setting static-chain for %s\n",
429 lang_hooks.decl_printable_name (info->context, 2));
430
431 DECL_STATIC_CHAIN (info->context) = 1;
432 }
433 return field;
434 }
435
436 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
437
438 static tree
439 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
440 gcall *call)
441 {
442 tree t;
443
444 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
445 gimple_call_set_lhs (call, t);
446 if (! gsi_end_p (*gsi))
447 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
448 gsi_insert_before (gsi, call, GSI_SAME_STMT);
449
450 return t;
451 }
452
453
454 /* Copy EXP into a temporary. Allocate the temporary in the context of
455 INFO and insert the initialization statement before GSI. */
456
457 static tree
458 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
459 {
460 tree t;
461 gimple *stmt;
462
463 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
464 stmt = gimple_build_assign (t, exp);
465 if (! gsi_end_p (*gsi))
466 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
467 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
468
469 return t;
470 }
471
472
473 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
474
475 static tree
476 gsi_gimplify_val (struct nesting_info *info, tree exp,
477 gimple_stmt_iterator *gsi)
478 {
479 if (is_gimple_val (exp))
480 return exp;
481 else
482 return init_tmp_var (info, exp, gsi);
483 }
484
485 /* Similarly, but copy from the temporary and insert the statement
486 after the iterator. */
487
488 static tree
489 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
490 {
491 tree t;
492 gimple *stmt;
493
494 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
495 stmt = gimple_build_assign (exp, t);
496 if (! gsi_end_p (*gsi))
497 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
498 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
499
500 return t;
501 }
502
503 /* Build or return the type used to represent a nested function trampoline. */
504
505 static GTY(()) tree trampoline_type;
506
507 static tree
508 get_trampoline_type (struct nesting_info *info)
509 {
510 unsigned align, size;
511 tree t;
512
513 if (trampoline_type)
514 return trampoline_type;
515
516 align = TRAMPOLINE_ALIGNMENT;
517 size = TRAMPOLINE_SIZE;
518
519 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
520 then allocate extra space so that we can do dynamic alignment. */
521 if (align > STACK_BOUNDARY)
522 {
523 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
524 align = STACK_BOUNDARY;
525 }
526
527 t = build_index_type (size_int (size - 1));
528 t = build_array_type (char_type_node, t);
529 t = build_decl (DECL_SOURCE_LOCATION (info->context),
530 FIELD_DECL, get_identifier ("__data"), t);
531 SET_DECL_ALIGN (t, align);
532 DECL_USER_ALIGN (t) = 1;
533
534 trampoline_type = make_node (RECORD_TYPE);
535 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
536 TYPE_FIELDS (trampoline_type) = t;
537 layout_type (trampoline_type);
538 DECL_CONTEXT (t) = trampoline_type;
539
540 return trampoline_type;
541 }
542
543 /* Build or return the type used to represent a nested function descriptor. */
544
545 static GTY(()) tree descriptor_type;
546
547 static tree
548 get_descriptor_type (struct nesting_info *info)
549 {
550 /* The base alignment is that of a function. */
551 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
552 tree t;
553
554 if (descriptor_type)
555 return descriptor_type;
556
557 t = build_index_type (integer_one_node);
558 t = build_array_type (ptr_type_node, t);
559 t = build_decl (DECL_SOURCE_LOCATION (info->context),
560 FIELD_DECL, get_identifier ("__data"), t);
561 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
562 DECL_USER_ALIGN (t) = 1;
563
564 descriptor_type = make_node (RECORD_TYPE);
565 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
566 TYPE_FIELDS (descriptor_type) = t;
567 layout_type (descriptor_type);
568 DECL_CONTEXT (t) = descriptor_type;
569
570 return descriptor_type;
571 }
572
573 /* Given DECL, a nested function, find or create an element in the
574 var map for this function. */
575
576 static tree
577 lookup_element_for_decl (struct nesting_info *info, tree decl,
578 enum insert_option insert)
579 {
580 if (insert == NO_INSERT)
581 {
582 tree *slot = info->var_map->get (decl);
583 return slot ? *slot : NULL_TREE;
584 }
585
586 tree *slot = &info->var_map->get_or_insert (decl);
587 if (!*slot)
588 *slot = build_tree_list (NULL_TREE, NULL_TREE);
589
590 return (tree) *slot;
591 }
592
593 /* Given DECL, a nested function, create a field in the non-local
594 frame structure for this function. */
595
596 static tree
597 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
598 {
599 tree field = make_node (FIELD_DECL);
600 DECL_NAME (field) = DECL_NAME (decl);
601 TREE_TYPE (field) = type;
602 TREE_ADDRESSABLE (field) = 1;
603 insert_field_into_struct (get_frame_type (info), field);
604 return field;
605 }
606
607 /* Given DECL, a nested function, find or create a field in the non-local
608 frame structure for a trampoline for this function. */
609
610 static tree
611 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
612 enum insert_option insert)
613 {
614 tree elt, field;
615
616 elt = lookup_element_for_decl (info, decl, insert);
617 if (!elt)
618 return NULL_TREE;
619
620 field = TREE_PURPOSE (elt);
621
622 if (!field && insert == INSERT)
623 {
624 field = create_field_for_decl (info, decl, get_trampoline_type (info));
625 TREE_PURPOSE (elt) = field;
626 info->any_tramp_created = true;
627 }
628
629 return field;
630 }
631
632 /* Given DECL, a nested function, find or create a field in the non-local
633 frame structure for a descriptor for this function. */
634
635 static tree
636 lookup_descr_for_decl (struct nesting_info *info, tree decl,
637 enum insert_option insert)
638 {
639 tree elt, field;
640
641 elt = lookup_element_for_decl (info, decl, insert);
642 if (!elt)
643 return NULL_TREE;
644
645 field = TREE_VALUE (elt);
646
647 if (!field && insert == INSERT)
648 {
649 field = create_field_for_decl (info, decl, get_descriptor_type (info));
650 TREE_VALUE (elt) = field;
651 info->any_descr_created = true;
652 }
653
654 return field;
655 }
656
657 /* Build or return the field within the non-local frame state that holds
658 the non-local goto "jmp_buf". The buffer itself is maintained by the
659 rtl middle-end as dynamic stack space is allocated. */
660
661 static tree
662 get_nl_goto_field (struct nesting_info *info)
663 {
664 tree field = info->nl_goto_field;
665 if (!field)
666 {
667 unsigned size;
668 tree type;
669
670 /* For __builtin_nonlocal_goto, we need N words. The first is the
671 frame pointer, the rest is for the target's stack pointer save
672 area. The number of words is controlled by STACK_SAVEAREA_MODE;
673 not the best interface, but it'll do for now. */
674 if (Pmode == ptr_mode)
675 type = ptr_type_node;
676 else
677 type = lang_hooks.types.type_for_mode (Pmode, 1);
678
679 scalar_int_mode mode
680 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
681 size = GET_MODE_SIZE (mode);
682 size = size / GET_MODE_SIZE (Pmode);
683 size = size + 1;
684
685 type = build_array_type
686 (type, build_index_type (size_int (size)));
687
688 field = make_node (FIELD_DECL);
689 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
690 TREE_TYPE (field) = type;
691 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
692 TREE_ADDRESSABLE (field) = 1;
693
694 insert_field_into_struct (get_frame_type (info), field);
695
696 info->nl_goto_field = field;
697 }
698
699 return field;
700 }
701
702 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
703
704 static void
705 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
706 struct nesting_info *info, gimple_seq *pseq)
707 {
708 struct walk_stmt_info wi;
709
710 memset (&wi, 0, sizeof (wi));
711 wi.info = info;
712 wi.val_only = true;
713 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
714 }
715
716
717 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
718
719 static inline void
720 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
721 struct nesting_info *info)
722 {
723 gimple_seq body = gimple_body (info->context);
724 walk_body (callback_stmt, callback_op, info, &body);
725 gimple_set_body (info->context, body);
726 }
727
728 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
729
730 static void
731 walk_gimple_omp_for (gomp_for *for_stmt,
732 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
733 struct nesting_info *info)
734 {
735 struct walk_stmt_info wi;
736 gimple_seq seq;
737 tree t;
738 size_t i;
739
740 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
741
742 seq = NULL;
743 memset (&wi, 0, sizeof (wi));
744 wi.info = info;
745 wi.gsi = gsi_last (seq);
746
747 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
748 {
749 wi.val_only = false;
750 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
751 &wi, NULL);
752 wi.val_only = true;
753 wi.is_lhs = false;
754 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
755 &wi, NULL);
756
757 wi.val_only = true;
758 wi.is_lhs = false;
759 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
760 &wi, NULL);
761
762 t = gimple_omp_for_incr (for_stmt, i);
763 gcc_assert (BINARY_CLASS_P (t));
764 wi.val_only = false;
765 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
766 wi.val_only = true;
767 wi.is_lhs = false;
768 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
769 }
770
771 seq = gsi_seq (wi.gsi);
772 if (!gimple_seq_empty_p (seq))
773 {
774 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
775 annotate_all_with_location (seq, gimple_location (for_stmt));
776 gimple_seq_add_seq (&pre_body, seq);
777 gimple_omp_for_set_pre_body (for_stmt, pre_body);
778 }
779 }
780
781 /* Similarly for ROOT and all functions nested underneath, depth first. */
782
783 static void
784 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
785 struct nesting_info *root)
786 {
787 struct nesting_info *n;
788 FOR_EACH_NEST_INFO (n, root)
789 walk_function (callback_stmt, callback_op, n);
790 }
791
792
793 /* We have to check for a fairly pathological case. The operands of function
794 nested function are to be interpreted in the context of the enclosing
795 function. So if any are variably-sized, they will get remapped when the
796 enclosing function is inlined. But that remapping would also have to be
797 done in the types of the PARM_DECLs of the nested function, meaning the
798 argument types of that function will disagree with the arguments in the
799 calls to that function. So we'd either have to make a copy of the nested
800 function corresponding to each time the enclosing function was inlined or
801 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
802 function. The former is not practical. The latter would still require
803 detecting this case to know when to add the conversions. So, for now at
804 least, we don't inline such an enclosing function.
805
806 We have to do that check recursively, so here return indicating whether
807 FNDECL has such a nested function. ORIG_FN is the function we were
808 trying to inline to use for checking whether any argument is variably
809 modified by anything in it.
810
811 It would be better to do this in tree-inline.c so that we could give
812 the appropriate warning for why a function can't be inlined, but that's
813 too late since the nesting structure has already been flattened and
814 adding a flag just to record this fact seems a waste of a flag. */
815
816 static bool
817 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
818 {
819 struct cgraph_node *cgn = cgraph_node::get (fndecl);
820 tree arg;
821
822 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
823 {
824 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
825 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
826 return true;
827
828 if (check_for_nested_with_variably_modified (cgn->decl,
829 orig_fndecl))
830 return true;
831 }
832
833 return false;
834 }
835
836 /* Construct our local datastructure describing the function nesting
837 tree rooted by CGN. */
838
839 static struct nesting_info *
840 create_nesting_tree (struct cgraph_node *cgn)
841 {
842 struct nesting_info *info = XCNEW (struct nesting_info);
843 info->field_map = new hash_map<tree, tree>;
844 info->var_map = new hash_map<tree, tree>;
845 info->mem_refs = new hash_set<tree *>;
846 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
847 info->context = cgn->decl;
848 info->thunk_p = cgn->thunk.thunk_p;
849
850 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
851 {
852 struct nesting_info *sub = create_nesting_tree (cgn);
853 sub->outer = info;
854 sub->next = info->inner;
855 info->inner = sub;
856 }
857
858 /* See discussion at check_for_nested_with_variably_modified for a
859 discussion of why this has to be here. */
860 if (check_for_nested_with_variably_modified (info->context, info->context))
861 DECL_UNINLINABLE (info->context) = true;
862
863 return info;
864 }
865
866 /* Return an expression computing the static chain for TARGET_CONTEXT
867 from INFO->CONTEXT. Insert any necessary computations before TSI. */
868
869 static tree
870 get_static_chain (struct nesting_info *info, tree target_context,
871 gimple_stmt_iterator *gsi)
872 {
873 struct nesting_info *i;
874 tree x;
875
876 if (info->context == target_context)
877 {
878 x = build_addr (info->frame_decl);
879 info->static_chain_added |= 1;
880 }
881 else
882 {
883 x = get_chain_decl (info);
884 info->static_chain_added |= 2;
885
886 for (i = info->outer; i->context != target_context; i = i->outer)
887 {
888 tree field = get_chain_field (i);
889
890 x = build_simple_mem_ref_notrap (x);
891 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
892 x = init_tmp_var (info, x, gsi);
893 }
894 }
895
896 return x;
897 }
898
899
900 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
901 frame as seen from INFO->CONTEXT. Insert any necessary computations
902 before GSI. */
903
904 static tree
905 get_frame_field (struct nesting_info *info, tree target_context,
906 tree field, gimple_stmt_iterator *gsi)
907 {
908 struct nesting_info *i;
909 tree x;
910
911 if (info->context == target_context)
912 {
913 /* Make sure frame_decl gets created. */
914 (void) get_frame_type (info);
915 x = info->frame_decl;
916 info->static_chain_added |= 1;
917 }
918 else
919 {
920 x = get_chain_decl (info);
921 info->static_chain_added |= 2;
922
923 for (i = info->outer; i->context != target_context; i = i->outer)
924 {
925 tree field = get_chain_field (i);
926
927 x = build_simple_mem_ref_notrap (x);
928 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
929 x = init_tmp_var (info, x, gsi);
930 }
931
932 x = build_simple_mem_ref_notrap (x);
933 }
934
935 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
936 return x;
937 }
938
939 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
940
941 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
942 in the nested function with DECL_VALUE_EXPR set to reference the true
943 variable in the parent function. This is used both for debug info
944 and in OMP lowering. */
945
946 static tree
947 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
948 {
949 tree target_context;
950 struct nesting_info *i;
951 tree x, field, new_decl;
952
953 tree *slot = &info->var_map->get_or_insert (decl);
954
955 if (*slot)
956 return *slot;
957
958 target_context = decl_function_context (decl);
959
960 /* A copy of the code in get_frame_field, but without the temporaries. */
961 if (info->context == target_context)
962 {
963 /* Make sure frame_decl gets created. */
964 (void) get_frame_type (info);
965 x = info->frame_decl;
966 i = info;
967 info->static_chain_added |= 1;
968 }
969 else
970 {
971 x = get_chain_decl (info);
972 info->static_chain_added |= 2;
973 for (i = info->outer; i->context != target_context; i = i->outer)
974 {
975 field = get_chain_field (i);
976 x = build_simple_mem_ref_notrap (x);
977 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
978 }
979 x = build_simple_mem_ref_notrap (x);
980 }
981
982 field = lookup_field_for_decl (i, decl, INSERT);
983 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
984 if (use_pointer_in_frame (decl))
985 x = build_simple_mem_ref_notrap (x);
986
987 /* ??? We should be remapping types as well, surely. */
988 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
989 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
990 DECL_CONTEXT (new_decl) = info->context;
991 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
992 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
993 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
994 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
995 TREE_READONLY (new_decl) = TREE_READONLY (decl);
996 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
997 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
998 if ((TREE_CODE (decl) == PARM_DECL
999 || TREE_CODE (decl) == RESULT_DECL
1000 || VAR_P (decl))
1001 && DECL_BY_REFERENCE (decl))
1002 DECL_BY_REFERENCE (new_decl) = 1;
1003
1004 SET_DECL_VALUE_EXPR (new_decl, x);
1005 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1006
1007 *slot = new_decl;
1008 DECL_CHAIN (new_decl) = info->debug_var_chain;
1009 info->debug_var_chain = new_decl;
1010
1011 if (!optimize
1012 && info->context != target_context
1013 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1014 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1015
1016 return new_decl;
1017 }
1018
1019
1020 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1021 and PARM_DECLs that belong to outer functions.
1022
1023 The rewrite will involve some number of structure accesses back up
1024 the static chain. E.g. for a variable FOO up one nesting level it'll
1025 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1026 indirections apply to decls for which use_pointer_in_frame is true. */
1027
1028 static tree
1029 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1030 {
1031 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1032 struct nesting_info *const info = (struct nesting_info *) wi->info;
1033 tree t = *tp;
1034
1035 *walk_subtrees = 0;
1036 switch (TREE_CODE (t))
1037 {
1038 case VAR_DECL:
1039 /* Non-automatic variables are never processed. */
1040 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1041 break;
1042 /* FALLTHRU */
1043
1044 case PARM_DECL:
1045 {
1046 tree x, target_context = decl_function_context (t);
1047
1048 if (info->context == target_context)
1049 break;
1050
1051 wi->changed = true;
1052
1053 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1054 x = get_nonlocal_debug_decl (info, t);
1055 else
1056 {
1057 struct nesting_info *i = info;
1058 while (i && i->context != target_context)
1059 i = i->outer;
1060 /* If none of the outer contexts is the target context, this means
1061 that the VAR or PARM_DECL is referenced in a wrong context. */
1062 if (!i)
1063 internal_error ("%s from %s referenced in %s",
1064 IDENTIFIER_POINTER (DECL_NAME (t)),
1065 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1066 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1067
1068 x = lookup_field_for_decl (i, t, INSERT);
1069 x = get_frame_field (info, target_context, x, &wi->gsi);
1070 if (use_pointer_in_frame (t))
1071 {
1072 x = init_tmp_var (info, x, &wi->gsi);
1073 x = build_simple_mem_ref_notrap (x);
1074 }
1075 }
1076
1077 if (wi->val_only)
1078 {
1079 if (wi->is_lhs)
1080 x = save_tmp_var (info, x, &wi->gsi);
1081 else
1082 x = init_tmp_var (info, x, &wi->gsi);
1083 }
1084
1085 *tp = x;
1086 }
1087 break;
1088
1089 case LABEL_DECL:
1090 /* We're taking the address of a label from a parent function, but
1091 this is not itself a non-local goto. Mark the label such that it
1092 will not be deleted, much as we would with a label address in
1093 static storage. */
1094 if (decl_function_context (t) != info->context)
1095 FORCED_LABEL (t) = 1;
1096 break;
1097
1098 case ADDR_EXPR:
1099 {
1100 bool save_val_only = wi->val_only;
1101
1102 wi->val_only = false;
1103 wi->is_lhs = false;
1104 wi->changed = false;
1105 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1106 wi->val_only = true;
1107
1108 if (wi->changed)
1109 {
1110 tree save_context;
1111
1112 /* If we changed anything, we might no longer be directly
1113 referencing a decl. */
1114 save_context = current_function_decl;
1115 current_function_decl = info->context;
1116 recompute_tree_invariant_for_addr_expr (t);
1117 current_function_decl = save_context;
1118
1119 /* If the callback converted the address argument in a context
1120 where we only accept variables (and min_invariant, presumably),
1121 then compute the address into a temporary. */
1122 if (save_val_only)
1123 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1124 t, &wi->gsi);
1125 }
1126 }
1127 break;
1128
1129 case REALPART_EXPR:
1130 case IMAGPART_EXPR:
1131 case COMPONENT_REF:
1132 case ARRAY_REF:
1133 case ARRAY_RANGE_REF:
1134 case BIT_FIELD_REF:
1135 /* Go down this entire nest and just look at the final prefix and
1136 anything that describes the references. Otherwise, we lose track
1137 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1138 wi->val_only = true;
1139 wi->is_lhs = false;
1140 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1141 {
1142 if (TREE_CODE (t) == COMPONENT_REF)
1143 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1144 NULL);
1145 else if (TREE_CODE (t) == ARRAY_REF
1146 || TREE_CODE (t) == ARRAY_RANGE_REF)
1147 {
1148 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1149 wi, NULL);
1150 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1151 wi, NULL);
1152 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1153 wi, NULL);
1154 }
1155 }
1156 wi->val_only = false;
1157 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1158 break;
1159
1160 case VIEW_CONVERT_EXPR:
1161 /* Just request to look at the subtrees, leaving val_only and lhs
1162 untouched. This might actually be for !val_only + lhs, in which
1163 case we don't want to force a replacement by a temporary. */
1164 *walk_subtrees = 1;
1165 break;
1166
1167 default:
1168 if (!IS_TYPE_OR_DECL_P (t))
1169 {
1170 *walk_subtrees = 1;
1171 wi->val_only = true;
1172 wi->is_lhs = false;
1173 }
1174 break;
1175 }
1176
1177 return NULL_TREE;
1178 }
1179
1180 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1181 struct walk_stmt_info *);
1182
1183 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1184 and PARM_DECLs that belong to outer functions. */
1185
1186 static bool
1187 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1188 {
1189 struct nesting_info *const info = (struct nesting_info *) wi->info;
1190 bool need_chain = false, need_stmts = false;
1191 tree clause, decl, *pdecl;
1192 int dummy;
1193 bitmap new_suppress;
1194
1195 new_suppress = BITMAP_GGC_ALLOC ();
1196 bitmap_copy (new_suppress, info->suppress_expansion);
1197
1198 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1199 {
1200 pdecl = NULL;
1201 switch (OMP_CLAUSE_CODE (clause))
1202 {
1203 case OMP_CLAUSE_REDUCTION:
1204 case OMP_CLAUSE_IN_REDUCTION:
1205 case OMP_CLAUSE_TASK_REDUCTION:
1206 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1207 need_stmts = true;
1208 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1209 {
1210 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1211 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1212 pdecl = &TREE_OPERAND (*pdecl, 0);
1213 if (TREE_CODE (*pdecl) == INDIRECT_REF
1214 || TREE_CODE (*pdecl) == ADDR_EXPR)
1215 pdecl = &TREE_OPERAND (*pdecl, 0);
1216 }
1217 goto do_decl_clause;
1218
1219 case OMP_CLAUSE_LASTPRIVATE:
1220 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1221 need_stmts = true;
1222 goto do_decl_clause;
1223
1224 case OMP_CLAUSE_LINEAR:
1225 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1226 need_stmts = true;
1227 wi->val_only = true;
1228 wi->is_lhs = false;
1229 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1230 &dummy, wi);
1231 goto do_decl_clause;
1232
1233 case OMP_CLAUSE_PRIVATE:
1234 case OMP_CLAUSE_FIRSTPRIVATE:
1235 case OMP_CLAUSE_COPYPRIVATE:
1236 case OMP_CLAUSE_SHARED:
1237 case OMP_CLAUSE_TO_DECLARE:
1238 case OMP_CLAUSE_LINK:
1239 case OMP_CLAUSE_USE_DEVICE_PTR:
1240 case OMP_CLAUSE_USE_DEVICE_ADDR:
1241 case OMP_CLAUSE_IS_DEVICE_PTR:
1242 do_decl_clause:
1243 if (pdecl == NULL)
1244 pdecl = &OMP_CLAUSE_DECL (clause);
1245 decl = *pdecl;
1246 if (VAR_P (decl)
1247 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1248 break;
1249 if (decl_function_context (decl) != info->context)
1250 {
1251 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1252 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1253 bitmap_set_bit (new_suppress, DECL_UID (decl));
1254 *pdecl = get_nonlocal_debug_decl (info, decl);
1255 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1256 need_chain = true;
1257 }
1258 break;
1259
1260 case OMP_CLAUSE_SCHEDULE:
1261 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1262 break;
1263 /* FALLTHRU */
1264 case OMP_CLAUSE_FINAL:
1265 case OMP_CLAUSE_IF:
1266 case OMP_CLAUSE_NUM_THREADS:
1267 case OMP_CLAUSE_DEPEND:
1268 case OMP_CLAUSE_DEVICE:
1269 case OMP_CLAUSE_NUM_TEAMS:
1270 case OMP_CLAUSE_THREAD_LIMIT:
1271 case OMP_CLAUSE_SAFELEN:
1272 case OMP_CLAUSE_SIMDLEN:
1273 case OMP_CLAUSE_PRIORITY:
1274 case OMP_CLAUSE_GRAINSIZE:
1275 case OMP_CLAUSE_NUM_TASKS:
1276 case OMP_CLAUSE_HINT:
1277 case OMP_CLAUSE_NUM_GANGS:
1278 case OMP_CLAUSE_NUM_WORKERS:
1279 case OMP_CLAUSE_VECTOR_LENGTH:
1280 case OMP_CLAUSE_GANG:
1281 case OMP_CLAUSE_WORKER:
1282 case OMP_CLAUSE_VECTOR:
1283 case OMP_CLAUSE_ASYNC:
1284 case OMP_CLAUSE_WAIT:
1285 /* Several OpenACC clauses have optional arguments. Check if they
1286 are present. */
1287 if (OMP_CLAUSE_OPERAND (clause, 0))
1288 {
1289 wi->val_only = true;
1290 wi->is_lhs = false;
1291 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1292 &dummy, wi);
1293 }
1294
1295 /* The gang clause accepts two arguments. */
1296 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1297 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1298 {
1299 wi->val_only = true;
1300 wi->is_lhs = false;
1301 convert_nonlocal_reference_op
1302 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1303 }
1304 break;
1305
1306 case OMP_CLAUSE_DIST_SCHEDULE:
1307 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1308 {
1309 wi->val_only = true;
1310 wi->is_lhs = false;
1311 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1312 &dummy, wi);
1313 }
1314 break;
1315
1316 case OMP_CLAUSE_MAP:
1317 case OMP_CLAUSE_TO:
1318 case OMP_CLAUSE_FROM:
1319 if (OMP_CLAUSE_SIZE (clause))
1320 {
1321 wi->val_only = true;
1322 wi->is_lhs = false;
1323 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1324 &dummy, wi);
1325 }
1326 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1327 goto do_decl_clause;
1328 wi->val_only = true;
1329 wi->is_lhs = false;
1330 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1331 wi, NULL);
1332 break;
1333
1334 case OMP_CLAUSE_ALIGNED:
1335 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1336 {
1337 wi->val_only = true;
1338 wi->is_lhs = false;
1339 convert_nonlocal_reference_op
1340 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1341 }
1342 /* FALLTHRU */
1343 case OMP_CLAUSE_NONTEMPORAL:
1344 /* Like do_decl_clause, but don't add any suppression. */
1345 decl = OMP_CLAUSE_DECL (clause);
1346 if (VAR_P (decl)
1347 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1348 break;
1349 if (decl_function_context (decl) != info->context)
1350 {
1351 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1352 need_chain = true;
1353 }
1354 break;
1355
1356 case OMP_CLAUSE_NOWAIT:
1357 case OMP_CLAUSE_ORDERED:
1358 case OMP_CLAUSE_DEFAULT:
1359 case OMP_CLAUSE_COPYIN:
1360 case OMP_CLAUSE_COLLAPSE:
1361 case OMP_CLAUSE_TILE:
1362 case OMP_CLAUSE_UNTIED:
1363 case OMP_CLAUSE_MERGEABLE:
1364 case OMP_CLAUSE_PROC_BIND:
1365 case OMP_CLAUSE_NOGROUP:
1366 case OMP_CLAUSE_THREADS:
1367 case OMP_CLAUSE_SIMD:
1368 case OMP_CLAUSE_DEFAULTMAP:
1369 case OMP_CLAUSE_ORDER:
1370 case OMP_CLAUSE_SEQ:
1371 case OMP_CLAUSE_INDEPENDENT:
1372 case OMP_CLAUSE_AUTO:
1373 case OMP_CLAUSE_IF_PRESENT:
1374 case OMP_CLAUSE_FINALIZE:
1375 case OMP_CLAUSE__CONDTEMP_:
1376 case OMP_CLAUSE__SCANTEMP_:
1377 break;
1378
1379 /* The following clause belongs to the OpenACC cache directive, which
1380 is discarded during gimplification. */
1381 case OMP_CLAUSE__CACHE_:
1382 /* The following clauses are only allowed in the OpenMP declare simd
1383 directive, so not seen here. */
1384 case OMP_CLAUSE_UNIFORM:
1385 case OMP_CLAUSE_INBRANCH:
1386 case OMP_CLAUSE_NOTINBRANCH:
1387 /* The following clauses are only allowed on OpenMP cancel and
1388 cancellation point directives, which at this point have already
1389 been lowered into a function call. */
1390 case OMP_CLAUSE_FOR:
1391 case OMP_CLAUSE_PARALLEL:
1392 case OMP_CLAUSE_SECTIONS:
1393 case OMP_CLAUSE_TASKGROUP:
1394 /* The following clauses are only added during OMP lowering; nested
1395 function decomposition happens before that. */
1396 case OMP_CLAUSE__LOOPTEMP_:
1397 case OMP_CLAUSE__REDUCTEMP_:
1398 case OMP_CLAUSE__SIMDUID_:
1399 case OMP_CLAUSE__GRIDDIM_:
1400 case OMP_CLAUSE__SIMT_:
1401 /* Anything else. */
1402 default:
1403 gcc_unreachable ();
1404 }
1405 }
1406
1407 info->suppress_expansion = new_suppress;
1408
1409 if (need_stmts)
1410 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1411 switch (OMP_CLAUSE_CODE (clause))
1412 {
1413 case OMP_CLAUSE_REDUCTION:
1414 case OMP_CLAUSE_IN_REDUCTION:
1415 case OMP_CLAUSE_TASK_REDUCTION:
1416 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1417 {
1418 tree old_context
1419 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1420 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1421 = info->context;
1422 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1423 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1424 = info->context;
1425 walk_body (convert_nonlocal_reference_stmt,
1426 convert_nonlocal_reference_op, info,
1427 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1428 walk_body (convert_nonlocal_reference_stmt,
1429 convert_nonlocal_reference_op, info,
1430 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1431 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1432 = old_context;
1433 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1434 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1435 = old_context;
1436 }
1437 break;
1438
1439 case OMP_CLAUSE_LASTPRIVATE:
1440 walk_body (convert_nonlocal_reference_stmt,
1441 convert_nonlocal_reference_op, info,
1442 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1443 break;
1444
1445 case OMP_CLAUSE_LINEAR:
1446 walk_body (convert_nonlocal_reference_stmt,
1447 convert_nonlocal_reference_op, info,
1448 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1449 break;
1450
1451 default:
1452 break;
1453 }
1454
1455 return need_chain;
1456 }
1457
1458 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1459
1460 static void
1461 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1462 {
1463 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1464 type = TREE_TYPE (type);
1465
1466 if (TYPE_NAME (type)
1467 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1468 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1469 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1470
1471 while (POINTER_TYPE_P (type)
1472 || TREE_CODE (type) == VECTOR_TYPE
1473 || TREE_CODE (type) == FUNCTION_TYPE
1474 || TREE_CODE (type) == METHOD_TYPE)
1475 type = TREE_TYPE (type);
1476
1477 if (TREE_CODE (type) == ARRAY_TYPE)
1478 {
1479 tree domain, t;
1480
1481 note_nonlocal_vla_type (info, TREE_TYPE (type));
1482 domain = TYPE_DOMAIN (type);
1483 if (domain)
1484 {
1485 t = TYPE_MIN_VALUE (domain);
1486 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1487 && decl_function_context (t) != info->context)
1488 get_nonlocal_debug_decl (info, t);
1489 t = TYPE_MAX_VALUE (domain);
1490 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1491 && decl_function_context (t) != info->context)
1492 get_nonlocal_debug_decl (info, t);
1493 }
1494 }
1495 }
1496
1497 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1498 PARM_DECLs that belong to outer functions. This handles statements
1499 that are not handled via the standard recursion done in
1500 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1501 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1502 operands of STMT have been handled by this function. */
1503
1504 static tree
1505 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1506 struct walk_stmt_info *wi)
1507 {
1508 struct nesting_info *info = (struct nesting_info *) wi->info;
1509 tree save_local_var_chain;
1510 bitmap save_suppress;
1511 gimple *stmt = gsi_stmt (*gsi);
1512
1513 switch (gimple_code (stmt))
1514 {
1515 case GIMPLE_GOTO:
1516 /* Don't walk non-local gotos for now. */
1517 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1518 {
1519 wi->val_only = true;
1520 wi->is_lhs = false;
1521 *handled_ops_p = false;
1522 return NULL_TREE;
1523 }
1524 break;
1525
1526 case GIMPLE_OMP_TEAMS:
1527 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1528 {
1529 save_suppress = info->suppress_expansion;
1530 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1531 wi);
1532 walk_body (convert_nonlocal_reference_stmt,
1533 convert_nonlocal_reference_op, info,
1534 gimple_omp_body_ptr (stmt));
1535 info->suppress_expansion = save_suppress;
1536 break;
1537 }
1538 /* FALLTHRU */
1539
1540 case GIMPLE_OMP_PARALLEL:
1541 case GIMPLE_OMP_TASK:
1542 save_suppress = info->suppress_expansion;
1543 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1544 wi))
1545 {
1546 tree c, decl;
1547 decl = get_chain_decl (info);
1548 c = build_omp_clause (gimple_location (stmt),
1549 OMP_CLAUSE_FIRSTPRIVATE);
1550 OMP_CLAUSE_DECL (c) = decl;
1551 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1552 gimple_omp_taskreg_set_clauses (stmt, c);
1553 }
1554
1555 save_local_var_chain = info->new_local_var_chain;
1556 info->new_local_var_chain = NULL;
1557
1558 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1559 info, gimple_omp_body_ptr (stmt));
1560
1561 if (info->new_local_var_chain)
1562 declare_vars (info->new_local_var_chain,
1563 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1564 false);
1565 info->new_local_var_chain = save_local_var_chain;
1566 info->suppress_expansion = save_suppress;
1567 break;
1568
1569 case GIMPLE_OMP_FOR:
1570 save_suppress = info->suppress_expansion;
1571 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1572 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1573 convert_nonlocal_reference_stmt,
1574 convert_nonlocal_reference_op, info);
1575 walk_body (convert_nonlocal_reference_stmt,
1576 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1577 info->suppress_expansion = save_suppress;
1578 break;
1579
1580 case GIMPLE_OMP_SECTIONS:
1581 save_suppress = info->suppress_expansion;
1582 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1583 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1584 info, gimple_omp_body_ptr (stmt));
1585 info->suppress_expansion = save_suppress;
1586 break;
1587
1588 case GIMPLE_OMP_SINGLE:
1589 save_suppress = info->suppress_expansion;
1590 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1591 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1592 info, gimple_omp_body_ptr (stmt));
1593 info->suppress_expansion = save_suppress;
1594 break;
1595
1596 case GIMPLE_OMP_TASKGROUP:
1597 save_suppress = info->suppress_expansion;
1598 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1599 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1600 info, gimple_omp_body_ptr (stmt));
1601 info->suppress_expansion = save_suppress;
1602 break;
1603
1604 case GIMPLE_OMP_TARGET:
1605 if (!is_gimple_omp_offloaded (stmt))
1606 {
1607 save_suppress = info->suppress_expansion;
1608 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1609 wi);
1610 info->suppress_expansion = save_suppress;
1611 walk_body (convert_nonlocal_reference_stmt,
1612 convert_nonlocal_reference_op, info,
1613 gimple_omp_body_ptr (stmt));
1614 break;
1615 }
1616 save_suppress = info->suppress_expansion;
1617 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1618 wi))
1619 {
1620 tree c, decl;
1621 decl = get_chain_decl (info);
1622 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1623 OMP_CLAUSE_DECL (c) = decl;
1624 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1625 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1626 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1627 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1628 }
1629
1630 save_local_var_chain = info->new_local_var_chain;
1631 info->new_local_var_chain = NULL;
1632
1633 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1634 info, gimple_omp_body_ptr (stmt));
1635
1636 if (info->new_local_var_chain)
1637 declare_vars (info->new_local_var_chain,
1638 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1639 false);
1640 info->new_local_var_chain = save_local_var_chain;
1641 info->suppress_expansion = save_suppress;
1642 break;
1643
1644 case GIMPLE_OMP_SECTION:
1645 case GIMPLE_OMP_MASTER:
1646 case GIMPLE_OMP_ORDERED:
1647 case GIMPLE_OMP_SCAN:
1648 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1649 info, gimple_omp_body_ptr (stmt));
1650 break;
1651
1652 case GIMPLE_BIND:
1653 {
1654 gbind *bind_stmt = as_a <gbind *> (stmt);
1655
1656 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1657 if (TREE_CODE (var) == NAMELIST_DECL)
1658 {
1659 /* Adjust decls mentioned in NAMELIST_DECL. */
1660 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1661 tree decl;
1662 unsigned int i;
1663
1664 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1665 {
1666 if (VAR_P (decl)
1667 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1668 continue;
1669 if (decl_function_context (decl) != info->context)
1670 CONSTRUCTOR_ELT (decls, i)->value
1671 = get_nonlocal_debug_decl (info, decl);
1672 }
1673 }
1674
1675 *handled_ops_p = false;
1676 return NULL_TREE;
1677 }
1678 case GIMPLE_COND:
1679 wi->val_only = true;
1680 wi->is_lhs = false;
1681 *handled_ops_p = false;
1682 return NULL_TREE;
1683
1684 case GIMPLE_ASSIGN:
1685 if (gimple_clobber_p (stmt))
1686 {
1687 tree lhs = gimple_assign_lhs (stmt);
1688 if (DECL_P (lhs)
1689 && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1690 && decl_function_context (lhs) != info->context)
1691 {
1692 gsi_replace (gsi, gimple_build_nop (), true);
1693 break;
1694 }
1695 }
1696 *handled_ops_p = false;
1697 return NULL_TREE;
1698
1699 default:
1700 /* For every other statement that we are not interested in
1701 handling here, let the walker traverse the operands. */
1702 *handled_ops_p = false;
1703 return NULL_TREE;
1704 }
1705
1706 /* We have handled all of STMT operands, no need to traverse the operands. */
1707 *handled_ops_p = true;
1708 return NULL_TREE;
1709 }
1710
1711
1712 /* A subroutine of convert_local_reference. Create a local variable
1713 in the parent function with DECL_VALUE_EXPR set to reference the
1714 field in FRAME. This is used both for debug info and in OMP
1715 lowering. */
1716
1717 static tree
1718 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1719 {
1720 tree x, new_decl;
1721
1722 tree *slot = &info->var_map->get_or_insert (decl);
1723 if (*slot)
1724 return *slot;
1725
1726 /* Make sure frame_decl gets created. */
1727 (void) get_frame_type (info);
1728 x = info->frame_decl;
1729 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1730
1731 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1732 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1733 DECL_CONTEXT (new_decl) = info->context;
1734 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1735 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1736 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1737 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1738 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1739 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1740 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1741 if ((TREE_CODE (decl) == PARM_DECL
1742 || TREE_CODE (decl) == RESULT_DECL
1743 || VAR_P (decl))
1744 && DECL_BY_REFERENCE (decl))
1745 DECL_BY_REFERENCE (new_decl) = 1;
1746
1747 SET_DECL_VALUE_EXPR (new_decl, x);
1748 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1749 *slot = new_decl;
1750
1751 DECL_CHAIN (new_decl) = info->debug_var_chain;
1752 info->debug_var_chain = new_decl;
1753
1754 /* Do not emit debug info twice. */
1755 DECL_IGNORED_P (decl) = 1;
1756
1757 return new_decl;
1758 }
1759
1760
1761 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1762 and PARM_DECLs that were referenced by inner nested functions.
1763 The rewrite will be a structure reference to the local frame variable. */
1764
1765 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1766
1767 static tree
1768 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1769 {
1770 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1771 struct nesting_info *const info = (struct nesting_info *) wi->info;
1772 tree t = *tp, field, x;
1773 bool save_val_only;
1774
1775 *walk_subtrees = 0;
1776 switch (TREE_CODE (t))
1777 {
1778 case VAR_DECL:
1779 /* Non-automatic variables are never processed. */
1780 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1781 break;
1782 /* FALLTHRU */
1783
1784 case PARM_DECL:
1785 if (t != info->frame_decl && decl_function_context (t) == info->context)
1786 {
1787 /* If we copied a pointer to the frame, then the original decl
1788 is used unchanged in the parent function. */
1789 if (use_pointer_in_frame (t))
1790 break;
1791
1792 /* No need to transform anything if no child references the
1793 variable. */
1794 field = lookup_field_for_decl (info, t, NO_INSERT);
1795 if (!field)
1796 break;
1797 wi->changed = true;
1798
1799 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1800 x = get_local_debug_decl (info, t, field);
1801 else
1802 x = get_frame_field (info, info->context, field, &wi->gsi);
1803
1804 if (wi->val_only)
1805 {
1806 if (wi->is_lhs)
1807 x = save_tmp_var (info, x, &wi->gsi);
1808 else
1809 x = init_tmp_var (info, x, &wi->gsi);
1810 }
1811
1812 *tp = x;
1813 }
1814 break;
1815
1816 case ADDR_EXPR:
1817 save_val_only = wi->val_only;
1818 wi->val_only = false;
1819 wi->is_lhs = false;
1820 wi->changed = false;
1821 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1822 wi->val_only = save_val_only;
1823
1824 /* If we converted anything ... */
1825 if (wi->changed)
1826 {
1827 tree save_context;
1828
1829 /* Then the frame decl is now addressable. */
1830 TREE_ADDRESSABLE (info->frame_decl) = 1;
1831
1832 save_context = current_function_decl;
1833 current_function_decl = info->context;
1834 recompute_tree_invariant_for_addr_expr (t);
1835 current_function_decl = save_context;
1836
1837 /* If we are in a context where we only accept values, then
1838 compute the address into a temporary. */
1839 if (save_val_only)
1840 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1841 t, &wi->gsi);
1842 }
1843 break;
1844
1845 case REALPART_EXPR:
1846 case IMAGPART_EXPR:
1847 case COMPONENT_REF:
1848 case ARRAY_REF:
1849 case ARRAY_RANGE_REF:
1850 case BIT_FIELD_REF:
1851 /* Go down this entire nest and just look at the final prefix and
1852 anything that describes the references. Otherwise, we lose track
1853 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1854 save_val_only = wi->val_only;
1855 wi->val_only = true;
1856 wi->is_lhs = false;
1857 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1858 {
1859 if (TREE_CODE (t) == COMPONENT_REF)
1860 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1861 NULL);
1862 else if (TREE_CODE (t) == ARRAY_REF
1863 || TREE_CODE (t) == ARRAY_RANGE_REF)
1864 {
1865 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1866 NULL);
1867 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1868 NULL);
1869 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1870 NULL);
1871 }
1872 }
1873 wi->val_only = false;
1874 walk_tree (tp, convert_local_reference_op, wi, NULL);
1875 wi->val_only = save_val_only;
1876 break;
1877
1878 case MEM_REF:
1879 save_val_only = wi->val_only;
1880 wi->val_only = true;
1881 wi->is_lhs = false;
1882 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1883 wi, NULL);
1884 /* We need to re-fold the MEM_REF as component references as
1885 part of a ADDR_EXPR address are not allowed. But we cannot
1886 fold here, as the chain record type is not yet finalized. */
1887 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1888 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1889 info->mem_refs->add (tp);
1890 wi->val_only = save_val_only;
1891 break;
1892
1893 case VIEW_CONVERT_EXPR:
1894 /* Just request to look at the subtrees, leaving val_only and lhs
1895 untouched. This might actually be for !val_only + lhs, in which
1896 case we don't want to force a replacement by a temporary. */
1897 *walk_subtrees = 1;
1898 break;
1899
1900 default:
1901 if (!IS_TYPE_OR_DECL_P (t))
1902 {
1903 *walk_subtrees = 1;
1904 wi->val_only = true;
1905 wi->is_lhs = false;
1906 }
1907 break;
1908 }
1909
1910 return NULL_TREE;
1911 }
1912
1913 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1914 struct walk_stmt_info *);
1915
1916 /* Helper for convert_local_reference. Convert all the references in
1917 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1918
1919 static bool
1920 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1921 {
1922 struct nesting_info *const info = (struct nesting_info *) wi->info;
1923 bool need_frame = false, need_stmts = false;
1924 tree clause, decl, *pdecl;
1925 int dummy;
1926 bitmap new_suppress;
1927
1928 new_suppress = BITMAP_GGC_ALLOC ();
1929 bitmap_copy (new_suppress, info->suppress_expansion);
1930
1931 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1932 {
1933 pdecl = NULL;
1934 switch (OMP_CLAUSE_CODE (clause))
1935 {
1936 case OMP_CLAUSE_REDUCTION:
1937 case OMP_CLAUSE_IN_REDUCTION:
1938 case OMP_CLAUSE_TASK_REDUCTION:
1939 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1940 need_stmts = true;
1941 if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1942 {
1943 pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1944 if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1945 pdecl = &TREE_OPERAND (*pdecl, 0);
1946 if (TREE_CODE (*pdecl) == INDIRECT_REF
1947 || TREE_CODE (*pdecl) == ADDR_EXPR)
1948 pdecl = &TREE_OPERAND (*pdecl, 0);
1949 }
1950 goto do_decl_clause;
1951
1952 case OMP_CLAUSE_LASTPRIVATE:
1953 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1954 need_stmts = true;
1955 goto do_decl_clause;
1956
1957 case OMP_CLAUSE_LINEAR:
1958 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1959 need_stmts = true;
1960 wi->val_only = true;
1961 wi->is_lhs = false;
1962 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1963 wi);
1964 goto do_decl_clause;
1965
1966 case OMP_CLAUSE_PRIVATE:
1967 case OMP_CLAUSE_FIRSTPRIVATE:
1968 case OMP_CLAUSE_COPYPRIVATE:
1969 case OMP_CLAUSE_SHARED:
1970 case OMP_CLAUSE_TO_DECLARE:
1971 case OMP_CLAUSE_LINK:
1972 case OMP_CLAUSE_USE_DEVICE_PTR:
1973 case OMP_CLAUSE_USE_DEVICE_ADDR:
1974 case OMP_CLAUSE_IS_DEVICE_PTR:
1975 do_decl_clause:
1976 if (pdecl == NULL)
1977 pdecl = &OMP_CLAUSE_DECL (clause);
1978 decl = *pdecl;
1979 if (VAR_P (decl)
1980 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1981 break;
1982 if (decl_function_context (decl) == info->context
1983 && !use_pointer_in_frame (decl))
1984 {
1985 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1986 if (field)
1987 {
1988 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1989 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1990 bitmap_set_bit (new_suppress, DECL_UID (decl));
1991 *pdecl = get_local_debug_decl (info, decl, field);
1992 need_frame = true;
1993 }
1994 }
1995 break;
1996
1997 case OMP_CLAUSE_SCHEDULE:
1998 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1999 break;
2000 /* FALLTHRU */
2001 case OMP_CLAUSE_FINAL:
2002 case OMP_CLAUSE_IF:
2003 case OMP_CLAUSE_NUM_THREADS:
2004 case OMP_CLAUSE_DEPEND:
2005 case OMP_CLAUSE_DEVICE:
2006 case OMP_CLAUSE_NUM_TEAMS:
2007 case OMP_CLAUSE_THREAD_LIMIT:
2008 case OMP_CLAUSE_SAFELEN:
2009 case OMP_CLAUSE_SIMDLEN:
2010 case OMP_CLAUSE_PRIORITY:
2011 case OMP_CLAUSE_GRAINSIZE:
2012 case OMP_CLAUSE_NUM_TASKS:
2013 case OMP_CLAUSE_HINT:
2014 case OMP_CLAUSE_NUM_GANGS:
2015 case OMP_CLAUSE_NUM_WORKERS:
2016 case OMP_CLAUSE_VECTOR_LENGTH:
2017 case OMP_CLAUSE_GANG:
2018 case OMP_CLAUSE_WORKER:
2019 case OMP_CLAUSE_VECTOR:
2020 case OMP_CLAUSE_ASYNC:
2021 case OMP_CLAUSE_WAIT:
2022 /* Several OpenACC clauses have optional arguments. Check if they
2023 are present. */
2024 if (OMP_CLAUSE_OPERAND (clause, 0))
2025 {
2026 wi->val_only = true;
2027 wi->is_lhs = false;
2028 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2029 &dummy, wi);
2030 }
2031
2032 /* The gang clause accepts two arguments. */
2033 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2034 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2035 {
2036 wi->val_only = true;
2037 wi->is_lhs = false;
2038 convert_nonlocal_reference_op
2039 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2040 }
2041 break;
2042
2043 case OMP_CLAUSE_DIST_SCHEDULE:
2044 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2045 {
2046 wi->val_only = true;
2047 wi->is_lhs = false;
2048 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2049 &dummy, wi);
2050 }
2051 break;
2052
2053 case OMP_CLAUSE_MAP:
2054 case OMP_CLAUSE_TO:
2055 case OMP_CLAUSE_FROM:
2056 if (OMP_CLAUSE_SIZE (clause))
2057 {
2058 wi->val_only = true;
2059 wi->is_lhs = false;
2060 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2061 &dummy, wi);
2062 }
2063 if (DECL_P (OMP_CLAUSE_DECL (clause)))
2064 goto do_decl_clause;
2065 wi->val_only = true;
2066 wi->is_lhs = false;
2067 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2068 wi, NULL);
2069 break;
2070
2071 case OMP_CLAUSE_ALIGNED:
2072 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2073 {
2074 wi->val_only = true;
2075 wi->is_lhs = false;
2076 convert_local_reference_op
2077 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2078 }
2079 /* FALLTHRU */
2080 case OMP_CLAUSE_NONTEMPORAL:
2081 /* Like do_decl_clause, but don't add any suppression. */
2082 decl = OMP_CLAUSE_DECL (clause);
2083 if (VAR_P (decl)
2084 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2085 break;
2086 if (decl_function_context (decl) == info->context
2087 && !use_pointer_in_frame (decl))
2088 {
2089 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2090 if (field)
2091 {
2092 OMP_CLAUSE_DECL (clause)
2093 = get_local_debug_decl (info, decl, field);
2094 need_frame = true;
2095 }
2096 }
2097 break;
2098
2099 case OMP_CLAUSE_NOWAIT:
2100 case OMP_CLAUSE_ORDERED:
2101 case OMP_CLAUSE_DEFAULT:
2102 case OMP_CLAUSE_COPYIN:
2103 case OMP_CLAUSE_COLLAPSE:
2104 case OMP_CLAUSE_TILE:
2105 case OMP_CLAUSE_UNTIED:
2106 case OMP_CLAUSE_MERGEABLE:
2107 case OMP_CLAUSE_PROC_BIND:
2108 case OMP_CLAUSE_NOGROUP:
2109 case OMP_CLAUSE_THREADS:
2110 case OMP_CLAUSE_SIMD:
2111 case OMP_CLAUSE_DEFAULTMAP:
2112 case OMP_CLAUSE_ORDER:
2113 case OMP_CLAUSE_SEQ:
2114 case OMP_CLAUSE_INDEPENDENT:
2115 case OMP_CLAUSE_AUTO:
2116 case OMP_CLAUSE_IF_PRESENT:
2117 case OMP_CLAUSE_FINALIZE:
2118 case OMP_CLAUSE__CONDTEMP_:
2119 case OMP_CLAUSE__SCANTEMP_:
2120 break;
2121
2122 /* The following clause belongs to the OpenACC cache directive, which
2123 is discarded during gimplification. */
2124 case OMP_CLAUSE__CACHE_:
2125 /* The following clauses are only allowed in the OpenMP declare simd
2126 directive, so not seen here. */
2127 case OMP_CLAUSE_UNIFORM:
2128 case OMP_CLAUSE_INBRANCH:
2129 case OMP_CLAUSE_NOTINBRANCH:
2130 /* The following clauses are only allowed on OpenMP cancel and
2131 cancellation point directives, which at this point have already
2132 been lowered into a function call. */
2133 case OMP_CLAUSE_FOR:
2134 case OMP_CLAUSE_PARALLEL:
2135 case OMP_CLAUSE_SECTIONS:
2136 case OMP_CLAUSE_TASKGROUP:
2137 /* The following clauses are only added during OMP lowering; nested
2138 function decomposition happens before that. */
2139 case OMP_CLAUSE__LOOPTEMP_:
2140 case OMP_CLAUSE__REDUCTEMP_:
2141 case OMP_CLAUSE__SIMDUID_:
2142 case OMP_CLAUSE__GRIDDIM_:
2143 case OMP_CLAUSE__SIMT_:
2144 /* Anything else. */
2145 default:
2146 gcc_unreachable ();
2147 }
2148 }
2149
2150 info->suppress_expansion = new_suppress;
2151
2152 if (need_stmts)
2153 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2154 switch (OMP_CLAUSE_CODE (clause))
2155 {
2156 case OMP_CLAUSE_REDUCTION:
2157 case OMP_CLAUSE_IN_REDUCTION:
2158 case OMP_CLAUSE_TASK_REDUCTION:
2159 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2160 {
2161 tree old_context
2162 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2163 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2164 = info->context;
2165 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2166 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2167 = info->context;
2168 walk_body (convert_local_reference_stmt,
2169 convert_local_reference_op, info,
2170 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2171 walk_body (convert_local_reference_stmt,
2172 convert_local_reference_op, info,
2173 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2174 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2175 = old_context;
2176 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2177 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2178 = old_context;
2179 }
2180 break;
2181
2182 case OMP_CLAUSE_LASTPRIVATE:
2183 walk_body (convert_local_reference_stmt,
2184 convert_local_reference_op, info,
2185 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2186 break;
2187
2188 case OMP_CLAUSE_LINEAR:
2189 walk_body (convert_local_reference_stmt,
2190 convert_local_reference_op, info,
2191 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2192 break;
2193
2194 default:
2195 break;
2196 }
2197
2198 return need_frame;
2199 }
2200
2201
2202 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2203 and PARM_DECLs that were referenced by inner nested functions.
2204 The rewrite will be a structure reference to the local frame variable. */
2205
2206 static tree
2207 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2208 struct walk_stmt_info *wi)
2209 {
2210 struct nesting_info *info = (struct nesting_info *) wi->info;
2211 tree save_local_var_chain;
2212 bitmap save_suppress;
2213 char save_static_chain_added;
2214 bool frame_decl_added;
2215 gimple *stmt = gsi_stmt (*gsi);
2216
2217 switch (gimple_code (stmt))
2218 {
2219 case GIMPLE_OMP_TEAMS:
2220 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2221 {
2222 save_suppress = info->suppress_expansion;
2223 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2224 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2225 info, gimple_omp_body_ptr (stmt));
2226 info->suppress_expansion = save_suppress;
2227 break;
2228 }
2229 /* FALLTHRU */
2230
2231 case GIMPLE_OMP_PARALLEL:
2232 case GIMPLE_OMP_TASK:
2233 save_suppress = info->suppress_expansion;
2234 frame_decl_added = false;
2235 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2236 wi))
2237 {
2238 tree c = build_omp_clause (gimple_location (stmt),
2239 OMP_CLAUSE_SHARED);
2240 (void) get_frame_type (info);
2241 OMP_CLAUSE_DECL (c) = info->frame_decl;
2242 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2243 gimple_omp_taskreg_set_clauses (stmt, c);
2244 info->static_chain_added |= 4;
2245 frame_decl_added = true;
2246 }
2247
2248 save_local_var_chain = info->new_local_var_chain;
2249 save_static_chain_added = info->static_chain_added;
2250 info->new_local_var_chain = NULL;
2251 info->static_chain_added = 0;
2252
2253 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2254 gimple_omp_body_ptr (stmt));
2255
2256 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2257 {
2258 tree c = build_omp_clause (gimple_location (stmt),
2259 OMP_CLAUSE_SHARED);
2260 (void) get_frame_type (info);
2261 OMP_CLAUSE_DECL (c) = info->frame_decl;
2262 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2263 info->static_chain_added |= 4;
2264 gimple_omp_taskreg_set_clauses (stmt, c);
2265 }
2266 if (info->new_local_var_chain)
2267 declare_vars (info->new_local_var_chain,
2268 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2269 info->new_local_var_chain = save_local_var_chain;
2270 info->suppress_expansion = save_suppress;
2271 info->static_chain_added |= save_static_chain_added;
2272 break;
2273
2274 case GIMPLE_OMP_FOR:
2275 save_suppress = info->suppress_expansion;
2276 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2277 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2278 convert_local_reference_stmt,
2279 convert_local_reference_op, info);
2280 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2281 info, gimple_omp_body_ptr (stmt));
2282 info->suppress_expansion = save_suppress;
2283 break;
2284
2285 case GIMPLE_OMP_SECTIONS:
2286 save_suppress = info->suppress_expansion;
2287 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2288 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2289 info, gimple_omp_body_ptr (stmt));
2290 info->suppress_expansion = save_suppress;
2291 break;
2292
2293 case GIMPLE_OMP_SINGLE:
2294 save_suppress = info->suppress_expansion;
2295 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2296 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2297 info, gimple_omp_body_ptr (stmt));
2298 info->suppress_expansion = save_suppress;
2299 break;
2300
2301 case GIMPLE_OMP_TASKGROUP:
2302 save_suppress = info->suppress_expansion;
2303 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2304 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2305 info, gimple_omp_body_ptr (stmt));
2306 info->suppress_expansion = save_suppress;
2307 break;
2308
2309 case GIMPLE_OMP_TARGET:
2310 if (!is_gimple_omp_offloaded (stmt))
2311 {
2312 save_suppress = info->suppress_expansion;
2313 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2314 info->suppress_expansion = save_suppress;
2315 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2316 info, gimple_omp_body_ptr (stmt));
2317 break;
2318 }
2319 save_suppress = info->suppress_expansion;
2320 frame_decl_added = false;
2321 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2322 {
2323 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2324 (void) get_frame_type (info);
2325 OMP_CLAUSE_DECL (c) = info->frame_decl;
2326 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2327 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2328 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2329 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2330 info->static_chain_added |= 4;
2331 frame_decl_added = true;
2332 }
2333
2334 save_local_var_chain = info->new_local_var_chain;
2335 save_static_chain_added = info->static_chain_added;
2336 info->new_local_var_chain = NULL;
2337 info->static_chain_added = 0;
2338
2339 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2340 gimple_omp_body_ptr (stmt));
2341
2342 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2343 {
2344 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2345 (void) get_frame_type (info);
2346 OMP_CLAUSE_DECL (c) = info->frame_decl;
2347 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2348 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2349 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2350 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2351 info->static_chain_added |= 4;
2352 }
2353
2354 if (info->new_local_var_chain)
2355 declare_vars (info->new_local_var_chain,
2356 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2357 info->new_local_var_chain = save_local_var_chain;
2358 info->suppress_expansion = save_suppress;
2359 info->static_chain_added |= save_static_chain_added;
2360 break;
2361
2362 case GIMPLE_OMP_SECTION:
2363 case GIMPLE_OMP_MASTER:
2364 case GIMPLE_OMP_ORDERED:
2365 case GIMPLE_OMP_SCAN:
2366 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2367 info, gimple_omp_body_ptr (stmt));
2368 break;
2369
2370 case GIMPLE_COND:
2371 wi->val_only = true;
2372 wi->is_lhs = false;
2373 *handled_ops_p = false;
2374 return NULL_TREE;
2375
2376 case GIMPLE_ASSIGN:
2377 if (gimple_clobber_p (stmt))
2378 {
2379 tree lhs = gimple_assign_lhs (stmt);
2380 if (DECL_P (lhs)
2381 && !use_pointer_in_frame (lhs)
2382 && lookup_field_for_decl (info, lhs, NO_INSERT))
2383 {
2384 gsi_replace (gsi, gimple_build_nop (), true);
2385 break;
2386 }
2387 }
2388 *handled_ops_p = false;
2389 return NULL_TREE;
2390
2391 case GIMPLE_BIND:
2392 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2393 var;
2394 var = DECL_CHAIN (var))
2395 if (TREE_CODE (var) == NAMELIST_DECL)
2396 {
2397 /* Adjust decls mentioned in NAMELIST_DECL. */
2398 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2399 tree decl;
2400 unsigned int i;
2401
2402 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2403 {
2404 if (VAR_P (decl)
2405 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2406 continue;
2407 if (decl_function_context (decl) == info->context
2408 && !use_pointer_in_frame (decl))
2409 {
2410 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2411 if (field)
2412 {
2413 CONSTRUCTOR_ELT (decls, i)->value
2414 = get_local_debug_decl (info, decl, field);
2415 }
2416 }
2417 }
2418 }
2419
2420 *handled_ops_p = false;
2421 return NULL_TREE;
2422
2423 default:
2424 /* For every other statement that we are not interested in
2425 handling here, let the walker traverse the operands. */
2426 *handled_ops_p = false;
2427 return NULL_TREE;
2428 }
2429
2430 /* Indicate that we have handled all the operands ourselves. */
2431 *handled_ops_p = true;
2432 return NULL_TREE;
2433 }
2434
2435
2436 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2437 that reference labels from outer functions. The rewrite will be a
2438 call to __builtin_nonlocal_goto. */
2439
2440 static tree
2441 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2442 struct walk_stmt_info *wi)
2443 {
2444 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2445 tree label, new_label, target_context, x, field;
2446 gcall *call;
2447 gimple *stmt = gsi_stmt (*gsi);
2448
2449 if (gimple_code (stmt) != GIMPLE_GOTO)
2450 {
2451 *handled_ops_p = false;
2452 return NULL_TREE;
2453 }
2454
2455 label = gimple_goto_dest (stmt);
2456 if (TREE_CODE (label) != LABEL_DECL)
2457 {
2458 *handled_ops_p = false;
2459 return NULL_TREE;
2460 }
2461
2462 target_context = decl_function_context (label);
2463 if (target_context == info->context)
2464 {
2465 *handled_ops_p = false;
2466 return NULL_TREE;
2467 }
2468
2469 for (i = info->outer; target_context != i->context; i = i->outer)
2470 continue;
2471
2472 /* The original user label may also be use for a normal goto, therefore
2473 we must create a new label that will actually receive the abnormal
2474 control transfer. This new label will be marked LABEL_NONLOCAL; this
2475 mark will trigger proper behavior in the cfg, as well as cause the
2476 (hairy target-specific) non-local goto receiver code to be generated
2477 when we expand rtl. Enter this association into var_map so that we
2478 can insert the new label into the IL during a second pass. */
2479 tree *slot = &i->var_map->get_or_insert (label);
2480 if (*slot == NULL)
2481 {
2482 new_label = create_artificial_label (UNKNOWN_LOCATION);
2483 DECL_NONLOCAL (new_label) = 1;
2484 *slot = new_label;
2485 }
2486 else
2487 new_label = *slot;
2488
2489 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2490 field = get_nl_goto_field (i);
2491 x = get_frame_field (info, target_context, field, gsi);
2492 x = build_addr (x);
2493 x = gsi_gimplify_val (info, x, gsi);
2494 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2495 2, build_addr (new_label), x);
2496 gsi_replace (gsi, call, false);
2497
2498 /* We have handled all of STMT's operands, no need to keep going. */
2499 *handled_ops_p = true;
2500 return NULL_TREE;
2501 }
2502
2503
2504 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2505 are referenced via nonlocal goto from a nested function. The rewrite
2506 will involve installing a newly generated DECL_NONLOCAL label, and
2507 (potentially) a branch around the rtl gunk that is assumed to be
2508 attached to such a label. */
2509
2510 static tree
2511 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2512 struct walk_stmt_info *wi)
2513 {
2514 struct nesting_info *const info = (struct nesting_info *) wi->info;
2515 tree label, new_label;
2516 gimple_stmt_iterator tmp_gsi;
2517 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2518
2519 if (!stmt)
2520 {
2521 *handled_ops_p = false;
2522 return NULL_TREE;
2523 }
2524
2525 label = gimple_label_label (stmt);
2526
2527 tree *slot = info->var_map->get (label);
2528 if (!slot)
2529 {
2530 *handled_ops_p = false;
2531 return NULL_TREE;
2532 }
2533
2534 /* If there's any possibility that the previous statement falls through,
2535 then we must branch around the new non-local label. */
2536 tmp_gsi = wi->gsi;
2537 gsi_prev (&tmp_gsi);
2538 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2539 {
2540 gimple *stmt = gimple_build_goto (label);
2541 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2542 }
2543
2544 new_label = (tree) *slot;
2545 stmt = gimple_build_label (new_label);
2546 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2547
2548 *handled_ops_p = true;
2549 return NULL_TREE;
2550 }
2551
2552
2553 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2554 of nested functions that require the use of trampolines. The rewrite
2555 will involve a reference a trampoline generated for the occasion. */
2556
2557 static tree
2558 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2559 {
2560 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2561 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2562 tree t = *tp, decl, target_context, x, builtin;
2563 bool descr;
2564 gcall *call;
2565
2566 *walk_subtrees = 0;
2567 switch (TREE_CODE (t))
2568 {
2569 case ADDR_EXPR:
2570 /* Build
2571 T.1 = &CHAIN->tramp;
2572 T.2 = __builtin_adjust_trampoline (T.1);
2573 T.3 = (func_type)T.2;
2574 */
2575
2576 decl = TREE_OPERAND (t, 0);
2577 if (TREE_CODE (decl) != FUNCTION_DECL)
2578 break;
2579
2580 /* Only need to process nested functions. */
2581 target_context = decl_function_context (decl);
2582 if (!target_context)
2583 break;
2584
2585 /* If the nested function doesn't use a static chain, then
2586 it doesn't need a trampoline. */
2587 if (!DECL_STATIC_CHAIN (decl))
2588 break;
2589
2590 /* If we don't want a trampoline, then don't build one. */
2591 if (TREE_NO_TRAMPOLINE (t))
2592 break;
2593
2594 /* Lookup the immediate parent of the callee, as that's where
2595 we need to insert the trampoline. */
2596 for (i = info; i->context != target_context; i = i->outer)
2597 continue;
2598
2599 /* Decide whether to generate a descriptor or a trampoline. */
2600 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2601
2602 if (descr)
2603 x = lookup_descr_for_decl (i, decl, INSERT);
2604 else
2605 x = lookup_tramp_for_decl (i, decl, INSERT);
2606
2607 /* Compute the address of the field holding the trampoline. */
2608 x = get_frame_field (info, target_context, x, &wi->gsi);
2609 x = build_addr (x);
2610 x = gsi_gimplify_val (info, x, &wi->gsi);
2611
2612 /* Do machine-specific ugliness. Normally this will involve
2613 computing extra alignment, but it can really be anything. */
2614 if (descr)
2615 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2616 else
2617 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2618 call = gimple_build_call (builtin, 1, x);
2619 x = init_tmp_var_with_call (info, &wi->gsi, call);
2620
2621 /* Cast back to the proper function type. */
2622 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2623 x = init_tmp_var (info, x, &wi->gsi);
2624
2625 *tp = x;
2626 break;
2627
2628 default:
2629 if (!IS_TYPE_OR_DECL_P (t))
2630 *walk_subtrees = 1;
2631 break;
2632 }
2633
2634 return NULL_TREE;
2635 }
2636
2637
2638 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2639 to addresses of nested functions that require the use of
2640 trampolines. The rewrite will involve a reference a trampoline
2641 generated for the occasion. */
2642
2643 static tree
2644 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2645 struct walk_stmt_info *wi)
2646 {
2647 struct nesting_info *info = (struct nesting_info *) wi->info;
2648 gimple *stmt = gsi_stmt (*gsi);
2649
2650 switch (gimple_code (stmt))
2651 {
2652 case GIMPLE_CALL:
2653 {
2654 /* Only walk call arguments, lest we generate trampolines for
2655 direct calls. */
2656 unsigned long i, nargs = gimple_call_num_args (stmt);
2657 for (i = 0; i < nargs; i++)
2658 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2659 wi, NULL);
2660 break;
2661 }
2662
2663 case GIMPLE_OMP_TEAMS:
2664 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2665 {
2666 *handled_ops_p = false;
2667 return NULL_TREE;
2668 }
2669 goto do_parallel;
2670
2671 case GIMPLE_OMP_TARGET:
2672 if (!is_gimple_omp_offloaded (stmt))
2673 {
2674 *handled_ops_p = false;
2675 return NULL_TREE;
2676 }
2677 /* FALLTHRU */
2678 case GIMPLE_OMP_PARALLEL:
2679 case GIMPLE_OMP_TASK:
2680 do_parallel:
2681 {
2682 tree save_local_var_chain = info->new_local_var_chain;
2683 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2684 info->new_local_var_chain = NULL;
2685 char save_static_chain_added = info->static_chain_added;
2686 info->static_chain_added = 0;
2687 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2688 info, gimple_omp_body_ptr (stmt));
2689 if (info->new_local_var_chain)
2690 declare_vars (info->new_local_var_chain,
2691 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2692 false);
2693 for (int i = 0; i < 2; i++)
2694 {
2695 tree c, decl;
2696 if ((info->static_chain_added & (1 << i)) == 0)
2697 continue;
2698 decl = i ? get_chain_decl (info) : info->frame_decl;
2699 /* Don't add CHAIN.* or FRAME.* twice. */
2700 for (c = gimple_omp_taskreg_clauses (stmt);
2701 c;
2702 c = OMP_CLAUSE_CHAIN (c))
2703 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2704 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2705 && OMP_CLAUSE_DECL (c) == decl)
2706 break;
2707 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2708 {
2709 c = build_omp_clause (gimple_location (stmt),
2710 i ? OMP_CLAUSE_FIRSTPRIVATE
2711 : OMP_CLAUSE_SHARED);
2712 OMP_CLAUSE_DECL (c) = decl;
2713 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2714 gimple_omp_taskreg_set_clauses (stmt, c);
2715 }
2716 else if (c == NULL)
2717 {
2718 c = build_omp_clause (gimple_location (stmt),
2719 OMP_CLAUSE_MAP);
2720 OMP_CLAUSE_DECL (c) = decl;
2721 OMP_CLAUSE_SET_MAP_KIND (c,
2722 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2723 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2724 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2725 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2726 c);
2727 }
2728 }
2729 info->new_local_var_chain = save_local_var_chain;
2730 info->static_chain_added |= save_static_chain_added;
2731 }
2732 break;
2733
2734 default:
2735 *handled_ops_p = false;
2736 return NULL_TREE;
2737 }
2738
2739 *handled_ops_p = true;
2740 return NULL_TREE;
2741 }
2742
2743
2744
2745 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2746 that reference nested functions to make sure that the static chain
2747 is set up properly for the call. */
2748
2749 static tree
2750 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2751 struct walk_stmt_info *wi)
2752 {
2753 struct nesting_info *const info = (struct nesting_info *) wi->info;
2754 tree decl, target_context;
2755 char save_static_chain_added;
2756 int i;
2757 gimple *stmt = gsi_stmt (*gsi);
2758
2759 switch (gimple_code (stmt))
2760 {
2761 case GIMPLE_CALL:
2762 if (gimple_call_chain (stmt))
2763 break;
2764 decl = gimple_call_fndecl (stmt);
2765 if (!decl)
2766 break;
2767 target_context = decl_function_context (decl);
2768 if (target_context && DECL_STATIC_CHAIN (decl))
2769 {
2770 struct nesting_info *i = info;
2771 while (i && i->context != target_context)
2772 i = i->outer;
2773 /* If none of the outer contexts is the target context, this means
2774 that the function is called in a wrong context. */
2775 if (!i)
2776 internal_error ("%s from %s called in %s",
2777 IDENTIFIER_POINTER (DECL_NAME (decl)),
2778 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2779 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2780
2781 gimple_call_set_chain (as_a <gcall *> (stmt),
2782 get_static_chain (info, target_context,
2783 &wi->gsi));
2784 info->static_chain_added |= (1 << (info->context != target_context));
2785 }
2786 break;
2787
2788 case GIMPLE_OMP_TEAMS:
2789 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2790 {
2791 walk_body (convert_gimple_call, NULL, info,
2792 gimple_omp_body_ptr (stmt));
2793 break;
2794 }
2795 /* FALLTHRU */
2796
2797 case GIMPLE_OMP_PARALLEL:
2798 case GIMPLE_OMP_TASK:
2799 save_static_chain_added = info->static_chain_added;
2800 info->static_chain_added = 0;
2801 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2802 for (i = 0; i < 2; i++)
2803 {
2804 tree c, decl;
2805 if ((info->static_chain_added & (1 << i)) == 0)
2806 continue;
2807 decl = i ? get_chain_decl (info) : info->frame_decl;
2808 /* Don't add CHAIN.* or FRAME.* twice. */
2809 for (c = gimple_omp_taskreg_clauses (stmt);
2810 c;
2811 c = OMP_CLAUSE_CHAIN (c))
2812 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2813 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2814 && OMP_CLAUSE_DECL (c) == decl)
2815 break;
2816 if (c == NULL)
2817 {
2818 c = build_omp_clause (gimple_location (stmt),
2819 i ? OMP_CLAUSE_FIRSTPRIVATE
2820 : OMP_CLAUSE_SHARED);
2821 OMP_CLAUSE_DECL (c) = decl;
2822 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2823 gimple_omp_taskreg_set_clauses (stmt, c);
2824 }
2825 }
2826 info->static_chain_added |= save_static_chain_added;
2827 break;
2828
2829 case GIMPLE_OMP_TARGET:
2830 if (!is_gimple_omp_offloaded (stmt))
2831 {
2832 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2833 break;
2834 }
2835 save_static_chain_added = info->static_chain_added;
2836 info->static_chain_added = 0;
2837 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2838 for (i = 0; i < 2; i++)
2839 {
2840 tree c, decl;
2841 if ((info->static_chain_added & (1 << i)) == 0)
2842 continue;
2843 decl = i ? get_chain_decl (info) : info->frame_decl;
2844 /* Don't add CHAIN.* or FRAME.* twice. */
2845 for (c = gimple_omp_target_clauses (stmt);
2846 c;
2847 c = OMP_CLAUSE_CHAIN (c))
2848 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2849 && OMP_CLAUSE_DECL (c) == decl)
2850 break;
2851 if (c == NULL)
2852 {
2853 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2854 OMP_CLAUSE_DECL (c) = decl;
2855 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2856 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2857 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2858 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2859 c);
2860 }
2861 }
2862 info->static_chain_added |= save_static_chain_added;
2863 break;
2864
2865 case GIMPLE_OMP_FOR:
2866 walk_body (convert_gimple_call, NULL, info,
2867 gimple_omp_for_pre_body_ptr (stmt));
2868 /* FALLTHRU */
2869 case GIMPLE_OMP_SECTIONS:
2870 case GIMPLE_OMP_SECTION:
2871 case GIMPLE_OMP_SINGLE:
2872 case GIMPLE_OMP_MASTER:
2873 case GIMPLE_OMP_TASKGROUP:
2874 case GIMPLE_OMP_ORDERED:
2875 case GIMPLE_OMP_SCAN:
2876 case GIMPLE_OMP_CRITICAL:
2877 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2878 break;
2879
2880 default:
2881 /* Keep looking for other operands. */
2882 *handled_ops_p = false;
2883 return NULL_TREE;
2884 }
2885
2886 *handled_ops_p = true;
2887 return NULL_TREE;
2888 }
2889
2890 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2891 call expressions. At the same time, determine if a nested function
2892 actually uses its static chain; if not, remember that. */
2893
2894 static void
2895 convert_all_function_calls (struct nesting_info *root)
2896 {
2897 unsigned int chain_count = 0, old_chain_count, iter_count;
2898 struct nesting_info *n;
2899
2900 /* First, optimistically clear static_chain for all decls that haven't
2901 used the static chain already for variable access. But always create
2902 it if not optimizing. This makes it possible to reconstruct the static
2903 nesting tree at run time and thus to resolve up-level references from
2904 within the debugger. */
2905 FOR_EACH_NEST_INFO (n, root)
2906 {
2907 if (n->thunk_p)
2908 continue;
2909 tree decl = n->context;
2910 if (!optimize)
2911 {
2912 if (n->inner)
2913 (void) get_frame_type (n);
2914 if (n->outer)
2915 (void) get_chain_decl (n);
2916 }
2917 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2918 {
2919 DECL_STATIC_CHAIN (decl) = 0;
2920 if (dump_file && (dump_flags & TDF_DETAILS))
2921 fprintf (dump_file, "Guessing no static-chain for %s\n",
2922 lang_hooks.decl_printable_name (decl, 2));
2923 }
2924 else
2925 DECL_STATIC_CHAIN (decl) = 1;
2926 chain_count += DECL_STATIC_CHAIN (decl);
2927 }
2928
2929 FOR_EACH_NEST_INFO (n, root)
2930 if (n->thunk_p)
2931 {
2932 tree decl = n->context;
2933 tree alias = cgraph_node::get (decl)->thunk.alias;
2934 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2935 }
2936
2937 /* Walk the functions and perform transformations. Note that these
2938 transformations can induce new uses of the static chain, which in turn
2939 require re-examining all users of the decl. */
2940 /* ??? It would make sense to try to use the call graph to speed this up,
2941 but the call graph hasn't really been built yet. Even if it did, we
2942 would still need to iterate in this loop since address-of references
2943 wouldn't show up in the callgraph anyway. */
2944 iter_count = 0;
2945 do
2946 {
2947 old_chain_count = chain_count;
2948 chain_count = 0;
2949 iter_count++;
2950
2951 if (dump_file && (dump_flags & TDF_DETAILS))
2952 fputc ('\n', dump_file);
2953
2954 FOR_EACH_NEST_INFO (n, root)
2955 {
2956 if (n->thunk_p)
2957 continue;
2958 tree decl = n->context;
2959 walk_function (convert_tramp_reference_stmt,
2960 convert_tramp_reference_op, n);
2961 walk_function (convert_gimple_call, NULL, n);
2962 chain_count += DECL_STATIC_CHAIN (decl);
2963 }
2964
2965 FOR_EACH_NEST_INFO (n, root)
2966 if (n->thunk_p)
2967 {
2968 tree decl = n->context;
2969 tree alias = cgraph_node::get (decl)->thunk.alias;
2970 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2971 }
2972 }
2973 while (chain_count != old_chain_count);
2974
2975 if (dump_file && (dump_flags & TDF_DETAILS))
2976 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2977 iter_count);
2978 }
2979
2980 struct nesting_copy_body_data
2981 {
2982 copy_body_data cb;
2983 struct nesting_info *root;
2984 };
2985
2986 /* A helper subroutine for debug_var_chain type remapping. */
2987
2988 static tree
2989 nesting_copy_decl (tree decl, copy_body_data *id)
2990 {
2991 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2992 tree *slot = nid->root->var_map->get (decl);
2993
2994 if (slot)
2995 return (tree) *slot;
2996
2997 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2998 {
2999 tree new_decl = copy_decl_no_change (decl, id);
3000 DECL_ORIGINAL_TYPE (new_decl)
3001 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
3002 return new_decl;
3003 }
3004
3005 if (VAR_P (decl)
3006 || TREE_CODE (decl) == PARM_DECL
3007 || TREE_CODE (decl) == RESULT_DECL)
3008 return decl;
3009
3010 return copy_decl_no_change (decl, id);
3011 }
3012
3013 /* A helper function for remap_vla_decls. See if *TP contains
3014 some remapped variables. */
3015
3016 static tree
3017 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3018 {
3019 struct nesting_info *root = (struct nesting_info *) data;
3020 tree t = *tp;
3021
3022 if (DECL_P (t))
3023 {
3024 *walk_subtrees = 0;
3025 tree *slot = root->var_map->get (t);
3026
3027 if (slot)
3028 return *slot;
3029 }
3030 return NULL;
3031 }
3032
3033 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3034 involved. */
3035
3036 static void
3037 remap_vla_decls (tree block, struct nesting_info *root)
3038 {
3039 tree var, subblock, val, type;
3040 struct nesting_copy_body_data id;
3041
3042 for (subblock = BLOCK_SUBBLOCKS (block);
3043 subblock;
3044 subblock = BLOCK_CHAIN (subblock))
3045 remap_vla_decls (subblock, root);
3046
3047 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3048 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3049 {
3050 val = DECL_VALUE_EXPR (var);
3051 type = TREE_TYPE (var);
3052
3053 if (!(TREE_CODE (val) == INDIRECT_REF
3054 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3055 && variably_modified_type_p (type, NULL)))
3056 continue;
3057
3058 if (root->var_map->get (TREE_OPERAND (val, 0))
3059 || walk_tree (&type, contains_remapped_vars, root, NULL))
3060 break;
3061 }
3062
3063 if (var == NULL_TREE)
3064 return;
3065
3066 memset (&id, 0, sizeof (id));
3067 id.cb.copy_decl = nesting_copy_decl;
3068 id.cb.decl_map = new hash_map<tree, tree>;
3069 id.root = root;
3070
3071 for (; var; var = DECL_CHAIN (var))
3072 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3073 {
3074 struct nesting_info *i;
3075 tree newt, context;
3076
3077 val = DECL_VALUE_EXPR (var);
3078 type = TREE_TYPE (var);
3079
3080 if (!(TREE_CODE (val) == INDIRECT_REF
3081 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3082 && variably_modified_type_p (type, NULL)))
3083 continue;
3084
3085 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3086 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3087 continue;
3088
3089 context = decl_function_context (var);
3090 for (i = root; i; i = i->outer)
3091 if (i->context == context)
3092 break;
3093
3094 if (i == NULL)
3095 continue;
3096
3097 /* Fully expand value expressions. This avoids having debug variables
3098 only referenced from them and that can be swept during GC. */
3099 if (slot)
3100 {
3101 tree t = (tree) *slot;
3102 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3103 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3104 }
3105
3106 id.cb.src_fn = i->context;
3107 id.cb.dst_fn = i->context;
3108 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3109
3110 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3111 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3112 {
3113 newt = TREE_TYPE (newt);
3114 type = TREE_TYPE (type);
3115 }
3116 if (TYPE_NAME (newt)
3117 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3118 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3119 && newt != type
3120 && TYPE_NAME (newt) == TYPE_NAME (type))
3121 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3122
3123 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3124 if (val != DECL_VALUE_EXPR (var))
3125 SET_DECL_VALUE_EXPR (var, val);
3126 }
3127
3128 delete id.cb.decl_map;
3129 }
3130
3131 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3132 involved. */
3133
3134 static void
3135 fixup_vla_decls (tree block)
3136 {
3137 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3138 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3139 {
3140 tree val = DECL_VALUE_EXPR (var);
3141
3142 if (!(TREE_CODE (val) == INDIRECT_REF
3143 && VAR_P (TREE_OPERAND (val, 0))
3144 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3145 continue;
3146
3147 /* Fully expand value expressions. This avoids having debug variables
3148 only referenced from them and that can be swept during GC. */
3149 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3150 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3151 SET_DECL_VALUE_EXPR (var, val);
3152 }
3153
3154 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3155 fixup_vla_decls (sub);
3156 }
3157
3158 /* Fold the MEM_REF *E. */
3159 bool
3160 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3161 {
3162 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3163 *ref_p = fold (*ref_p);
3164 return true;
3165 }
3166
3167 /* Given DECL, a nested function, build an initialization call for FIELD,
3168 the trampoline or descriptor for DECL, using FUNC as the function. */
3169
3170 static gcall *
3171 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3172 tree func)
3173 {
3174 tree arg1, arg2, arg3, x;
3175
3176 gcc_assert (DECL_STATIC_CHAIN (decl));
3177 arg3 = build_addr (info->frame_decl);
3178
3179 arg2 = build_addr (decl);
3180
3181 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3182 info->frame_decl, field, NULL_TREE);
3183 arg1 = build_addr (x);
3184
3185 return gimple_build_call (func, 3, arg1, arg2, arg3);
3186 }
3187
3188 /* Do "everything else" to clean up or complete state collected by the various
3189 walking passes -- create a field to hold the frame base address, lay out the
3190 types and decls, generate code to initialize the frame decl, store critical
3191 expressions in the struct function for rtl to find. */
3192
3193 static void
3194 finalize_nesting_tree_1 (struct nesting_info *root)
3195 {
3196 gimple_seq stmt_list = NULL;
3197 gimple *stmt;
3198 tree context = root->context;
3199 struct function *sf;
3200
3201 if (root->thunk_p)
3202 return;
3203
3204 /* If we created a non-local frame type or decl, we need to lay them
3205 out at this time. */
3206 if (root->frame_type)
3207 {
3208 /* Debugging information needs to compute the frame base address of the
3209 parent frame out of the static chain from the nested frame.
3210
3211 The static chain is the address of the FRAME record, so one could
3212 imagine it would be possible to compute the frame base address just
3213 adding a constant offset to this address. Unfortunately, this is not
3214 possible: if the FRAME object has alignment constraints that are
3215 stronger than the stack, then the offset between the frame base and
3216 the FRAME object will be dynamic.
3217
3218 What we do instead is to append a field to the FRAME object that holds
3219 the frame base address: then debug info just has to fetch this
3220 field. */
3221
3222 /* Debugging information will refer to the CFA as the frame base
3223 address: we will do the same here. */
3224 const tree frame_addr_fndecl
3225 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3226
3227 /* Create a field in the FRAME record to hold the frame base address for
3228 this stack frame. Since it will be used only by the debugger, put it
3229 at the end of the record in order not to shift all other offsets. */
3230 tree fb_decl = make_node (FIELD_DECL);
3231
3232 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3233 TREE_TYPE (fb_decl) = ptr_type_node;
3234 TREE_ADDRESSABLE (fb_decl) = 1;
3235 DECL_CONTEXT (fb_decl) = root->frame_type;
3236 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3237 fb_decl);
3238
3239 /* In some cases the frame type will trigger the -Wpadded warning.
3240 This is not helpful; suppress it. */
3241 int save_warn_padded = warn_padded;
3242 warn_padded = 0;
3243 layout_type (root->frame_type);
3244 warn_padded = save_warn_padded;
3245 layout_decl (root->frame_decl, 0);
3246
3247 /* Initialize the frame base address field. If the builtin we need is
3248 not available, set it to NULL so that debugging information does not
3249 reference junk. */
3250 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3251 root->frame_decl, fb_decl, NULL_TREE);
3252 tree fb_tmp;
3253
3254 if (frame_addr_fndecl != NULL_TREE)
3255 {
3256 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3257 integer_zero_node);
3258 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3259
3260 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3261 }
3262 else
3263 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3264 gimple_seq_add_stmt (&stmt_list,
3265 gimple_build_assign (fb_ref, fb_tmp));
3266
3267 declare_vars (root->frame_decl,
3268 gimple_seq_first_stmt (gimple_body (context)), true);
3269 }
3270
3271 /* If any parameters were referenced non-locally, then we need to insert
3272 a copy or a pointer. */
3273 if (root->any_parm_remapped)
3274 {
3275 tree p;
3276 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3277 {
3278 tree field, x, y;
3279
3280 field = lookup_field_for_decl (root, p, NO_INSERT);
3281 if (!field)
3282 continue;
3283
3284 if (use_pointer_in_frame (p))
3285 x = build_addr (p);
3286 else
3287 x = p;
3288
3289 /* If the assignment is from a non-register the stmt is
3290 not valid gimple. Make it so by using a temporary instead. */
3291 if (!is_gimple_reg (x)
3292 && is_gimple_reg_type (TREE_TYPE (x)))
3293 {
3294 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3295 x = init_tmp_var (root, x, &gsi);
3296 }
3297
3298 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3299 root->frame_decl, field, NULL_TREE);
3300 stmt = gimple_build_assign (y, x);
3301 gimple_seq_add_stmt (&stmt_list, stmt);
3302 }
3303 }
3304
3305 /* If a chain_field was created, then it needs to be initialized
3306 from chain_decl. */
3307 if (root->chain_field)
3308 {
3309 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3310 root->frame_decl, root->chain_field, NULL_TREE);
3311 stmt = gimple_build_assign (x, get_chain_decl (root));
3312 gimple_seq_add_stmt (&stmt_list, stmt);
3313 }
3314
3315 /* If trampolines were created, then we need to initialize them. */
3316 if (root->any_tramp_created)
3317 {
3318 struct nesting_info *i;
3319 for (i = root->inner; i ; i = i->next)
3320 {
3321 tree field, x;
3322
3323 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3324 if (!field)
3325 continue;
3326
3327 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3328 stmt = build_init_call_stmt (root, i->context, field, x);
3329 gimple_seq_add_stmt (&stmt_list, stmt);
3330 }
3331 }
3332
3333 /* If descriptors were created, then we need to initialize them. */
3334 if (root->any_descr_created)
3335 {
3336 struct nesting_info *i;
3337 for (i = root->inner; i ; i = i->next)
3338 {
3339 tree field, x;
3340
3341 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3342 if (!field)
3343 continue;
3344
3345 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3346 stmt = build_init_call_stmt (root, i->context, field, x);
3347 gimple_seq_add_stmt (&stmt_list, stmt);
3348 }
3349 }
3350
3351 /* If we created initialization statements, insert them. */
3352 if (stmt_list)
3353 {
3354 gbind *bind;
3355 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3356 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3357 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3358 gimple_bind_set_body (bind, stmt_list);
3359 }
3360
3361 /* If a chain_decl was created, then it needs to be registered with
3362 struct function so that it gets initialized from the static chain
3363 register at the beginning of the function. */
3364 sf = DECL_STRUCT_FUNCTION (root->context);
3365 sf->static_chain_decl = root->chain_decl;
3366
3367 /* Similarly for the non-local goto save area. */
3368 if (root->nl_goto_field)
3369 {
3370 sf->nonlocal_goto_save_area
3371 = get_frame_field (root, context, root->nl_goto_field, NULL);
3372 sf->has_nonlocal_label = 1;
3373 }
3374
3375 /* Make sure all new local variables get inserted into the
3376 proper BIND_EXPR. */
3377 if (root->new_local_var_chain)
3378 declare_vars (root->new_local_var_chain,
3379 gimple_seq_first_stmt (gimple_body (root->context)),
3380 false);
3381
3382 if (root->debug_var_chain)
3383 {
3384 tree debug_var;
3385 gbind *scope;
3386
3387 remap_vla_decls (DECL_INITIAL (root->context), root);
3388
3389 for (debug_var = root->debug_var_chain; debug_var;
3390 debug_var = DECL_CHAIN (debug_var))
3391 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3392 break;
3393
3394 /* If there are any debug decls with variable length types,
3395 remap those types using other debug_var_chain variables. */
3396 if (debug_var)
3397 {
3398 struct nesting_copy_body_data id;
3399
3400 memset (&id, 0, sizeof (id));
3401 id.cb.copy_decl = nesting_copy_decl;
3402 id.cb.decl_map = new hash_map<tree, tree>;
3403 id.root = root;
3404
3405 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3406 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3407 {
3408 tree type = TREE_TYPE (debug_var);
3409 tree newt, t = type;
3410 struct nesting_info *i;
3411
3412 for (i = root; i; i = i->outer)
3413 if (variably_modified_type_p (type, i->context))
3414 break;
3415
3416 if (i == NULL)
3417 continue;
3418
3419 id.cb.src_fn = i->context;
3420 id.cb.dst_fn = i->context;
3421 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3422
3423 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3424 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3425 {
3426 newt = TREE_TYPE (newt);
3427 t = TREE_TYPE (t);
3428 }
3429 if (TYPE_NAME (newt)
3430 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3431 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3432 && newt != t
3433 && TYPE_NAME (newt) == TYPE_NAME (t))
3434 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3435 }
3436
3437 delete id.cb.decl_map;
3438 }
3439
3440 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3441 if (gimple_bind_block (scope))
3442 declare_vars (root->debug_var_chain, scope, true);
3443 else
3444 BLOCK_VARS (DECL_INITIAL (root->context))
3445 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3446 root->debug_var_chain);
3447 }
3448 else
3449 fixup_vla_decls (DECL_INITIAL (root->context));
3450
3451 /* Fold the rewritten MEM_REF trees. */
3452 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3453
3454 /* Dump the translated tree function. */
3455 if (dump_file)
3456 {
3457 fputs ("\n\n", dump_file);
3458 dump_function_to_file (root->context, dump_file, dump_flags);
3459 }
3460 }
3461
3462 static void
3463 finalize_nesting_tree (struct nesting_info *root)
3464 {
3465 struct nesting_info *n;
3466 FOR_EACH_NEST_INFO (n, root)
3467 finalize_nesting_tree_1 (n);
3468 }
3469
3470 /* Unnest the nodes and pass them to cgraph. */
3471
3472 static void
3473 unnest_nesting_tree_1 (struct nesting_info *root)
3474 {
3475 struct cgraph_node *node = cgraph_node::get (root->context);
3476
3477 /* For nested functions update the cgraph to reflect unnesting.
3478 We also delay finalizing of these functions up to this point. */
3479 if (node->origin)
3480 {
3481 node->unnest ();
3482 if (!root->thunk_p)
3483 cgraph_node::finalize_function (root->context, true);
3484 }
3485 }
3486
3487 static void
3488 unnest_nesting_tree (struct nesting_info *root)
3489 {
3490 struct nesting_info *n;
3491 FOR_EACH_NEST_INFO (n, root)
3492 unnest_nesting_tree_1 (n);
3493 }
3494
3495 /* Free the data structures allocated during this pass. */
3496
3497 static void
3498 free_nesting_tree (struct nesting_info *root)
3499 {
3500 struct nesting_info *node, *next;
3501
3502 node = iter_nestinfo_start (root);
3503 do
3504 {
3505 next = iter_nestinfo_next (node);
3506 delete node->var_map;
3507 delete node->field_map;
3508 delete node->mem_refs;
3509 free (node);
3510 node = next;
3511 }
3512 while (node);
3513 }
3514
3515 /* Gimplify a function and all its nested functions. */
3516 static void
3517 gimplify_all_functions (struct cgraph_node *root)
3518 {
3519 struct cgraph_node *iter;
3520 if (!gimple_body (root->decl))
3521 gimplify_function_tree (root->decl);
3522 for (iter = root->nested; iter; iter = iter->next_nested)
3523 if (!iter->thunk.thunk_p)
3524 gimplify_all_functions (iter);
3525 }
3526
3527 /* Main entry point for this pass. Process FNDECL and all of its nested
3528 subroutines and turn them into something less tightly bound. */
3529
3530 void
3531 lower_nested_functions (tree fndecl)
3532 {
3533 struct cgraph_node *cgn;
3534 struct nesting_info *root;
3535
3536 /* If there are no nested functions, there's nothing to do. */
3537 cgn = cgraph_node::get (fndecl);
3538 if (!cgn->nested)
3539 return;
3540
3541 gimplify_all_functions (cgn);
3542
3543 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3544 if (dump_file)
3545 fprintf (dump_file, "\n;; Function %s\n\n",
3546 lang_hooks.decl_printable_name (fndecl, 2));
3547
3548 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3549 root = create_nesting_tree (cgn);
3550
3551 walk_all_functions (convert_nonlocal_reference_stmt,
3552 convert_nonlocal_reference_op,
3553 root);
3554 walk_all_functions (convert_local_reference_stmt,
3555 convert_local_reference_op,
3556 root);
3557 walk_all_functions (convert_nl_goto_reference, NULL, root);
3558 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3559
3560 convert_all_function_calls (root);
3561 finalize_nesting_tree (root);
3562 unnest_nesting_tree (root);
3563
3564 free_nesting_tree (root);
3565 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3566
3567 if (dump_file)
3568 {
3569 dump_end (TDI_nested, dump_file);
3570 set_dump_file (NULL);
3571 }
3572 }
3573
3574 #include "gt-tree-nested.h"