]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-nested.c
generalized IPA predicate on parameter
[thirdparty/gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45
46
47 /* The object of this pass is to lower the representation of a set of nested
48 functions in order to expose all of the gory details of the various
49 nonlocal references. We want to do this sooner rather than later, in
50 order to give us more freedom in emitting all of the functions in question.
51
52 Back in olden times, when gcc was young, we developed an insanely
53 complicated scheme whereby variables which were referenced nonlocally
54 were forced to live in the stack of the declaring function, and then
55 the nested functions magically discovered where these variables were
56 placed. In order for this scheme to function properly, it required
57 that the outer function be partially expanded, then we switch to
58 compiling the inner function, and once done with those we switch back
59 to compiling the outer function. Such delicate ordering requirements
60 makes it difficult to do whole translation unit optimizations
61 involving such functions.
62
63 The implementation here is much more direct. Everything that can be
64 referenced by an inner function is a member of an explicitly created
65 structure herein called the "nonlocal frame struct". The incoming
66 static chain for a nested function is a pointer to this struct in
67 the parent. In this way, we settle on known offsets from a known
68 base, and so are decoupled from the logic that places objects in the
69 function's stack frame. More importantly, we don't have to wait for
70 that to happen -- since the compilation of the inner function is no
71 longer tied to a real stack frame, the nonlocal frame struct can be
72 allocated anywhere. Which means that the outer function is now
73 inlinable.
74
75 Theory of operation here is very simple. Iterate over all the
76 statements in all the functions (depth first) several times,
77 allocating structures and fields on demand. In general we want to
78 examine inner functions first, so that we can avoid making changes
79 to outer functions which are unnecessary.
80
81 The order of the passes matters a bit, in that later passes will be
82 skipped if it is discovered that the functions don't actually interact
83 at all. That is, they're nested in the lexical sense but could have
84 been written as independent functions without change. */
85
86
87 struct nesting_info
88 {
89 struct nesting_info *outer;
90 struct nesting_info *inner;
91 struct nesting_info *next;
92
93 hash_map<tree, tree> *field_map;
94 hash_map<tree, tree> *var_map;
95 hash_set<tree *> *mem_refs;
96 bitmap suppress_expansion;
97
98 tree context;
99 tree new_local_var_chain;
100 tree debug_var_chain;
101 tree frame_type;
102 tree frame_decl;
103 tree chain_field;
104 tree chain_decl;
105 tree nl_goto_field;
106
107 bool thunk_p;
108 bool any_parm_remapped;
109 bool any_tramp_created;
110 bool any_descr_created;
111 char static_chain_added;
112 };
113
114
115 /* Iterate over the nesting tree, starting with ROOT, depth first. */
116
117 static inline struct nesting_info *
118 iter_nestinfo_start (struct nesting_info *root)
119 {
120 while (root->inner)
121 root = root->inner;
122 return root;
123 }
124
125 static inline struct nesting_info *
126 iter_nestinfo_next (struct nesting_info *node)
127 {
128 if (node->next)
129 return iter_nestinfo_start (node->next);
130 return node->outer;
131 }
132
133 #define FOR_EACH_NEST_INFO(I, ROOT) \
134 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
135
136 /* Obstack used for the bitmaps in the struct above. */
137 static struct bitmap_obstack nesting_info_bitmap_obstack;
138
139
140 /* We're working in so many different function contexts simultaneously,
141 that create_tmp_var is dangerous. Prevent mishap. */
142 #define create_tmp_var cant_use_create_tmp_var_here_dummy
143
144 /* Like create_tmp_var, except record the variable for registration at
145 the given nesting level. */
146
147 static tree
148 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
149 {
150 tree tmp_var;
151
152 /* If the type is of variable size or a type which must be created by the
153 frontend, something is wrong. Note that we explicitly allow
154 incomplete types here, since we create them ourselves here. */
155 gcc_assert (!TREE_ADDRESSABLE (type));
156 gcc_assert (!TYPE_SIZE_UNIT (type)
157 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
158
159 tmp_var = create_tmp_var_raw (type, prefix);
160 DECL_CONTEXT (tmp_var) = info->context;
161 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
162 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
163 if (TREE_CODE (type) == COMPLEX_TYPE
164 || TREE_CODE (type) == VECTOR_TYPE)
165 DECL_GIMPLE_REG_P (tmp_var) = 1;
166
167 info->new_local_var_chain = tmp_var;
168
169 return tmp_var;
170 }
171
172 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */
173
174 static tree
175 build_simple_mem_ref_notrap (tree ptr)
176 {
177 tree t = build_simple_mem_ref (ptr);
178 TREE_THIS_NOTRAP (t) = 1;
179 return t;
180 }
181
182 /* Take the address of EXP to be used within function CONTEXT.
183 Mark it for addressability as necessary. */
184
185 tree
186 build_addr (tree exp)
187 {
188 mark_addressable (exp);
189 return build_fold_addr_expr (exp);
190 }
191
192 /* Insert FIELD into TYPE, sorted by alignment requirements. */
193
194 void
195 insert_field_into_struct (tree type, tree field)
196 {
197 tree *p;
198
199 DECL_CONTEXT (field) = type;
200
201 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
202 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
203 break;
204
205 DECL_CHAIN (field) = *p;
206 *p = field;
207
208 /* Set correct alignment for frame struct type. */
209 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
210 SET_TYPE_ALIGN (type, DECL_ALIGN (field));
211 }
212
213 /* Build or return the RECORD_TYPE that describes the frame state that is
214 shared between INFO->CONTEXT and its nested functions. This record will
215 not be complete until finalize_nesting_tree; up until that point we'll
216 be adding fields as necessary.
217
218 We also build the DECL that represents this frame in the function. */
219
220 static tree
221 get_frame_type (struct nesting_info *info)
222 {
223 tree type = info->frame_type;
224 if (!type)
225 {
226 char *name;
227
228 type = make_node (RECORD_TYPE);
229
230 name = concat ("FRAME.",
231 IDENTIFIER_POINTER (DECL_NAME (info->context)),
232 NULL);
233 TYPE_NAME (type) = get_identifier (name);
234 free (name);
235
236 info->frame_type = type;
237
238 /* Do not put info->frame_decl on info->new_local_var_chain,
239 so that we can declare it in the lexical blocks, which
240 makes sure virtual regs that end up appearing in its RTL
241 expression get substituted in instantiate_virtual_regs. */
242 info->frame_decl = create_tmp_var_raw (type, "FRAME");
243 DECL_CONTEXT (info->frame_decl) = info->context;
244 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
245 DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
246
247 /* ??? Always make it addressable for now, since it is meant to
248 be pointed to by the static chain pointer. This pessimizes
249 when it turns out that no static chains are needed because
250 the nested functions referencing non-local variables are not
251 reachable, but the true pessimization is to create the non-
252 local frame structure in the first place. */
253 TREE_ADDRESSABLE (info->frame_decl) = 1;
254 }
255
256 return type;
257 }
258
259 /* Return true if DECL should be referenced by pointer in the non-local frame
260 structure. */
261
262 static bool
263 use_pointer_in_frame (tree decl)
264 {
265 if (TREE_CODE (decl) == PARM_DECL)
266 {
267 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
268 sized DECLs, and inefficient to copy large aggregates. Don't bother
269 moving anything but scalar parameters. */
270 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
271 }
272 else
273 {
274 /* Variable-sized DECLs can only come from OMP clauses at this point
275 since the gimplifier has already turned the regular variables into
276 pointers. Do the same as the gimplifier. */
277 return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
278 }
279 }
280
281 /* Given DECL, a non-locally accessed variable, find or create a field
282 in the non-local frame structure for the given nesting context. */
283
284 static tree
285 lookup_field_for_decl (struct nesting_info *info, tree decl,
286 enum insert_option insert)
287 {
288 gcc_checking_assert (decl_function_context (decl) == info->context);
289
290 if (insert == NO_INSERT)
291 {
292 tree *slot = info->field_map->get (decl);
293 return slot ? *slot : NULL_TREE;
294 }
295
296 tree *slot = &info->field_map->get_or_insert (decl);
297 if (!*slot)
298 {
299 tree type = get_frame_type (info);
300 tree field = make_node (FIELD_DECL);
301 DECL_NAME (field) = DECL_NAME (decl);
302
303 if (use_pointer_in_frame (decl))
304 {
305 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
306 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
307 DECL_NONADDRESSABLE_P (field) = 1;
308 }
309 else
310 {
311 TREE_TYPE (field) = TREE_TYPE (decl);
312 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
313 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
314 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
315 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
316 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
317 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
318
319 /* Declare the transformation and adjust the original DECL. For a
320 variable or for a parameter when not optimizing, we make it point
321 to the field in the frame directly. For a parameter, we don't do
322 it when optimizing because the variable tracking pass will already
323 do the job, */
324 if (VAR_P (decl) || !optimize)
325 {
326 tree x
327 = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
328 field, NULL_TREE);
329
330 /* If the next declaration is a PARM_DECL pointing to the DECL,
331 we need to adjust its VALUE_EXPR directly, since chains of
332 VALUE_EXPRs run afoul of garbage collection. This occurs
333 in Ada for Out parameters that aren't copied in. */
334 tree next = DECL_CHAIN (decl);
335 if (next
336 && TREE_CODE (next) == PARM_DECL
337 && DECL_HAS_VALUE_EXPR_P (next)
338 && DECL_VALUE_EXPR (next) == decl)
339 SET_DECL_VALUE_EXPR (next, x);
340
341 SET_DECL_VALUE_EXPR (decl, x);
342 DECL_HAS_VALUE_EXPR_P (decl) = 1;
343 }
344 }
345
346 insert_field_into_struct (type, field);
347 *slot = field;
348
349 if (TREE_CODE (decl) == PARM_DECL)
350 info->any_parm_remapped = true;
351 }
352
353 return *slot;
354 }
355
356 /* Build or return the variable that holds the static chain within
357 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
358
359 static tree
360 get_chain_decl (struct nesting_info *info)
361 {
362 tree decl = info->chain_decl;
363
364 if (!decl)
365 {
366 tree type;
367
368 type = get_frame_type (info->outer);
369 type = build_pointer_type (type);
370
371 /* Note that this variable is *not* entered into any BIND_EXPR;
372 the construction of this variable is handled specially in
373 expand_function_start and initialize_inlined_parameters.
374 Note also that it's represented as a parameter. This is more
375 close to the truth, since the initial value does come from
376 the caller. */
377 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
378 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
379 DECL_ARTIFICIAL (decl) = 1;
380 DECL_IGNORED_P (decl) = 1;
381 TREE_USED (decl) = 1;
382 DECL_CONTEXT (decl) = info->context;
383 DECL_ARG_TYPE (decl) = type;
384
385 /* Tell tree-inline.c that we never write to this variable, so
386 it can copy-prop the replacement value immediately. */
387 TREE_READONLY (decl) = 1;
388
389 info->chain_decl = decl;
390
391 if (dump_file
392 && (dump_flags & TDF_DETAILS)
393 && !DECL_STATIC_CHAIN (info->context))
394 fprintf (dump_file, "Setting static-chain for %s\n",
395 lang_hooks.decl_printable_name (info->context, 2));
396
397 DECL_STATIC_CHAIN (info->context) = 1;
398 }
399 return decl;
400 }
401
402 /* Build or return the field within the non-local frame state that holds
403 the static chain for INFO->CONTEXT. This is the way to walk back up
404 multiple nesting levels. */
405
406 static tree
407 get_chain_field (struct nesting_info *info)
408 {
409 tree field = info->chain_field;
410
411 if (!field)
412 {
413 tree type = build_pointer_type (get_frame_type (info->outer));
414
415 field = make_node (FIELD_DECL);
416 DECL_NAME (field) = get_identifier ("__chain");
417 TREE_TYPE (field) = type;
418 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
419 DECL_NONADDRESSABLE_P (field) = 1;
420
421 insert_field_into_struct (get_frame_type (info), field);
422
423 info->chain_field = field;
424
425 if (dump_file
426 && (dump_flags & TDF_DETAILS)
427 && !DECL_STATIC_CHAIN (info->context))
428 fprintf (dump_file, "Setting static-chain for %s\n",
429 lang_hooks.decl_printable_name (info->context, 2));
430
431 DECL_STATIC_CHAIN (info->context) = 1;
432 }
433 return field;
434 }
435
436 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
437
438 static tree
439 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
440 gcall *call)
441 {
442 tree t;
443
444 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
445 gimple_call_set_lhs (call, t);
446 if (! gsi_end_p (*gsi))
447 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
448 gsi_insert_before (gsi, call, GSI_SAME_STMT);
449
450 return t;
451 }
452
453
454 /* Copy EXP into a temporary. Allocate the temporary in the context of
455 INFO and insert the initialization statement before GSI. */
456
457 static tree
458 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
459 {
460 tree t;
461 gimple *stmt;
462
463 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
464 stmt = gimple_build_assign (t, exp);
465 if (! gsi_end_p (*gsi))
466 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
467 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
468
469 return t;
470 }
471
472
473 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
474
475 static tree
476 gsi_gimplify_val (struct nesting_info *info, tree exp,
477 gimple_stmt_iterator *gsi)
478 {
479 if (is_gimple_val (exp))
480 return exp;
481 else
482 return init_tmp_var (info, exp, gsi);
483 }
484
485 /* Similarly, but copy from the temporary and insert the statement
486 after the iterator. */
487
488 static tree
489 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
490 {
491 tree t;
492 gimple *stmt;
493
494 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
495 stmt = gimple_build_assign (exp, t);
496 if (! gsi_end_p (*gsi))
497 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
498 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
499
500 return t;
501 }
502
503 /* Build or return the type used to represent a nested function trampoline. */
504
505 static GTY(()) tree trampoline_type;
506
507 static tree
508 get_trampoline_type (struct nesting_info *info)
509 {
510 unsigned align, size;
511 tree t;
512
513 if (trampoline_type)
514 return trampoline_type;
515
516 align = TRAMPOLINE_ALIGNMENT;
517 size = TRAMPOLINE_SIZE;
518
519 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
520 then allocate extra space so that we can do dynamic alignment. */
521 if (align > STACK_BOUNDARY)
522 {
523 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
524 align = STACK_BOUNDARY;
525 }
526
527 t = build_index_type (size_int (size - 1));
528 t = build_array_type (char_type_node, t);
529 t = build_decl (DECL_SOURCE_LOCATION (info->context),
530 FIELD_DECL, get_identifier ("__data"), t);
531 SET_DECL_ALIGN (t, align);
532 DECL_USER_ALIGN (t) = 1;
533
534 trampoline_type = make_node (RECORD_TYPE);
535 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
536 TYPE_FIELDS (trampoline_type) = t;
537 layout_type (trampoline_type);
538 DECL_CONTEXT (t) = trampoline_type;
539
540 return trampoline_type;
541 }
542
543 /* Build or return the type used to represent a nested function descriptor. */
544
545 static GTY(()) tree descriptor_type;
546
547 static tree
548 get_descriptor_type (struct nesting_info *info)
549 {
550 /* The base alignment is that of a function. */
551 const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
552 tree t;
553
554 if (descriptor_type)
555 return descriptor_type;
556
557 t = build_index_type (integer_one_node);
558 t = build_array_type (ptr_type_node, t);
559 t = build_decl (DECL_SOURCE_LOCATION (info->context),
560 FIELD_DECL, get_identifier ("__data"), t);
561 SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
562 DECL_USER_ALIGN (t) = 1;
563
564 descriptor_type = make_node (RECORD_TYPE);
565 TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
566 TYPE_FIELDS (descriptor_type) = t;
567 layout_type (descriptor_type);
568 DECL_CONTEXT (t) = descriptor_type;
569
570 return descriptor_type;
571 }
572
573 /* Given DECL, a nested function, find or create an element in the
574 var map for this function. */
575
576 static tree
577 lookup_element_for_decl (struct nesting_info *info, tree decl,
578 enum insert_option insert)
579 {
580 if (insert == NO_INSERT)
581 {
582 tree *slot = info->var_map->get (decl);
583 return slot ? *slot : NULL_TREE;
584 }
585
586 tree *slot = &info->var_map->get_or_insert (decl);
587 if (!*slot)
588 *slot = build_tree_list (NULL_TREE, NULL_TREE);
589
590 return (tree) *slot;
591 }
592
593 /* Given DECL, a nested function, create a field in the non-local
594 frame structure for this function. */
595
596 static tree
597 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
598 {
599 tree field = make_node (FIELD_DECL);
600 DECL_NAME (field) = DECL_NAME (decl);
601 TREE_TYPE (field) = type;
602 TREE_ADDRESSABLE (field) = 1;
603 insert_field_into_struct (get_frame_type (info), field);
604 return field;
605 }
606
607 /* Given DECL, a nested function, find or create a field in the non-local
608 frame structure for a trampoline for this function. */
609
610 static tree
611 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
612 enum insert_option insert)
613 {
614 tree elt, field;
615
616 elt = lookup_element_for_decl (info, decl, insert);
617 if (!elt)
618 return NULL_TREE;
619
620 field = TREE_PURPOSE (elt);
621
622 if (!field && insert == INSERT)
623 {
624 field = create_field_for_decl (info, decl, get_trampoline_type (info));
625 TREE_PURPOSE (elt) = field;
626 info->any_tramp_created = true;
627 }
628
629 return field;
630 }
631
632 /* Given DECL, a nested function, find or create a field in the non-local
633 frame structure for a descriptor for this function. */
634
635 static tree
636 lookup_descr_for_decl (struct nesting_info *info, tree decl,
637 enum insert_option insert)
638 {
639 tree elt, field;
640
641 elt = lookup_element_for_decl (info, decl, insert);
642 if (!elt)
643 return NULL_TREE;
644
645 field = TREE_VALUE (elt);
646
647 if (!field && insert == INSERT)
648 {
649 field = create_field_for_decl (info, decl, get_descriptor_type (info));
650 TREE_VALUE (elt) = field;
651 info->any_descr_created = true;
652 }
653
654 return field;
655 }
656
657 /* Build or return the field within the non-local frame state that holds
658 the non-local goto "jmp_buf". The buffer itself is maintained by the
659 rtl middle-end as dynamic stack space is allocated. */
660
661 static tree
662 get_nl_goto_field (struct nesting_info *info)
663 {
664 tree field = info->nl_goto_field;
665 if (!field)
666 {
667 unsigned size;
668 tree type;
669
670 /* For __builtin_nonlocal_goto, we need N words. The first is the
671 frame pointer, the rest is for the target's stack pointer save
672 area. The number of words is controlled by STACK_SAVEAREA_MODE;
673 not the best interface, but it'll do for now. */
674 if (Pmode == ptr_mode)
675 type = ptr_type_node;
676 else
677 type = lang_hooks.types.type_for_mode (Pmode, 1);
678
679 scalar_int_mode mode
680 = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
681 size = GET_MODE_SIZE (mode);
682 size = size / GET_MODE_SIZE (Pmode);
683 size = size + 1;
684
685 type = build_array_type
686 (type, build_index_type (size_int (size)));
687
688 field = make_node (FIELD_DECL);
689 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
690 TREE_TYPE (field) = type;
691 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
692 TREE_ADDRESSABLE (field) = 1;
693
694 insert_field_into_struct (get_frame_type (info), field);
695
696 info->nl_goto_field = field;
697 }
698
699 return field;
700 }
701
702 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
703
704 static void
705 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
706 struct nesting_info *info, gimple_seq *pseq)
707 {
708 struct walk_stmt_info wi;
709
710 memset (&wi, 0, sizeof (wi));
711 wi.info = info;
712 wi.val_only = true;
713 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
714 }
715
716
717 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
718
719 static inline void
720 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
721 struct nesting_info *info)
722 {
723 gimple_seq body = gimple_body (info->context);
724 walk_body (callback_stmt, callback_op, info, &body);
725 gimple_set_body (info->context, body);
726 }
727
728 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
729
730 static void
731 walk_gimple_omp_for (gomp_for *for_stmt,
732 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
733 struct nesting_info *info)
734 {
735 struct walk_stmt_info wi;
736 gimple_seq seq;
737 tree t;
738 size_t i;
739
740 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
741
742 seq = NULL;
743 memset (&wi, 0, sizeof (wi));
744 wi.info = info;
745 wi.gsi = gsi_last (seq);
746
747 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
748 {
749 wi.val_only = false;
750 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
751 &wi, NULL);
752 wi.val_only = true;
753 wi.is_lhs = false;
754 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
755 &wi, NULL);
756
757 wi.val_only = true;
758 wi.is_lhs = false;
759 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
760 &wi, NULL);
761
762 t = gimple_omp_for_incr (for_stmt, i);
763 gcc_assert (BINARY_CLASS_P (t));
764 wi.val_only = false;
765 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
766 wi.val_only = true;
767 wi.is_lhs = false;
768 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
769 }
770
771 seq = gsi_seq (wi.gsi);
772 if (!gimple_seq_empty_p (seq))
773 {
774 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
775 annotate_all_with_location (seq, gimple_location (for_stmt));
776 gimple_seq_add_seq (&pre_body, seq);
777 gimple_omp_for_set_pre_body (for_stmt, pre_body);
778 }
779 }
780
781 /* Similarly for ROOT and all functions nested underneath, depth first. */
782
783 static void
784 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
785 struct nesting_info *root)
786 {
787 struct nesting_info *n;
788 FOR_EACH_NEST_INFO (n, root)
789 walk_function (callback_stmt, callback_op, n);
790 }
791
792
793 /* We have to check for a fairly pathological case. The operands of function
794 nested function are to be interpreted in the context of the enclosing
795 function. So if any are variably-sized, they will get remapped when the
796 enclosing function is inlined. But that remapping would also have to be
797 done in the types of the PARM_DECLs of the nested function, meaning the
798 argument types of that function will disagree with the arguments in the
799 calls to that function. So we'd either have to make a copy of the nested
800 function corresponding to each time the enclosing function was inlined or
801 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
802 function. The former is not practical. The latter would still require
803 detecting this case to know when to add the conversions. So, for now at
804 least, we don't inline such an enclosing function.
805
806 We have to do that check recursively, so here return indicating whether
807 FNDECL has such a nested function. ORIG_FN is the function we were
808 trying to inline to use for checking whether any argument is variably
809 modified by anything in it.
810
811 It would be better to do this in tree-inline.c so that we could give
812 the appropriate warning for why a function can't be inlined, but that's
813 too late since the nesting structure has already been flattened and
814 adding a flag just to record this fact seems a waste of a flag. */
815
816 static bool
817 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
818 {
819 struct cgraph_node *cgn = cgraph_node::get (fndecl);
820 tree arg;
821
822 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
823 {
824 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
825 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
826 return true;
827
828 if (check_for_nested_with_variably_modified (cgn->decl,
829 orig_fndecl))
830 return true;
831 }
832
833 return false;
834 }
835
836 /* Construct our local datastructure describing the function nesting
837 tree rooted by CGN. */
838
839 static struct nesting_info *
840 create_nesting_tree (struct cgraph_node *cgn)
841 {
842 struct nesting_info *info = XCNEW (struct nesting_info);
843 info->field_map = new hash_map<tree, tree>;
844 info->var_map = new hash_map<tree, tree>;
845 info->mem_refs = new hash_set<tree *>;
846 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
847 info->context = cgn->decl;
848 info->thunk_p = cgn->thunk.thunk_p;
849
850 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
851 {
852 struct nesting_info *sub = create_nesting_tree (cgn);
853 sub->outer = info;
854 sub->next = info->inner;
855 info->inner = sub;
856 }
857
858 /* See discussion at check_for_nested_with_variably_modified for a
859 discussion of why this has to be here. */
860 if (check_for_nested_with_variably_modified (info->context, info->context))
861 DECL_UNINLINABLE (info->context) = true;
862
863 return info;
864 }
865
866 /* Return an expression computing the static chain for TARGET_CONTEXT
867 from INFO->CONTEXT. Insert any necessary computations before TSI. */
868
869 static tree
870 get_static_chain (struct nesting_info *info, tree target_context,
871 gimple_stmt_iterator *gsi)
872 {
873 struct nesting_info *i;
874 tree x;
875
876 if (info->context == target_context)
877 {
878 x = build_addr (info->frame_decl);
879 info->static_chain_added |= 1;
880 }
881 else
882 {
883 x = get_chain_decl (info);
884 info->static_chain_added |= 2;
885
886 for (i = info->outer; i->context != target_context; i = i->outer)
887 {
888 tree field = get_chain_field (i);
889
890 x = build_simple_mem_ref_notrap (x);
891 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
892 x = init_tmp_var (info, x, gsi);
893 }
894 }
895
896 return x;
897 }
898
899
900 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
901 frame as seen from INFO->CONTEXT. Insert any necessary computations
902 before GSI. */
903
904 static tree
905 get_frame_field (struct nesting_info *info, tree target_context,
906 tree field, gimple_stmt_iterator *gsi)
907 {
908 struct nesting_info *i;
909 tree x;
910
911 if (info->context == target_context)
912 {
913 /* Make sure frame_decl gets created. */
914 (void) get_frame_type (info);
915 x = info->frame_decl;
916 info->static_chain_added |= 1;
917 }
918 else
919 {
920 x = get_chain_decl (info);
921 info->static_chain_added |= 2;
922
923 for (i = info->outer; i->context != target_context; i = i->outer)
924 {
925 tree field = get_chain_field (i);
926
927 x = build_simple_mem_ref_notrap (x);
928 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
929 x = init_tmp_var (info, x, gsi);
930 }
931
932 x = build_simple_mem_ref_notrap (x);
933 }
934
935 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
936 return x;
937 }
938
939 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
940
941 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
942 in the nested function with DECL_VALUE_EXPR set to reference the true
943 variable in the parent function. This is used both for debug info
944 and in OMP lowering. */
945
946 static tree
947 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
948 {
949 tree target_context;
950 struct nesting_info *i;
951 tree x, field, new_decl;
952
953 tree *slot = &info->var_map->get_or_insert (decl);
954
955 if (*slot)
956 return *slot;
957
958 target_context = decl_function_context (decl);
959
960 /* A copy of the code in get_frame_field, but without the temporaries. */
961 if (info->context == target_context)
962 {
963 /* Make sure frame_decl gets created. */
964 (void) get_frame_type (info);
965 x = info->frame_decl;
966 i = info;
967 info->static_chain_added |= 1;
968 }
969 else
970 {
971 x = get_chain_decl (info);
972 info->static_chain_added |= 2;
973 for (i = info->outer; i->context != target_context; i = i->outer)
974 {
975 field = get_chain_field (i);
976 x = build_simple_mem_ref_notrap (x);
977 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
978 }
979 x = build_simple_mem_ref_notrap (x);
980 }
981
982 field = lookup_field_for_decl (i, decl, INSERT);
983 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
984 if (use_pointer_in_frame (decl))
985 x = build_simple_mem_ref_notrap (x);
986
987 /* ??? We should be remapping types as well, surely. */
988 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
989 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
990 DECL_CONTEXT (new_decl) = info->context;
991 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
992 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
993 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
994 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
995 TREE_READONLY (new_decl) = TREE_READONLY (decl);
996 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
997 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
998 if ((TREE_CODE (decl) == PARM_DECL
999 || TREE_CODE (decl) == RESULT_DECL
1000 || VAR_P (decl))
1001 && DECL_BY_REFERENCE (decl))
1002 DECL_BY_REFERENCE (new_decl) = 1;
1003
1004 SET_DECL_VALUE_EXPR (new_decl, x);
1005 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1006
1007 *slot = new_decl;
1008 DECL_CHAIN (new_decl) = info->debug_var_chain;
1009 info->debug_var_chain = new_decl;
1010
1011 if (!optimize
1012 && info->context != target_context
1013 && variably_modified_type_p (TREE_TYPE (decl), NULL))
1014 note_nonlocal_vla_type (info, TREE_TYPE (decl));
1015
1016 return new_decl;
1017 }
1018
1019
1020 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1021 and PARM_DECLs that belong to outer functions.
1022
1023 The rewrite will involve some number of structure accesses back up
1024 the static chain. E.g. for a variable FOO up one nesting level it'll
1025 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
1026 indirections apply to decls for which use_pointer_in_frame is true. */
1027
1028 static tree
1029 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1030 {
1031 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1032 struct nesting_info *const info = (struct nesting_info *) wi->info;
1033 tree t = *tp;
1034
1035 *walk_subtrees = 0;
1036 switch (TREE_CODE (t))
1037 {
1038 case VAR_DECL:
1039 /* Non-automatic variables are never processed. */
1040 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1041 break;
1042 /* FALLTHRU */
1043
1044 case PARM_DECL:
1045 {
1046 tree x, target_context = decl_function_context (t);
1047
1048 if (info->context == target_context)
1049 break;
1050
1051 wi->changed = true;
1052
1053 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1054 x = get_nonlocal_debug_decl (info, t);
1055 else
1056 {
1057 struct nesting_info *i = info;
1058 while (i && i->context != target_context)
1059 i = i->outer;
1060 /* If none of the outer contexts is the target context, this means
1061 that the VAR or PARM_DECL is referenced in a wrong context. */
1062 if (!i)
1063 internal_error ("%s from %s referenced in %s",
1064 IDENTIFIER_POINTER (DECL_NAME (t)),
1065 IDENTIFIER_POINTER (DECL_NAME (target_context)),
1066 IDENTIFIER_POINTER (DECL_NAME (info->context)));
1067
1068 x = lookup_field_for_decl (i, t, INSERT);
1069 x = get_frame_field (info, target_context, x, &wi->gsi);
1070 if (use_pointer_in_frame (t))
1071 {
1072 x = init_tmp_var (info, x, &wi->gsi);
1073 x = build_simple_mem_ref_notrap (x);
1074 }
1075 }
1076
1077 if (wi->val_only)
1078 {
1079 if (wi->is_lhs)
1080 x = save_tmp_var (info, x, &wi->gsi);
1081 else
1082 x = init_tmp_var (info, x, &wi->gsi);
1083 }
1084
1085 *tp = x;
1086 }
1087 break;
1088
1089 case LABEL_DECL:
1090 /* We're taking the address of a label from a parent function, but
1091 this is not itself a non-local goto. Mark the label such that it
1092 will not be deleted, much as we would with a label address in
1093 static storage. */
1094 if (decl_function_context (t) != info->context)
1095 FORCED_LABEL (t) = 1;
1096 break;
1097
1098 case ADDR_EXPR:
1099 {
1100 bool save_val_only = wi->val_only;
1101
1102 wi->val_only = false;
1103 wi->is_lhs = false;
1104 wi->changed = false;
1105 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1106 wi->val_only = true;
1107
1108 if (wi->changed)
1109 {
1110 tree save_context;
1111
1112 /* If we changed anything, we might no longer be directly
1113 referencing a decl. */
1114 save_context = current_function_decl;
1115 current_function_decl = info->context;
1116 recompute_tree_invariant_for_addr_expr (t);
1117 current_function_decl = save_context;
1118
1119 /* If the callback converted the address argument in a context
1120 where we only accept variables (and min_invariant, presumably),
1121 then compute the address into a temporary. */
1122 if (save_val_only)
1123 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1124 t, &wi->gsi);
1125 }
1126 }
1127 break;
1128
1129 case REALPART_EXPR:
1130 case IMAGPART_EXPR:
1131 case COMPONENT_REF:
1132 case ARRAY_REF:
1133 case ARRAY_RANGE_REF:
1134 case BIT_FIELD_REF:
1135 /* Go down this entire nest and just look at the final prefix and
1136 anything that describes the references. Otherwise, we lose track
1137 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1138 wi->val_only = true;
1139 wi->is_lhs = false;
1140 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1141 {
1142 if (TREE_CODE (t) == COMPONENT_REF)
1143 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1144 NULL);
1145 else if (TREE_CODE (t) == ARRAY_REF
1146 || TREE_CODE (t) == ARRAY_RANGE_REF)
1147 {
1148 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1149 wi, NULL);
1150 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1151 wi, NULL);
1152 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1153 wi, NULL);
1154 }
1155 }
1156 wi->val_only = false;
1157 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1158 break;
1159
1160 case VIEW_CONVERT_EXPR:
1161 /* Just request to look at the subtrees, leaving val_only and lhs
1162 untouched. This might actually be for !val_only + lhs, in which
1163 case we don't want to force a replacement by a temporary. */
1164 *walk_subtrees = 1;
1165 break;
1166
1167 default:
1168 if (!IS_TYPE_OR_DECL_P (t))
1169 {
1170 *walk_subtrees = 1;
1171 wi->val_only = true;
1172 wi->is_lhs = false;
1173 }
1174 break;
1175 }
1176
1177 return NULL_TREE;
1178 }
1179
1180 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1181 struct walk_stmt_info *);
1182
1183 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1184 and PARM_DECLs that belong to outer functions. */
1185
1186 static bool
1187 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1188 {
1189 struct nesting_info *const info = (struct nesting_info *) wi->info;
1190 bool need_chain = false, need_stmts = false;
1191 tree clause, decl;
1192 int dummy;
1193 bitmap new_suppress;
1194
1195 new_suppress = BITMAP_GGC_ALLOC ();
1196 bitmap_copy (new_suppress, info->suppress_expansion);
1197
1198 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1199 {
1200 switch (OMP_CLAUSE_CODE (clause))
1201 {
1202 case OMP_CLAUSE_REDUCTION:
1203 case OMP_CLAUSE_IN_REDUCTION:
1204 case OMP_CLAUSE_TASK_REDUCTION:
1205 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1206 need_stmts = true;
1207 goto do_decl_clause;
1208
1209 case OMP_CLAUSE_LASTPRIVATE:
1210 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1211 need_stmts = true;
1212 goto do_decl_clause;
1213
1214 case OMP_CLAUSE_LINEAR:
1215 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1216 need_stmts = true;
1217 wi->val_only = true;
1218 wi->is_lhs = false;
1219 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1220 &dummy, wi);
1221 goto do_decl_clause;
1222
1223 case OMP_CLAUSE_PRIVATE:
1224 case OMP_CLAUSE_FIRSTPRIVATE:
1225 case OMP_CLAUSE_COPYPRIVATE:
1226 case OMP_CLAUSE_SHARED:
1227 case OMP_CLAUSE_TO_DECLARE:
1228 case OMP_CLAUSE_LINK:
1229 case OMP_CLAUSE_USE_DEVICE_PTR:
1230 case OMP_CLAUSE_USE_DEVICE_ADDR:
1231 case OMP_CLAUSE_IS_DEVICE_PTR:
1232 do_decl_clause:
1233 decl = OMP_CLAUSE_DECL (clause);
1234 if (VAR_P (decl)
1235 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1236 break;
1237 if (decl_function_context (decl) != info->context)
1238 {
1239 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1240 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1241 bitmap_set_bit (new_suppress, DECL_UID (decl));
1242 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1243 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1244 need_chain = true;
1245 }
1246 break;
1247
1248 case OMP_CLAUSE_SCHEDULE:
1249 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1250 break;
1251 /* FALLTHRU */
1252 case OMP_CLAUSE_FINAL:
1253 case OMP_CLAUSE_IF:
1254 case OMP_CLAUSE_NUM_THREADS:
1255 case OMP_CLAUSE_DEPEND:
1256 case OMP_CLAUSE_DEVICE:
1257 case OMP_CLAUSE_NUM_TEAMS:
1258 case OMP_CLAUSE_THREAD_LIMIT:
1259 case OMP_CLAUSE_SAFELEN:
1260 case OMP_CLAUSE_SIMDLEN:
1261 case OMP_CLAUSE_PRIORITY:
1262 case OMP_CLAUSE_GRAINSIZE:
1263 case OMP_CLAUSE_NUM_TASKS:
1264 case OMP_CLAUSE_HINT:
1265 case OMP_CLAUSE_NUM_GANGS:
1266 case OMP_CLAUSE_NUM_WORKERS:
1267 case OMP_CLAUSE_VECTOR_LENGTH:
1268 case OMP_CLAUSE_GANG:
1269 case OMP_CLAUSE_WORKER:
1270 case OMP_CLAUSE_VECTOR:
1271 case OMP_CLAUSE_ASYNC:
1272 case OMP_CLAUSE_WAIT:
1273 /* Several OpenACC clauses have optional arguments. Check if they
1274 are present. */
1275 if (OMP_CLAUSE_OPERAND (clause, 0))
1276 {
1277 wi->val_only = true;
1278 wi->is_lhs = false;
1279 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1280 &dummy, wi);
1281 }
1282
1283 /* The gang clause accepts two arguments. */
1284 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1285 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1286 {
1287 wi->val_only = true;
1288 wi->is_lhs = false;
1289 convert_nonlocal_reference_op
1290 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1291 }
1292 break;
1293
1294 case OMP_CLAUSE_DIST_SCHEDULE:
1295 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1296 {
1297 wi->val_only = true;
1298 wi->is_lhs = false;
1299 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1300 &dummy, wi);
1301 }
1302 break;
1303
1304 case OMP_CLAUSE_MAP:
1305 case OMP_CLAUSE_TO:
1306 case OMP_CLAUSE_FROM:
1307 if (OMP_CLAUSE_SIZE (clause))
1308 {
1309 wi->val_only = true;
1310 wi->is_lhs = false;
1311 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1312 &dummy, wi);
1313 }
1314 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1315 goto do_decl_clause;
1316 wi->val_only = true;
1317 wi->is_lhs = false;
1318 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1319 wi, NULL);
1320 break;
1321
1322 case OMP_CLAUSE_ALIGNED:
1323 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1324 {
1325 wi->val_only = true;
1326 wi->is_lhs = false;
1327 convert_nonlocal_reference_op
1328 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1329 }
1330 /* FALLTHRU */
1331 case OMP_CLAUSE_NONTEMPORAL:
1332 /* Like do_decl_clause, but don't add any suppression. */
1333 decl = OMP_CLAUSE_DECL (clause);
1334 if (VAR_P (decl)
1335 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1336 break;
1337 if (decl_function_context (decl) != info->context)
1338 {
1339 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1340 need_chain = true;
1341 }
1342 break;
1343
1344 case OMP_CLAUSE_NOWAIT:
1345 case OMP_CLAUSE_ORDERED:
1346 case OMP_CLAUSE_DEFAULT:
1347 case OMP_CLAUSE_COPYIN:
1348 case OMP_CLAUSE_COLLAPSE:
1349 case OMP_CLAUSE_TILE:
1350 case OMP_CLAUSE_UNTIED:
1351 case OMP_CLAUSE_MERGEABLE:
1352 case OMP_CLAUSE_PROC_BIND:
1353 case OMP_CLAUSE_NOGROUP:
1354 case OMP_CLAUSE_THREADS:
1355 case OMP_CLAUSE_SIMD:
1356 case OMP_CLAUSE_DEFAULTMAP:
1357 case OMP_CLAUSE_ORDER:
1358 case OMP_CLAUSE_SEQ:
1359 case OMP_CLAUSE_INDEPENDENT:
1360 case OMP_CLAUSE_AUTO:
1361 case OMP_CLAUSE_IF_PRESENT:
1362 case OMP_CLAUSE_FINALIZE:
1363 case OMP_CLAUSE__CONDTEMP_:
1364 case OMP_CLAUSE__SCANTEMP_:
1365 break;
1366
1367 /* The following clause belongs to the OpenACC cache directive, which
1368 is discarded during gimplification. */
1369 case OMP_CLAUSE__CACHE_:
1370 /* The following clauses are only allowed in the OpenMP declare simd
1371 directive, so not seen here. */
1372 case OMP_CLAUSE_UNIFORM:
1373 case OMP_CLAUSE_INBRANCH:
1374 case OMP_CLAUSE_NOTINBRANCH:
1375 /* The following clauses are only allowed on OpenMP cancel and
1376 cancellation point directives, which at this point have already
1377 been lowered into a function call. */
1378 case OMP_CLAUSE_FOR:
1379 case OMP_CLAUSE_PARALLEL:
1380 case OMP_CLAUSE_SECTIONS:
1381 case OMP_CLAUSE_TASKGROUP:
1382 /* The following clauses are only added during OMP lowering; nested
1383 function decomposition happens before that. */
1384 case OMP_CLAUSE__LOOPTEMP_:
1385 case OMP_CLAUSE__REDUCTEMP_:
1386 case OMP_CLAUSE__SIMDUID_:
1387 case OMP_CLAUSE__GRIDDIM_:
1388 case OMP_CLAUSE__SIMT_:
1389 /* Anything else. */
1390 default:
1391 gcc_unreachable ();
1392 }
1393 }
1394
1395 info->suppress_expansion = new_suppress;
1396
1397 if (need_stmts)
1398 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1399 switch (OMP_CLAUSE_CODE (clause))
1400 {
1401 case OMP_CLAUSE_REDUCTION:
1402 case OMP_CLAUSE_IN_REDUCTION:
1403 case OMP_CLAUSE_TASK_REDUCTION:
1404 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1405 {
1406 tree old_context
1407 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1408 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1409 = info->context;
1410 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1411 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1412 = info->context;
1413 walk_body (convert_nonlocal_reference_stmt,
1414 convert_nonlocal_reference_op, info,
1415 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1416 walk_body (convert_nonlocal_reference_stmt,
1417 convert_nonlocal_reference_op, info,
1418 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1419 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1420 = old_context;
1421 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1422 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1423 = old_context;
1424 }
1425 break;
1426
1427 case OMP_CLAUSE_LASTPRIVATE:
1428 walk_body (convert_nonlocal_reference_stmt,
1429 convert_nonlocal_reference_op, info,
1430 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1431 break;
1432
1433 case OMP_CLAUSE_LINEAR:
1434 walk_body (convert_nonlocal_reference_stmt,
1435 convert_nonlocal_reference_op, info,
1436 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1437 break;
1438
1439 default:
1440 break;
1441 }
1442
1443 return need_chain;
1444 }
1445
1446 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1447
1448 static void
1449 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1450 {
1451 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1452 type = TREE_TYPE (type);
1453
1454 if (TYPE_NAME (type)
1455 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1456 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1457 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1458
1459 while (POINTER_TYPE_P (type)
1460 || TREE_CODE (type) == VECTOR_TYPE
1461 || TREE_CODE (type) == FUNCTION_TYPE
1462 || TREE_CODE (type) == METHOD_TYPE)
1463 type = TREE_TYPE (type);
1464
1465 if (TREE_CODE (type) == ARRAY_TYPE)
1466 {
1467 tree domain, t;
1468
1469 note_nonlocal_vla_type (info, TREE_TYPE (type));
1470 domain = TYPE_DOMAIN (type);
1471 if (domain)
1472 {
1473 t = TYPE_MIN_VALUE (domain);
1474 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1475 && decl_function_context (t) != info->context)
1476 get_nonlocal_debug_decl (info, t);
1477 t = TYPE_MAX_VALUE (domain);
1478 if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1479 && decl_function_context (t) != info->context)
1480 get_nonlocal_debug_decl (info, t);
1481 }
1482 }
1483 }
1484
1485 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1486 PARM_DECLs that belong to outer functions. This handles statements
1487 that are not handled via the standard recursion done in
1488 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1489 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1490 operands of STMT have been handled by this function. */
1491
1492 static tree
1493 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1494 struct walk_stmt_info *wi)
1495 {
1496 struct nesting_info *info = (struct nesting_info *) wi->info;
1497 tree save_local_var_chain;
1498 bitmap save_suppress;
1499 gimple *stmt = gsi_stmt (*gsi);
1500
1501 switch (gimple_code (stmt))
1502 {
1503 case GIMPLE_GOTO:
1504 /* Don't walk non-local gotos for now. */
1505 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1506 {
1507 wi->val_only = true;
1508 wi->is_lhs = false;
1509 *handled_ops_p = false;
1510 return NULL_TREE;
1511 }
1512 break;
1513
1514 case GIMPLE_OMP_TEAMS:
1515 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1516 {
1517 save_suppress = info->suppress_expansion;
1518 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1519 wi);
1520 walk_body (convert_nonlocal_reference_stmt,
1521 convert_nonlocal_reference_op, info,
1522 gimple_omp_body_ptr (stmt));
1523 info->suppress_expansion = save_suppress;
1524 break;
1525 }
1526 /* FALLTHRU */
1527
1528 case GIMPLE_OMP_PARALLEL:
1529 case GIMPLE_OMP_TASK:
1530 save_suppress = info->suppress_expansion;
1531 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1532 wi))
1533 {
1534 tree c, decl;
1535 decl = get_chain_decl (info);
1536 c = build_omp_clause (gimple_location (stmt),
1537 OMP_CLAUSE_FIRSTPRIVATE);
1538 OMP_CLAUSE_DECL (c) = decl;
1539 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1540 gimple_omp_taskreg_set_clauses (stmt, c);
1541 }
1542
1543 save_local_var_chain = info->new_local_var_chain;
1544 info->new_local_var_chain = NULL;
1545
1546 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1547 info, gimple_omp_body_ptr (stmt));
1548
1549 if (info->new_local_var_chain)
1550 declare_vars (info->new_local_var_chain,
1551 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1552 false);
1553 info->new_local_var_chain = save_local_var_chain;
1554 info->suppress_expansion = save_suppress;
1555 break;
1556
1557 case GIMPLE_OMP_FOR:
1558 save_suppress = info->suppress_expansion;
1559 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1560 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1561 convert_nonlocal_reference_stmt,
1562 convert_nonlocal_reference_op, info);
1563 walk_body (convert_nonlocal_reference_stmt,
1564 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1565 info->suppress_expansion = save_suppress;
1566 break;
1567
1568 case GIMPLE_OMP_SECTIONS:
1569 save_suppress = info->suppress_expansion;
1570 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1571 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1572 info, gimple_omp_body_ptr (stmt));
1573 info->suppress_expansion = save_suppress;
1574 break;
1575
1576 case GIMPLE_OMP_SINGLE:
1577 save_suppress = info->suppress_expansion;
1578 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1579 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1580 info, gimple_omp_body_ptr (stmt));
1581 info->suppress_expansion = save_suppress;
1582 break;
1583
1584 case GIMPLE_OMP_TASKGROUP:
1585 save_suppress = info->suppress_expansion;
1586 convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1587 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1588 info, gimple_omp_body_ptr (stmt));
1589 info->suppress_expansion = save_suppress;
1590 break;
1591
1592 case GIMPLE_OMP_TARGET:
1593 if (!is_gimple_omp_offloaded (stmt))
1594 {
1595 save_suppress = info->suppress_expansion;
1596 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1597 wi);
1598 info->suppress_expansion = save_suppress;
1599 walk_body (convert_nonlocal_reference_stmt,
1600 convert_nonlocal_reference_op, info,
1601 gimple_omp_body_ptr (stmt));
1602 break;
1603 }
1604 save_suppress = info->suppress_expansion;
1605 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1606 wi))
1607 {
1608 tree c, decl;
1609 decl = get_chain_decl (info);
1610 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1611 OMP_CLAUSE_DECL (c) = decl;
1612 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1613 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1614 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1615 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1616 }
1617
1618 save_local_var_chain = info->new_local_var_chain;
1619 info->new_local_var_chain = NULL;
1620
1621 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1622 info, gimple_omp_body_ptr (stmt));
1623
1624 if (info->new_local_var_chain)
1625 declare_vars (info->new_local_var_chain,
1626 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1627 false);
1628 info->new_local_var_chain = save_local_var_chain;
1629 info->suppress_expansion = save_suppress;
1630 break;
1631
1632 case GIMPLE_OMP_SECTION:
1633 case GIMPLE_OMP_MASTER:
1634 case GIMPLE_OMP_ORDERED:
1635 case GIMPLE_OMP_SCAN:
1636 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1637 info, gimple_omp_body_ptr (stmt));
1638 break;
1639
1640 case GIMPLE_BIND:
1641 {
1642 gbind *bind_stmt = as_a <gbind *> (stmt);
1643
1644 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1645 if (TREE_CODE (var) == NAMELIST_DECL)
1646 {
1647 /* Adjust decls mentioned in NAMELIST_DECL. */
1648 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1649 tree decl;
1650 unsigned int i;
1651
1652 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1653 {
1654 if (VAR_P (decl)
1655 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1656 continue;
1657 if (decl_function_context (decl) != info->context)
1658 CONSTRUCTOR_ELT (decls, i)->value
1659 = get_nonlocal_debug_decl (info, decl);
1660 }
1661 }
1662
1663 *handled_ops_p = false;
1664 return NULL_TREE;
1665 }
1666 case GIMPLE_COND:
1667 wi->val_only = true;
1668 wi->is_lhs = false;
1669 *handled_ops_p = false;
1670 return NULL_TREE;
1671
1672 case GIMPLE_ASSIGN:
1673 if (gimple_clobber_p (stmt))
1674 {
1675 tree lhs = gimple_assign_lhs (stmt);
1676 if (DECL_P (lhs)
1677 && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1678 && decl_function_context (lhs) != info->context)
1679 {
1680 gsi_replace (gsi, gimple_build_nop (), true);
1681 break;
1682 }
1683 }
1684 *handled_ops_p = false;
1685 return NULL_TREE;
1686
1687 default:
1688 /* For every other statement that we are not interested in
1689 handling here, let the walker traverse the operands. */
1690 *handled_ops_p = false;
1691 return NULL_TREE;
1692 }
1693
1694 /* We have handled all of STMT operands, no need to traverse the operands. */
1695 *handled_ops_p = true;
1696 return NULL_TREE;
1697 }
1698
1699
1700 /* A subroutine of convert_local_reference. Create a local variable
1701 in the parent function with DECL_VALUE_EXPR set to reference the
1702 field in FRAME. This is used both for debug info and in OMP
1703 lowering. */
1704
1705 static tree
1706 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1707 {
1708 tree x, new_decl;
1709
1710 tree *slot = &info->var_map->get_or_insert (decl);
1711 if (*slot)
1712 return *slot;
1713
1714 /* Make sure frame_decl gets created. */
1715 (void) get_frame_type (info);
1716 x = info->frame_decl;
1717 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1718
1719 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1720 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1721 DECL_CONTEXT (new_decl) = info->context;
1722 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1723 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1724 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1725 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1726 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1727 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1728 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1729 if ((TREE_CODE (decl) == PARM_DECL
1730 || TREE_CODE (decl) == RESULT_DECL
1731 || VAR_P (decl))
1732 && DECL_BY_REFERENCE (decl))
1733 DECL_BY_REFERENCE (new_decl) = 1;
1734
1735 SET_DECL_VALUE_EXPR (new_decl, x);
1736 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1737 *slot = new_decl;
1738
1739 DECL_CHAIN (new_decl) = info->debug_var_chain;
1740 info->debug_var_chain = new_decl;
1741
1742 /* Do not emit debug info twice. */
1743 DECL_IGNORED_P (decl) = 1;
1744
1745 return new_decl;
1746 }
1747
1748
1749 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1750 and PARM_DECLs that were referenced by inner nested functions.
1751 The rewrite will be a structure reference to the local frame variable. */
1752
1753 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1754
1755 static tree
1756 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1757 {
1758 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1759 struct nesting_info *const info = (struct nesting_info *) wi->info;
1760 tree t = *tp, field, x;
1761 bool save_val_only;
1762
1763 *walk_subtrees = 0;
1764 switch (TREE_CODE (t))
1765 {
1766 case VAR_DECL:
1767 /* Non-automatic variables are never processed. */
1768 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1769 break;
1770 /* FALLTHRU */
1771
1772 case PARM_DECL:
1773 if (t != info->frame_decl && decl_function_context (t) == info->context)
1774 {
1775 /* If we copied a pointer to the frame, then the original decl
1776 is used unchanged in the parent function. */
1777 if (use_pointer_in_frame (t))
1778 break;
1779
1780 /* No need to transform anything if no child references the
1781 variable. */
1782 field = lookup_field_for_decl (info, t, NO_INSERT);
1783 if (!field)
1784 break;
1785 wi->changed = true;
1786
1787 if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1788 x = get_local_debug_decl (info, t, field);
1789 else
1790 x = get_frame_field (info, info->context, field, &wi->gsi);
1791
1792 if (wi->val_only)
1793 {
1794 if (wi->is_lhs)
1795 x = save_tmp_var (info, x, &wi->gsi);
1796 else
1797 x = init_tmp_var (info, x, &wi->gsi);
1798 }
1799
1800 *tp = x;
1801 }
1802 break;
1803
1804 case ADDR_EXPR:
1805 save_val_only = wi->val_only;
1806 wi->val_only = false;
1807 wi->is_lhs = false;
1808 wi->changed = false;
1809 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1810 wi->val_only = save_val_only;
1811
1812 /* If we converted anything ... */
1813 if (wi->changed)
1814 {
1815 tree save_context;
1816
1817 /* Then the frame decl is now addressable. */
1818 TREE_ADDRESSABLE (info->frame_decl) = 1;
1819
1820 save_context = current_function_decl;
1821 current_function_decl = info->context;
1822 recompute_tree_invariant_for_addr_expr (t);
1823 current_function_decl = save_context;
1824
1825 /* If we are in a context where we only accept values, then
1826 compute the address into a temporary. */
1827 if (save_val_only)
1828 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1829 t, &wi->gsi);
1830 }
1831 break;
1832
1833 case REALPART_EXPR:
1834 case IMAGPART_EXPR:
1835 case COMPONENT_REF:
1836 case ARRAY_REF:
1837 case ARRAY_RANGE_REF:
1838 case BIT_FIELD_REF:
1839 /* Go down this entire nest and just look at the final prefix and
1840 anything that describes the references. Otherwise, we lose track
1841 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1842 save_val_only = wi->val_only;
1843 wi->val_only = true;
1844 wi->is_lhs = false;
1845 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1846 {
1847 if (TREE_CODE (t) == COMPONENT_REF)
1848 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1849 NULL);
1850 else if (TREE_CODE (t) == ARRAY_REF
1851 || TREE_CODE (t) == ARRAY_RANGE_REF)
1852 {
1853 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1854 NULL);
1855 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1856 NULL);
1857 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1858 NULL);
1859 }
1860 }
1861 wi->val_only = false;
1862 walk_tree (tp, convert_local_reference_op, wi, NULL);
1863 wi->val_only = save_val_only;
1864 break;
1865
1866 case MEM_REF:
1867 save_val_only = wi->val_only;
1868 wi->val_only = true;
1869 wi->is_lhs = false;
1870 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1871 wi, NULL);
1872 /* We need to re-fold the MEM_REF as component references as
1873 part of a ADDR_EXPR address are not allowed. But we cannot
1874 fold here, as the chain record type is not yet finalized. */
1875 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1876 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1877 info->mem_refs->add (tp);
1878 wi->val_only = save_val_only;
1879 break;
1880
1881 case VIEW_CONVERT_EXPR:
1882 /* Just request to look at the subtrees, leaving val_only and lhs
1883 untouched. This might actually be for !val_only + lhs, in which
1884 case we don't want to force a replacement by a temporary. */
1885 *walk_subtrees = 1;
1886 break;
1887
1888 default:
1889 if (!IS_TYPE_OR_DECL_P (t))
1890 {
1891 *walk_subtrees = 1;
1892 wi->val_only = true;
1893 wi->is_lhs = false;
1894 }
1895 break;
1896 }
1897
1898 return NULL_TREE;
1899 }
1900
1901 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1902 struct walk_stmt_info *);
1903
1904 /* Helper for convert_local_reference. Convert all the references in
1905 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1906
1907 static bool
1908 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1909 {
1910 struct nesting_info *const info = (struct nesting_info *) wi->info;
1911 bool need_frame = false, need_stmts = false;
1912 tree clause, decl;
1913 int dummy;
1914 bitmap new_suppress;
1915
1916 new_suppress = BITMAP_GGC_ALLOC ();
1917 bitmap_copy (new_suppress, info->suppress_expansion);
1918
1919 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1920 {
1921 switch (OMP_CLAUSE_CODE (clause))
1922 {
1923 case OMP_CLAUSE_REDUCTION:
1924 case OMP_CLAUSE_IN_REDUCTION:
1925 case OMP_CLAUSE_TASK_REDUCTION:
1926 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1927 need_stmts = true;
1928 goto do_decl_clause;
1929
1930 case OMP_CLAUSE_LASTPRIVATE:
1931 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1932 need_stmts = true;
1933 goto do_decl_clause;
1934
1935 case OMP_CLAUSE_LINEAR:
1936 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1937 need_stmts = true;
1938 wi->val_only = true;
1939 wi->is_lhs = false;
1940 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1941 wi);
1942 goto do_decl_clause;
1943
1944 case OMP_CLAUSE_PRIVATE:
1945 case OMP_CLAUSE_FIRSTPRIVATE:
1946 case OMP_CLAUSE_COPYPRIVATE:
1947 case OMP_CLAUSE_SHARED:
1948 case OMP_CLAUSE_TO_DECLARE:
1949 case OMP_CLAUSE_LINK:
1950 case OMP_CLAUSE_USE_DEVICE_PTR:
1951 case OMP_CLAUSE_USE_DEVICE_ADDR:
1952 case OMP_CLAUSE_IS_DEVICE_PTR:
1953 do_decl_clause:
1954 decl = OMP_CLAUSE_DECL (clause);
1955 if (VAR_P (decl)
1956 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1957 break;
1958 if (decl_function_context (decl) == info->context
1959 && !use_pointer_in_frame (decl))
1960 {
1961 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1962 if (field)
1963 {
1964 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1965 OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1966 bitmap_set_bit (new_suppress, DECL_UID (decl));
1967 OMP_CLAUSE_DECL (clause)
1968 = get_local_debug_decl (info, decl, field);
1969 need_frame = true;
1970 }
1971 }
1972 break;
1973
1974 case OMP_CLAUSE_SCHEDULE:
1975 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1976 break;
1977 /* FALLTHRU */
1978 case OMP_CLAUSE_FINAL:
1979 case OMP_CLAUSE_IF:
1980 case OMP_CLAUSE_NUM_THREADS:
1981 case OMP_CLAUSE_DEPEND:
1982 case OMP_CLAUSE_DEVICE:
1983 case OMP_CLAUSE_NUM_TEAMS:
1984 case OMP_CLAUSE_THREAD_LIMIT:
1985 case OMP_CLAUSE_SAFELEN:
1986 case OMP_CLAUSE_SIMDLEN:
1987 case OMP_CLAUSE_PRIORITY:
1988 case OMP_CLAUSE_GRAINSIZE:
1989 case OMP_CLAUSE_NUM_TASKS:
1990 case OMP_CLAUSE_HINT:
1991 case OMP_CLAUSE_NUM_GANGS:
1992 case OMP_CLAUSE_NUM_WORKERS:
1993 case OMP_CLAUSE_VECTOR_LENGTH:
1994 case OMP_CLAUSE_GANG:
1995 case OMP_CLAUSE_WORKER:
1996 case OMP_CLAUSE_VECTOR:
1997 case OMP_CLAUSE_ASYNC:
1998 case OMP_CLAUSE_WAIT:
1999 /* Several OpenACC clauses have optional arguments. Check if they
2000 are present. */
2001 if (OMP_CLAUSE_OPERAND (clause, 0))
2002 {
2003 wi->val_only = true;
2004 wi->is_lhs = false;
2005 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2006 &dummy, wi);
2007 }
2008
2009 /* The gang clause accepts two arguments. */
2010 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2011 && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2012 {
2013 wi->val_only = true;
2014 wi->is_lhs = false;
2015 convert_nonlocal_reference_op
2016 (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2017 }
2018 break;
2019
2020 case OMP_CLAUSE_DIST_SCHEDULE:
2021 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2022 {
2023 wi->val_only = true;
2024 wi->is_lhs = false;
2025 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2026 &dummy, wi);
2027 }
2028 break;
2029
2030 case OMP_CLAUSE_MAP:
2031 case OMP_CLAUSE_TO:
2032 case OMP_CLAUSE_FROM:
2033 if (OMP_CLAUSE_SIZE (clause))
2034 {
2035 wi->val_only = true;
2036 wi->is_lhs = false;
2037 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2038 &dummy, wi);
2039 }
2040 if (DECL_P (OMP_CLAUSE_DECL (clause)))
2041 goto do_decl_clause;
2042 wi->val_only = true;
2043 wi->is_lhs = false;
2044 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2045 wi, NULL);
2046 break;
2047
2048 case OMP_CLAUSE_ALIGNED:
2049 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2050 {
2051 wi->val_only = true;
2052 wi->is_lhs = false;
2053 convert_local_reference_op
2054 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2055 }
2056 /* FALLTHRU */
2057 case OMP_CLAUSE_NONTEMPORAL:
2058 /* Like do_decl_clause, but don't add any suppression. */
2059 decl = OMP_CLAUSE_DECL (clause);
2060 if (VAR_P (decl)
2061 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2062 break;
2063 if (decl_function_context (decl) == info->context
2064 && !use_pointer_in_frame (decl))
2065 {
2066 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2067 if (field)
2068 {
2069 OMP_CLAUSE_DECL (clause)
2070 = get_local_debug_decl (info, decl, field);
2071 need_frame = true;
2072 }
2073 }
2074 break;
2075
2076 case OMP_CLAUSE_NOWAIT:
2077 case OMP_CLAUSE_ORDERED:
2078 case OMP_CLAUSE_DEFAULT:
2079 case OMP_CLAUSE_COPYIN:
2080 case OMP_CLAUSE_COLLAPSE:
2081 case OMP_CLAUSE_TILE:
2082 case OMP_CLAUSE_UNTIED:
2083 case OMP_CLAUSE_MERGEABLE:
2084 case OMP_CLAUSE_PROC_BIND:
2085 case OMP_CLAUSE_NOGROUP:
2086 case OMP_CLAUSE_THREADS:
2087 case OMP_CLAUSE_SIMD:
2088 case OMP_CLAUSE_DEFAULTMAP:
2089 case OMP_CLAUSE_ORDER:
2090 case OMP_CLAUSE_SEQ:
2091 case OMP_CLAUSE_INDEPENDENT:
2092 case OMP_CLAUSE_AUTO:
2093 case OMP_CLAUSE_IF_PRESENT:
2094 case OMP_CLAUSE_FINALIZE:
2095 case OMP_CLAUSE__CONDTEMP_:
2096 case OMP_CLAUSE__SCANTEMP_:
2097 break;
2098
2099 /* The following clause belongs to the OpenACC cache directive, which
2100 is discarded during gimplification. */
2101 case OMP_CLAUSE__CACHE_:
2102 /* The following clauses are only allowed in the OpenMP declare simd
2103 directive, so not seen here. */
2104 case OMP_CLAUSE_UNIFORM:
2105 case OMP_CLAUSE_INBRANCH:
2106 case OMP_CLAUSE_NOTINBRANCH:
2107 /* The following clauses are only allowed on OpenMP cancel and
2108 cancellation point directives, which at this point have already
2109 been lowered into a function call. */
2110 case OMP_CLAUSE_FOR:
2111 case OMP_CLAUSE_PARALLEL:
2112 case OMP_CLAUSE_SECTIONS:
2113 case OMP_CLAUSE_TASKGROUP:
2114 /* The following clauses are only added during OMP lowering; nested
2115 function decomposition happens before that. */
2116 case OMP_CLAUSE__LOOPTEMP_:
2117 case OMP_CLAUSE__REDUCTEMP_:
2118 case OMP_CLAUSE__SIMDUID_:
2119 case OMP_CLAUSE__GRIDDIM_:
2120 case OMP_CLAUSE__SIMT_:
2121 /* Anything else. */
2122 default:
2123 gcc_unreachable ();
2124 }
2125 }
2126
2127 info->suppress_expansion = new_suppress;
2128
2129 if (need_stmts)
2130 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2131 switch (OMP_CLAUSE_CODE (clause))
2132 {
2133 case OMP_CLAUSE_REDUCTION:
2134 case OMP_CLAUSE_IN_REDUCTION:
2135 case OMP_CLAUSE_TASK_REDUCTION:
2136 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2137 {
2138 tree old_context
2139 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2140 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2141 = info->context;
2142 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2143 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2144 = info->context;
2145 walk_body (convert_local_reference_stmt,
2146 convert_local_reference_op, info,
2147 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2148 walk_body (convert_local_reference_stmt,
2149 convert_local_reference_op, info,
2150 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2151 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2152 = old_context;
2153 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2154 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2155 = old_context;
2156 }
2157 break;
2158
2159 case OMP_CLAUSE_LASTPRIVATE:
2160 walk_body (convert_local_reference_stmt,
2161 convert_local_reference_op, info,
2162 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2163 break;
2164
2165 case OMP_CLAUSE_LINEAR:
2166 walk_body (convert_local_reference_stmt,
2167 convert_local_reference_op, info,
2168 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2169 break;
2170
2171 default:
2172 break;
2173 }
2174
2175 return need_frame;
2176 }
2177
2178
2179 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2180 and PARM_DECLs that were referenced by inner nested functions.
2181 The rewrite will be a structure reference to the local frame variable. */
2182
2183 static tree
2184 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2185 struct walk_stmt_info *wi)
2186 {
2187 struct nesting_info *info = (struct nesting_info *) wi->info;
2188 tree save_local_var_chain;
2189 bitmap save_suppress;
2190 char save_static_chain_added;
2191 bool frame_decl_added;
2192 gimple *stmt = gsi_stmt (*gsi);
2193
2194 switch (gimple_code (stmt))
2195 {
2196 case GIMPLE_OMP_TEAMS:
2197 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2198 {
2199 save_suppress = info->suppress_expansion;
2200 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2201 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2202 info, gimple_omp_body_ptr (stmt));
2203 info->suppress_expansion = save_suppress;
2204 break;
2205 }
2206 /* FALLTHRU */
2207
2208 case GIMPLE_OMP_PARALLEL:
2209 case GIMPLE_OMP_TASK:
2210 save_suppress = info->suppress_expansion;
2211 frame_decl_added = false;
2212 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2213 wi))
2214 {
2215 tree c = build_omp_clause (gimple_location (stmt),
2216 OMP_CLAUSE_SHARED);
2217 (void) get_frame_type (info);
2218 OMP_CLAUSE_DECL (c) = info->frame_decl;
2219 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2220 gimple_omp_taskreg_set_clauses (stmt, c);
2221 info->static_chain_added |= 4;
2222 frame_decl_added = true;
2223 }
2224
2225 save_local_var_chain = info->new_local_var_chain;
2226 save_static_chain_added = info->static_chain_added;
2227 info->new_local_var_chain = NULL;
2228 info->static_chain_added = 0;
2229
2230 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2231 gimple_omp_body_ptr (stmt));
2232
2233 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2234 {
2235 tree c = build_omp_clause (gimple_location (stmt),
2236 OMP_CLAUSE_SHARED);
2237 (void) get_frame_type (info);
2238 OMP_CLAUSE_DECL (c) = info->frame_decl;
2239 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2240 info->static_chain_added |= 4;
2241 gimple_omp_taskreg_set_clauses (stmt, c);
2242 }
2243 if (info->new_local_var_chain)
2244 declare_vars (info->new_local_var_chain,
2245 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2246 info->new_local_var_chain = save_local_var_chain;
2247 info->suppress_expansion = save_suppress;
2248 info->static_chain_added |= save_static_chain_added;
2249 break;
2250
2251 case GIMPLE_OMP_FOR:
2252 save_suppress = info->suppress_expansion;
2253 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2254 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2255 convert_local_reference_stmt,
2256 convert_local_reference_op, info);
2257 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2258 info, gimple_omp_body_ptr (stmt));
2259 info->suppress_expansion = save_suppress;
2260 break;
2261
2262 case GIMPLE_OMP_SECTIONS:
2263 save_suppress = info->suppress_expansion;
2264 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2265 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2266 info, gimple_omp_body_ptr (stmt));
2267 info->suppress_expansion = save_suppress;
2268 break;
2269
2270 case GIMPLE_OMP_SINGLE:
2271 save_suppress = info->suppress_expansion;
2272 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2273 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2274 info, gimple_omp_body_ptr (stmt));
2275 info->suppress_expansion = save_suppress;
2276 break;
2277
2278 case GIMPLE_OMP_TASKGROUP:
2279 save_suppress = info->suppress_expansion;
2280 convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2281 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2282 info, gimple_omp_body_ptr (stmt));
2283 info->suppress_expansion = save_suppress;
2284 break;
2285
2286 case GIMPLE_OMP_TARGET:
2287 if (!is_gimple_omp_offloaded (stmt))
2288 {
2289 save_suppress = info->suppress_expansion;
2290 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2291 info->suppress_expansion = save_suppress;
2292 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2293 info, gimple_omp_body_ptr (stmt));
2294 break;
2295 }
2296 save_suppress = info->suppress_expansion;
2297 frame_decl_added = false;
2298 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2299 {
2300 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2301 (void) get_frame_type (info);
2302 OMP_CLAUSE_DECL (c) = info->frame_decl;
2303 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2304 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2305 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2306 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2307 info->static_chain_added |= 4;
2308 frame_decl_added = true;
2309 }
2310
2311 save_local_var_chain = info->new_local_var_chain;
2312 save_static_chain_added = info->static_chain_added;
2313 info->new_local_var_chain = NULL;
2314 info->static_chain_added = 0;
2315
2316 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2317 gimple_omp_body_ptr (stmt));
2318
2319 if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2320 {
2321 tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2322 (void) get_frame_type (info);
2323 OMP_CLAUSE_DECL (c) = info->frame_decl;
2324 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2325 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2326 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2327 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2328 info->static_chain_added |= 4;
2329 }
2330
2331 if (info->new_local_var_chain)
2332 declare_vars (info->new_local_var_chain,
2333 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2334 info->new_local_var_chain = save_local_var_chain;
2335 info->suppress_expansion = save_suppress;
2336 info->static_chain_added |= save_static_chain_added;
2337 break;
2338
2339 case GIMPLE_OMP_SECTION:
2340 case GIMPLE_OMP_MASTER:
2341 case GIMPLE_OMP_ORDERED:
2342 case GIMPLE_OMP_SCAN:
2343 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2344 info, gimple_omp_body_ptr (stmt));
2345 break;
2346
2347 case GIMPLE_COND:
2348 wi->val_only = true;
2349 wi->is_lhs = false;
2350 *handled_ops_p = false;
2351 return NULL_TREE;
2352
2353 case GIMPLE_ASSIGN:
2354 if (gimple_clobber_p (stmt))
2355 {
2356 tree lhs = gimple_assign_lhs (stmt);
2357 if (DECL_P (lhs)
2358 && !use_pointer_in_frame (lhs)
2359 && lookup_field_for_decl (info, lhs, NO_INSERT))
2360 {
2361 gsi_replace (gsi, gimple_build_nop (), true);
2362 break;
2363 }
2364 }
2365 *handled_ops_p = false;
2366 return NULL_TREE;
2367
2368 case GIMPLE_BIND:
2369 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2370 var;
2371 var = DECL_CHAIN (var))
2372 if (TREE_CODE (var) == NAMELIST_DECL)
2373 {
2374 /* Adjust decls mentioned in NAMELIST_DECL. */
2375 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2376 tree decl;
2377 unsigned int i;
2378
2379 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2380 {
2381 if (VAR_P (decl)
2382 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2383 continue;
2384 if (decl_function_context (decl) == info->context
2385 && !use_pointer_in_frame (decl))
2386 {
2387 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2388 if (field)
2389 {
2390 CONSTRUCTOR_ELT (decls, i)->value
2391 = get_local_debug_decl (info, decl, field);
2392 }
2393 }
2394 }
2395 }
2396
2397 *handled_ops_p = false;
2398 return NULL_TREE;
2399
2400 default:
2401 /* For every other statement that we are not interested in
2402 handling here, let the walker traverse the operands. */
2403 *handled_ops_p = false;
2404 return NULL_TREE;
2405 }
2406
2407 /* Indicate that we have handled all the operands ourselves. */
2408 *handled_ops_p = true;
2409 return NULL_TREE;
2410 }
2411
2412
2413 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2414 that reference labels from outer functions. The rewrite will be a
2415 call to __builtin_nonlocal_goto. */
2416
2417 static tree
2418 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2419 struct walk_stmt_info *wi)
2420 {
2421 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2422 tree label, new_label, target_context, x, field;
2423 gcall *call;
2424 gimple *stmt = gsi_stmt (*gsi);
2425
2426 if (gimple_code (stmt) != GIMPLE_GOTO)
2427 {
2428 *handled_ops_p = false;
2429 return NULL_TREE;
2430 }
2431
2432 label = gimple_goto_dest (stmt);
2433 if (TREE_CODE (label) != LABEL_DECL)
2434 {
2435 *handled_ops_p = false;
2436 return NULL_TREE;
2437 }
2438
2439 target_context = decl_function_context (label);
2440 if (target_context == info->context)
2441 {
2442 *handled_ops_p = false;
2443 return NULL_TREE;
2444 }
2445
2446 for (i = info->outer; target_context != i->context; i = i->outer)
2447 continue;
2448
2449 /* The original user label may also be use for a normal goto, therefore
2450 we must create a new label that will actually receive the abnormal
2451 control transfer. This new label will be marked LABEL_NONLOCAL; this
2452 mark will trigger proper behavior in the cfg, as well as cause the
2453 (hairy target-specific) non-local goto receiver code to be generated
2454 when we expand rtl. Enter this association into var_map so that we
2455 can insert the new label into the IL during a second pass. */
2456 tree *slot = &i->var_map->get_or_insert (label);
2457 if (*slot == NULL)
2458 {
2459 new_label = create_artificial_label (UNKNOWN_LOCATION);
2460 DECL_NONLOCAL (new_label) = 1;
2461 *slot = new_label;
2462 }
2463 else
2464 new_label = *slot;
2465
2466 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2467 field = get_nl_goto_field (i);
2468 x = get_frame_field (info, target_context, field, gsi);
2469 x = build_addr (x);
2470 x = gsi_gimplify_val (info, x, gsi);
2471 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2472 2, build_addr (new_label), x);
2473 gsi_replace (gsi, call, false);
2474
2475 /* We have handled all of STMT's operands, no need to keep going. */
2476 *handled_ops_p = true;
2477 return NULL_TREE;
2478 }
2479
2480
2481 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2482 are referenced via nonlocal goto from a nested function. The rewrite
2483 will involve installing a newly generated DECL_NONLOCAL label, and
2484 (potentially) a branch around the rtl gunk that is assumed to be
2485 attached to such a label. */
2486
2487 static tree
2488 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2489 struct walk_stmt_info *wi)
2490 {
2491 struct nesting_info *const info = (struct nesting_info *) wi->info;
2492 tree label, new_label;
2493 gimple_stmt_iterator tmp_gsi;
2494 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2495
2496 if (!stmt)
2497 {
2498 *handled_ops_p = false;
2499 return NULL_TREE;
2500 }
2501
2502 label = gimple_label_label (stmt);
2503
2504 tree *slot = info->var_map->get (label);
2505 if (!slot)
2506 {
2507 *handled_ops_p = false;
2508 return NULL_TREE;
2509 }
2510
2511 /* If there's any possibility that the previous statement falls through,
2512 then we must branch around the new non-local label. */
2513 tmp_gsi = wi->gsi;
2514 gsi_prev (&tmp_gsi);
2515 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2516 {
2517 gimple *stmt = gimple_build_goto (label);
2518 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2519 }
2520
2521 new_label = (tree) *slot;
2522 stmt = gimple_build_label (new_label);
2523 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2524
2525 *handled_ops_p = true;
2526 return NULL_TREE;
2527 }
2528
2529
2530 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2531 of nested functions that require the use of trampolines. The rewrite
2532 will involve a reference a trampoline generated for the occasion. */
2533
2534 static tree
2535 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2536 {
2537 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2538 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2539 tree t = *tp, decl, target_context, x, builtin;
2540 bool descr;
2541 gcall *call;
2542
2543 *walk_subtrees = 0;
2544 switch (TREE_CODE (t))
2545 {
2546 case ADDR_EXPR:
2547 /* Build
2548 T.1 = &CHAIN->tramp;
2549 T.2 = __builtin_adjust_trampoline (T.1);
2550 T.3 = (func_type)T.2;
2551 */
2552
2553 decl = TREE_OPERAND (t, 0);
2554 if (TREE_CODE (decl) != FUNCTION_DECL)
2555 break;
2556
2557 /* Only need to process nested functions. */
2558 target_context = decl_function_context (decl);
2559 if (!target_context)
2560 break;
2561
2562 /* If the nested function doesn't use a static chain, then
2563 it doesn't need a trampoline. */
2564 if (!DECL_STATIC_CHAIN (decl))
2565 break;
2566
2567 /* If we don't want a trampoline, then don't build one. */
2568 if (TREE_NO_TRAMPOLINE (t))
2569 break;
2570
2571 /* Lookup the immediate parent of the callee, as that's where
2572 we need to insert the trampoline. */
2573 for (i = info; i->context != target_context; i = i->outer)
2574 continue;
2575
2576 /* Decide whether to generate a descriptor or a trampoline. */
2577 descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2578
2579 if (descr)
2580 x = lookup_descr_for_decl (i, decl, INSERT);
2581 else
2582 x = lookup_tramp_for_decl (i, decl, INSERT);
2583
2584 /* Compute the address of the field holding the trampoline. */
2585 x = get_frame_field (info, target_context, x, &wi->gsi);
2586 x = build_addr (x);
2587 x = gsi_gimplify_val (info, x, &wi->gsi);
2588
2589 /* Do machine-specific ugliness. Normally this will involve
2590 computing extra alignment, but it can really be anything. */
2591 if (descr)
2592 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2593 else
2594 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2595 call = gimple_build_call (builtin, 1, x);
2596 x = init_tmp_var_with_call (info, &wi->gsi, call);
2597
2598 /* Cast back to the proper function type. */
2599 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2600 x = init_tmp_var (info, x, &wi->gsi);
2601
2602 *tp = x;
2603 break;
2604
2605 default:
2606 if (!IS_TYPE_OR_DECL_P (t))
2607 *walk_subtrees = 1;
2608 break;
2609 }
2610
2611 return NULL_TREE;
2612 }
2613
2614
2615 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2616 to addresses of nested functions that require the use of
2617 trampolines. The rewrite will involve a reference a trampoline
2618 generated for the occasion. */
2619
2620 static tree
2621 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2622 struct walk_stmt_info *wi)
2623 {
2624 struct nesting_info *info = (struct nesting_info *) wi->info;
2625 gimple *stmt = gsi_stmt (*gsi);
2626
2627 switch (gimple_code (stmt))
2628 {
2629 case GIMPLE_CALL:
2630 {
2631 /* Only walk call arguments, lest we generate trampolines for
2632 direct calls. */
2633 unsigned long i, nargs = gimple_call_num_args (stmt);
2634 for (i = 0; i < nargs; i++)
2635 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2636 wi, NULL);
2637 break;
2638 }
2639
2640 case GIMPLE_OMP_TEAMS:
2641 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2642 {
2643 *handled_ops_p = false;
2644 return NULL_TREE;
2645 }
2646 goto do_parallel;
2647
2648 case GIMPLE_OMP_TARGET:
2649 if (!is_gimple_omp_offloaded (stmt))
2650 {
2651 *handled_ops_p = false;
2652 return NULL_TREE;
2653 }
2654 /* FALLTHRU */
2655 case GIMPLE_OMP_PARALLEL:
2656 case GIMPLE_OMP_TASK:
2657 do_parallel:
2658 {
2659 tree save_local_var_chain = info->new_local_var_chain;
2660 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2661 info->new_local_var_chain = NULL;
2662 char save_static_chain_added = info->static_chain_added;
2663 info->static_chain_added = 0;
2664 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2665 info, gimple_omp_body_ptr (stmt));
2666 if (info->new_local_var_chain)
2667 declare_vars (info->new_local_var_chain,
2668 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2669 false);
2670 for (int i = 0; i < 2; i++)
2671 {
2672 tree c, decl;
2673 if ((info->static_chain_added & (1 << i)) == 0)
2674 continue;
2675 decl = i ? get_chain_decl (info) : info->frame_decl;
2676 /* Don't add CHAIN.* or FRAME.* twice. */
2677 for (c = gimple_omp_taskreg_clauses (stmt);
2678 c;
2679 c = OMP_CLAUSE_CHAIN (c))
2680 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2681 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2682 && OMP_CLAUSE_DECL (c) == decl)
2683 break;
2684 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2685 {
2686 c = build_omp_clause (gimple_location (stmt),
2687 i ? OMP_CLAUSE_FIRSTPRIVATE
2688 : OMP_CLAUSE_SHARED);
2689 OMP_CLAUSE_DECL (c) = decl;
2690 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2691 gimple_omp_taskreg_set_clauses (stmt, c);
2692 }
2693 else if (c == NULL)
2694 {
2695 c = build_omp_clause (gimple_location (stmt),
2696 OMP_CLAUSE_MAP);
2697 OMP_CLAUSE_DECL (c) = decl;
2698 OMP_CLAUSE_SET_MAP_KIND (c,
2699 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2700 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2701 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2702 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2703 c);
2704 }
2705 }
2706 info->new_local_var_chain = save_local_var_chain;
2707 info->static_chain_added |= save_static_chain_added;
2708 }
2709 break;
2710
2711 default:
2712 *handled_ops_p = false;
2713 return NULL_TREE;
2714 }
2715
2716 *handled_ops_p = true;
2717 return NULL_TREE;
2718 }
2719
2720
2721
2722 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2723 that reference nested functions to make sure that the static chain
2724 is set up properly for the call. */
2725
2726 static tree
2727 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2728 struct walk_stmt_info *wi)
2729 {
2730 struct nesting_info *const info = (struct nesting_info *) wi->info;
2731 tree decl, target_context;
2732 char save_static_chain_added;
2733 int i;
2734 gimple *stmt = gsi_stmt (*gsi);
2735
2736 switch (gimple_code (stmt))
2737 {
2738 case GIMPLE_CALL:
2739 if (gimple_call_chain (stmt))
2740 break;
2741 decl = gimple_call_fndecl (stmt);
2742 if (!decl)
2743 break;
2744 target_context = decl_function_context (decl);
2745 if (target_context && DECL_STATIC_CHAIN (decl))
2746 {
2747 struct nesting_info *i = info;
2748 while (i && i->context != target_context)
2749 i = i->outer;
2750 /* If none of the outer contexts is the target context, this means
2751 that the function is called in a wrong context. */
2752 if (!i)
2753 internal_error ("%s from %s called in %s",
2754 IDENTIFIER_POINTER (DECL_NAME (decl)),
2755 IDENTIFIER_POINTER (DECL_NAME (target_context)),
2756 IDENTIFIER_POINTER (DECL_NAME (info->context)));
2757
2758 gimple_call_set_chain (as_a <gcall *> (stmt),
2759 get_static_chain (info, target_context,
2760 &wi->gsi));
2761 info->static_chain_added |= (1 << (info->context != target_context));
2762 }
2763 break;
2764
2765 case GIMPLE_OMP_TEAMS:
2766 if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2767 {
2768 walk_body (convert_gimple_call, NULL, info,
2769 gimple_omp_body_ptr (stmt));
2770 break;
2771 }
2772 /* FALLTHRU */
2773
2774 case GIMPLE_OMP_PARALLEL:
2775 case GIMPLE_OMP_TASK:
2776 save_static_chain_added = info->static_chain_added;
2777 info->static_chain_added = 0;
2778 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2779 for (i = 0; i < 2; i++)
2780 {
2781 tree c, decl;
2782 if ((info->static_chain_added & (1 << i)) == 0)
2783 continue;
2784 decl = i ? get_chain_decl (info) : info->frame_decl;
2785 /* Don't add CHAIN.* or FRAME.* twice. */
2786 for (c = gimple_omp_taskreg_clauses (stmt);
2787 c;
2788 c = OMP_CLAUSE_CHAIN (c))
2789 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2790 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2791 && OMP_CLAUSE_DECL (c) == decl)
2792 break;
2793 if (c == NULL)
2794 {
2795 c = build_omp_clause (gimple_location (stmt),
2796 i ? OMP_CLAUSE_FIRSTPRIVATE
2797 : OMP_CLAUSE_SHARED);
2798 OMP_CLAUSE_DECL (c) = decl;
2799 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2800 gimple_omp_taskreg_set_clauses (stmt, c);
2801 }
2802 }
2803 info->static_chain_added |= save_static_chain_added;
2804 break;
2805
2806 case GIMPLE_OMP_TARGET:
2807 if (!is_gimple_omp_offloaded (stmt))
2808 {
2809 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2810 break;
2811 }
2812 save_static_chain_added = info->static_chain_added;
2813 info->static_chain_added = 0;
2814 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2815 for (i = 0; i < 2; i++)
2816 {
2817 tree c, decl;
2818 if ((info->static_chain_added & (1 << i)) == 0)
2819 continue;
2820 decl = i ? get_chain_decl (info) : info->frame_decl;
2821 /* Don't add CHAIN.* or FRAME.* twice. */
2822 for (c = gimple_omp_target_clauses (stmt);
2823 c;
2824 c = OMP_CLAUSE_CHAIN (c))
2825 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2826 && OMP_CLAUSE_DECL (c) == decl)
2827 break;
2828 if (c == NULL)
2829 {
2830 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2831 OMP_CLAUSE_DECL (c) = decl;
2832 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2833 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2834 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2835 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2836 c);
2837 }
2838 }
2839 info->static_chain_added |= save_static_chain_added;
2840 break;
2841
2842 case GIMPLE_OMP_FOR:
2843 walk_body (convert_gimple_call, NULL, info,
2844 gimple_omp_for_pre_body_ptr (stmt));
2845 /* FALLTHRU */
2846 case GIMPLE_OMP_SECTIONS:
2847 case GIMPLE_OMP_SECTION:
2848 case GIMPLE_OMP_SINGLE:
2849 case GIMPLE_OMP_MASTER:
2850 case GIMPLE_OMP_TASKGROUP:
2851 case GIMPLE_OMP_ORDERED:
2852 case GIMPLE_OMP_SCAN:
2853 case GIMPLE_OMP_CRITICAL:
2854 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2855 break;
2856
2857 default:
2858 /* Keep looking for other operands. */
2859 *handled_ops_p = false;
2860 return NULL_TREE;
2861 }
2862
2863 *handled_ops_p = true;
2864 return NULL_TREE;
2865 }
2866
2867 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2868 call expressions. At the same time, determine if a nested function
2869 actually uses its static chain; if not, remember that. */
2870
2871 static void
2872 convert_all_function_calls (struct nesting_info *root)
2873 {
2874 unsigned int chain_count = 0, old_chain_count, iter_count;
2875 struct nesting_info *n;
2876
2877 /* First, optimistically clear static_chain for all decls that haven't
2878 used the static chain already for variable access. But always create
2879 it if not optimizing. This makes it possible to reconstruct the static
2880 nesting tree at run time and thus to resolve up-level references from
2881 within the debugger. */
2882 FOR_EACH_NEST_INFO (n, root)
2883 {
2884 if (n->thunk_p)
2885 continue;
2886 tree decl = n->context;
2887 if (!optimize)
2888 {
2889 if (n->inner)
2890 (void) get_frame_type (n);
2891 if (n->outer)
2892 (void) get_chain_decl (n);
2893 }
2894 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2895 {
2896 DECL_STATIC_CHAIN (decl) = 0;
2897 if (dump_file && (dump_flags & TDF_DETAILS))
2898 fprintf (dump_file, "Guessing no static-chain for %s\n",
2899 lang_hooks.decl_printable_name (decl, 2));
2900 }
2901 else
2902 DECL_STATIC_CHAIN (decl) = 1;
2903 chain_count += DECL_STATIC_CHAIN (decl);
2904 }
2905
2906 FOR_EACH_NEST_INFO (n, root)
2907 if (n->thunk_p)
2908 {
2909 tree decl = n->context;
2910 tree alias = cgraph_node::get (decl)->thunk.alias;
2911 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2912 }
2913
2914 /* Walk the functions and perform transformations. Note that these
2915 transformations can induce new uses of the static chain, which in turn
2916 require re-examining all users of the decl. */
2917 /* ??? It would make sense to try to use the call graph to speed this up,
2918 but the call graph hasn't really been built yet. Even if it did, we
2919 would still need to iterate in this loop since address-of references
2920 wouldn't show up in the callgraph anyway. */
2921 iter_count = 0;
2922 do
2923 {
2924 old_chain_count = chain_count;
2925 chain_count = 0;
2926 iter_count++;
2927
2928 if (dump_file && (dump_flags & TDF_DETAILS))
2929 fputc ('\n', dump_file);
2930
2931 FOR_EACH_NEST_INFO (n, root)
2932 {
2933 if (n->thunk_p)
2934 continue;
2935 tree decl = n->context;
2936 walk_function (convert_tramp_reference_stmt,
2937 convert_tramp_reference_op, n);
2938 walk_function (convert_gimple_call, NULL, n);
2939 chain_count += DECL_STATIC_CHAIN (decl);
2940 }
2941
2942 FOR_EACH_NEST_INFO (n, root)
2943 if (n->thunk_p)
2944 {
2945 tree decl = n->context;
2946 tree alias = cgraph_node::get (decl)->thunk.alias;
2947 DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2948 }
2949 }
2950 while (chain_count != old_chain_count);
2951
2952 if (dump_file && (dump_flags & TDF_DETAILS))
2953 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2954 iter_count);
2955 }
2956
2957 struct nesting_copy_body_data
2958 {
2959 copy_body_data cb;
2960 struct nesting_info *root;
2961 };
2962
2963 /* A helper subroutine for debug_var_chain type remapping. */
2964
2965 static tree
2966 nesting_copy_decl (tree decl, copy_body_data *id)
2967 {
2968 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2969 tree *slot = nid->root->var_map->get (decl);
2970
2971 if (slot)
2972 return (tree) *slot;
2973
2974 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2975 {
2976 tree new_decl = copy_decl_no_change (decl, id);
2977 DECL_ORIGINAL_TYPE (new_decl)
2978 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2979 return new_decl;
2980 }
2981
2982 if (VAR_P (decl)
2983 || TREE_CODE (decl) == PARM_DECL
2984 || TREE_CODE (decl) == RESULT_DECL)
2985 return decl;
2986
2987 return copy_decl_no_change (decl, id);
2988 }
2989
2990 /* A helper function for remap_vla_decls. See if *TP contains
2991 some remapped variables. */
2992
2993 static tree
2994 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2995 {
2996 struct nesting_info *root = (struct nesting_info *) data;
2997 tree t = *tp;
2998
2999 if (DECL_P (t))
3000 {
3001 *walk_subtrees = 0;
3002 tree *slot = root->var_map->get (t);
3003
3004 if (slot)
3005 return *slot;
3006 }
3007 return NULL;
3008 }
3009
3010 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3011 involved. */
3012
3013 static void
3014 remap_vla_decls (tree block, struct nesting_info *root)
3015 {
3016 tree var, subblock, val, type;
3017 struct nesting_copy_body_data id;
3018
3019 for (subblock = BLOCK_SUBBLOCKS (block);
3020 subblock;
3021 subblock = BLOCK_CHAIN (subblock))
3022 remap_vla_decls (subblock, root);
3023
3024 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3025 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3026 {
3027 val = DECL_VALUE_EXPR (var);
3028 type = TREE_TYPE (var);
3029
3030 if (!(TREE_CODE (val) == INDIRECT_REF
3031 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3032 && variably_modified_type_p (type, NULL)))
3033 continue;
3034
3035 if (root->var_map->get (TREE_OPERAND (val, 0))
3036 || walk_tree (&type, contains_remapped_vars, root, NULL))
3037 break;
3038 }
3039
3040 if (var == NULL_TREE)
3041 return;
3042
3043 memset (&id, 0, sizeof (id));
3044 id.cb.copy_decl = nesting_copy_decl;
3045 id.cb.decl_map = new hash_map<tree, tree>;
3046 id.root = root;
3047
3048 for (; var; var = DECL_CHAIN (var))
3049 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3050 {
3051 struct nesting_info *i;
3052 tree newt, context;
3053
3054 val = DECL_VALUE_EXPR (var);
3055 type = TREE_TYPE (var);
3056
3057 if (!(TREE_CODE (val) == INDIRECT_REF
3058 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3059 && variably_modified_type_p (type, NULL)))
3060 continue;
3061
3062 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3063 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3064 continue;
3065
3066 context = decl_function_context (var);
3067 for (i = root; i; i = i->outer)
3068 if (i->context == context)
3069 break;
3070
3071 if (i == NULL)
3072 continue;
3073
3074 /* Fully expand value expressions. This avoids having debug variables
3075 only referenced from them and that can be swept during GC. */
3076 if (slot)
3077 {
3078 tree t = (tree) *slot;
3079 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3080 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3081 }
3082
3083 id.cb.src_fn = i->context;
3084 id.cb.dst_fn = i->context;
3085 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3086
3087 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3088 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3089 {
3090 newt = TREE_TYPE (newt);
3091 type = TREE_TYPE (type);
3092 }
3093 if (TYPE_NAME (newt)
3094 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3095 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3096 && newt != type
3097 && TYPE_NAME (newt) == TYPE_NAME (type))
3098 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3099
3100 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3101 if (val != DECL_VALUE_EXPR (var))
3102 SET_DECL_VALUE_EXPR (var, val);
3103 }
3104
3105 delete id.cb.decl_map;
3106 }
3107
3108 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3109 involved. */
3110
3111 static void
3112 fixup_vla_decls (tree block)
3113 {
3114 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3115 if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3116 {
3117 tree val = DECL_VALUE_EXPR (var);
3118
3119 if (!(TREE_CODE (val) == INDIRECT_REF
3120 && VAR_P (TREE_OPERAND (val, 0))
3121 && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3122 continue;
3123
3124 /* Fully expand value expressions. This avoids having debug variables
3125 only referenced from them and that can be swept during GC. */
3126 val = build1 (INDIRECT_REF, TREE_TYPE (val),
3127 DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3128 SET_DECL_VALUE_EXPR (var, val);
3129 }
3130
3131 for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3132 fixup_vla_decls (sub);
3133 }
3134
3135 /* Fold the MEM_REF *E. */
3136 bool
3137 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3138 {
3139 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3140 *ref_p = fold (*ref_p);
3141 return true;
3142 }
3143
3144 /* Given DECL, a nested function, build an initialization call for FIELD,
3145 the trampoline or descriptor for DECL, using FUNC as the function. */
3146
3147 static gcall *
3148 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3149 tree func)
3150 {
3151 tree arg1, arg2, arg3, x;
3152
3153 gcc_assert (DECL_STATIC_CHAIN (decl));
3154 arg3 = build_addr (info->frame_decl);
3155
3156 arg2 = build_addr (decl);
3157
3158 x = build3 (COMPONENT_REF, TREE_TYPE (field),
3159 info->frame_decl, field, NULL_TREE);
3160 arg1 = build_addr (x);
3161
3162 return gimple_build_call (func, 3, arg1, arg2, arg3);
3163 }
3164
3165 /* Do "everything else" to clean up or complete state collected by the various
3166 walking passes -- create a field to hold the frame base address, lay out the
3167 types and decls, generate code to initialize the frame decl, store critical
3168 expressions in the struct function for rtl to find. */
3169
3170 static void
3171 finalize_nesting_tree_1 (struct nesting_info *root)
3172 {
3173 gimple_seq stmt_list = NULL;
3174 gimple *stmt;
3175 tree context = root->context;
3176 struct function *sf;
3177
3178 if (root->thunk_p)
3179 return;
3180
3181 /* If we created a non-local frame type or decl, we need to lay them
3182 out at this time. */
3183 if (root->frame_type)
3184 {
3185 /* Debugging information needs to compute the frame base address of the
3186 parent frame out of the static chain from the nested frame.
3187
3188 The static chain is the address of the FRAME record, so one could
3189 imagine it would be possible to compute the frame base address just
3190 adding a constant offset to this address. Unfortunately, this is not
3191 possible: if the FRAME object has alignment constraints that are
3192 stronger than the stack, then the offset between the frame base and
3193 the FRAME object will be dynamic.
3194
3195 What we do instead is to append a field to the FRAME object that holds
3196 the frame base address: then debug info just has to fetch this
3197 field. */
3198
3199 /* Debugging information will refer to the CFA as the frame base
3200 address: we will do the same here. */
3201 const tree frame_addr_fndecl
3202 = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3203
3204 /* Create a field in the FRAME record to hold the frame base address for
3205 this stack frame. Since it will be used only by the debugger, put it
3206 at the end of the record in order not to shift all other offsets. */
3207 tree fb_decl = make_node (FIELD_DECL);
3208
3209 DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3210 TREE_TYPE (fb_decl) = ptr_type_node;
3211 TREE_ADDRESSABLE (fb_decl) = 1;
3212 DECL_CONTEXT (fb_decl) = root->frame_type;
3213 TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3214 fb_decl);
3215
3216 /* In some cases the frame type will trigger the -Wpadded warning.
3217 This is not helpful; suppress it. */
3218 int save_warn_padded = warn_padded;
3219 warn_padded = 0;
3220 layout_type (root->frame_type);
3221 warn_padded = save_warn_padded;
3222 layout_decl (root->frame_decl, 0);
3223
3224 /* Initialize the frame base address field. If the builtin we need is
3225 not available, set it to NULL so that debugging information does not
3226 reference junk. */
3227 tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3228 root->frame_decl, fb_decl, NULL_TREE);
3229 tree fb_tmp;
3230
3231 if (frame_addr_fndecl != NULL_TREE)
3232 {
3233 gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3234 integer_zero_node);
3235 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3236
3237 fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3238 }
3239 else
3240 fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3241 gimple_seq_add_stmt (&stmt_list,
3242 gimple_build_assign (fb_ref, fb_tmp));
3243
3244 declare_vars (root->frame_decl,
3245 gimple_seq_first_stmt (gimple_body (context)), true);
3246 }
3247
3248 /* If any parameters were referenced non-locally, then we need to insert
3249 a copy or a pointer. */
3250 if (root->any_parm_remapped)
3251 {
3252 tree p;
3253 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3254 {
3255 tree field, x, y;
3256
3257 field = lookup_field_for_decl (root, p, NO_INSERT);
3258 if (!field)
3259 continue;
3260
3261 if (use_pointer_in_frame (p))
3262 x = build_addr (p);
3263 else
3264 x = p;
3265
3266 /* If the assignment is from a non-register the stmt is
3267 not valid gimple. Make it so by using a temporary instead. */
3268 if (!is_gimple_reg (x)
3269 && is_gimple_reg_type (TREE_TYPE (x)))
3270 {
3271 gimple_stmt_iterator gsi = gsi_last (stmt_list);
3272 x = init_tmp_var (root, x, &gsi);
3273 }
3274
3275 y = build3 (COMPONENT_REF, TREE_TYPE (field),
3276 root->frame_decl, field, NULL_TREE);
3277 stmt = gimple_build_assign (y, x);
3278 gimple_seq_add_stmt (&stmt_list, stmt);
3279 }
3280 }
3281
3282 /* If a chain_field was created, then it needs to be initialized
3283 from chain_decl. */
3284 if (root->chain_field)
3285 {
3286 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3287 root->frame_decl, root->chain_field, NULL_TREE);
3288 stmt = gimple_build_assign (x, get_chain_decl (root));
3289 gimple_seq_add_stmt (&stmt_list, stmt);
3290 }
3291
3292 /* If trampolines were created, then we need to initialize them. */
3293 if (root->any_tramp_created)
3294 {
3295 struct nesting_info *i;
3296 for (i = root->inner; i ; i = i->next)
3297 {
3298 tree field, x;
3299
3300 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3301 if (!field)
3302 continue;
3303
3304 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3305 stmt = build_init_call_stmt (root, i->context, field, x);
3306 gimple_seq_add_stmt (&stmt_list, stmt);
3307 }
3308 }
3309
3310 /* If descriptors were created, then we need to initialize them. */
3311 if (root->any_descr_created)
3312 {
3313 struct nesting_info *i;
3314 for (i = root->inner; i ; i = i->next)
3315 {
3316 tree field, x;
3317
3318 field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3319 if (!field)
3320 continue;
3321
3322 x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3323 stmt = build_init_call_stmt (root, i->context, field, x);
3324 gimple_seq_add_stmt (&stmt_list, stmt);
3325 }
3326 }
3327
3328 /* If we created initialization statements, insert them. */
3329 if (stmt_list)
3330 {
3331 gbind *bind;
3332 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3333 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3334 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3335 gimple_bind_set_body (bind, stmt_list);
3336 }
3337
3338 /* If a chain_decl was created, then it needs to be registered with
3339 struct function so that it gets initialized from the static chain
3340 register at the beginning of the function. */
3341 sf = DECL_STRUCT_FUNCTION (root->context);
3342 sf->static_chain_decl = root->chain_decl;
3343
3344 /* Similarly for the non-local goto save area. */
3345 if (root->nl_goto_field)
3346 {
3347 sf->nonlocal_goto_save_area
3348 = get_frame_field (root, context, root->nl_goto_field, NULL);
3349 sf->has_nonlocal_label = 1;
3350 }
3351
3352 /* Make sure all new local variables get inserted into the
3353 proper BIND_EXPR. */
3354 if (root->new_local_var_chain)
3355 declare_vars (root->new_local_var_chain,
3356 gimple_seq_first_stmt (gimple_body (root->context)),
3357 false);
3358
3359 if (root->debug_var_chain)
3360 {
3361 tree debug_var;
3362 gbind *scope;
3363
3364 remap_vla_decls (DECL_INITIAL (root->context), root);
3365
3366 for (debug_var = root->debug_var_chain; debug_var;
3367 debug_var = DECL_CHAIN (debug_var))
3368 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3369 break;
3370
3371 /* If there are any debug decls with variable length types,
3372 remap those types using other debug_var_chain variables. */
3373 if (debug_var)
3374 {
3375 struct nesting_copy_body_data id;
3376
3377 memset (&id, 0, sizeof (id));
3378 id.cb.copy_decl = nesting_copy_decl;
3379 id.cb.decl_map = new hash_map<tree, tree>;
3380 id.root = root;
3381
3382 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3383 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3384 {
3385 tree type = TREE_TYPE (debug_var);
3386 tree newt, t = type;
3387 struct nesting_info *i;
3388
3389 for (i = root; i; i = i->outer)
3390 if (variably_modified_type_p (type, i->context))
3391 break;
3392
3393 if (i == NULL)
3394 continue;
3395
3396 id.cb.src_fn = i->context;
3397 id.cb.dst_fn = i->context;
3398 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3399
3400 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3401 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3402 {
3403 newt = TREE_TYPE (newt);
3404 t = TREE_TYPE (t);
3405 }
3406 if (TYPE_NAME (newt)
3407 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3408 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3409 && newt != t
3410 && TYPE_NAME (newt) == TYPE_NAME (t))
3411 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3412 }
3413
3414 delete id.cb.decl_map;
3415 }
3416
3417 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3418 if (gimple_bind_block (scope))
3419 declare_vars (root->debug_var_chain, scope, true);
3420 else
3421 BLOCK_VARS (DECL_INITIAL (root->context))
3422 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3423 root->debug_var_chain);
3424 }
3425 else
3426 fixup_vla_decls (DECL_INITIAL (root->context));
3427
3428 /* Fold the rewritten MEM_REF trees. */
3429 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3430
3431 /* Dump the translated tree function. */
3432 if (dump_file)
3433 {
3434 fputs ("\n\n", dump_file);
3435 dump_function_to_file (root->context, dump_file, dump_flags);
3436 }
3437 }
3438
3439 static void
3440 finalize_nesting_tree (struct nesting_info *root)
3441 {
3442 struct nesting_info *n;
3443 FOR_EACH_NEST_INFO (n, root)
3444 finalize_nesting_tree_1 (n);
3445 }
3446
3447 /* Unnest the nodes and pass them to cgraph. */
3448
3449 static void
3450 unnest_nesting_tree_1 (struct nesting_info *root)
3451 {
3452 struct cgraph_node *node = cgraph_node::get (root->context);
3453
3454 /* For nested functions update the cgraph to reflect unnesting.
3455 We also delay finalizing of these functions up to this point. */
3456 if (node->origin)
3457 {
3458 node->unnest ();
3459 if (!root->thunk_p)
3460 cgraph_node::finalize_function (root->context, true);
3461 }
3462 }
3463
3464 static void
3465 unnest_nesting_tree (struct nesting_info *root)
3466 {
3467 struct nesting_info *n;
3468 FOR_EACH_NEST_INFO (n, root)
3469 unnest_nesting_tree_1 (n);
3470 }
3471
3472 /* Free the data structures allocated during this pass. */
3473
3474 static void
3475 free_nesting_tree (struct nesting_info *root)
3476 {
3477 struct nesting_info *node, *next;
3478
3479 node = iter_nestinfo_start (root);
3480 do
3481 {
3482 next = iter_nestinfo_next (node);
3483 delete node->var_map;
3484 delete node->field_map;
3485 delete node->mem_refs;
3486 free (node);
3487 node = next;
3488 }
3489 while (node);
3490 }
3491
3492 /* Gimplify a function and all its nested functions. */
3493 static void
3494 gimplify_all_functions (struct cgraph_node *root)
3495 {
3496 struct cgraph_node *iter;
3497 if (!gimple_body (root->decl))
3498 gimplify_function_tree (root->decl);
3499 for (iter = root->nested; iter; iter = iter->next_nested)
3500 if (!iter->thunk.thunk_p)
3501 gimplify_all_functions (iter);
3502 }
3503
3504 /* Main entry point for this pass. Process FNDECL and all of its nested
3505 subroutines and turn them into something less tightly bound. */
3506
3507 void
3508 lower_nested_functions (tree fndecl)
3509 {
3510 struct cgraph_node *cgn;
3511 struct nesting_info *root;
3512
3513 /* If there are no nested functions, there's nothing to do. */
3514 cgn = cgraph_node::get (fndecl);
3515 if (!cgn->nested)
3516 return;
3517
3518 gimplify_all_functions (cgn);
3519
3520 set_dump_file (dump_begin (TDI_nested, &dump_flags));
3521 if (dump_file)
3522 fprintf (dump_file, "\n;; Function %s\n\n",
3523 lang_hooks.decl_printable_name (fndecl, 2));
3524
3525 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3526 root = create_nesting_tree (cgn);
3527
3528 walk_all_functions (convert_nonlocal_reference_stmt,
3529 convert_nonlocal_reference_op,
3530 root);
3531 walk_all_functions (convert_local_reference_stmt,
3532 convert_local_reference_op,
3533 root);
3534 walk_all_functions (convert_nl_goto_reference, NULL, root);
3535 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3536
3537 convert_all_function_calls (root);
3538 finalize_nesting_tree (root);
3539 unnest_nesting_tree (root);
3540
3541 free_nesting_tree (root);
3542 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3543
3544 if (dump_file)
3545 {
3546 dump_end (TDI_nested, dump_file);
3547 set_dump_file (NULL);
3548 }
3549 }
3550
3551 #include "gt-tree-nested.h"