]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-nested.c
builtin-types.def (BT_FN_BOOL_UINT_LONGPTR_LONGPTR_LONGPTR, [...]): New.
[thirdparty/gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "rtl.h"
27 #include "alias.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "tm_p.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "internal-fn.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimple-walk.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "tree-cfg.h"
41 #include "flags.h"
42 #include "insn-config.h"
43 #include "expmed.h"
44 #include "dojump.h"
45 #include "explow.h"
46 #include "calls.h"
47 #include "emit-rtl.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
51 #include "langhooks.h"
52 #include "gimple-low.h"
53 #include "gomp-constants.h"
54
55
56 /* The object of this pass is to lower the representation of a set of nested
57 functions in order to expose all of the gory details of the various
58 nonlocal references. We want to do this sooner rather than later, in
59 order to give us more freedom in emitting all of the functions in question.
60
61 Back in olden times, when gcc was young, we developed an insanely
62 complicated scheme whereby variables which were referenced nonlocally
63 were forced to live in the stack of the declaring function, and then
64 the nested functions magically discovered where these variables were
65 placed. In order for this scheme to function properly, it required
66 that the outer function be partially expanded, then we switch to
67 compiling the inner function, and once done with those we switch back
68 to compiling the outer function. Such delicate ordering requirements
69 makes it difficult to do whole translation unit optimizations
70 involving such functions.
71
72 The implementation here is much more direct. Everything that can be
73 referenced by an inner function is a member of an explicitly created
74 structure herein called the "nonlocal frame struct". The incoming
75 static chain for a nested function is a pointer to this struct in
76 the parent. In this way, we settle on known offsets from a known
77 base, and so are decoupled from the logic that places objects in the
78 function's stack frame. More importantly, we don't have to wait for
79 that to happen -- since the compilation of the inner function is no
80 longer tied to a real stack frame, the nonlocal frame struct can be
81 allocated anywhere. Which means that the outer function is now
82 inlinable.
83
84 Theory of operation here is very simple. Iterate over all the
85 statements in all the functions (depth first) several times,
86 allocating structures and fields on demand. In general we want to
87 examine inner functions first, so that we can avoid making changes
88 to outer functions which are unnecessary.
89
90 The order of the passes matters a bit, in that later passes will be
91 skipped if it is discovered that the functions don't actually interact
92 at all. That is, they're nested in the lexical sense but could have
93 been written as independent functions without change. */
94
95
96 struct nesting_info
97 {
98 struct nesting_info *outer;
99 struct nesting_info *inner;
100 struct nesting_info *next;
101
102 hash_map<tree, tree> *field_map;
103 hash_map<tree, tree> *var_map;
104 hash_set<tree *> *mem_refs;
105 bitmap suppress_expansion;
106
107 tree context;
108 tree new_local_var_chain;
109 tree debug_var_chain;
110 tree frame_type;
111 tree frame_decl;
112 tree chain_field;
113 tree chain_decl;
114 tree nl_goto_field;
115
116 bool any_parm_remapped;
117 bool any_tramp_created;
118 char static_chain_added;
119 };
120
121
122 /* Iterate over the nesting tree, starting with ROOT, depth first. */
123
124 static inline struct nesting_info *
125 iter_nestinfo_start (struct nesting_info *root)
126 {
127 while (root->inner)
128 root = root->inner;
129 return root;
130 }
131
132 static inline struct nesting_info *
133 iter_nestinfo_next (struct nesting_info *node)
134 {
135 if (node->next)
136 return iter_nestinfo_start (node->next);
137 return node->outer;
138 }
139
140 #define FOR_EACH_NEST_INFO(I, ROOT) \
141 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
142
143 /* Obstack used for the bitmaps in the struct above. */
144 static struct bitmap_obstack nesting_info_bitmap_obstack;
145
146
147 /* We're working in so many different function contexts simultaneously,
148 that create_tmp_var is dangerous. Prevent mishap. */
149 #define create_tmp_var cant_use_create_tmp_var_here_dummy
150
151 /* Like create_tmp_var, except record the variable for registration at
152 the given nesting level. */
153
154 static tree
155 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
156 {
157 tree tmp_var;
158
159 /* If the type is of variable size or a type which must be created by the
160 frontend, something is wrong. Note that we explicitly allow
161 incomplete types here, since we create them ourselves here. */
162 gcc_assert (!TREE_ADDRESSABLE (type));
163 gcc_assert (!TYPE_SIZE_UNIT (type)
164 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
165
166 tmp_var = create_tmp_var_raw (type, prefix);
167 DECL_CONTEXT (tmp_var) = info->context;
168 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
169 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
170 if (TREE_CODE (type) == COMPLEX_TYPE
171 || TREE_CODE (type) == VECTOR_TYPE)
172 DECL_GIMPLE_REG_P (tmp_var) = 1;
173
174 info->new_local_var_chain = tmp_var;
175
176 return tmp_var;
177 }
178
179 /* Take the address of EXP to be used within function CONTEXT.
180 Mark it for addressability as necessary. */
181
182 tree
183 build_addr (tree exp, tree context)
184 {
185 tree base = exp;
186 tree save_context;
187 tree retval;
188
189 while (handled_component_p (base))
190 base = TREE_OPERAND (base, 0);
191
192 if (DECL_P (base))
193 TREE_ADDRESSABLE (base) = 1;
194
195 /* Building the ADDR_EXPR will compute a set of properties for
196 that ADDR_EXPR. Those properties are unfortunately context
197 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
198
199 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
200 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
201 way the properties are for the ADDR_EXPR are computed properly. */
202 save_context = current_function_decl;
203 current_function_decl = context;
204 retval = build_fold_addr_expr (exp);
205 current_function_decl = save_context;
206 return retval;
207 }
208
209 /* Insert FIELD into TYPE, sorted by alignment requirements. */
210
211 void
212 insert_field_into_struct (tree type, tree field)
213 {
214 tree *p;
215
216 DECL_CONTEXT (field) = type;
217
218 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
219 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
220 break;
221
222 DECL_CHAIN (field) = *p;
223 *p = field;
224
225 /* Set correct alignment for frame struct type. */
226 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
227 TYPE_ALIGN (type) = DECL_ALIGN (field);
228 }
229
230 /* Build or return the RECORD_TYPE that describes the frame state that is
231 shared between INFO->CONTEXT and its nested functions. This record will
232 not be complete until finalize_nesting_tree; up until that point we'll
233 be adding fields as necessary.
234
235 We also build the DECL that represents this frame in the function. */
236
237 static tree
238 get_frame_type (struct nesting_info *info)
239 {
240 tree type = info->frame_type;
241 if (!type)
242 {
243 char *name;
244
245 type = make_node (RECORD_TYPE);
246
247 name = concat ("FRAME.",
248 IDENTIFIER_POINTER (DECL_NAME (info->context)),
249 NULL);
250 TYPE_NAME (type) = get_identifier (name);
251 free (name);
252
253 info->frame_type = type;
254 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
255 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
256
257 /* ??? Always make it addressable for now, since it is meant to
258 be pointed to by the static chain pointer. This pessimizes
259 when it turns out that no static chains are needed because
260 the nested functions referencing non-local variables are not
261 reachable, but the true pessimization is to create the non-
262 local frame structure in the first place. */
263 TREE_ADDRESSABLE (info->frame_decl) = 1;
264 }
265 return type;
266 }
267
268 /* Return true if DECL should be referenced by pointer in the non-local
269 frame structure. */
270
271 static bool
272 use_pointer_in_frame (tree decl)
273 {
274 if (TREE_CODE (decl) == PARM_DECL)
275 {
276 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
277 sized decls, and inefficient to copy large aggregates. Don't bother
278 moving anything but scalar variables. */
279 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
280 }
281 else
282 {
283 /* Variable sized types make things "interesting" in the frame. */
284 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
285 }
286 }
287
288 /* Given DECL, a non-locally accessed variable, find or create a field
289 in the non-local frame structure for the given nesting context. */
290
291 static tree
292 lookup_field_for_decl (struct nesting_info *info, tree decl,
293 enum insert_option insert)
294 {
295 if (insert == NO_INSERT)
296 {
297 tree *slot = info->field_map->get (decl);
298 return slot ? *slot : NULL_TREE;
299 }
300
301 tree *slot = &info->field_map->get_or_insert (decl);
302 if (!*slot)
303 {
304 tree field = make_node (FIELD_DECL);
305 DECL_NAME (field) = DECL_NAME (decl);
306
307 if (use_pointer_in_frame (decl))
308 {
309 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
310 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
311 DECL_NONADDRESSABLE_P (field) = 1;
312 }
313 else
314 {
315 TREE_TYPE (field) = TREE_TYPE (decl);
316 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
317 DECL_ALIGN (field) = DECL_ALIGN (decl);
318 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
319 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
320 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
321 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
322 }
323
324 insert_field_into_struct (get_frame_type (info), field);
325 *slot = field;
326
327 if (TREE_CODE (decl) == PARM_DECL)
328 info->any_parm_remapped = true;
329 }
330
331 return *slot;
332 }
333
334 /* Build or return the variable that holds the static chain within
335 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
336
337 static tree
338 get_chain_decl (struct nesting_info *info)
339 {
340 tree decl = info->chain_decl;
341
342 if (!decl)
343 {
344 tree type;
345
346 type = get_frame_type (info->outer);
347 type = build_pointer_type (type);
348
349 /* Note that this variable is *not* entered into any BIND_EXPR;
350 the construction of this variable is handled specially in
351 expand_function_start and initialize_inlined_parameters.
352 Note also that it's represented as a parameter. This is more
353 close to the truth, since the initial value does come from
354 the caller. */
355 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
356 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
357 DECL_ARTIFICIAL (decl) = 1;
358 DECL_IGNORED_P (decl) = 1;
359 TREE_USED (decl) = 1;
360 DECL_CONTEXT (decl) = info->context;
361 DECL_ARG_TYPE (decl) = type;
362
363 /* Tell tree-inline.c that we never write to this variable, so
364 it can copy-prop the replacement value immediately. */
365 TREE_READONLY (decl) = 1;
366
367 info->chain_decl = decl;
368
369 if (dump_file
370 && (dump_flags & TDF_DETAILS)
371 && !DECL_STATIC_CHAIN (info->context))
372 fprintf (dump_file, "Setting static-chain for %s\n",
373 lang_hooks.decl_printable_name (info->context, 2));
374
375 DECL_STATIC_CHAIN (info->context) = 1;
376 }
377 return decl;
378 }
379
380 /* Build or return the field within the non-local frame state that holds
381 the static chain for INFO->CONTEXT. This is the way to walk back up
382 multiple nesting levels. */
383
384 static tree
385 get_chain_field (struct nesting_info *info)
386 {
387 tree field = info->chain_field;
388
389 if (!field)
390 {
391 tree type = build_pointer_type (get_frame_type (info->outer));
392
393 field = make_node (FIELD_DECL);
394 DECL_NAME (field) = get_identifier ("__chain");
395 TREE_TYPE (field) = type;
396 DECL_ALIGN (field) = TYPE_ALIGN (type);
397 DECL_NONADDRESSABLE_P (field) = 1;
398
399 insert_field_into_struct (get_frame_type (info), field);
400
401 info->chain_field = field;
402
403 if (dump_file
404 && (dump_flags & TDF_DETAILS)
405 && !DECL_STATIC_CHAIN (info->context))
406 fprintf (dump_file, "Setting static-chain for %s\n",
407 lang_hooks.decl_printable_name (info->context, 2));
408
409 DECL_STATIC_CHAIN (info->context) = 1;
410 }
411 return field;
412 }
413
414 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
415
416 static tree
417 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
418 gcall *call)
419 {
420 tree t;
421
422 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
423 gimple_call_set_lhs (call, t);
424 if (! gsi_end_p (*gsi))
425 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
426 gsi_insert_before (gsi, call, GSI_SAME_STMT);
427
428 return t;
429 }
430
431
432 /* Copy EXP into a temporary. Allocate the temporary in the context of
433 INFO and insert the initialization statement before GSI. */
434
435 static tree
436 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
437 {
438 tree t;
439 gimple *stmt;
440
441 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
442 stmt = gimple_build_assign (t, exp);
443 if (! gsi_end_p (*gsi))
444 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
445 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
446
447 return t;
448 }
449
450
451 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
452
453 static tree
454 gsi_gimplify_val (struct nesting_info *info, tree exp,
455 gimple_stmt_iterator *gsi)
456 {
457 if (is_gimple_val (exp))
458 return exp;
459 else
460 return init_tmp_var (info, exp, gsi);
461 }
462
463 /* Similarly, but copy from the temporary and insert the statement
464 after the iterator. */
465
466 static tree
467 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
468 {
469 tree t;
470 gimple *stmt;
471
472 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
473 stmt = gimple_build_assign (exp, t);
474 if (! gsi_end_p (*gsi))
475 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
476 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
477
478 return t;
479 }
480
481 /* Build or return the type used to represent a nested function trampoline. */
482
483 static GTY(()) tree trampoline_type;
484
485 static tree
486 get_trampoline_type (struct nesting_info *info)
487 {
488 unsigned align, size;
489 tree t;
490
491 if (trampoline_type)
492 return trampoline_type;
493
494 align = TRAMPOLINE_ALIGNMENT;
495 size = TRAMPOLINE_SIZE;
496
497 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
498 then allocate extra space so that we can do dynamic alignment. */
499 if (align > STACK_BOUNDARY)
500 {
501 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
502 align = STACK_BOUNDARY;
503 }
504
505 t = build_index_type (size_int (size - 1));
506 t = build_array_type (char_type_node, t);
507 t = build_decl (DECL_SOURCE_LOCATION (info->context),
508 FIELD_DECL, get_identifier ("__data"), t);
509 DECL_ALIGN (t) = align;
510 DECL_USER_ALIGN (t) = 1;
511
512 trampoline_type = make_node (RECORD_TYPE);
513 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
514 TYPE_FIELDS (trampoline_type) = t;
515 layout_type (trampoline_type);
516 DECL_CONTEXT (t) = trampoline_type;
517
518 return trampoline_type;
519 }
520
521 /* Given DECL, a nested function, find or create a field in the non-local
522 frame structure for a trampoline for this function. */
523
524 static tree
525 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
526 enum insert_option insert)
527 {
528 if (insert == NO_INSERT)
529 {
530 tree *slot = info->var_map->get (decl);
531 return slot ? *slot : NULL_TREE;
532 }
533
534 tree *slot = &info->var_map->get_or_insert (decl);
535 if (!*slot)
536 {
537 tree field = make_node (FIELD_DECL);
538 DECL_NAME (field) = DECL_NAME (decl);
539 TREE_TYPE (field) = get_trampoline_type (info);
540 TREE_ADDRESSABLE (field) = 1;
541
542 insert_field_into_struct (get_frame_type (info), field);
543 *slot = field;
544
545 info->any_tramp_created = true;
546 }
547
548 return *slot;
549 }
550
551 /* Build or return the field within the non-local frame state that holds
552 the non-local goto "jmp_buf". The buffer itself is maintained by the
553 rtl middle-end as dynamic stack space is allocated. */
554
555 static tree
556 get_nl_goto_field (struct nesting_info *info)
557 {
558 tree field = info->nl_goto_field;
559 if (!field)
560 {
561 unsigned size;
562 tree type;
563
564 /* For __builtin_nonlocal_goto, we need N words. The first is the
565 frame pointer, the rest is for the target's stack pointer save
566 area. The number of words is controlled by STACK_SAVEAREA_MODE;
567 not the best interface, but it'll do for now. */
568 if (Pmode == ptr_mode)
569 type = ptr_type_node;
570 else
571 type = lang_hooks.types.type_for_mode (Pmode, 1);
572
573 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
574 size = size / GET_MODE_SIZE (Pmode);
575 size = size + 1;
576
577 type = build_array_type
578 (type, build_index_type (size_int (size)));
579
580 field = make_node (FIELD_DECL);
581 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
582 TREE_TYPE (field) = type;
583 DECL_ALIGN (field) = TYPE_ALIGN (type);
584 TREE_ADDRESSABLE (field) = 1;
585
586 insert_field_into_struct (get_frame_type (info), field);
587
588 info->nl_goto_field = field;
589 }
590
591 return field;
592 }
593
594 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
595
596 static void
597 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
598 struct nesting_info *info, gimple_seq *pseq)
599 {
600 struct walk_stmt_info wi;
601
602 memset (&wi, 0, sizeof (wi));
603 wi.info = info;
604 wi.val_only = true;
605 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
606 }
607
608
609 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
610
611 static inline void
612 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
613 struct nesting_info *info)
614 {
615 gimple_seq body = gimple_body (info->context);
616 walk_body (callback_stmt, callback_op, info, &body);
617 gimple_set_body (info->context, body);
618 }
619
620 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
621
622 static void
623 walk_gimple_omp_for (gomp_for *for_stmt,
624 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
625 struct nesting_info *info)
626 {
627 struct walk_stmt_info wi;
628 gimple_seq seq;
629 tree t;
630 size_t i;
631
632 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
633
634 seq = NULL;
635 memset (&wi, 0, sizeof (wi));
636 wi.info = info;
637 wi.gsi = gsi_last (seq);
638
639 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
640 {
641 wi.val_only = false;
642 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
643 &wi, NULL);
644 wi.val_only = true;
645 wi.is_lhs = false;
646 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
647 &wi, NULL);
648
649 wi.val_only = true;
650 wi.is_lhs = false;
651 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
652 &wi, NULL);
653
654 t = gimple_omp_for_incr (for_stmt, i);
655 gcc_assert (BINARY_CLASS_P (t));
656 wi.val_only = false;
657 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
658 wi.val_only = true;
659 wi.is_lhs = false;
660 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
661 }
662
663 seq = gsi_seq (wi.gsi);
664 if (!gimple_seq_empty_p (seq))
665 {
666 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
667 annotate_all_with_location (seq, gimple_location (for_stmt));
668 gimple_seq_add_seq (&pre_body, seq);
669 gimple_omp_for_set_pre_body (for_stmt, pre_body);
670 }
671 }
672
673 /* Similarly for ROOT and all functions nested underneath, depth first. */
674
675 static void
676 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
677 struct nesting_info *root)
678 {
679 struct nesting_info *n;
680 FOR_EACH_NEST_INFO (n, root)
681 walk_function (callback_stmt, callback_op, n);
682 }
683
684
685 /* We have to check for a fairly pathological case. The operands of function
686 nested function are to be interpreted in the context of the enclosing
687 function. So if any are variably-sized, they will get remapped when the
688 enclosing function is inlined. But that remapping would also have to be
689 done in the types of the PARM_DECLs of the nested function, meaning the
690 argument types of that function will disagree with the arguments in the
691 calls to that function. So we'd either have to make a copy of the nested
692 function corresponding to each time the enclosing function was inlined or
693 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
694 function. The former is not practical. The latter would still require
695 detecting this case to know when to add the conversions. So, for now at
696 least, we don't inline such an enclosing function.
697
698 We have to do that check recursively, so here return indicating whether
699 FNDECL has such a nested function. ORIG_FN is the function we were
700 trying to inline to use for checking whether any argument is variably
701 modified by anything in it.
702
703 It would be better to do this in tree-inline.c so that we could give
704 the appropriate warning for why a function can't be inlined, but that's
705 too late since the nesting structure has already been flattened and
706 adding a flag just to record this fact seems a waste of a flag. */
707
708 static bool
709 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
710 {
711 struct cgraph_node *cgn = cgraph_node::get (fndecl);
712 tree arg;
713
714 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
715 {
716 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
717 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
718 return true;
719
720 if (check_for_nested_with_variably_modified (cgn->decl,
721 orig_fndecl))
722 return true;
723 }
724
725 return false;
726 }
727
728 /* Construct our local datastructure describing the function nesting
729 tree rooted by CGN. */
730
731 static struct nesting_info *
732 create_nesting_tree (struct cgraph_node *cgn)
733 {
734 struct nesting_info *info = XCNEW (struct nesting_info);
735 info->field_map = new hash_map<tree, tree>;
736 info->var_map = new hash_map<tree, tree>;
737 info->mem_refs = new hash_set<tree *>;
738 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
739 info->context = cgn->decl;
740
741 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
742 {
743 struct nesting_info *sub = create_nesting_tree (cgn);
744 sub->outer = info;
745 sub->next = info->inner;
746 info->inner = sub;
747 }
748
749 /* See discussion at check_for_nested_with_variably_modified for a
750 discussion of why this has to be here. */
751 if (check_for_nested_with_variably_modified (info->context, info->context))
752 DECL_UNINLINABLE (info->context) = true;
753
754 return info;
755 }
756
757 /* Return an expression computing the static chain for TARGET_CONTEXT
758 from INFO->CONTEXT. Insert any necessary computations before TSI. */
759
760 static tree
761 get_static_chain (struct nesting_info *info, tree target_context,
762 gimple_stmt_iterator *gsi)
763 {
764 struct nesting_info *i;
765 tree x;
766
767 if (info->context == target_context)
768 {
769 x = build_addr (info->frame_decl, target_context);
770 info->static_chain_added |= 1;
771 }
772 else
773 {
774 x = get_chain_decl (info);
775 info->static_chain_added |= 2;
776
777 for (i = info->outer; i->context != target_context; i = i->outer)
778 {
779 tree field = get_chain_field (i);
780
781 x = build_simple_mem_ref (x);
782 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
783 x = init_tmp_var (info, x, gsi);
784 }
785 }
786
787 return x;
788 }
789
790
791 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
792 frame as seen from INFO->CONTEXT. Insert any necessary computations
793 before GSI. */
794
795 static tree
796 get_frame_field (struct nesting_info *info, tree target_context,
797 tree field, gimple_stmt_iterator *gsi)
798 {
799 struct nesting_info *i;
800 tree x;
801
802 if (info->context == target_context)
803 {
804 /* Make sure frame_decl gets created. */
805 (void) get_frame_type (info);
806 x = info->frame_decl;
807 info->static_chain_added |= 1;
808 }
809 else
810 {
811 x = get_chain_decl (info);
812 info->static_chain_added |= 2;
813
814 for (i = info->outer; i->context != target_context; i = i->outer)
815 {
816 tree field = get_chain_field (i);
817
818 x = build_simple_mem_ref (x);
819 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
820 x = init_tmp_var (info, x, gsi);
821 }
822
823 x = build_simple_mem_ref (x);
824 }
825
826 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
827 return x;
828 }
829
830 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
831
832 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
833 in the nested function with DECL_VALUE_EXPR set to reference the true
834 variable in the parent function. This is used both for debug info
835 and in OMP lowering. */
836
837 static tree
838 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
839 {
840 tree target_context;
841 struct nesting_info *i;
842 tree x, field, new_decl;
843
844 tree *slot = &info->var_map->get_or_insert (decl);
845
846 if (*slot)
847 return *slot;
848
849 target_context = decl_function_context (decl);
850
851 /* A copy of the code in get_frame_field, but without the temporaries. */
852 if (info->context == target_context)
853 {
854 /* Make sure frame_decl gets created. */
855 (void) get_frame_type (info);
856 x = info->frame_decl;
857 i = info;
858 info->static_chain_added |= 1;
859 }
860 else
861 {
862 x = get_chain_decl (info);
863 info->static_chain_added |= 2;
864 for (i = info->outer; i->context != target_context; i = i->outer)
865 {
866 field = get_chain_field (i);
867 x = build_simple_mem_ref (x);
868 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
869 }
870 x = build_simple_mem_ref (x);
871 }
872
873 field = lookup_field_for_decl (i, decl, INSERT);
874 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
875 if (use_pointer_in_frame (decl))
876 x = build_simple_mem_ref (x);
877
878 /* ??? We should be remapping types as well, surely. */
879 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
880 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
881 DECL_CONTEXT (new_decl) = info->context;
882 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
883 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
884 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
885 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
886 TREE_READONLY (new_decl) = TREE_READONLY (decl);
887 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
888 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
889 if ((TREE_CODE (decl) == PARM_DECL
890 || TREE_CODE (decl) == RESULT_DECL
891 || TREE_CODE (decl) == VAR_DECL)
892 && DECL_BY_REFERENCE (decl))
893 DECL_BY_REFERENCE (new_decl) = 1;
894
895 SET_DECL_VALUE_EXPR (new_decl, x);
896 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
897
898 *slot = new_decl;
899 DECL_CHAIN (new_decl) = info->debug_var_chain;
900 info->debug_var_chain = new_decl;
901
902 if (!optimize
903 && info->context != target_context
904 && variably_modified_type_p (TREE_TYPE (decl), NULL))
905 note_nonlocal_vla_type (info, TREE_TYPE (decl));
906
907 return new_decl;
908 }
909
910
911 /* Callback for walk_gimple_stmt, rewrite all references to VAR
912 and PARM_DECLs that belong to outer functions.
913
914 The rewrite will involve some number of structure accesses back up
915 the static chain. E.g. for a variable FOO up one nesting level it'll
916 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
917 indirections apply to decls for which use_pointer_in_frame is true. */
918
919 static tree
920 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
921 {
922 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
923 struct nesting_info *const info = (struct nesting_info *) wi->info;
924 tree t = *tp;
925
926 *walk_subtrees = 0;
927 switch (TREE_CODE (t))
928 {
929 case VAR_DECL:
930 /* Non-automatic variables are never processed. */
931 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
932 break;
933 /* FALLTHRU */
934
935 case PARM_DECL:
936 if (decl_function_context (t) != info->context)
937 {
938 tree x;
939 wi->changed = true;
940
941 x = get_nonlocal_debug_decl (info, t);
942 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
943 {
944 tree target_context = decl_function_context (t);
945 struct nesting_info *i;
946 for (i = info->outer; i->context != target_context; i = i->outer)
947 continue;
948 x = lookup_field_for_decl (i, t, INSERT);
949 x = get_frame_field (info, target_context, x, &wi->gsi);
950 if (use_pointer_in_frame (t))
951 {
952 x = init_tmp_var (info, x, &wi->gsi);
953 x = build_simple_mem_ref (x);
954 }
955 }
956
957 if (wi->val_only)
958 {
959 if (wi->is_lhs)
960 x = save_tmp_var (info, x, &wi->gsi);
961 else
962 x = init_tmp_var (info, x, &wi->gsi);
963 }
964
965 *tp = x;
966 }
967 break;
968
969 case LABEL_DECL:
970 /* We're taking the address of a label from a parent function, but
971 this is not itself a non-local goto. Mark the label such that it
972 will not be deleted, much as we would with a label address in
973 static storage. */
974 if (decl_function_context (t) != info->context)
975 FORCED_LABEL (t) = 1;
976 break;
977
978 case ADDR_EXPR:
979 {
980 bool save_val_only = wi->val_only;
981
982 wi->val_only = false;
983 wi->is_lhs = false;
984 wi->changed = false;
985 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
986 wi->val_only = true;
987
988 if (wi->changed)
989 {
990 tree save_context;
991
992 /* If we changed anything, we might no longer be directly
993 referencing a decl. */
994 save_context = current_function_decl;
995 current_function_decl = info->context;
996 recompute_tree_invariant_for_addr_expr (t);
997 current_function_decl = save_context;
998
999 /* If the callback converted the address argument in a context
1000 where we only accept variables (and min_invariant, presumably),
1001 then compute the address into a temporary. */
1002 if (save_val_only)
1003 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1004 t, &wi->gsi);
1005 }
1006 }
1007 break;
1008
1009 case REALPART_EXPR:
1010 case IMAGPART_EXPR:
1011 case COMPONENT_REF:
1012 case ARRAY_REF:
1013 case ARRAY_RANGE_REF:
1014 case BIT_FIELD_REF:
1015 /* Go down this entire nest and just look at the final prefix and
1016 anything that describes the references. Otherwise, we lose track
1017 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1018 wi->val_only = true;
1019 wi->is_lhs = false;
1020 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1021 {
1022 if (TREE_CODE (t) == COMPONENT_REF)
1023 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1024 NULL);
1025 else if (TREE_CODE (t) == ARRAY_REF
1026 || TREE_CODE (t) == ARRAY_RANGE_REF)
1027 {
1028 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1029 wi, NULL);
1030 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1031 wi, NULL);
1032 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1033 wi, NULL);
1034 }
1035 }
1036 wi->val_only = false;
1037 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1038 break;
1039
1040 case VIEW_CONVERT_EXPR:
1041 /* Just request to look at the subtrees, leaving val_only and lhs
1042 untouched. This might actually be for !val_only + lhs, in which
1043 case we don't want to force a replacement by a temporary. */
1044 *walk_subtrees = 1;
1045 break;
1046
1047 default:
1048 if (!IS_TYPE_OR_DECL_P (t))
1049 {
1050 *walk_subtrees = 1;
1051 wi->val_only = true;
1052 wi->is_lhs = false;
1053 }
1054 break;
1055 }
1056
1057 return NULL_TREE;
1058 }
1059
1060 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1061 struct walk_stmt_info *);
1062
1063 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1064 and PARM_DECLs that belong to outer functions. */
1065
1066 static bool
1067 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1068 {
1069 struct nesting_info *const info = (struct nesting_info *) wi->info;
1070 bool need_chain = false, need_stmts = false;
1071 tree clause, decl;
1072 int dummy;
1073 bitmap new_suppress;
1074
1075 new_suppress = BITMAP_GGC_ALLOC ();
1076 bitmap_copy (new_suppress, info->suppress_expansion);
1077
1078 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1079 {
1080 switch (OMP_CLAUSE_CODE (clause))
1081 {
1082 case OMP_CLAUSE_REDUCTION:
1083 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1084 need_stmts = true;
1085 goto do_decl_clause;
1086
1087 case OMP_CLAUSE_LASTPRIVATE:
1088 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1089 need_stmts = true;
1090 goto do_decl_clause;
1091
1092 case OMP_CLAUSE_LINEAR:
1093 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1094 need_stmts = true;
1095 wi->val_only = true;
1096 wi->is_lhs = false;
1097 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1098 &dummy, wi);
1099 goto do_decl_clause;
1100
1101 case OMP_CLAUSE_PRIVATE:
1102 case OMP_CLAUSE_FIRSTPRIVATE:
1103 case OMP_CLAUSE_COPYPRIVATE:
1104 case OMP_CLAUSE_SHARED:
1105 case OMP_CLAUSE_TO_DECLARE:
1106 case OMP_CLAUSE_LINK:
1107 case OMP_CLAUSE_USE_DEVICE_PTR:
1108 case OMP_CLAUSE_IS_DEVICE_PTR:
1109 do_decl_clause:
1110 decl = OMP_CLAUSE_DECL (clause);
1111 if (TREE_CODE (decl) == VAR_DECL
1112 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1113 break;
1114 if (decl_function_context (decl) != info->context)
1115 {
1116 bitmap_set_bit (new_suppress, DECL_UID (decl));
1117 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1118 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1119 need_chain = true;
1120 }
1121 break;
1122
1123 case OMP_CLAUSE_SCHEDULE:
1124 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1125 break;
1126 /* FALLTHRU */
1127 case OMP_CLAUSE_FINAL:
1128 case OMP_CLAUSE_IF:
1129 case OMP_CLAUSE_NUM_THREADS:
1130 case OMP_CLAUSE_DEPEND:
1131 case OMP_CLAUSE_DEVICE:
1132 case OMP_CLAUSE_NUM_TEAMS:
1133 case OMP_CLAUSE_THREAD_LIMIT:
1134 case OMP_CLAUSE_SAFELEN:
1135 case OMP_CLAUSE_SIMDLEN:
1136 case OMP_CLAUSE_PRIORITY:
1137 case OMP_CLAUSE_GRAINSIZE:
1138 case OMP_CLAUSE_NUM_TASKS:
1139 case OMP_CLAUSE_HINT:
1140 case OMP_CLAUSE__CILK_FOR_COUNT_:
1141 wi->val_only = true;
1142 wi->is_lhs = false;
1143 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1144 &dummy, wi);
1145 break;
1146
1147 case OMP_CLAUSE_DIST_SCHEDULE:
1148 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1149 {
1150 wi->val_only = true;
1151 wi->is_lhs = false;
1152 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1153 &dummy, wi);
1154 }
1155 break;
1156
1157 case OMP_CLAUSE_MAP:
1158 case OMP_CLAUSE_TO:
1159 case OMP_CLAUSE_FROM:
1160 if (OMP_CLAUSE_SIZE (clause))
1161 {
1162 wi->val_only = true;
1163 wi->is_lhs = false;
1164 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1165 &dummy, wi);
1166 }
1167 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1168 goto do_decl_clause;
1169 wi->val_only = true;
1170 wi->is_lhs = false;
1171 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1172 wi, NULL);
1173 break;
1174
1175 case OMP_CLAUSE_ALIGNED:
1176 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1177 {
1178 wi->val_only = true;
1179 wi->is_lhs = false;
1180 convert_nonlocal_reference_op
1181 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1182 }
1183 /* Like do_decl_clause, but don't add any suppression. */
1184 decl = OMP_CLAUSE_DECL (clause);
1185 if (TREE_CODE (decl) == VAR_DECL
1186 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1187 break;
1188 if (decl_function_context (decl) != info->context)
1189 {
1190 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1191 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1192 need_chain = true;
1193 }
1194 break;
1195
1196 case OMP_CLAUSE_NOWAIT:
1197 case OMP_CLAUSE_ORDERED:
1198 case OMP_CLAUSE_DEFAULT:
1199 case OMP_CLAUSE_COPYIN:
1200 case OMP_CLAUSE_COLLAPSE:
1201 case OMP_CLAUSE_UNTIED:
1202 case OMP_CLAUSE_MERGEABLE:
1203 case OMP_CLAUSE_PROC_BIND:
1204 case OMP_CLAUSE_NOGROUP:
1205 case OMP_CLAUSE_THREADS:
1206 case OMP_CLAUSE_SIMD:
1207 case OMP_CLAUSE_DEFAULTMAP:
1208 break;
1209
1210 default:
1211 gcc_unreachable ();
1212 }
1213 }
1214
1215 info->suppress_expansion = new_suppress;
1216
1217 if (need_stmts)
1218 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1219 switch (OMP_CLAUSE_CODE (clause))
1220 {
1221 case OMP_CLAUSE_REDUCTION:
1222 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1223 {
1224 tree old_context
1225 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1226 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1227 = info->context;
1228 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1229 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1230 = info->context;
1231 walk_body (convert_nonlocal_reference_stmt,
1232 convert_nonlocal_reference_op, info,
1233 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1234 walk_body (convert_nonlocal_reference_stmt,
1235 convert_nonlocal_reference_op, info,
1236 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1237 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1238 = old_context;
1239 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1240 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1241 = old_context;
1242 }
1243 break;
1244
1245 case OMP_CLAUSE_LASTPRIVATE:
1246 walk_body (convert_nonlocal_reference_stmt,
1247 convert_nonlocal_reference_op, info,
1248 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1249 break;
1250
1251 case OMP_CLAUSE_LINEAR:
1252 walk_body (convert_nonlocal_reference_stmt,
1253 convert_nonlocal_reference_op, info,
1254 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1255 break;
1256
1257 default:
1258 break;
1259 }
1260
1261 return need_chain;
1262 }
1263
1264 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1265
1266 static void
1267 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1268 {
1269 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1270 type = TREE_TYPE (type);
1271
1272 if (TYPE_NAME (type)
1273 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1274 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1275 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1276
1277 while (POINTER_TYPE_P (type)
1278 || TREE_CODE (type) == VECTOR_TYPE
1279 || TREE_CODE (type) == FUNCTION_TYPE
1280 || TREE_CODE (type) == METHOD_TYPE)
1281 type = TREE_TYPE (type);
1282
1283 if (TREE_CODE (type) == ARRAY_TYPE)
1284 {
1285 tree domain, t;
1286
1287 note_nonlocal_vla_type (info, TREE_TYPE (type));
1288 domain = TYPE_DOMAIN (type);
1289 if (domain)
1290 {
1291 t = TYPE_MIN_VALUE (domain);
1292 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1293 && decl_function_context (t) != info->context)
1294 get_nonlocal_debug_decl (info, t);
1295 t = TYPE_MAX_VALUE (domain);
1296 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1297 && decl_function_context (t) != info->context)
1298 get_nonlocal_debug_decl (info, t);
1299 }
1300 }
1301 }
1302
1303 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1304 in BLOCK. */
1305
1306 static void
1307 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1308 {
1309 tree var;
1310
1311 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1312 if (TREE_CODE (var) == VAR_DECL
1313 && variably_modified_type_p (TREE_TYPE (var), NULL)
1314 && DECL_HAS_VALUE_EXPR_P (var)
1315 && decl_function_context (var) != info->context)
1316 note_nonlocal_vla_type (info, TREE_TYPE (var));
1317 }
1318
1319 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1320 PARM_DECLs that belong to outer functions. This handles statements
1321 that are not handled via the standard recursion done in
1322 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1323 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1324 operands of STMT have been handled by this function. */
1325
1326 static tree
1327 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1328 struct walk_stmt_info *wi)
1329 {
1330 struct nesting_info *info = (struct nesting_info *) wi->info;
1331 tree save_local_var_chain;
1332 bitmap save_suppress;
1333 gimple *stmt = gsi_stmt (*gsi);
1334
1335 switch (gimple_code (stmt))
1336 {
1337 case GIMPLE_GOTO:
1338 /* Don't walk non-local gotos for now. */
1339 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1340 {
1341 wi->val_only = true;
1342 wi->is_lhs = false;
1343 *handled_ops_p = true;
1344 return NULL_TREE;
1345 }
1346 break;
1347
1348 case GIMPLE_OMP_PARALLEL:
1349 case GIMPLE_OMP_TASK:
1350 save_suppress = info->suppress_expansion;
1351 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1352 wi))
1353 {
1354 tree c, decl;
1355 decl = get_chain_decl (info);
1356 c = build_omp_clause (gimple_location (stmt),
1357 OMP_CLAUSE_FIRSTPRIVATE);
1358 OMP_CLAUSE_DECL (c) = decl;
1359 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1360 gimple_omp_taskreg_set_clauses (stmt, c);
1361 }
1362
1363 save_local_var_chain = info->new_local_var_chain;
1364 info->new_local_var_chain = NULL;
1365
1366 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1367 info, gimple_omp_body_ptr (stmt));
1368
1369 if (info->new_local_var_chain)
1370 declare_vars (info->new_local_var_chain,
1371 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1372 false);
1373 info->new_local_var_chain = save_local_var_chain;
1374 info->suppress_expansion = save_suppress;
1375 break;
1376
1377 case GIMPLE_OMP_FOR:
1378 save_suppress = info->suppress_expansion;
1379 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1380 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1381 convert_nonlocal_reference_stmt,
1382 convert_nonlocal_reference_op, info);
1383 walk_body (convert_nonlocal_reference_stmt,
1384 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1385 info->suppress_expansion = save_suppress;
1386 break;
1387
1388 case GIMPLE_OMP_SECTIONS:
1389 save_suppress = info->suppress_expansion;
1390 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1391 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1392 info, gimple_omp_body_ptr (stmt));
1393 info->suppress_expansion = save_suppress;
1394 break;
1395
1396 case GIMPLE_OMP_SINGLE:
1397 save_suppress = info->suppress_expansion;
1398 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1399 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1400 info, gimple_omp_body_ptr (stmt));
1401 info->suppress_expansion = save_suppress;
1402 break;
1403
1404 case GIMPLE_OMP_TARGET:
1405 if (!is_gimple_omp_offloaded (stmt))
1406 {
1407 save_suppress = info->suppress_expansion;
1408 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1409 wi);
1410 info->suppress_expansion = save_suppress;
1411 walk_body (convert_nonlocal_reference_stmt,
1412 convert_nonlocal_reference_op, info,
1413 gimple_omp_body_ptr (stmt));
1414 break;
1415 }
1416 save_suppress = info->suppress_expansion;
1417 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1418 wi))
1419 {
1420 tree c, decl;
1421 decl = get_chain_decl (info);
1422 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1423 OMP_CLAUSE_DECL (c) = decl;
1424 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1425 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1426 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1427 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1428 }
1429
1430 save_local_var_chain = info->new_local_var_chain;
1431 info->new_local_var_chain = NULL;
1432
1433 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1434 info, gimple_omp_body_ptr (stmt));
1435
1436 if (info->new_local_var_chain)
1437 declare_vars (info->new_local_var_chain,
1438 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1439 false);
1440 info->new_local_var_chain = save_local_var_chain;
1441 info->suppress_expansion = save_suppress;
1442 break;
1443
1444 case GIMPLE_OMP_TEAMS:
1445 save_suppress = info->suppress_expansion;
1446 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1447 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1448 info, gimple_omp_body_ptr (stmt));
1449 info->suppress_expansion = save_suppress;
1450 break;
1451
1452 case GIMPLE_OMP_SECTION:
1453 case GIMPLE_OMP_MASTER:
1454 case GIMPLE_OMP_TASKGROUP:
1455 case GIMPLE_OMP_ORDERED:
1456 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1457 info, gimple_omp_body_ptr (stmt));
1458 break;
1459
1460 case GIMPLE_BIND:
1461 {
1462 gbind *bind_stmt = as_a <gbind *> (stmt);
1463 if (!optimize && gimple_bind_block (bind_stmt))
1464 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1465
1466 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1467 if (TREE_CODE (var) == NAMELIST_DECL)
1468 {
1469 /* Adjust decls mentioned in NAMELIST_DECL. */
1470 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1471 tree decl;
1472 unsigned int i;
1473
1474 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1475 {
1476 if (TREE_CODE (decl) == VAR_DECL
1477 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1478 continue;
1479 if (decl_function_context (decl) != info->context)
1480 CONSTRUCTOR_ELT (decls, i)->value
1481 = get_nonlocal_debug_decl (info, decl);
1482 }
1483 }
1484
1485 *handled_ops_p = false;
1486 return NULL_TREE;
1487 }
1488 case GIMPLE_COND:
1489 wi->val_only = true;
1490 wi->is_lhs = false;
1491 *handled_ops_p = false;
1492 return NULL_TREE;
1493
1494 default:
1495 /* For every other statement that we are not interested in
1496 handling here, let the walker traverse the operands. */
1497 *handled_ops_p = false;
1498 return NULL_TREE;
1499 }
1500
1501 /* We have handled all of STMT operands, no need to traverse the operands. */
1502 *handled_ops_p = true;
1503 return NULL_TREE;
1504 }
1505
1506
1507 /* A subroutine of convert_local_reference. Create a local variable
1508 in the parent function with DECL_VALUE_EXPR set to reference the
1509 field in FRAME. This is used both for debug info and in OMP
1510 lowering. */
1511
1512 static tree
1513 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1514 {
1515 tree x, new_decl;
1516
1517 tree *slot = &info->var_map->get_or_insert (decl);
1518 if (*slot)
1519 return *slot;
1520
1521 /* Make sure frame_decl gets created. */
1522 (void) get_frame_type (info);
1523 x = info->frame_decl;
1524 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1525
1526 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1527 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1528 DECL_CONTEXT (new_decl) = info->context;
1529 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1530 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1531 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1532 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1533 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1534 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1535 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1536 if ((TREE_CODE (decl) == PARM_DECL
1537 || TREE_CODE (decl) == RESULT_DECL
1538 || TREE_CODE (decl) == VAR_DECL)
1539 && DECL_BY_REFERENCE (decl))
1540 DECL_BY_REFERENCE (new_decl) = 1;
1541
1542 SET_DECL_VALUE_EXPR (new_decl, x);
1543 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1544 *slot = new_decl;
1545
1546 DECL_CHAIN (new_decl) = info->debug_var_chain;
1547 info->debug_var_chain = new_decl;
1548
1549 /* Do not emit debug info twice. */
1550 DECL_IGNORED_P (decl) = 1;
1551
1552 return new_decl;
1553 }
1554
1555
1556 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1557 and PARM_DECLs that were referenced by inner nested functions.
1558 The rewrite will be a structure reference to the local frame variable. */
1559
1560 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1561
1562 static tree
1563 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1564 {
1565 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1566 struct nesting_info *const info = (struct nesting_info *) wi->info;
1567 tree t = *tp, field, x;
1568 bool save_val_only;
1569
1570 *walk_subtrees = 0;
1571 switch (TREE_CODE (t))
1572 {
1573 case VAR_DECL:
1574 /* Non-automatic variables are never processed. */
1575 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1576 break;
1577 /* FALLTHRU */
1578
1579 case PARM_DECL:
1580 if (decl_function_context (t) == info->context)
1581 {
1582 /* If we copied a pointer to the frame, then the original decl
1583 is used unchanged in the parent function. */
1584 if (use_pointer_in_frame (t))
1585 break;
1586
1587 /* No need to transform anything if no child references the
1588 variable. */
1589 field = lookup_field_for_decl (info, t, NO_INSERT);
1590 if (!field)
1591 break;
1592 wi->changed = true;
1593
1594 x = get_local_debug_decl (info, t, field);
1595 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1596 x = get_frame_field (info, info->context, field, &wi->gsi);
1597
1598 if (wi->val_only)
1599 {
1600 if (wi->is_lhs)
1601 x = save_tmp_var (info, x, &wi->gsi);
1602 else
1603 x = init_tmp_var (info, x, &wi->gsi);
1604 }
1605
1606 *tp = x;
1607 }
1608 break;
1609
1610 case ADDR_EXPR:
1611 save_val_only = wi->val_only;
1612 wi->val_only = false;
1613 wi->is_lhs = false;
1614 wi->changed = false;
1615 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1616 wi->val_only = save_val_only;
1617
1618 /* If we converted anything ... */
1619 if (wi->changed)
1620 {
1621 tree save_context;
1622
1623 /* Then the frame decl is now addressable. */
1624 TREE_ADDRESSABLE (info->frame_decl) = 1;
1625
1626 save_context = current_function_decl;
1627 current_function_decl = info->context;
1628 recompute_tree_invariant_for_addr_expr (t);
1629 current_function_decl = save_context;
1630
1631 /* If we are in a context where we only accept values, then
1632 compute the address into a temporary. */
1633 if (save_val_only)
1634 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1635 t, &wi->gsi);
1636 }
1637 break;
1638
1639 case REALPART_EXPR:
1640 case IMAGPART_EXPR:
1641 case COMPONENT_REF:
1642 case ARRAY_REF:
1643 case ARRAY_RANGE_REF:
1644 case BIT_FIELD_REF:
1645 /* Go down this entire nest and just look at the final prefix and
1646 anything that describes the references. Otherwise, we lose track
1647 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1648 save_val_only = wi->val_only;
1649 wi->val_only = true;
1650 wi->is_lhs = false;
1651 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1652 {
1653 if (TREE_CODE (t) == COMPONENT_REF)
1654 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1655 NULL);
1656 else if (TREE_CODE (t) == ARRAY_REF
1657 || TREE_CODE (t) == ARRAY_RANGE_REF)
1658 {
1659 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1660 NULL);
1661 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1662 NULL);
1663 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1664 NULL);
1665 }
1666 }
1667 wi->val_only = false;
1668 walk_tree (tp, convert_local_reference_op, wi, NULL);
1669 wi->val_only = save_val_only;
1670 break;
1671
1672 case MEM_REF:
1673 save_val_only = wi->val_only;
1674 wi->val_only = true;
1675 wi->is_lhs = false;
1676 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1677 wi, NULL);
1678 /* We need to re-fold the MEM_REF as component references as
1679 part of a ADDR_EXPR address are not allowed. But we cannot
1680 fold here, as the chain record type is not yet finalized. */
1681 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1682 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1683 info->mem_refs->add (tp);
1684 wi->val_only = save_val_only;
1685 break;
1686
1687 case VIEW_CONVERT_EXPR:
1688 /* Just request to look at the subtrees, leaving val_only and lhs
1689 untouched. This might actually be for !val_only + lhs, in which
1690 case we don't want to force a replacement by a temporary. */
1691 *walk_subtrees = 1;
1692 break;
1693
1694 default:
1695 if (!IS_TYPE_OR_DECL_P (t))
1696 {
1697 *walk_subtrees = 1;
1698 wi->val_only = true;
1699 wi->is_lhs = false;
1700 }
1701 break;
1702 }
1703
1704 return NULL_TREE;
1705 }
1706
1707 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1708 struct walk_stmt_info *);
1709
1710 /* Helper for convert_local_reference. Convert all the references in
1711 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1712
1713 static bool
1714 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1715 {
1716 struct nesting_info *const info = (struct nesting_info *) wi->info;
1717 bool need_frame = false, need_stmts = false;
1718 tree clause, decl;
1719 int dummy;
1720 bitmap new_suppress;
1721
1722 new_suppress = BITMAP_GGC_ALLOC ();
1723 bitmap_copy (new_suppress, info->suppress_expansion);
1724
1725 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1726 {
1727 switch (OMP_CLAUSE_CODE (clause))
1728 {
1729 case OMP_CLAUSE_REDUCTION:
1730 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1731 need_stmts = true;
1732 goto do_decl_clause;
1733
1734 case OMP_CLAUSE_LASTPRIVATE:
1735 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1736 need_stmts = true;
1737 goto do_decl_clause;
1738
1739 case OMP_CLAUSE_LINEAR:
1740 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1741 need_stmts = true;
1742 wi->val_only = true;
1743 wi->is_lhs = false;
1744 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1745 wi);
1746 goto do_decl_clause;
1747
1748 case OMP_CLAUSE_PRIVATE:
1749 case OMP_CLAUSE_FIRSTPRIVATE:
1750 case OMP_CLAUSE_COPYPRIVATE:
1751 case OMP_CLAUSE_SHARED:
1752 case OMP_CLAUSE_TO_DECLARE:
1753 case OMP_CLAUSE_LINK:
1754 case OMP_CLAUSE_USE_DEVICE_PTR:
1755 case OMP_CLAUSE_IS_DEVICE_PTR:
1756 do_decl_clause:
1757 decl = OMP_CLAUSE_DECL (clause);
1758 if (TREE_CODE (decl) == VAR_DECL
1759 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1760 break;
1761 if (decl_function_context (decl) == info->context
1762 && !use_pointer_in_frame (decl))
1763 {
1764 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1765 if (field)
1766 {
1767 bitmap_set_bit (new_suppress, DECL_UID (decl));
1768 OMP_CLAUSE_DECL (clause)
1769 = get_local_debug_decl (info, decl, field);
1770 need_frame = true;
1771 }
1772 }
1773 break;
1774
1775 case OMP_CLAUSE_SCHEDULE:
1776 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1777 break;
1778 /* FALLTHRU */
1779 case OMP_CLAUSE_FINAL:
1780 case OMP_CLAUSE_IF:
1781 case OMP_CLAUSE_NUM_THREADS:
1782 case OMP_CLAUSE_DEPEND:
1783 case OMP_CLAUSE_DEVICE:
1784 case OMP_CLAUSE_NUM_TEAMS:
1785 case OMP_CLAUSE_THREAD_LIMIT:
1786 case OMP_CLAUSE_SAFELEN:
1787 case OMP_CLAUSE_SIMDLEN:
1788 case OMP_CLAUSE_PRIORITY:
1789 case OMP_CLAUSE_GRAINSIZE:
1790 case OMP_CLAUSE_NUM_TASKS:
1791 case OMP_CLAUSE_HINT:
1792 case OMP_CLAUSE__CILK_FOR_COUNT_:
1793 wi->val_only = true;
1794 wi->is_lhs = false;
1795 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1796 wi);
1797 break;
1798
1799 case OMP_CLAUSE_DIST_SCHEDULE:
1800 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1801 {
1802 wi->val_only = true;
1803 wi->is_lhs = false;
1804 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1805 &dummy, wi);
1806 }
1807 break;
1808
1809 case OMP_CLAUSE_MAP:
1810 case OMP_CLAUSE_TO:
1811 case OMP_CLAUSE_FROM:
1812 if (OMP_CLAUSE_SIZE (clause))
1813 {
1814 wi->val_only = true;
1815 wi->is_lhs = false;
1816 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1817 &dummy, wi);
1818 }
1819 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1820 goto do_decl_clause;
1821 wi->val_only = true;
1822 wi->is_lhs = false;
1823 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1824 wi, NULL);
1825 break;
1826
1827 case OMP_CLAUSE_ALIGNED:
1828 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1829 {
1830 wi->val_only = true;
1831 wi->is_lhs = false;
1832 convert_local_reference_op
1833 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1834 }
1835 /* Like do_decl_clause, but don't add any suppression. */
1836 decl = OMP_CLAUSE_DECL (clause);
1837 if (TREE_CODE (decl) == VAR_DECL
1838 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1839 break;
1840 if (decl_function_context (decl) == info->context
1841 && !use_pointer_in_frame (decl))
1842 {
1843 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1844 if (field)
1845 {
1846 OMP_CLAUSE_DECL (clause)
1847 = get_local_debug_decl (info, decl, field);
1848 need_frame = true;
1849 }
1850 }
1851 break;
1852
1853 case OMP_CLAUSE_NOWAIT:
1854 case OMP_CLAUSE_ORDERED:
1855 case OMP_CLAUSE_DEFAULT:
1856 case OMP_CLAUSE_COPYIN:
1857 case OMP_CLAUSE_COLLAPSE:
1858 case OMP_CLAUSE_UNTIED:
1859 case OMP_CLAUSE_MERGEABLE:
1860 case OMP_CLAUSE_PROC_BIND:
1861 case OMP_CLAUSE_NOGROUP:
1862 case OMP_CLAUSE_THREADS:
1863 case OMP_CLAUSE_SIMD:
1864 case OMP_CLAUSE_DEFAULTMAP:
1865 break;
1866
1867 default:
1868 gcc_unreachable ();
1869 }
1870 }
1871
1872 info->suppress_expansion = new_suppress;
1873
1874 if (need_stmts)
1875 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1876 switch (OMP_CLAUSE_CODE (clause))
1877 {
1878 case OMP_CLAUSE_REDUCTION:
1879 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1880 {
1881 tree old_context
1882 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1883 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1884 = info->context;
1885 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1886 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1887 = info->context;
1888 walk_body (convert_local_reference_stmt,
1889 convert_local_reference_op, info,
1890 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1891 walk_body (convert_local_reference_stmt,
1892 convert_local_reference_op, info,
1893 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1894 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1895 = old_context;
1896 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1897 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1898 = old_context;
1899 }
1900 break;
1901
1902 case OMP_CLAUSE_LASTPRIVATE:
1903 walk_body (convert_local_reference_stmt,
1904 convert_local_reference_op, info,
1905 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1906 break;
1907
1908 case OMP_CLAUSE_LINEAR:
1909 walk_body (convert_local_reference_stmt,
1910 convert_local_reference_op, info,
1911 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1912 break;
1913
1914 default:
1915 break;
1916 }
1917
1918 return need_frame;
1919 }
1920
1921
1922 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1923 and PARM_DECLs that were referenced by inner nested functions.
1924 The rewrite will be a structure reference to the local frame variable. */
1925
1926 static tree
1927 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1928 struct walk_stmt_info *wi)
1929 {
1930 struct nesting_info *info = (struct nesting_info *) wi->info;
1931 tree save_local_var_chain;
1932 bitmap save_suppress;
1933 gimple *stmt = gsi_stmt (*gsi);
1934
1935 switch (gimple_code (stmt))
1936 {
1937 case GIMPLE_OMP_PARALLEL:
1938 case GIMPLE_OMP_TASK:
1939 save_suppress = info->suppress_expansion;
1940 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1941 wi))
1942 {
1943 tree c;
1944 (void) get_frame_type (info);
1945 c = build_omp_clause (gimple_location (stmt),
1946 OMP_CLAUSE_SHARED);
1947 OMP_CLAUSE_DECL (c) = info->frame_decl;
1948 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1949 gimple_omp_taskreg_set_clauses (stmt, c);
1950 }
1951
1952 save_local_var_chain = info->new_local_var_chain;
1953 info->new_local_var_chain = NULL;
1954
1955 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1956 gimple_omp_body_ptr (stmt));
1957
1958 if (info->new_local_var_chain)
1959 declare_vars (info->new_local_var_chain,
1960 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1961 info->new_local_var_chain = save_local_var_chain;
1962 info->suppress_expansion = save_suppress;
1963 break;
1964
1965 case GIMPLE_OMP_FOR:
1966 save_suppress = info->suppress_expansion;
1967 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1968 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1969 convert_local_reference_stmt,
1970 convert_local_reference_op, info);
1971 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1972 info, gimple_omp_body_ptr (stmt));
1973 info->suppress_expansion = save_suppress;
1974 break;
1975
1976 case GIMPLE_OMP_SECTIONS:
1977 save_suppress = info->suppress_expansion;
1978 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1979 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1980 info, gimple_omp_body_ptr (stmt));
1981 info->suppress_expansion = save_suppress;
1982 break;
1983
1984 case GIMPLE_OMP_SINGLE:
1985 save_suppress = info->suppress_expansion;
1986 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1987 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1988 info, gimple_omp_body_ptr (stmt));
1989 info->suppress_expansion = save_suppress;
1990 break;
1991
1992 case GIMPLE_OMP_TARGET:
1993 if (!is_gimple_omp_offloaded (stmt))
1994 {
1995 save_suppress = info->suppress_expansion;
1996 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1997 info->suppress_expansion = save_suppress;
1998 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1999 info, gimple_omp_body_ptr (stmt));
2000 break;
2001 }
2002 save_suppress = info->suppress_expansion;
2003 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2004 {
2005 tree c;
2006 (void) get_frame_type (info);
2007 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2008 OMP_CLAUSE_DECL (c) = info->frame_decl;
2009 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2010 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2011 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2012 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2013 }
2014
2015 save_local_var_chain = info->new_local_var_chain;
2016 info->new_local_var_chain = NULL;
2017
2018 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2019 gimple_omp_body_ptr (stmt));
2020
2021 if (info->new_local_var_chain)
2022 declare_vars (info->new_local_var_chain,
2023 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2024 info->new_local_var_chain = save_local_var_chain;
2025 info->suppress_expansion = save_suppress;
2026 break;
2027
2028 case GIMPLE_OMP_TEAMS:
2029 save_suppress = info->suppress_expansion;
2030 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2031 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2032 info, gimple_omp_body_ptr (stmt));
2033 info->suppress_expansion = save_suppress;
2034 break;
2035
2036 case GIMPLE_OMP_SECTION:
2037 case GIMPLE_OMP_MASTER:
2038 case GIMPLE_OMP_TASKGROUP:
2039 case GIMPLE_OMP_ORDERED:
2040 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2041 info, gimple_omp_body_ptr (stmt));
2042 break;
2043
2044 case GIMPLE_COND:
2045 wi->val_only = true;
2046 wi->is_lhs = false;
2047 *handled_ops_p = false;
2048 return NULL_TREE;
2049
2050 case GIMPLE_ASSIGN:
2051 if (gimple_clobber_p (stmt))
2052 {
2053 tree lhs = gimple_assign_lhs (stmt);
2054 if (!use_pointer_in_frame (lhs)
2055 && lookup_field_for_decl (info, lhs, NO_INSERT))
2056 {
2057 gsi_replace (gsi, gimple_build_nop (), true);
2058 break;
2059 }
2060 }
2061 *handled_ops_p = false;
2062 return NULL_TREE;
2063
2064 case GIMPLE_BIND:
2065 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2066 var;
2067 var = DECL_CHAIN (var))
2068 if (TREE_CODE (var) == NAMELIST_DECL)
2069 {
2070 /* Adjust decls mentioned in NAMELIST_DECL. */
2071 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2072 tree decl;
2073 unsigned int i;
2074
2075 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2076 {
2077 if (TREE_CODE (decl) == VAR_DECL
2078 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2079 continue;
2080 if (decl_function_context (decl) == info->context
2081 && !use_pointer_in_frame (decl))
2082 {
2083 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2084 if (field)
2085 {
2086 CONSTRUCTOR_ELT (decls, i)->value
2087 = get_local_debug_decl (info, decl, field);
2088 }
2089 }
2090 }
2091 }
2092
2093 *handled_ops_p = false;
2094 return NULL_TREE;
2095
2096 default:
2097 /* For every other statement that we are not interested in
2098 handling here, let the walker traverse the operands. */
2099 *handled_ops_p = false;
2100 return NULL_TREE;
2101 }
2102
2103 /* Indicate that we have handled all the operands ourselves. */
2104 *handled_ops_p = true;
2105 return NULL_TREE;
2106 }
2107
2108
2109 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2110 that reference labels from outer functions. The rewrite will be a
2111 call to __builtin_nonlocal_goto. */
2112
2113 static tree
2114 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2115 struct walk_stmt_info *wi)
2116 {
2117 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2118 tree label, new_label, target_context, x, field;
2119 gcall *call;
2120 gimple *stmt = gsi_stmt (*gsi);
2121
2122 if (gimple_code (stmt) != GIMPLE_GOTO)
2123 {
2124 *handled_ops_p = false;
2125 return NULL_TREE;
2126 }
2127
2128 label = gimple_goto_dest (stmt);
2129 if (TREE_CODE (label) != LABEL_DECL)
2130 {
2131 *handled_ops_p = false;
2132 return NULL_TREE;
2133 }
2134
2135 target_context = decl_function_context (label);
2136 if (target_context == info->context)
2137 {
2138 *handled_ops_p = false;
2139 return NULL_TREE;
2140 }
2141
2142 for (i = info->outer; target_context != i->context; i = i->outer)
2143 continue;
2144
2145 /* The original user label may also be use for a normal goto, therefore
2146 we must create a new label that will actually receive the abnormal
2147 control transfer. This new label will be marked LABEL_NONLOCAL; this
2148 mark will trigger proper behavior in the cfg, as well as cause the
2149 (hairy target-specific) non-local goto receiver code to be generated
2150 when we expand rtl. Enter this association into var_map so that we
2151 can insert the new label into the IL during a second pass. */
2152 tree *slot = &i->var_map->get_or_insert (label);
2153 if (*slot == NULL)
2154 {
2155 new_label = create_artificial_label (UNKNOWN_LOCATION);
2156 DECL_NONLOCAL (new_label) = 1;
2157 *slot = new_label;
2158 }
2159 else
2160 new_label = *slot;
2161
2162 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2163 field = get_nl_goto_field (i);
2164 x = get_frame_field (info, target_context, field, gsi);
2165 x = build_addr (x, target_context);
2166 x = gsi_gimplify_val (info, x, gsi);
2167 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2168 2, build_addr (new_label, target_context), x);
2169 gsi_replace (gsi, call, false);
2170
2171 /* We have handled all of STMT's operands, no need to keep going. */
2172 *handled_ops_p = true;
2173 return NULL_TREE;
2174 }
2175
2176
2177 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2178 are referenced via nonlocal goto from a nested function. The rewrite
2179 will involve installing a newly generated DECL_NONLOCAL label, and
2180 (potentially) a branch around the rtl gunk that is assumed to be
2181 attached to such a label. */
2182
2183 static tree
2184 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2185 struct walk_stmt_info *wi)
2186 {
2187 struct nesting_info *const info = (struct nesting_info *) wi->info;
2188 tree label, new_label;
2189 gimple_stmt_iterator tmp_gsi;
2190 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2191
2192 if (!stmt)
2193 {
2194 *handled_ops_p = false;
2195 return NULL_TREE;
2196 }
2197
2198 label = gimple_label_label (stmt);
2199
2200 tree *slot = info->var_map->get (label);
2201 if (!slot)
2202 {
2203 *handled_ops_p = false;
2204 return NULL_TREE;
2205 }
2206
2207 /* If there's any possibility that the previous statement falls through,
2208 then we must branch around the new non-local label. */
2209 tmp_gsi = wi->gsi;
2210 gsi_prev (&tmp_gsi);
2211 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2212 {
2213 gimple *stmt = gimple_build_goto (label);
2214 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2215 }
2216
2217 new_label = (tree) *slot;
2218 stmt = gimple_build_label (new_label);
2219 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2220
2221 *handled_ops_p = true;
2222 return NULL_TREE;
2223 }
2224
2225
2226 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2227 of nested functions that require the use of trampolines. The rewrite
2228 will involve a reference a trampoline generated for the occasion. */
2229
2230 static tree
2231 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2232 {
2233 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2234 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2235 tree t = *tp, decl, target_context, x, builtin;
2236 gcall *call;
2237
2238 *walk_subtrees = 0;
2239 switch (TREE_CODE (t))
2240 {
2241 case ADDR_EXPR:
2242 /* Build
2243 T.1 = &CHAIN->tramp;
2244 T.2 = __builtin_adjust_trampoline (T.1);
2245 T.3 = (func_type)T.2;
2246 */
2247
2248 decl = TREE_OPERAND (t, 0);
2249 if (TREE_CODE (decl) != FUNCTION_DECL)
2250 break;
2251
2252 /* Only need to process nested functions. */
2253 target_context = decl_function_context (decl);
2254 if (!target_context)
2255 break;
2256
2257 /* If the nested function doesn't use a static chain, then
2258 it doesn't need a trampoline. */
2259 if (!DECL_STATIC_CHAIN (decl))
2260 break;
2261
2262 /* If we don't want a trampoline, then don't build one. */
2263 if (TREE_NO_TRAMPOLINE (t))
2264 break;
2265
2266 /* Lookup the immediate parent of the callee, as that's where
2267 we need to insert the trampoline. */
2268 for (i = info; i->context != target_context; i = i->outer)
2269 continue;
2270 x = lookup_tramp_for_decl (i, decl, INSERT);
2271
2272 /* Compute the address of the field holding the trampoline. */
2273 x = get_frame_field (info, target_context, x, &wi->gsi);
2274 x = build_addr (x, target_context);
2275 x = gsi_gimplify_val (info, x, &wi->gsi);
2276
2277 /* Do machine-specific ugliness. Normally this will involve
2278 computing extra alignment, but it can really be anything. */
2279 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2280 call = gimple_build_call (builtin, 1, x);
2281 x = init_tmp_var_with_call (info, &wi->gsi, call);
2282
2283 /* Cast back to the proper function type. */
2284 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2285 x = init_tmp_var (info, x, &wi->gsi);
2286
2287 *tp = x;
2288 break;
2289
2290 default:
2291 if (!IS_TYPE_OR_DECL_P (t))
2292 *walk_subtrees = 1;
2293 break;
2294 }
2295
2296 return NULL_TREE;
2297 }
2298
2299
2300 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2301 to addresses of nested functions that require the use of
2302 trampolines. The rewrite will involve a reference a trampoline
2303 generated for the occasion. */
2304
2305 static tree
2306 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2307 struct walk_stmt_info *wi)
2308 {
2309 struct nesting_info *info = (struct nesting_info *) wi->info;
2310 gimple *stmt = gsi_stmt (*gsi);
2311
2312 switch (gimple_code (stmt))
2313 {
2314 case GIMPLE_CALL:
2315 {
2316 /* Only walk call arguments, lest we generate trampolines for
2317 direct calls. */
2318 unsigned long i, nargs = gimple_call_num_args (stmt);
2319 for (i = 0; i < nargs; i++)
2320 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2321 wi, NULL);
2322 break;
2323 }
2324
2325 case GIMPLE_OMP_TARGET:
2326 if (!is_gimple_omp_offloaded (stmt))
2327 {
2328 *handled_ops_p = false;
2329 return NULL_TREE;
2330 }
2331 /* FALLTHRU */
2332 case GIMPLE_OMP_PARALLEL:
2333 case GIMPLE_OMP_TASK:
2334 {
2335 tree save_local_var_chain = info->new_local_var_chain;
2336 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2337 info->new_local_var_chain = NULL;
2338 char save_static_chain_added = info->static_chain_added;
2339 info->static_chain_added = 0;
2340 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2341 info, gimple_omp_body_ptr (stmt));
2342 if (info->new_local_var_chain)
2343 declare_vars (info->new_local_var_chain,
2344 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2345 false);
2346 for (int i = 0; i < 2; i++)
2347 {
2348 tree c, decl;
2349 if ((info->static_chain_added & (1 << i)) == 0)
2350 continue;
2351 decl = i ? get_chain_decl (info) : info->frame_decl;
2352 /* Don't add CHAIN.* or FRAME.* twice. */
2353 for (c = gimple_omp_taskreg_clauses (stmt);
2354 c;
2355 c = OMP_CLAUSE_CHAIN (c))
2356 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2357 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2358 && OMP_CLAUSE_DECL (c) == decl)
2359 break;
2360 if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2361 {
2362 c = build_omp_clause (gimple_location (stmt),
2363 i ? OMP_CLAUSE_FIRSTPRIVATE
2364 : OMP_CLAUSE_SHARED);
2365 OMP_CLAUSE_DECL (c) = decl;
2366 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2367 gimple_omp_taskreg_set_clauses (stmt, c);
2368 }
2369 else if (c == NULL)
2370 {
2371 c = build_omp_clause (gimple_location (stmt),
2372 OMP_CLAUSE_MAP);
2373 OMP_CLAUSE_DECL (c) = decl;
2374 OMP_CLAUSE_SET_MAP_KIND (c,
2375 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2376 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2377 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2378 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2379 c);
2380 }
2381 }
2382 info->new_local_var_chain = save_local_var_chain;
2383 info->static_chain_added |= save_static_chain_added;
2384 }
2385 break;
2386
2387 default:
2388 *handled_ops_p = false;
2389 return NULL_TREE;
2390 }
2391
2392 *handled_ops_p = true;
2393 return NULL_TREE;
2394 }
2395
2396
2397
2398 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2399 that reference nested functions to make sure that the static chain
2400 is set up properly for the call. */
2401
2402 static tree
2403 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2404 struct walk_stmt_info *wi)
2405 {
2406 struct nesting_info *const info = (struct nesting_info *) wi->info;
2407 tree decl, target_context;
2408 char save_static_chain_added;
2409 int i;
2410 gimple *stmt = gsi_stmt (*gsi);
2411
2412 switch (gimple_code (stmt))
2413 {
2414 case GIMPLE_CALL:
2415 if (gimple_call_chain (stmt))
2416 break;
2417 decl = gimple_call_fndecl (stmt);
2418 if (!decl)
2419 break;
2420 target_context = decl_function_context (decl);
2421 if (target_context && DECL_STATIC_CHAIN (decl))
2422 {
2423 gimple_call_set_chain (as_a <gcall *> (stmt),
2424 get_static_chain (info, target_context,
2425 &wi->gsi));
2426 info->static_chain_added |= (1 << (info->context != target_context));
2427 }
2428 break;
2429
2430 case GIMPLE_OMP_PARALLEL:
2431 case GIMPLE_OMP_TASK:
2432 save_static_chain_added = info->static_chain_added;
2433 info->static_chain_added = 0;
2434 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2435 for (i = 0; i < 2; i++)
2436 {
2437 tree c, decl;
2438 if ((info->static_chain_added & (1 << i)) == 0)
2439 continue;
2440 decl = i ? get_chain_decl (info) : info->frame_decl;
2441 /* Don't add CHAIN.* or FRAME.* twice. */
2442 for (c = gimple_omp_taskreg_clauses (stmt);
2443 c;
2444 c = OMP_CLAUSE_CHAIN (c))
2445 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2446 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2447 && OMP_CLAUSE_DECL (c) == decl)
2448 break;
2449 if (c == NULL)
2450 {
2451 c = build_omp_clause (gimple_location (stmt),
2452 i ? OMP_CLAUSE_FIRSTPRIVATE
2453 : OMP_CLAUSE_SHARED);
2454 OMP_CLAUSE_DECL (c) = decl;
2455 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2456 gimple_omp_taskreg_set_clauses (stmt, c);
2457 }
2458 }
2459 info->static_chain_added |= save_static_chain_added;
2460 break;
2461
2462 case GIMPLE_OMP_TARGET:
2463 if (!is_gimple_omp_offloaded (stmt))
2464 {
2465 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2466 break;
2467 }
2468 save_static_chain_added = info->static_chain_added;
2469 info->static_chain_added = 0;
2470 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2471 for (i = 0; i < 2; i++)
2472 {
2473 tree c, decl;
2474 if ((info->static_chain_added & (1 << i)) == 0)
2475 continue;
2476 decl = i ? get_chain_decl (info) : info->frame_decl;
2477 /* Don't add CHAIN.* or FRAME.* twice. */
2478 for (c = gimple_omp_target_clauses (stmt);
2479 c;
2480 c = OMP_CLAUSE_CHAIN (c))
2481 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2482 && OMP_CLAUSE_DECL (c) == decl)
2483 break;
2484 if (c == NULL)
2485 {
2486 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2487 OMP_CLAUSE_DECL (c) = decl;
2488 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2489 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2490 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2491 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2492 c);
2493 }
2494 }
2495 info->static_chain_added |= save_static_chain_added;
2496 break;
2497
2498 case GIMPLE_OMP_FOR:
2499 walk_body (convert_gimple_call, NULL, info,
2500 gimple_omp_for_pre_body_ptr (stmt));
2501 /* FALLTHRU */
2502 case GIMPLE_OMP_SECTIONS:
2503 case GIMPLE_OMP_SECTION:
2504 case GIMPLE_OMP_SINGLE:
2505 case GIMPLE_OMP_TEAMS:
2506 case GIMPLE_OMP_MASTER:
2507 case GIMPLE_OMP_TASKGROUP:
2508 case GIMPLE_OMP_ORDERED:
2509 case GIMPLE_OMP_CRITICAL:
2510 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2511 break;
2512
2513 default:
2514 /* Keep looking for other operands. */
2515 *handled_ops_p = false;
2516 return NULL_TREE;
2517 }
2518
2519 *handled_ops_p = true;
2520 return NULL_TREE;
2521 }
2522
2523 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2524 call expressions. At the same time, determine if a nested function
2525 actually uses its static chain; if not, remember that. */
2526
2527 static void
2528 convert_all_function_calls (struct nesting_info *root)
2529 {
2530 unsigned int chain_count = 0, old_chain_count, iter_count;
2531 struct nesting_info *n;
2532
2533 /* First, optimistically clear static_chain for all decls that haven't
2534 used the static chain already for variable access. But always create
2535 it if not optimizing. This makes it possible to reconstruct the static
2536 nesting tree at run time and thus to resolve up-level references from
2537 within the debugger. */
2538 FOR_EACH_NEST_INFO (n, root)
2539 {
2540 tree decl = n->context;
2541 if (!optimize)
2542 {
2543 if (n->inner)
2544 (void) get_frame_type (n);
2545 if (n->outer)
2546 (void) get_chain_decl (n);
2547 }
2548 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2549 {
2550 DECL_STATIC_CHAIN (decl) = 0;
2551 if (dump_file && (dump_flags & TDF_DETAILS))
2552 fprintf (dump_file, "Guessing no static-chain for %s\n",
2553 lang_hooks.decl_printable_name (decl, 2));
2554 }
2555 else
2556 DECL_STATIC_CHAIN (decl) = 1;
2557 chain_count += DECL_STATIC_CHAIN (decl);
2558 }
2559
2560 /* Walk the functions and perform transformations. Note that these
2561 transformations can induce new uses of the static chain, which in turn
2562 require re-examining all users of the decl. */
2563 /* ??? It would make sense to try to use the call graph to speed this up,
2564 but the call graph hasn't really been built yet. Even if it did, we
2565 would still need to iterate in this loop since address-of references
2566 wouldn't show up in the callgraph anyway. */
2567 iter_count = 0;
2568 do
2569 {
2570 old_chain_count = chain_count;
2571 chain_count = 0;
2572 iter_count++;
2573
2574 if (dump_file && (dump_flags & TDF_DETAILS))
2575 fputc ('\n', dump_file);
2576
2577 FOR_EACH_NEST_INFO (n, root)
2578 {
2579 tree decl = n->context;
2580 walk_function (convert_tramp_reference_stmt,
2581 convert_tramp_reference_op, n);
2582 walk_function (convert_gimple_call, NULL, n);
2583 chain_count += DECL_STATIC_CHAIN (decl);
2584 }
2585 }
2586 while (chain_count != old_chain_count);
2587
2588 if (dump_file && (dump_flags & TDF_DETAILS))
2589 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2590 iter_count);
2591 }
2592
2593 struct nesting_copy_body_data
2594 {
2595 copy_body_data cb;
2596 struct nesting_info *root;
2597 };
2598
2599 /* A helper subroutine for debug_var_chain type remapping. */
2600
2601 static tree
2602 nesting_copy_decl (tree decl, copy_body_data *id)
2603 {
2604 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2605 tree *slot = nid->root->var_map->get (decl);
2606
2607 if (slot)
2608 return (tree) *slot;
2609
2610 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2611 {
2612 tree new_decl = copy_decl_no_change (decl, id);
2613 DECL_ORIGINAL_TYPE (new_decl)
2614 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2615 return new_decl;
2616 }
2617
2618 if (TREE_CODE (decl) == VAR_DECL
2619 || TREE_CODE (decl) == PARM_DECL
2620 || TREE_CODE (decl) == RESULT_DECL)
2621 return decl;
2622
2623 return copy_decl_no_change (decl, id);
2624 }
2625
2626 /* A helper function for remap_vla_decls. See if *TP contains
2627 some remapped variables. */
2628
2629 static tree
2630 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2631 {
2632 struct nesting_info *root = (struct nesting_info *) data;
2633 tree t = *tp;
2634
2635 if (DECL_P (t))
2636 {
2637 *walk_subtrees = 0;
2638 tree *slot = root->var_map->get (t);
2639
2640 if (slot)
2641 return *slot;
2642 }
2643 return NULL;
2644 }
2645
2646 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2647 involved. */
2648
2649 static void
2650 remap_vla_decls (tree block, struct nesting_info *root)
2651 {
2652 tree var, subblock, val, type;
2653 struct nesting_copy_body_data id;
2654
2655 for (subblock = BLOCK_SUBBLOCKS (block);
2656 subblock;
2657 subblock = BLOCK_CHAIN (subblock))
2658 remap_vla_decls (subblock, root);
2659
2660 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2661 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2662 {
2663 val = DECL_VALUE_EXPR (var);
2664 type = TREE_TYPE (var);
2665
2666 if (!(TREE_CODE (val) == INDIRECT_REF
2667 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2668 && variably_modified_type_p (type, NULL)))
2669 continue;
2670
2671 if (root->var_map->get (TREE_OPERAND (val, 0))
2672 || walk_tree (&type, contains_remapped_vars, root, NULL))
2673 break;
2674 }
2675
2676 if (var == NULL_TREE)
2677 return;
2678
2679 memset (&id, 0, sizeof (id));
2680 id.cb.copy_decl = nesting_copy_decl;
2681 id.cb.decl_map = new hash_map<tree, tree>;
2682 id.root = root;
2683
2684 for (; var; var = DECL_CHAIN (var))
2685 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2686 {
2687 struct nesting_info *i;
2688 tree newt, context;
2689
2690 val = DECL_VALUE_EXPR (var);
2691 type = TREE_TYPE (var);
2692
2693 if (!(TREE_CODE (val) == INDIRECT_REF
2694 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2695 && variably_modified_type_p (type, NULL)))
2696 continue;
2697
2698 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2699 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2700 continue;
2701
2702 context = decl_function_context (var);
2703 for (i = root; i; i = i->outer)
2704 if (i->context == context)
2705 break;
2706
2707 if (i == NULL)
2708 continue;
2709
2710 /* Fully expand value expressions. This avoids having debug variables
2711 only referenced from them and that can be swept during GC. */
2712 if (slot)
2713 {
2714 tree t = (tree) *slot;
2715 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2716 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2717 }
2718
2719 id.cb.src_fn = i->context;
2720 id.cb.dst_fn = i->context;
2721 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2722
2723 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2724 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2725 {
2726 newt = TREE_TYPE (newt);
2727 type = TREE_TYPE (type);
2728 }
2729 if (TYPE_NAME (newt)
2730 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2731 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2732 && newt != type
2733 && TYPE_NAME (newt) == TYPE_NAME (type))
2734 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2735
2736 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2737 if (val != DECL_VALUE_EXPR (var))
2738 SET_DECL_VALUE_EXPR (var, val);
2739 }
2740
2741 delete id.cb.decl_map;
2742 }
2743
2744 /* Fold the MEM_REF *E. */
2745 bool
2746 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2747 {
2748 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2749 *ref_p = fold (*ref_p);
2750 return true;
2751 }
2752
2753 /* Do "everything else" to clean up or complete state collected by the
2754 various walking passes -- lay out the types and decls, generate code
2755 to initialize the frame decl, store critical expressions in the
2756 struct function for rtl to find. */
2757
2758 static void
2759 finalize_nesting_tree_1 (struct nesting_info *root)
2760 {
2761 gimple_seq stmt_list;
2762 gimple *stmt;
2763 tree context = root->context;
2764 struct function *sf;
2765
2766 stmt_list = NULL;
2767
2768 /* If we created a non-local frame type or decl, we need to lay them
2769 out at this time. */
2770 if (root->frame_type)
2771 {
2772 /* In some cases the frame type will trigger the -Wpadded warning.
2773 This is not helpful; suppress it. */
2774 int save_warn_padded = warn_padded;
2775 tree *adjust;
2776
2777 warn_padded = 0;
2778 layout_type (root->frame_type);
2779 warn_padded = save_warn_padded;
2780 layout_decl (root->frame_decl, 0);
2781
2782 /* Remove root->frame_decl from root->new_local_var_chain, so
2783 that we can declare it also in the lexical blocks, which
2784 helps ensure virtual regs that end up appearing in its RTL
2785 expression get substituted in instantiate_virtual_regs(). */
2786 for (adjust = &root->new_local_var_chain;
2787 *adjust != root->frame_decl;
2788 adjust = &DECL_CHAIN (*adjust))
2789 gcc_assert (DECL_CHAIN (*adjust));
2790 *adjust = DECL_CHAIN (*adjust);
2791
2792 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2793 declare_vars (root->frame_decl,
2794 gimple_seq_first_stmt (gimple_body (context)), true);
2795 }
2796
2797 /* If any parameters were referenced non-locally, then we need to
2798 insert a copy. Likewise, if any variables were referenced by
2799 pointer, we need to initialize the address. */
2800 if (root->any_parm_remapped)
2801 {
2802 tree p;
2803 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2804 {
2805 tree field, x, y;
2806
2807 field = lookup_field_for_decl (root, p, NO_INSERT);
2808 if (!field)
2809 continue;
2810
2811 if (use_pointer_in_frame (p))
2812 x = build_addr (p, context);
2813 else
2814 x = p;
2815
2816 /* If the assignment is from a non-register the stmt is
2817 not valid gimple. Make it so by using a temporary instead. */
2818 if (!is_gimple_reg (x)
2819 && is_gimple_reg_type (TREE_TYPE (x)))
2820 {
2821 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2822 x = init_tmp_var (root, x, &gsi);
2823 }
2824
2825 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2826 root->frame_decl, field, NULL_TREE);
2827 stmt = gimple_build_assign (y, x);
2828 gimple_seq_add_stmt (&stmt_list, stmt);
2829 }
2830 }
2831
2832 /* If a chain_field was created, then it needs to be initialized
2833 from chain_decl. */
2834 if (root->chain_field)
2835 {
2836 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2837 root->frame_decl, root->chain_field, NULL_TREE);
2838 stmt = gimple_build_assign (x, get_chain_decl (root));
2839 gimple_seq_add_stmt (&stmt_list, stmt);
2840 }
2841
2842 /* If trampolines were created, then we need to initialize them. */
2843 if (root->any_tramp_created)
2844 {
2845 struct nesting_info *i;
2846 for (i = root->inner; i ; i = i->next)
2847 {
2848 tree arg1, arg2, arg3, x, field;
2849
2850 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2851 if (!field)
2852 continue;
2853
2854 gcc_assert (DECL_STATIC_CHAIN (i->context));
2855 arg3 = build_addr (root->frame_decl, context);
2856
2857 arg2 = build_addr (i->context, context);
2858
2859 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2860 root->frame_decl, field, NULL_TREE);
2861 arg1 = build_addr (x, context);
2862
2863 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2864 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2865 gimple_seq_add_stmt (&stmt_list, stmt);
2866 }
2867 }
2868
2869 /* If we created initialization statements, insert them. */
2870 if (stmt_list)
2871 {
2872 gbind *bind;
2873 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2874 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2875 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2876 gimple_bind_set_body (bind, stmt_list);
2877 }
2878
2879 /* If a chain_decl was created, then it needs to be registered with
2880 struct function so that it gets initialized from the static chain
2881 register at the beginning of the function. */
2882 sf = DECL_STRUCT_FUNCTION (root->context);
2883 sf->static_chain_decl = root->chain_decl;
2884
2885 /* Similarly for the non-local goto save area. */
2886 if (root->nl_goto_field)
2887 {
2888 sf->nonlocal_goto_save_area
2889 = get_frame_field (root, context, root->nl_goto_field, NULL);
2890 sf->has_nonlocal_label = 1;
2891 }
2892
2893 /* Make sure all new local variables get inserted into the
2894 proper BIND_EXPR. */
2895 if (root->new_local_var_chain)
2896 declare_vars (root->new_local_var_chain,
2897 gimple_seq_first_stmt (gimple_body (root->context)),
2898 false);
2899
2900 if (root->debug_var_chain)
2901 {
2902 tree debug_var;
2903 gbind *scope;
2904
2905 remap_vla_decls (DECL_INITIAL (root->context), root);
2906
2907 for (debug_var = root->debug_var_chain; debug_var;
2908 debug_var = DECL_CHAIN (debug_var))
2909 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2910 break;
2911
2912 /* If there are any debug decls with variable length types,
2913 remap those types using other debug_var_chain variables. */
2914 if (debug_var)
2915 {
2916 struct nesting_copy_body_data id;
2917
2918 memset (&id, 0, sizeof (id));
2919 id.cb.copy_decl = nesting_copy_decl;
2920 id.cb.decl_map = new hash_map<tree, tree>;
2921 id.root = root;
2922
2923 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2924 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2925 {
2926 tree type = TREE_TYPE (debug_var);
2927 tree newt, t = type;
2928 struct nesting_info *i;
2929
2930 for (i = root; i; i = i->outer)
2931 if (variably_modified_type_p (type, i->context))
2932 break;
2933
2934 if (i == NULL)
2935 continue;
2936
2937 id.cb.src_fn = i->context;
2938 id.cb.dst_fn = i->context;
2939 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2940
2941 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2942 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2943 {
2944 newt = TREE_TYPE (newt);
2945 t = TREE_TYPE (t);
2946 }
2947 if (TYPE_NAME (newt)
2948 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2949 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2950 && newt != t
2951 && TYPE_NAME (newt) == TYPE_NAME (t))
2952 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2953 }
2954
2955 delete id.cb.decl_map;
2956 }
2957
2958 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2959 if (gimple_bind_block (scope))
2960 declare_vars (root->debug_var_chain, scope, true);
2961 else
2962 BLOCK_VARS (DECL_INITIAL (root->context))
2963 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2964 root->debug_var_chain);
2965 }
2966
2967 /* Fold the rewritten MEM_REF trees. */
2968 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2969
2970 /* Dump the translated tree function. */
2971 if (dump_file)
2972 {
2973 fputs ("\n\n", dump_file);
2974 dump_function_to_file (root->context, dump_file, dump_flags);
2975 }
2976 }
2977
2978 static void
2979 finalize_nesting_tree (struct nesting_info *root)
2980 {
2981 struct nesting_info *n;
2982 FOR_EACH_NEST_INFO (n, root)
2983 finalize_nesting_tree_1 (n);
2984 }
2985
2986 /* Unnest the nodes and pass them to cgraph. */
2987
2988 static void
2989 unnest_nesting_tree_1 (struct nesting_info *root)
2990 {
2991 struct cgraph_node *node = cgraph_node::get (root->context);
2992
2993 /* For nested functions update the cgraph to reflect unnesting.
2994 We also delay finalizing of these functions up to this point. */
2995 if (node->origin)
2996 {
2997 node->unnest ();
2998 cgraph_node::finalize_function (root->context, true);
2999 }
3000 }
3001
3002 static void
3003 unnest_nesting_tree (struct nesting_info *root)
3004 {
3005 struct nesting_info *n;
3006 FOR_EACH_NEST_INFO (n, root)
3007 unnest_nesting_tree_1 (n);
3008 }
3009
3010 /* Free the data structures allocated during this pass. */
3011
3012 static void
3013 free_nesting_tree (struct nesting_info *root)
3014 {
3015 struct nesting_info *node, *next;
3016
3017 node = iter_nestinfo_start (root);
3018 do
3019 {
3020 next = iter_nestinfo_next (node);
3021 delete node->var_map;
3022 delete node->field_map;
3023 delete node->mem_refs;
3024 free (node);
3025 node = next;
3026 }
3027 while (node);
3028 }
3029
3030 /* Gimplify a function and all its nested functions. */
3031 static void
3032 gimplify_all_functions (struct cgraph_node *root)
3033 {
3034 struct cgraph_node *iter;
3035 if (!gimple_body (root->decl))
3036 gimplify_function_tree (root->decl);
3037 for (iter = root->nested; iter; iter = iter->next_nested)
3038 gimplify_all_functions (iter);
3039 }
3040
3041 /* Main entry point for this pass. Process FNDECL and all of its nested
3042 subroutines and turn them into something less tightly bound. */
3043
3044 void
3045 lower_nested_functions (tree fndecl)
3046 {
3047 struct cgraph_node *cgn;
3048 struct nesting_info *root;
3049
3050 /* If there are no nested functions, there's nothing to do. */
3051 cgn = cgraph_node::get (fndecl);
3052 if (!cgn->nested)
3053 return;
3054
3055 gimplify_all_functions (cgn);
3056
3057 dump_file = dump_begin (TDI_nested, &dump_flags);
3058 if (dump_file)
3059 fprintf (dump_file, "\n;; Function %s\n\n",
3060 lang_hooks.decl_printable_name (fndecl, 2));
3061
3062 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3063 root = create_nesting_tree (cgn);
3064
3065 walk_all_functions (convert_nonlocal_reference_stmt,
3066 convert_nonlocal_reference_op,
3067 root);
3068 walk_all_functions (convert_local_reference_stmt,
3069 convert_local_reference_op,
3070 root);
3071 walk_all_functions (convert_nl_goto_reference, NULL, root);
3072 walk_all_functions (convert_nl_goto_receiver, NULL, root);
3073
3074 convert_all_function_calls (root);
3075 finalize_nesting_tree (root);
3076 unnest_nesting_tree (root);
3077
3078 free_nesting_tree (root);
3079 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3080
3081 if (dump_file)
3082 {
3083 dump_end (TDI_nested, dump_file);
3084 dump_file = NULL;
3085 }
3086 }
3087
3088 #include "gt-tree-nested.h"