]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-nested.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tree-nested.c
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "rtl.h"
27 #include "alias.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "tm_p.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "internal-fn.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimple-walk.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "tree-cfg.h"
41 #include "flags.h"
42 #include "insn-config.h"
43 #include "expmed.h"
44 #include "dojump.h"
45 #include "explow.h"
46 #include "calls.h"
47 #include "emit-rtl.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
51 #include "langhooks.h"
52 #include "gimple-low.h"
53 #include "gomp-constants.h"
54
55
56 /* The object of this pass is to lower the representation of a set of nested
57 functions in order to expose all of the gory details of the various
58 nonlocal references. We want to do this sooner rather than later, in
59 order to give us more freedom in emitting all of the functions in question.
60
61 Back in olden times, when gcc was young, we developed an insanely
62 complicated scheme whereby variables which were referenced nonlocally
63 were forced to live in the stack of the declaring function, and then
64 the nested functions magically discovered where these variables were
65 placed. In order for this scheme to function properly, it required
66 that the outer function be partially expanded, then we switch to
67 compiling the inner function, and once done with those we switch back
68 to compiling the outer function. Such delicate ordering requirements
69 makes it difficult to do whole translation unit optimizations
70 involving such functions.
71
72 The implementation here is much more direct. Everything that can be
73 referenced by an inner function is a member of an explicitly created
74 structure herein called the "nonlocal frame struct". The incoming
75 static chain for a nested function is a pointer to this struct in
76 the parent. In this way, we settle on known offsets from a known
77 base, and so are decoupled from the logic that places objects in the
78 function's stack frame. More importantly, we don't have to wait for
79 that to happen -- since the compilation of the inner function is no
80 longer tied to a real stack frame, the nonlocal frame struct can be
81 allocated anywhere. Which means that the outer function is now
82 inlinable.
83
84 Theory of operation here is very simple. Iterate over all the
85 statements in all the functions (depth first) several times,
86 allocating structures and fields on demand. In general we want to
87 examine inner functions first, so that we can avoid making changes
88 to outer functions which are unnecessary.
89
90 The order of the passes matters a bit, in that later passes will be
91 skipped if it is discovered that the functions don't actually interact
92 at all. That is, they're nested in the lexical sense but could have
93 been written as independent functions without change. */
94
95
96 struct nesting_info
97 {
98 struct nesting_info *outer;
99 struct nesting_info *inner;
100 struct nesting_info *next;
101
102 hash_map<tree, tree> *field_map;
103 hash_map<tree, tree> *var_map;
104 hash_set<tree *> *mem_refs;
105 bitmap suppress_expansion;
106
107 tree context;
108 tree new_local_var_chain;
109 tree debug_var_chain;
110 tree frame_type;
111 tree frame_decl;
112 tree chain_field;
113 tree chain_decl;
114 tree nl_goto_field;
115
116 bool any_parm_remapped;
117 bool any_tramp_created;
118 char static_chain_added;
119 };
120
121
122 /* Iterate over the nesting tree, starting with ROOT, depth first. */
123
124 static inline struct nesting_info *
125 iter_nestinfo_start (struct nesting_info *root)
126 {
127 while (root->inner)
128 root = root->inner;
129 return root;
130 }
131
132 static inline struct nesting_info *
133 iter_nestinfo_next (struct nesting_info *node)
134 {
135 if (node->next)
136 return iter_nestinfo_start (node->next);
137 return node->outer;
138 }
139
140 #define FOR_EACH_NEST_INFO(I, ROOT) \
141 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
142
143 /* Obstack used for the bitmaps in the struct above. */
144 static struct bitmap_obstack nesting_info_bitmap_obstack;
145
146
147 /* We're working in so many different function contexts simultaneously,
148 that create_tmp_var is dangerous. Prevent mishap. */
149 #define create_tmp_var cant_use_create_tmp_var_here_dummy
150
151 /* Like create_tmp_var, except record the variable for registration at
152 the given nesting level. */
153
154 static tree
155 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
156 {
157 tree tmp_var;
158
159 /* If the type is of variable size or a type which must be created by the
160 frontend, something is wrong. Note that we explicitly allow
161 incomplete types here, since we create them ourselves here. */
162 gcc_assert (!TREE_ADDRESSABLE (type));
163 gcc_assert (!TYPE_SIZE_UNIT (type)
164 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
165
166 tmp_var = create_tmp_var_raw (type, prefix);
167 DECL_CONTEXT (tmp_var) = info->context;
168 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
169 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
170 if (TREE_CODE (type) == COMPLEX_TYPE
171 || TREE_CODE (type) == VECTOR_TYPE)
172 DECL_GIMPLE_REG_P (tmp_var) = 1;
173
174 info->new_local_var_chain = tmp_var;
175
176 return tmp_var;
177 }
178
179 /* Take the address of EXP to be used within function CONTEXT.
180 Mark it for addressability as necessary. */
181
182 tree
183 build_addr (tree exp, tree context)
184 {
185 tree base = exp;
186 tree save_context;
187 tree retval;
188
189 while (handled_component_p (base))
190 base = TREE_OPERAND (base, 0);
191
192 if (DECL_P (base))
193 TREE_ADDRESSABLE (base) = 1;
194
195 /* Building the ADDR_EXPR will compute a set of properties for
196 that ADDR_EXPR. Those properties are unfortunately context
197 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
198
199 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
200 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
201 way the properties are for the ADDR_EXPR are computed properly. */
202 save_context = current_function_decl;
203 current_function_decl = context;
204 retval = build_fold_addr_expr (exp);
205 current_function_decl = save_context;
206 return retval;
207 }
208
209 /* Insert FIELD into TYPE, sorted by alignment requirements. */
210
211 void
212 insert_field_into_struct (tree type, tree field)
213 {
214 tree *p;
215
216 DECL_CONTEXT (field) = type;
217
218 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
219 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
220 break;
221
222 DECL_CHAIN (field) = *p;
223 *p = field;
224
225 /* Set correct alignment for frame struct type. */
226 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
227 TYPE_ALIGN (type) = DECL_ALIGN (field);
228 }
229
230 /* Build or return the RECORD_TYPE that describes the frame state that is
231 shared between INFO->CONTEXT and its nested functions. This record will
232 not be complete until finalize_nesting_tree; up until that point we'll
233 be adding fields as necessary.
234
235 We also build the DECL that represents this frame in the function. */
236
237 static tree
238 get_frame_type (struct nesting_info *info)
239 {
240 tree type = info->frame_type;
241 if (!type)
242 {
243 char *name;
244
245 type = make_node (RECORD_TYPE);
246
247 name = concat ("FRAME.",
248 IDENTIFIER_POINTER (DECL_NAME (info->context)),
249 NULL);
250 TYPE_NAME (type) = get_identifier (name);
251 free (name);
252
253 info->frame_type = type;
254 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
255 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
256
257 /* ??? Always make it addressable for now, since it is meant to
258 be pointed to by the static chain pointer. This pessimizes
259 when it turns out that no static chains are needed because
260 the nested functions referencing non-local variables are not
261 reachable, but the true pessimization is to create the non-
262 local frame structure in the first place. */
263 TREE_ADDRESSABLE (info->frame_decl) = 1;
264 }
265 return type;
266 }
267
268 /* Return true if DECL should be referenced by pointer in the non-local
269 frame structure. */
270
271 static bool
272 use_pointer_in_frame (tree decl)
273 {
274 if (TREE_CODE (decl) == PARM_DECL)
275 {
276 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
277 sized decls, and inefficient to copy large aggregates. Don't bother
278 moving anything but scalar variables. */
279 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
280 }
281 else
282 {
283 /* Variable sized types make things "interesting" in the frame. */
284 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
285 }
286 }
287
288 /* Given DECL, a non-locally accessed variable, find or create a field
289 in the non-local frame structure for the given nesting context. */
290
291 static tree
292 lookup_field_for_decl (struct nesting_info *info, tree decl,
293 enum insert_option insert)
294 {
295 if (insert == NO_INSERT)
296 {
297 tree *slot = info->field_map->get (decl);
298 return slot ? *slot : NULL_TREE;
299 }
300
301 tree *slot = &info->field_map->get_or_insert (decl);
302 if (!*slot)
303 {
304 tree field = make_node (FIELD_DECL);
305 DECL_NAME (field) = DECL_NAME (decl);
306
307 if (use_pointer_in_frame (decl))
308 {
309 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
310 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
311 DECL_NONADDRESSABLE_P (field) = 1;
312 }
313 else
314 {
315 TREE_TYPE (field) = TREE_TYPE (decl);
316 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
317 DECL_ALIGN (field) = DECL_ALIGN (decl);
318 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
319 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
320 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
321 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
322 }
323
324 insert_field_into_struct (get_frame_type (info), field);
325 *slot = field;
326
327 if (TREE_CODE (decl) == PARM_DECL)
328 info->any_parm_remapped = true;
329 }
330
331 return *slot;
332 }
333
334 /* Build or return the variable that holds the static chain within
335 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
336
337 static tree
338 get_chain_decl (struct nesting_info *info)
339 {
340 tree decl = info->chain_decl;
341
342 if (!decl)
343 {
344 tree type;
345
346 type = get_frame_type (info->outer);
347 type = build_pointer_type (type);
348
349 /* Note that this variable is *not* entered into any BIND_EXPR;
350 the construction of this variable is handled specially in
351 expand_function_start and initialize_inlined_parameters.
352 Note also that it's represented as a parameter. This is more
353 close to the truth, since the initial value does come from
354 the caller. */
355 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
356 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
357 DECL_ARTIFICIAL (decl) = 1;
358 DECL_IGNORED_P (decl) = 1;
359 TREE_USED (decl) = 1;
360 DECL_CONTEXT (decl) = info->context;
361 DECL_ARG_TYPE (decl) = type;
362
363 /* Tell tree-inline.c that we never write to this variable, so
364 it can copy-prop the replacement value immediately. */
365 TREE_READONLY (decl) = 1;
366
367 info->chain_decl = decl;
368
369 if (dump_file
370 && (dump_flags & TDF_DETAILS)
371 && !DECL_STATIC_CHAIN (info->context))
372 fprintf (dump_file, "Setting static-chain for %s\n",
373 lang_hooks.decl_printable_name (info->context, 2));
374
375 DECL_STATIC_CHAIN (info->context) = 1;
376 }
377 return decl;
378 }
379
380 /* Build or return the field within the non-local frame state that holds
381 the static chain for INFO->CONTEXT. This is the way to walk back up
382 multiple nesting levels. */
383
384 static tree
385 get_chain_field (struct nesting_info *info)
386 {
387 tree field = info->chain_field;
388
389 if (!field)
390 {
391 tree type = build_pointer_type (get_frame_type (info->outer));
392
393 field = make_node (FIELD_DECL);
394 DECL_NAME (field) = get_identifier ("__chain");
395 TREE_TYPE (field) = type;
396 DECL_ALIGN (field) = TYPE_ALIGN (type);
397 DECL_NONADDRESSABLE_P (field) = 1;
398
399 insert_field_into_struct (get_frame_type (info), field);
400
401 info->chain_field = field;
402
403 if (dump_file
404 && (dump_flags & TDF_DETAILS)
405 && !DECL_STATIC_CHAIN (info->context))
406 fprintf (dump_file, "Setting static-chain for %s\n",
407 lang_hooks.decl_printable_name (info->context, 2));
408
409 DECL_STATIC_CHAIN (info->context) = 1;
410 }
411 return field;
412 }
413
414 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
415
416 static tree
417 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
418 gcall *call)
419 {
420 tree t;
421
422 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
423 gimple_call_set_lhs (call, t);
424 if (! gsi_end_p (*gsi))
425 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
426 gsi_insert_before (gsi, call, GSI_SAME_STMT);
427
428 return t;
429 }
430
431
432 /* Copy EXP into a temporary. Allocate the temporary in the context of
433 INFO and insert the initialization statement before GSI. */
434
435 static tree
436 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
437 {
438 tree t;
439 gimple stmt;
440
441 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
442 stmt = gimple_build_assign (t, exp);
443 if (! gsi_end_p (*gsi))
444 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
445 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
446
447 return t;
448 }
449
450
451 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
452
453 static tree
454 gsi_gimplify_val (struct nesting_info *info, tree exp,
455 gimple_stmt_iterator *gsi)
456 {
457 if (is_gimple_val (exp))
458 return exp;
459 else
460 return init_tmp_var (info, exp, gsi);
461 }
462
463 /* Similarly, but copy from the temporary and insert the statement
464 after the iterator. */
465
466 static tree
467 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
468 {
469 tree t;
470 gimple stmt;
471
472 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
473 stmt = gimple_build_assign (exp, t);
474 if (! gsi_end_p (*gsi))
475 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
476 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
477
478 return t;
479 }
480
481 /* Build or return the type used to represent a nested function trampoline. */
482
483 static GTY(()) tree trampoline_type;
484
485 static tree
486 get_trampoline_type (struct nesting_info *info)
487 {
488 unsigned align, size;
489 tree t;
490
491 if (trampoline_type)
492 return trampoline_type;
493
494 align = TRAMPOLINE_ALIGNMENT;
495 size = TRAMPOLINE_SIZE;
496
497 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
498 then allocate extra space so that we can do dynamic alignment. */
499 if (align > STACK_BOUNDARY)
500 {
501 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
502 align = STACK_BOUNDARY;
503 }
504
505 t = build_index_type (size_int (size - 1));
506 t = build_array_type (char_type_node, t);
507 t = build_decl (DECL_SOURCE_LOCATION (info->context),
508 FIELD_DECL, get_identifier ("__data"), t);
509 DECL_ALIGN (t) = align;
510 DECL_USER_ALIGN (t) = 1;
511
512 trampoline_type = make_node (RECORD_TYPE);
513 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
514 TYPE_FIELDS (trampoline_type) = t;
515 layout_type (trampoline_type);
516 DECL_CONTEXT (t) = trampoline_type;
517
518 return trampoline_type;
519 }
520
521 /* Given DECL, a nested function, find or create a field in the non-local
522 frame structure for a trampoline for this function. */
523
524 static tree
525 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
526 enum insert_option insert)
527 {
528 if (insert == NO_INSERT)
529 {
530 tree *slot = info->var_map->get (decl);
531 return slot ? *slot : NULL_TREE;
532 }
533
534 tree *slot = &info->var_map->get_or_insert (decl);
535 if (!*slot)
536 {
537 tree field = make_node (FIELD_DECL);
538 DECL_NAME (field) = DECL_NAME (decl);
539 TREE_TYPE (field) = get_trampoline_type (info);
540 TREE_ADDRESSABLE (field) = 1;
541
542 insert_field_into_struct (get_frame_type (info), field);
543 *slot = field;
544
545 info->any_tramp_created = true;
546 }
547
548 return *slot;
549 }
550
551 /* Build or return the field within the non-local frame state that holds
552 the non-local goto "jmp_buf". The buffer itself is maintained by the
553 rtl middle-end as dynamic stack space is allocated. */
554
555 static tree
556 get_nl_goto_field (struct nesting_info *info)
557 {
558 tree field = info->nl_goto_field;
559 if (!field)
560 {
561 unsigned size;
562 tree type;
563
564 /* For __builtin_nonlocal_goto, we need N words. The first is the
565 frame pointer, the rest is for the target's stack pointer save
566 area. The number of words is controlled by STACK_SAVEAREA_MODE;
567 not the best interface, but it'll do for now. */
568 if (Pmode == ptr_mode)
569 type = ptr_type_node;
570 else
571 type = lang_hooks.types.type_for_mode (Pmode, 1);
572
573 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
574 size = size / GET_MODE_SIZE (Pmode);
575 size = size + 1;
576
577 type = build_array_type
578 (type, build_index_type (size_int (size)));
579
580 field = make_node (FIELD_DECL);
581 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
582 TREE_TYPE (field) = type;
583 DECL_ALIGN (field) = TYPE_ALIGN (type);
584 TREE_ADDRESSABLE (field) = 1;
585
586 insert_field_into_struct (get_frame_type (info), field);
587
588 info->nl_goto_field = field;
589 }
590
591 return field;
592 }
593
594 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
595
596 static void
597 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
598 struct nesting_info *info, gimple_seq *pseq)
599 {
600 struct walk_stmt_info wi;
601
602 memset (&wi, 0, sizeof (wi));
603 wi.info = info;
604 wi.val_only = true;
605 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
606 }
607
608
609 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
610
611 static inline void
612 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
613 struct nesting_info *info)
614 {
615 gimple_seq body = gimple_body (info->context);
616 walk_body (callback_stmt, callback_op, info, &body);
617 gimple_set_body (info->context, body);
618 }
619
620 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
621
622 static void
623 walk_gimple_omp_for (gomp_for *for_stmt,
624 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
625 struct nesting_info *info)
626 {
627 struct walk_stmt_info wi;
628 gimple_seq seq;
629 tree t;
630 size_t i;
631
632 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
633
634 seq = NULL;
635 memset (&wi, 0, sizeof (wi));
636 wi.info = info;
637 wi.gsi = gsi_last (seq);
638
639 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
640 {
641 wi.val_only = false;
642 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
643 &wi, NULL);
644 wi.val_only = true;
645 wi.is_lhs = false;
646 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
647 &wi, NULL);
648
649 wi.val_only = true;
650 wi.is_lhs = false;
651 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
652 &wi, NULL);
653
654 t = gimple_omp_for_incr (for_stmt, i);
655 gcc_assert (BINARY_CLASS_P (t));
656 wi.val_only = false;
657 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
658 wi.val_only = true;
659 wi.is_lhs = false;
660 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
661 }
662
663 seq = gsi_seq (wi.gsi);
664 if (!gimple_seq_empty_p (seq))
665 {
666 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
667 annotate_all_with_location (seq, gimple_location (for_stmt));
668 gimple_seq_add_seq (&pre_body, seq);
669 gimple_omp_for_set_pre_body (for_stmt, pre_body);
670 }
671 }
672
673 /* Similarly for ROOT and all functions nested underneath, depth first. */
674
675 static void
676 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
677 struct nesting_info *root)
678 {
679 struct nesting_info *n;
680 FOR_EACH_NEST_INFO (n, root)
681 walk_function (callback_stmt, callback_op, n);
682 }
683
684
685 /* We have to check for a fairly pathological case. The operands of function
686 nested function are to be interpreted in the context of the enclosing
687 function. So if any are variably-sized, they will get remapped when the
688 enclosing function is inlined. But that remapping would also have to be
689 done in the types of the PARM_DECLs of the nested function, meaning the
690 argument types of that function will disagree with the arguments in the
691 calls to that function. So we'd either have to make a copy of the nested
692 function corresponding to each time the enclosing function was inlined or
693 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
694 function. The former is not practical. The latter would still require
695 detecting this case to know when to add the conversions. So, for now at
696 least, we don't inline such an enclosing function.
697
698 We have to do that check recursively, so here return indicating whether
699 FNDECL has such a nested function. ORIG_FN is the function we were
700 trying to inline to use for checking whether any argument is variably
701 modified by anything in it.
702
703 It would be better to do this in tree-inline.c so that we could give
704 the appropriate warning for why a function can't be inlined, but that's
705 too late since the nesting structure has already been flattened and
706 adding a flag just to record this fact seems a waste of a flag. */
707
708 static bool
709 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
710 {
711 struct cgraph_node *cgn = cgraph_node::get (fndecl);
712 tree arg;
713
714 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
715 {
716 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
717 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
718 return true;
719
720 if (check_for_nested_with_variably_modified (cgn->decl,
721 orig_fndecl))
722 return true;
723 }
724
725 return false;
726 }
727
728 /* Construct our local datastructure describing the function nesting
729 tree rooted by CGN. */
730
731 static struct nesting_info *
732 create_nesting_tree (struct cgraph_node *cgn)
733 {
734 struct nesting_info *info = XCNEW (struct nesting_info);
735 info->field_map = new hash_map<tree, tree>;
736 info->var_map = new hash_map<tree, tree>;
737 info->mem_refs = new hash_set<tree *>;
738 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
739 info->context = cgn->decl;
740
741 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
742 {
743 struct nesting_info *sub = create_nesting_tree (cgn);
744 sub->outer = info;
745 sub->next = info->inner;
746 info->inner = sub;
747 }
748
749 /* See discussion at check_for_nested_with_variably_modified for a
750 discussion of why this has to be here. */
751 if (check_for_nested_with_variably_modified (info->context, info->context))
752 DECL_UNINLINABLE (info->context) = true;
753
754 return info;
755 }
756
757 /* Return an expression computing the static chain for TARGET_CONTEXT
758 from INFO->CONTEXT. Insert any necessary computations before TSI. */
759
760 static tree
761 get_static_chain (struct nesting_info *info, tree target_context,
762 gimple_stmt_iterator *gsi)
763 {
764 struct nesting_info *i;
765 tree x;
766
767 if (info->context == target_context)
768 {
769 x = build_addr (info->frame_decl, target_context);
770 }
771 else
772 {
773 x = get_chain_decl (info);
774
775 for (i = info->outer; i->context != target_context; i = i->outer)
776 {
777 tree field = get_chain_field (i);
778
779 x = build_simple_mem_ref (x);
780 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
781 x = init_tmp_var (info, x, gsi);
782 }
783 }
784
785 return x;
786 }
787
788
789 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
790 frame as seen from INFO->CONTEXT. Insert any necessary computations
791 before GSI. */
792
793 static tree
794 get_frame_field (struct nesting_info *info, tree target_context,
795 tree field, gimple_stmt_iterator *gsi)
796 {
797 struct nesting_info *i;
798 tree x;
799
800 if (info->context == target_context)
801 {
802 /* Make sure frame_decl gets created. */
803 (void) get_frame_type (info);
804 x = info->frame_decl;
805 }
806 else
807 {
808 x = get_chain_decl (info);
809
810 for (i = info->outer; i->context != target_context; i = i->outer)
811 {
812 tree field = get_chain_field (i);
813
814 x = build_simple_mem_ref (x);
815 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
816 x = init_tmp_var (info, x, gsi);
817 }
818
819 x = build_simple_mem_ref (x);
820 }
821
822 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
823 return x;
824 }
825
826 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
827
828 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
829 in the nested function with DECL_VALUE_EXPR set to reference the true
830 variable in the parent function. This is used both for debug info
831 and in OMP lowering. */
832
833 static tree
834 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
835 {
836 tree target_context;
837 struct nesting_info *i;
838 tree x, field, new_decl;
839
840 tree *slot = &info->var_map->get_or_insert (decl);
841
842 if (*slot)
843 return *slot;
844
845 target_context = decl_function_context (decl);
846
847 /* A copy of the code in get_frame_field, but without the temporaries. */
848 if (info->context == target_context)
849 {
850 /* Make sure frame_decl gets created. */
851 (void) get_frame_type (info);
852 x = info->frame_decl;
853 i = info;
854 }
855 else
856 {
857 x = get_chain_decl (info);
858 for (i = info->outer; i->context != target_context; i = i->outer)
859 {
860 field = get_chain_field (i);
861 x = build_simple_mem_ref (x);
862 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
863 }
864 x = build_simple_mem_ref (x);
865 }
866
867 field = lookup_field_for_decl (i, decl, INSERT);
868 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
869 if (use_pointer_in_frame (decl))
870 x = build_simple_mem_ref (x);
871
872 /* ??? We should be remapping types as well, surely. */
873 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
874 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
875 DECL_CONTEXT (new_decl) = info->context;
876 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
877 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
878 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
879 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
880 TREE_READONLY (new_decl) = TREE_READONLY (decl);
881 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
882 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
883 if ((TREE_CODE (decl) == PARM_DECL
884 || TREE_CODE (decl) == RESULT_DECL
885 || TREE_CODE (decl) == VAR_DECL)
886 && DECL_BY_REFERENCE (decl))
887 DECL_BY_REFERENCE (new_decl) = 1;
888
889 SET_DECL_VALUE_EXPR (new_decl, x);
890 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
891
892 *slot = new_decl;
893 DECL_CHAIN (new_decl) = info->debug_var_chain;
894 info->debug_var_chain = new_decl;
895
896 if (!optimize
897 && info->context != target_context
898 && variably_modified_type_p (TREE_TYPE (decl), NULL))
899 note_nonlocal_vla_type (info, TREE_TYPE (decl));
900
901 return new_decl;
902 }
903
904
905 /* Callback for walk_gimple_stmt, rewrite all references to VAR
906 and PARM_DECLs that belong to outer functions.
907
908 The rewrite will involve some number of structure accesses back up
909 the static chain. E.g. for a variable FOO up one nesting level it'll
910 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
911 indirections apply to decls for which use_pointer_in_frame is true. */
912
913 static tree
914 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
915 {
916 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
917 struct nesting_info *const info = (struct nesting_info *) wi->info;
918 tree t = *tp;
919
920 *walk_subtrees = 0;
921 switch (TREE_CODE (t))
922 {
923 case VAR_DECL:
924 /* Non-automatic variables are never processed. */
925 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
926 break;
927 /* FALLTHRU */
928
929 case PARM_DECL:
930 if (decl_function_context (t) != info->context)
931 {
932 tree x;
933 wi->changed = true;
934
935 x = get_nonlocal_debug_decl (info, t);
936 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
937 {
938 tree target_context = decl_function_context (t);
939 struct nesting_info *i;
940 for (i = info->outer; i->context != target_context; i = i->outer)
941 continue;
942 x = lookup_field_for_decl (i, t, INSERT);
943 x = get_frame_field (info, target_context, x, &wi->gsi);
944 if (use_pointer_in_frame (t))
945 {
946 x = init_tmp_var (info, x, &wi->gsi);
947 x = build_simple_mem_ref (x);
948 }
949 }
950
951 if (wi->val_only)
952 {
953 if (wi->is_lhs)
954 x = save_tmp_var (info, x, &wi->gsi);
955 else
956 x = init_tmp_var (info, x, &wi->gsi);
957 }
958
959 *tp = x;
960 }
961 break;
962
963 case LABEL_DECL:
964 /* We're taking the address of a label from a parent function, but
965 this is not itself a non-local goto. Mark the label such that it
966 will not be deleted, much as we would with a label address in
967 static storage. */
968 if (decl_function_context (t) != info->context)
969 FORCED_LABEL (t) = 1;
970 break;
971
972 case ADDR_EXPR:
973 {
974 bool save_val_only = wi->val_only;
975
976 wi->val_only = false;
977 wi->is_lhs = false;
978 wi->changed = false;
979 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
980 wi->val_only = true;
981
982 if (wi->changed)
983 {
984 tree save_context;
985
986 /* If we changed anything, we might no longer be directly
987 referencing a decl. */
988 save_context = current_function_decl;
989 current_function_decl = info->context;
990 recompute_tree_invariant_for_addr_expr (t);
991 current_function_decl = save_context;
992
993 /* If the callback converted the address argument in a context
994 where we only accept variables (and min_invariant, presumably),
995 then compute the address into a temporary. */
996 if (save_val_only)
997 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
998 t, &wi->gsi);
999 }
1000 }
1001 break;
1002
1003 case REALPART_EXPR:
1004 case IMAGPART_EXPR:
1005 case COMPONENT_REF:
1006 case ARRAY_REF:
1007 case ARRAY_RANGE_REF:
1008 case BIT_FIELD_REF:
1009 /* Go down this entire nest and just look at the final prefix and
1010 anything that describes the references. Otherwise, we lose track
1011 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1012 wi->val_only = true;
1013 wi->is_lhs = false;
1014 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1015 {
1016 if (TREE_CODE (t) == COMPONENT_REF)
1017 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1018 NULL);
1019 else if (TREE_CODE (t) == ARRAY_REF
1020 || TREE_CODE (t) == ARRAY_RANGE_REF)
1021 {
1022 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1023 wi, NULL);
1024 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1025 wi, NULL);
1026 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1027 wi, NULL);
1028 }
1029 }
1030 wi->val_only = false;
1031 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1032 break;
1033
1034 case VIEW_CONVERT_EXPR:
1035 /* Just request to look at the subtrees, leaving val_only and lhs
1036 untouched. This might actually be for !val_only + lhs, in which
1037 case we don't want to force a replacement by a temporary. */
1038 *walk_subtrees = 1;
1039 break;
1040
1041 default:
1042 if (!IS_TYPE_OR_DECL_P (t))
1043 {
1044 *walk_subtrees = 1;
1045 wi->val_only = true;
1046 wi->is_lhs = false;
1047 }
1048 break;
1049 }
1050
1051 return NULL_TREE;
1052 }
1053
1054 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1055 struct walk_stmt_info *);
1056
1057 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1058 and PARM_DECLs that belong to outer functions. */
1059
1060 static bool
1061 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1062 {
1063 struct nesting_info *const info = (struct nesting_info *) wi->info;
1064 /* If not optimizing, we will force the creation of the CHAIN object in
1065 convert_all_function_calls, so we need to take it into account here. */
1066 bool need_chain = info->outer && !optimize, need_stmts = false;
1067 tree clause, decl;
1068 int dummy;
1069 bitmap new_suppress;
1070
1071 new_suppress = BITMAP_GGC_ALLOC ();
1072 bitmap_copy (new_suppress, info->suppress_expansion);
1073
1074 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1075 {
1076 switch (OMP_CLAUSE_CODE (clause))
1077 {
1078 case OMP_CLAUSE_REDUCTION:
1079 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1080 need_stmts = true;
1081 goto do_decl_clause;
1082
1083 case OMP_CLAUSE_LASTPRIVATE:
1084 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1085 need_stmts = true;
1086 goto do_decl_clause;
1087
1088 case OMP_CLAUSE_LINEAR:
1089 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1090 need_stmts = true;
1091 wi->val_only = true;
1092 wi->is_lhs = false;
1093 convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1094 &dummy, wi);
1095 goto do_decl_clause;
1096
1097 case OMP_CLAUSE_PRIVATE:
1098 case OMP_CLAUSE_FIRSTPRIVATE:
1099 case OMP_CLAUSE_COPYPRIVATE:
1100 case OMP_CLAUSE_SHARED:
1101 do_decl_clause:
1102 decl = OMP_CLAUSE_DECL (clause);
1103 if (TREE_CODE (decl) == VAR_DECL
1104 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1105 break;
1106 if (decl_function_context (decl) != info->context)
1107 {
1108 bitmap_set_bit (new_suppress, DECL_UID (decl));
1109 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1110 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1111 need_chain = true;
1112 }
1113 break;
1114
1115 case OMP_CLAUSE_SCHEDULE:
1116 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1117 break;
1118 /* FALLTHRU */
1119 case OMP_CLAUSE_FINAL:
1120 case OMP_CLAUSE_IF:
1121 case OMP_CLAUSE_NUM_THREADS:
1122 case OMP_CLAUSE_DEPEND:
1123 case OMP_CLAUSE_DEVICE:
1124 case OMP_CLAUSE_NUM_TEAMS:
1125 case OMP_CLAUSE_THREAD_LIMIT:
1126 case OMP_CLAUSE_SAFELEN:
1127 case OMP_CLAUSE__CILK_FOR_COUNT_:
1128 wi->val_only = true;
1129 wi->is_lhs = false;
1130 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1131 &dummy, wi);
1132 break;
1133
1134 case OMP_CLAUSE_DIST_SCHEDULE:
1135 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1136 {
1137 wi->val_only = true;
1138 wi->is_lhs = false;
1139 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1140 &dummy, wi);
1141 }
1142 break;
1143
1144 case OMP_CLAUSE_MAP:
1145 case OMP_CLAUSE_TO:
1146 case OMP_CLAUSE_FROM:
1147 if (OMP_CLAUSE_SIZE (clause))
1148 {
1149 wi->val_only = true;
1150 wi->is_lhs = false;
1151 convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1152 &dummy, wi);
1153 }
1154 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1155 goto do_decl_clause;
1156 wi->val_only = true;
1157 wi->is_lhs = false;
1158 walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1159 wi, NULL);
1160 break;
1161
1162 case OMP_CLAUSE_ALIGNED:
1163 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1164 {
1165 wi->val_only = true;
1166 wi->is_lhs = false;
1167 convert_nonlocal_reference_op
1168 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1169 }
1170 /* Like do_decl_clause, but don't add any suppression. */
1171 decl = OMP_CLAUSE_DECL (clause);
1172 if (TREE_CODE (decl) == VAR_DECL
1173 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1174 break;
1175 if (decl_function_context (decl) != info->context)
1176 {
1177 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1178 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1179 need_chain = true;
1180 }
1181 break;
1182
1183 case OMP_CLAUSE_NOWAIT:
1184 case OMP_CLAUSE_ORDERED:
1185 case OMP_CLAUSE_DEFAULT:
1186 case OMP_CLAUSE_COPYIN:
1187 case OMP_CLAUSE_COLLAPSE:
1188 case OMP_CLAUSE_UNTIED:
1189 case OMP_CLAUSE_MERGEABLE:
1190 case OMP_CLAUSE_PROC_BIND:
1191 break;
1192
1193 default:
1194 gcc_unreachable ();
1195 }
1196 }
1197
1198 info->suppress_expansion = new_suppress;
1199
1200 if (need_stmts)
1201 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1202 switch (OMP_CLAUSE_CODE (clause))
1203 {
1204 case OMP_CLAUSE_REDUCTION:
1205 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1206 {
1207 tree old_context
1208 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1209 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1210 = info->context;
1211 walk_body (convert_nonlocal_reference_stmt,
1212 convert_nonlocal_reference_op, info,
1213 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1214 walk_body (convert_nonlocal_reference_stmt,
1215 convert_nonlocal_reference_op, info,
1216 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1217 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1218 = old_context;
1219 }
1220 break;
1221
1222 case OMP_CLAUSE_LASTPRIVATE:
1223 walk_body (convert_nonlocal_reference_stmt,
1224 convert_nonlocal_reference_op, info,
1225 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1226 break;
1227
1228 case OMP_CLAUSE_LINEAR:
1229 walk_body (convert_nonlocal_reference_stmt,
1230 convert_nonlocal_reference_op, info,
1231 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1232 break;
1233
1234 default:
1235 break;
1236 }
1237
1238 return need_chain;
1239 }
1240
1241 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1242
1243 static void
1244 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1245 {
1246 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1247 type = TREE_TYPE (type);
1248
1249 if (TYPE_NAME (type)
1250 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1251 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1252 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1253
1254 while (POINTER_TYPE_P (type)
1255 || TREE_CODE (type) == VECTOR_TYPE
1256 || TREE_CODE (type) == FUNCTION_TYPE
1257 || TREE_CODE (type) == METHOD_TYPE)
1258 type = TREE_TYPE (type);
1259
1260 if (TREE_CODE (type) == ARRAY_TYPE)
1261 {
1262 tree domain, t;
1263
1264 note_nonlocal_vla_type (info, TREE_TYPE (type));
1265 domain = TYPE_DOMAIN (type);
1266 if (domain)
1267 {
1268 t = TYPE_MIN_VALUE (domain);
1269 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1270 && decl_function_context (t) != info->context)
1271 get_nonlocal_debug_decl (info, t);
1272 t = TYPE_MAX_VALUE (domain);
1273 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1274 && decl_function_context (t) != info->context)
1275 get_nonlocal_debug_decl (info, t);
1276 }
1277 }
1278 }
1279
1280 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1281 in BLOCK. */
1282
1283 static void
1284 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1285 {
1286 tree var;
1287
1288 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1289 if (TREE_CODE (var) == VAR_DECL
1290 && variably_modified_type_p (TREE_TYPE (var), NULL)
1291 && DECL_HAS_VALUE_EXPR_P (var)
1292 && decl_function_context (var) != info->context)
1293 note_nonlocal_vla_type (info, TREE_TYPE (var));
1294 }
1295
1296 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1297 PARM_DECLs that belong to outer functions. This handles statements
1298 that are not handled via the standard recursion done in
1299 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1300 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1301 operands of STMT have been handled by this function. */
1302
1303 static tree
1304 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1305 struct walk_stmt_info *wi)
1306 {
1307 struct nesting_info *info = (struct nesting_info *) wi->info;
1308 tree save_local_var_chain;
1309 bitmap save_suppress;
1310 gimple stmt = gsi_stmt (*gsi);
1311
1312 switch (gimple_code (stmt))
1313 {
1314 case GIMPLE_GOTO:
1315 /* Don't walk non-local gotos for now. */
1316 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1317 {
1318 wi->val_only = true;
1319 wi->is_lhs = false;
1320 *handled_ops_p = true;
1321 return NULL_TREE;
1322 }
1323 break;
1324
1325 case GIMPLE_OMP_PARALLEL:
1326 case GIMPLE_OMP_TASK:
1327 save_suppress = info->suppress_expansion;
1328 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1329 wi))
1330 {
1331 tree c, decl;
1332 decl = get_chain_decl (info);
1333 c = build_omp_clause (gimple_location (stmt),
1334 OMP_CLAUSE_FIRSTPRIVATE);
1335 OMP_CLAUSE_DECL (c) = decl;
1336 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1337 gimple_omp_taskreg_set_clauses (stmt, c);
1338 }
1339
1340 save_local_var_chain = info->new_local_var_chain;
1341 info->new_local_var_chain = NULL;
1342
1343 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1344 info, gimple_omp_body_ptr (stmt));
1345
1346 if (info->new_local_var_chain)
1347 declare_vars (info->new_local_var_chain,
1348 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1349 false);
1350 info->new_local_var_chain = save_local_var_chain;
1351 info->suppress_expansion = save_suppress;
1352 break;
1353
1354 case GIMPLE_OMP_FOR:
1355 save_suppress = info->suppress_expansion;
1356 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1357 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1358 convert_nonlocal_reference_stmt,
1359 convert_nonlocal_reference_op, info);
1360 walk_body (convert_nonlocal_reference_stmt,
1361 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1362 info->suppress_expansion = save_suppress;
1363 break;
1364
1365 case GIMPLE_OMP_SECTIONS:
1366 save_suppress = info->suppress_expansion;
1367 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1368 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1369 info, gimple_omp_body_ptr (stmt));
1370 info->suppress_expansion = save_suppress;
1371 break;
1372
1373 case GIMPLE_OMP_SINGLE:
1374 save_suppress = info->suppress_expansion;
1375 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1376 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1377 info, gimple_omp_body_ptr (stmt));
1378 info->suppress_expansion = save_suppress;
1379 break;
1380
1381 case GIMPLE_OMP_TARGET:
1382 if (!is_gimple_omp_offloaded (stmt))
1383 {
1384 save_suppress = info->suppress_expansion;
1385 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1386 wi);
1387 info->suppress_expansion = save_suppress;
1388 walk_body (convert_nonlocal_reference_stmt,
1389 convert_nonlocal_reference_op, info,
1390 gimple_omp_body_ptr (stmt));
1391 break;
1392 }
1393 save_suppress = info->suppress_expansion;
1394 if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1395 wi))
1396 {
1397 tree c, decl;
1398 decl = get_chain_decl (info);
1399 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1400 OMP_CLAUSE_DECL (c) = decl;
1401 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1402 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1403 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1404 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1405 }
1406
1407 save_local_var_chain = info->new_local_var_chain;
1408 info->new_local_var_chain = NULL;
1409
1410 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1411 info, gimple_omp_body_ptr (stmt));
1412
1413 if (info->new_local_var_chain)
1414 declare_vars (info->new_local_var_chain,
1415 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1416 false);
1417 info->new_local_var_chain = save_local_var_chain;
1418 info->suppress_expansion = save_suppress;
1419 break;
1420
1421 case GIMPLE_OMP_TEAMS:
1422 save_suppress = info->suppress_expansion;
1423 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1424 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1425 info, gimple_omp_body_ptr (stmt));
1426 info->suppress_expansion = save_suppress;
1427 break;
1428
1429 case GIMPLE_OMP_SECTION:
1430 case GIMPLE_OMP_MASTER:
1431 case GIMPLE_OMP_TASKGROUP:
1432 case GIMPLE_OMP_ORDERED:
1433 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1434 info, gimple_omp_body_ptr (stmt));
1435 break;
1436
1437 case GIMPLE_BIND:
1438 {
1439 gbind *bind_stmt = as_a <gbind *> (stmt);
1440 if (!optimize && gimple_bind_block (bind_stmt))
1441 note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1442
1443 for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1444 if (TREE_CODE (var) == NAMELIST_DECL)
1445 {
1446 /* Adjust decls mentioned in NAMELIST_DECL. */
1447 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1448 tree decl;
1449 unsigned int i;
1450
1451 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1452 {
1453 if (TREE_CODE (decl) == VAR_DECL
1454 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1455 continue;
1456 if (decl_function_context (decl) != info->context)
1457 CONSTRUCTOR_ELT (decls, i)->value
1458 = get_nonlocal_debug_decl (info, decl);
1459 }
1460 }
1461
1462 *handled_ops_p = false;
1463 return NULL_TREE;
1464 }
1465 case GIMPLE_COND:
1466 wi->val_only = true;
1467 wi->is_lhs = false;
1468 *handled_ops_p = false;
1469 return NULL_TREE;
1470
1471 default:
1472 /* For every other statement that we are not interested in
1473 handling here, let the walker traverse the operands. */
1474 *handled_ops_p = false;
1475 return NULL_TREE;
1476 }
1477
1478 /* We have handled all of STMT operands, no need to traverse the operands. */
1479 *handled_ops_p = true;
1480 return NULL_TREE;
1481 }
1482
1483
1484 /* A subroutine of convert_local_reference. Create a local variable
1485 in the parent function with DECL_VALUE_EXPR set to reference the
1486 field in FRAME. This is used both for debug info and in OMP
1487 lowering. */
1488
1489 static tree
1490 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1491 {
1492 tree x, new_decl;
1493
1494 tree *slot = &info->var_map->get_or_insert (decl);
1495 if (*slot)
1496 return *slot;
1497
1498 /* Make sure frame_decl gets created. */
1499 (void) get_frame_type (info);
1500 x = info->frame_decl;
1501 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1502
1503 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1504 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1505 DECL_CONTEXT (new_decl) = info->context;
1506 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1507 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1508 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1509 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1510 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1511 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1512 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1513 if ((TREE_CODE (decl) == PARM_DECL
1514 || TREE_CODE (decl) == RESULT_DECL
1515 || TREE_CODE (decl) == VAR_DECL)
1516 && DECL_BY_REFERENCE (decl))
1517 DECL_BY_REFERENCE (new_decl) = 1;
1518
1519 SET_DECL_VALUE_EXPR (new_decl, x);
1520 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1521 *slot = new_decl;
1522
1523 DECL_CHAIN (new_decl) = info->debug_var_chain;
1524 info->debug_var_chain = new_decl;
1525
1526 /* Do not emit debug info twice. */
1527 DECL_IGNORED_P (decl) = 1;
1528
1529 return new_decl;
1530 }
1531
1532
1533 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1534 and PARM_DECLs that were referenced by inner nested functions.
1535 The rewrite will be a structure reference to the local frame variable. */
1536
1537 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1538
1539 static tree
1540 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1541 {
1542 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1543 struct nesting_info *const info = (struct nesting_info *) wi->info;
1544 tree t = *tp, field, x;
1545 bool save_val_only;
1546
1547 *walk_subtrees = 0;
1548 switch (TREE_CODE (t))
1549 {
1550 case VAR_DECL:
1551 /* Non-automatic variables are never processed. */
1552 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1553 break;
1554 /* FALLTHRU */
1555
1556 case PARM_DECL:
1557 if (decl_function_context (t) == info->context)
1558 {
1559 /* If we copied a pointer to the frame, then the original decl
1560 is used unchanged in the parent function. */
1561 if (use_pointer_in_frame (t))
1562 break;
1563
1564 /* No need to transform anything if no child references the
1565 variable. */
1566 field = lookup_field_for_decl (info, t, NO_INSERT);
1567 if (!field)
1568 break;
1569 wi->changed = true;
1570
1571 x = get_local_debug_decl (info, t, field);
1572 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1573 x = get_frame_field (info, info->context, field, &wi->gsi);
1574
1575 if (wi->val_only)
1576 {
1577 if (wi->is_lhs)
1578 x = save_tmp_var (info, x, &wi->gsi);
1579 else
1580 x = init_tmp_var (info, x, &wi->gsi);
1581 }
1582
1583 *tp = x;
1584 }
1585 break;
1586
1587 case ADDR_EXPR:
1588 save_val_only = wi->val_only;
1589 wi->val_only = false;
1590 wi->is_lhs = false;
1591 wi->changed = false;
1592 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1593 wi->val_only = save_val_only;
1594
1595 /* If we converted anything ... */
1596 if (wi->changed)
1597 {
1598 tree save_context;
1599
1600 /* Then the frame decl is now addressable. */
1601 TREE_ADDRESSABLE (info->frame_decl) = 1;
1602
1603 save_context = current_function_decl;
1604 current_function_decl = info->context;
1605 recompute_tree_invariant_for_addr_expr (t);
1606 current_function_decl = save_context;
1607
1608 /* If we are in a context where we only accept values, then
1609 compute the address into a temporary. */
1610 if (save_val_only)
1611 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1612 t, &wi->gsi);
1613 }
1614 break;
1615
1616 case REALPART_EXPR:
1617 case IMAGPART_EXPR:
1618 case COMPONENT_REF:
1619 case ARRAY_REF:
1620 case ARRAY_RANGE_REF:
1621 case BIT_FIELD_REF:
1622 /* Go down this entire nest and just look at the final prefix and
1623 anything that describes the references. Otherwise, we lose track
1624 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1625 save_val_only = wi->val_only;
1626 wi->val_only = true;
1627 wi->is_lhs = false;
1628 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1629 {
1630 if (TREE_CODE (t) == COMPONENT_REF)
1631 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1632 NULL);
1633 else if (TREE_CODE (t) == ARRAY_REF
1634 || TREE_CODE (t) == ARRAY_RANGE_REF)
1635 {
1636 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1637 NULL);
1638 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1639 NULL);
1640 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1641 NULL);
1642 }
1643 }
1644 wi->val_only = false;
1645 walk_tree (tp, convert_local_reference_op, wi, NULL);
1646 wi->val_only = save_val_only;
1647 break;
1648
1649 case MEM_REF:
1650 save_val_only = wi->val_only;
1651 wi->val_only = true;
1652 wi->is_lhs = false;
1653 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1654 wi, NULL);
1655 /* We need to re-fold the MEM_REF as component references as
1656 part of a ADDR_EXPR address are not allowed. But we cannot
1657 fold here, as the chain record type is not yet finalized. */
1658 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1659 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1660 info->mem_refs->add (tp);
1661 wi->val_only = save_val_only;
1662 break;
1663
1664 case VIEW_CONVERT_EXPR:
1665 /* Just request to look at the subtrees, leaving val_only and lhs
1666 untouched. This might actually be for !val_only + lhs, in which
1667 case we don't want to force a replacement by a temporary. */
1668 *walk_subtrees = 1;
1669 break;
1670
1671 default:
1672 if (!IS_TYPE_OR_DECL_P (t))
1673 {
1674 *walk_subtrees = 1;
1675 wi->val_only = true;
1676 wi->is_lhs = false;
1677 }
1678 break;
1679 }
1680
1681 return NULL_TREE;
1682 }
1683
1684 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1685 struct walk_stmt_info *);
1686
1687 /* Helper for convert_local_reference. Convert all the references in
1688 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1689
1690 static bool
1691 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1692 {
1693 struct nesting_info *const info = (struct nesting_info *) wi->info;
1694 /* If not optimizing, we will force the creation of the FRAME object in
1695 convert_all_function_calls, so we need to take it into account here. */
1696 bool need_frame = info->inner && !optimize, need_stmts = false;
1697 tree clause, decl;
1698 int dummy;
1699 bitmap new_suppress;
1700
1701 new_suppress = BITMAP_GGC_ALLOC ();
1702 bitmap_copy (new_suppress, info->suppress_expansion);
1703
1704 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1705 {
1706 switch (OMP_CLAUSE_CODE (clause))
1707 {
1708 case OMP_CLAUSE_REDUCTION:
1709 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1710 need_stmts = true;
1711 goto do_decl_clause;
1712
1713 case OMP_CLAUSE_LASTPRIVATE:
1714 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1715 need_stmts = true;
1716 goto do_decl_clause;
1717
1718 case OMP_CLAUSE_LINEAR:
1719 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1720 need_stmts = true;
1721 wi->val_only = true;
1722 wi->is_lhs = false;
1723 convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1724 wi);
1725 goto do_decl_clause;
1726
1727 case OMP_CLAUSE_PRIVATE:
1728 case OMP_CLAUSE_FIRSTPRIVATE:
1729 case OMP_CLAUSE_COPYPRIVATE:
1730 case OMP_CLAUSE_SHARED:
1731 do_decl_clause:
1732 decl = OMP_CLAUSE_DECL (clause);
1733 if (TREE_CODE (decl) == VAR_DECL
1734 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1735 break;
1736 if (decl_function_context (decl) == info->context
1737 && !use_pointer_in_frame (decl))
1738 {
1739 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1740 if (field)
1741 {
1742 bitmap_set_bit (new_suppress, DECL_UID (decl));
1743 OMP_CLAUSE_DECL (clause)
1744 = get_local_debug_decl (info, decl, field);
1745 need_frame = true;
1746 }
1747 }
1748 break;
1749
1750 case OMP_CLAUSE_SCHEDULE:
1751 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1752 break;
1753 /* FALLTHRU */
1754 case OMP_CLAUSE_FINAL:
1755 case OMP_CLAUSE_IF:
1756 case OMP_CLAUSE_NUM_THREADS:
1757 case OMP_CLAUSE_DEPEND:
1758 case OMP_CLAUSE_DEVICE:
1759 case OMP_CLAUSE_NUM_TEAMS:
1760 case OMP_CLAUSE_THREAD_LIMIT:
1761 case OMP_CLAUSE_SAFELEN:
1762 case OMP_CLAUSE__CILK_FOR_COUNT_:
1763 wi->val_only = true;
1764 wi->is_lhs = false;
1765 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1766 wi);
1767 break;
1768
1769 case OMP_CLAUSE_DIST_SCHEDULE:
1770 if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1771 {
1772 wi->val_only = true;
1773 wi->is_lhs = false;
1774 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1775 &dummy, wi);
1776 }
1777 break;
1778
1779 case OMP_CLAUSE_MAP:
1780 case OMP_CLAUSE_TO:
1781 case OMP_CLAUSE_FROM:
1782 if (OMP_CLAUSE_SIZE (clause))
1783 {
1784 wi->val_only = true;
1785 wi->is_lhs = false;
1786 convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1787 &dummy, wi);
1788 }
1789 if (DECL_P (OMP_CLAUSE_DECL (clause)))
1790 goto do_decl_clause;
1791 wi->val_only = true;
1792 wi->is_lhs = false;
1793 walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1794 wi, NULL);
1795 break;
1796
1797 case OMP_CLAUSE_ALIGNED:
1798 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1799 {
1800 wi->val_only = true;
1801 wi->is_lhs = false;
1802 convert_local_reference_op
1803 (&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1804 }
1805 /* Like do_decl_clause, but don't add any suppression. */
1806 decl = OMP_CLAUSE_DECL (clause);
1807 if (TREE_CODE (decl) == VAR_DECL
1808 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1809 break;
1810 if (decl_function_context (decl) == info->context
1811 && !use_pointer_in_frame (decl))
1812 {
1813 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1814 if (field)
1815 {
1816 OMP_CLAUSE_DECL (clause)
1817 = get_local_debug_decl (info, decl, field);
1818 need_frame = true;
1819 }
1820 }
1821 break;
1822
1823 case OMP_CLAUSE_NOWAIT:
1824 case OMP_CLAUSE_ORDERED:
1825 case OMP_CLAUSE_DEFAULT:
1826 case OMP_CLAUSE_COPYIN:
1827 case OMP_CLAUSE_COLLAPSE:
1828 case OMP_CLAUSE_UNTIED:
1829 case OMP_CLAUSE_MERGEABLE:
1830 case OMP_CLAUSE_PROC_BIND:
1831 break;
1832
1833 default:
1834 gcc_unreachable ();
1835 }
1836 }
1837
1838 info->suppress_expansion = new_suppress;
1839
1840 if (need_stmts)
1841 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1842 switch (OMP_CLAUSE_CODE (clause))
1843 {
1844 case OMP_CLAUSE_REDUCTION:
1845 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1846 {
1847 tree old_context
1848 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1849 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1850 = info->context;
1851 walk_body (convert_local_reference_stmt,
1852 convert_local_reference_op, info,
1853 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1854 walk_body (convert_local_reference_stmt,
1855 convert_local_reference_op, info,
1856 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1857 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1858 = old_context;
1859 }
1860 break;
1861
1862 case OMP_CLAUSE_LASTPRIVATE:
1863 walk_body (convert_local_reference_stmt,
1864 convert_local_reference_op, info,
1865 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1866 break;
1867
1868 case OMP_CLAUSE_LINEAR:
1869 walk_body (convert_local_reference_stmt,
1870 convert_local_reference_op, info,
1871 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1872 break;
1873
1874 default:
1875 break;
1876 }
1877
1878 return need_frame;
1879 }
1880
1881
1882 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1883 and PARM_DECLs that were referenced by inner nested functions.
1884 The rewrite will be a structure reference to the local frame variable. */
1885
1886 static tree
1887 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1888 struct walk_stmt_info *wi)
1889 {
1890 struct nesting_info *info = (struct nesting_info *) wi->info;
1891 tree save_local_var_chain;
1892 bitmap save_suppress;
1893 gimple stmt = gsi_stmt (*gsi);
1894
1895 switch (gimple_code (stmt))
1896 {
1897 case GIMPLE_OMP_PARALLEL:
1898 case GIMPLE_OMP_TASK:
1899 save_suppress = info->suppress_expansion;
1900 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1901 wi))
1902 {
1903 tree c;
1904 (void) get_frame_type (info);
1905 c = build_omp_clause (gimple_location (stmt),
1906 OMP_CLAUSE_SHARED);
1907 OMP_CLAUSE_DECL (c) = info->frame_decl;
1908 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1909 gimple_omp_taskreg_set_clauses (stmt, c);
1910 }
1911
1912 save_local_var_chain = info->new_local_var_chain;
1913 info->new_local_var_chain = NULL;
1914
1915 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1916 gimple_omp_body_ptr (stmt));
1917
1918 if (info->new_local_var_chain)
1919 declare_vars (info->new_local_var_chain,
1920 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1921 info->new_local_var_chain = save_local_var_chain;
1922 info->suppress_expansion = save_suppress;
1923 break;
1924
1925 case GIMPLE_OMP_FOR:
1926 save_suppress = info->suppress_expansion;
1927 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1928 walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1929 convert_local_reference_stmt,
1930 convert_local_reference_op, info);
1931 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1932 info, gimple_omp_body_ptr (stmt));
1933 info->suppress_expansion = save_suppress;
1934 break;
1935
1936 case GIMPLE_OMP_SECTIONS:
1937 save_suppress = info->suppress_expansion;
1938 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1939 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1940 info, gimple_omp_body_ptr (stmt));
1941 info->suppress_expansion = save_suppress;
1942 break;
1943
1944 case GIMPLE_OMP_SINGLE:
1945 save_suppress = info->suppress_expansion;
1946 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1947 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1948 info, gimple_omp_body_ptr (stmt));
1949 info->suppress_expansion = save_suppress;
1950 break;
1951
1952 case GIMPLE_OMP_TARGET:
1953 if (!is_gimple_omp_offloaded (stmt))
1954 {
1955 save_suppress = info->suppress_expansion;
1956 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1957 info->suppress_expansion = save_suppress;
1958 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1959 info, gimple_omp_body_ptr (stmt));
1960 break;
1961 }
1962 save_suppress = info->suppress_expansion;
1963 if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
1964 {
1965 tree c;
1966 (void) get_frame_type (info);
1967 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1968 OMP_CLAUSE_DECL (c) = info->frame_decl;
1969 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
1970 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
1971 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1972 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1973 }
1974
1975 save_local_var_chain = info->new_local_var_chain;
1976 info->new_local_var_chain = NULL;
1977
1978 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1979 gimple_omp_body_ptr (stmt));
1980
1981 if (info->new_local_var_chain)
1982 declare_vars (info->new_local_var_chain,
1983 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1984 info->new_local_var_chain = save_local_var_chain;
1985 info->suppress_expansion = save_suppress;
1986 break;
1987
1988 case GIMPLE_OMP_TEAMS:
1989 save_suppress = info->suppress_expansion;
1990 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1991 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1992 info, gimple_omp_body_ptr (stmt));
1993 info->suppress_expansion = save_suppress;
1994 break;
1995
1996 case GIMPLE_OMP_SECTION:
1997 case GIMPLE_OMP_MASTER:
1998 case GIMPLE_OMP_TASKGROUP:
1999 case GIMPLE_OMP_ORDERED:
2000 walk_body (convert_local_reference_stmt, convert_local_reference_op,
2001 info, gimple_omp_body_ptr (stmt));
2002 break;
2003
2004 case GIMPLE_COND:
2005 wi->val_only = true;
2006 wi->is_lhs = false;
2007 *handled_ops_p = false;
2008 return NULL_TREE;
2009
2010 case GIMPLE_ASSIGN:
2011 if (gimple_clobber_p (stmt))
2012 {
2013 tree lhs = gimple_assign_lhs (stmt);
2014 if (!use_pointer_in_frame (lhs)
2015 && lookup_field_for_decl (info, lhs, NO_INSERT))
2016 {
2017 gsi_replace (gsi, gimple_build_nop (), true);
2018 break;
2019 }
2020 }
2021 *handled_ops_p = false;
2022 return NULL_TREE;
2023
2024 case GIMPLE_BIND:
2025 for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2026 var;
2027 var = DECL_CHAIN (var))
2028 if (TREE_CODE (var) == NAMELIST_DECL)
2029 {
2030 /* Adjust decls mentioned in NAMELIST_DECL. */
2031 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2032 tree decl;
2033 unsigned int i;
2034
2035 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2036 {
2037 if (TREE_CODE (decl) == VAR_DECL
2038 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2039 continue;
2040 if (decl_function_context (decl) == info->context
2041 && !use_pointer_in_frame (decl))
2042 {
2043 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2044 if (field)
2045 {
2046 CONSTRUCTOR_ELT (decls, i)->value
2047 = get_local_debug_decl (info, decl, field);
2048 }
2049 }
2050 }
2051 }
2052
2053 *handled_ops_p = false;
2054 return NULL_TREE;
2055
2056 default:
2057 /* For every other statement that we are not interested in
2058 handling here, let the walker traverse the operands. */
2059 *handled_ops_p = false;
2060 return NULL_TREE;
2061 }
2062
2063 /* Indicate that we have handled all the operands ourselves. */
2064 *handled_ops_p = true;
2065 return NULL_TREE;
2066 }
2067
2068
2069 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2070 that reference labels from outer functions. The rewrite will be a
2071 call to __builtin_nonlocal_goto. */
2072
2073 static tree
2074 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2075 struct walk_stmt_info *wi)
2076 {
2077 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2078 tree label, new_label, target_context, x, field;
2079 gcall *call;
2080 gimple stmt = gsi_stmt (*gsi);
2081
2082 if (gimple_code (stmt) != GIMPLE_GOTO)
2083 {
2084 *handled_ops_p = false;
2085 return NULL_TREE;
2086 }
2087
2088 label = gimple_goto_dest (stmt);
2089 if (TREE_CODE (label) != LABEL_DECL)
2090 {
2091 *handled_ops_p = false;
2092 return NULL_TREE;
2093 }
2094
2095 target_context = decl_function_context (label);
2096 if (target_context == info->context)
2097 {
2098 *handled_ops_p = false;
2099 return NULL_TREE;
2100 }
2101
2102 for (i = info->outer; target_context != i->context; i = i->outer)
2103 continue;
2104
2105 /* The original user label may also be use for a normal goto, therefore
2106 we must create a new label that will actually receive the abnormal
2107 control transfer. This new label will be marked LABEL_NONLOCAL; this
2108 mark will trigger proper behavior in the cfg, as well as cause the
2109 (hairy target-specific) non-local goto receiver code to be generated
2110 when we expand rtl. Enter this association into var_map so that we
2111 can insert the new label into the IL during a second pass. */
2112 tree *slot = &i->var_map->get_or_insert (label);
2113 if (*slot == NULL)
2114 {
2115 new_label = create_artificial_label (UNKNOWN_LOCATION);
2116 DECL_NONLOCAL (new_label) = 1;
2117 *slot = new_label;
2118 }
2119 else
2120 new_label = *slot;
2121
2122 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
2123 field = get_nl_goto_field (i);
2124 x = get_frame_field (info, target_context, field, gsi);
2125 x = build_addr (x, target_context);
2126 x = gsi_gimplify_val (info, x, gsi);
2127 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2128 2, build_addr (new_label, target_context), x);
2129 gsi_replace (gsi, call, false);
2130
2131 /* We have handled all of STMT's operands, no need to keep going. */
2132 *handled_ops_p = true;
2133 return NULL_TREE;
2134 }
2135
2136
2137 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2138 are referenced via nonlocal goto from a nested function. The rewrite
2139 will involve installing a newly generated DECL_NONLOCAL label, and
2140 (potentially) a branch around the rtl gunk that is assumed to be
2141 attached to such a label. */
2142
2143 static tree
2144 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2145 struct walk_stmt_info *wi)
2146 {
2147 struct nesting_info *const info = (struct nesting_info *) wi->info;
2148 tree label, new_label;
2149 gimple_stmt_iterator tmp_gsi;
2150 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2151
2152 if (!stmt)
2153 {
2154 *handled_ops_p = false;
2155 return NULL_TREE;
2156 }
2157
2158 label = gimple_label_label (stmt);
2159
2160 tree *slot = info->var_map->get (label);
2161 if (!slot)
2162 {
2163 *handled_ops_p = false;
2164 return NULL_TREE;
2165 }
2166
2167 /* If there's any possibility that the previous statement falls through,
2168 then we must branch around the new non-local label. */
2169 tmp_gsi = wi->gsi;
2170 gsi_prev (&tmp_gsi);
2171 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2172 {
2173 gimple stmt = gimple_build_goto (label);
2174 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2175 }
2176
2177 new_label = (tree) *slot;
2178 stmt = gimple_build_label (new_label);
2179 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2180
2181 *handled_ops_p = true;
2182 return NULL_TREE;
2183 }
2184
2185
2186 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2187 of nested functions that require the use of trampolines. The rewrite
2188 will involve a reference a trampoline generated for the occasion. */
2189
2190 static tree
2191 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2192 {
2193 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2194 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2195 tree t = *tp, decl, target_context, x, builtin;
2196 gcall *call;
2197
2198 *walk_subtrees = 0;
2199 switch (TREE_CODE (t))
2200 {
2201 case ADDR_EXPR:
2202 /* Build
2203 T.1 = &CHAIN->tramp;
2204 T.2 = __builtin_adjust_trampoline (T.1);
2205 T.3 = (func_type)T.2;
2206 */
2207
2208 decl = TREE_OPERAND (t, 0);
2209 if (TREE_CODE (decl) != FUNCTION_DECL)
2210 break;
2211
2212 /* Only need to process nested functions. */
2213 target_context = decl_function_context (decl);
2214 if (!target_context)
2215 break;
2216
2217 /* If the nested function doesn't use a static chain, then
2218 it doesn't need a trampoline. */
2219 if (!DECL_STATIC_CHAIN (decl))
2220 break;
2221
2222 /* If we don't want a trampoline, then don't build one. */
2223 if (TREE_NO_TRAMPOLINE (t))
2224 break;
2225
2226 /* Lookup the immediate parent of the callee, as that's where
2227 we need to insert the trampoline. */
2228 for (i = info; i->context != target_context; i = i->outer)
2229 continue;
2230 x = lookup_tramp_for_decl (i, decl, INSERT);
2231
2232 /* Compute the address of the field holding the trampoline. */
2233 x = get_frame_field (info, target_context, x, &wi->gsi);
2234 x = build_addr (x, target_context);
2235 x = gsi_gimplify_val (info, x, &wi->gsi);
2236
2237 /* Do machine-specific ugliness. Normally this will involve
2238 computing extra alignment, but it can really be anything. */
2239 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2240 call = gimple_build_call (builtin, 1, x);
2241 x = init_tmp_var_with_call (info, &wi->gsi, call);
2242
2243 /* Cast back to the proper function type. */
2244 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2245 x = init_tmp_var (info, x, &wi->gsi);
2246
2247 *tp = x;
2248 break;
2249
2250 default:
2251 if (!IS_TYPE_OR_DECL_P (t))
2252 *walk_subtrees = 1;
2253 break;
2254 }
2255
2256 return NULL_TREE;
2257 }
2258
2259
2260 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2261 to addresses of nested functions that require the use of
2262 trampolines. The rewrite will involve a reference a trampoline
2263 generated for the occasion. */
2264
2265 static tree
2266 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2267 struct walk_stmt_info *wi)
2268 {
2269 struct nesting_info *info = (struct nesting_info *) wi->info;
2270 gimple stmt = gsi_stmt (*gsi);
2271
2272 switch (gimple_code (stmt))
2273 {
2274 case GIMPLE_CALL:
2275 {
2276 /* Only walk call arguments, lest we generate trampolines for
2277 direct calls. */
2278 unsigned long i, nargs = gimple_call_num_args (stmt);
2279 for (i = 0; i < nargs; i++)
2280 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2281 wi, NULL);
2282 break;
2283 }
2284
2285 case GIMPLE_OMP_TARGET:
2286 if (!is_gimple_omp_offloaded (stmt))
2287 {
2288 *handled_ops_p = false;
2289 return NULL_TREE;
2290 }
2291 /* FALLTHRU */
2292 case GIMPLE_OMP_PARALLEL:
2293 case GIMPLE_OMP_TASK:
2294 {
2295 tree save_local_var_chain;
2296 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2297 save_local_var_chain = info->new_local_var_chain;
2298 info->new_local_var_chain = NULL;
2299 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2300 info, gimple_omp_body_ptr (stmt));
2301 if (info->new_local_var_chain)
2302 declare_vars (info->new_local_var_chain,
2303 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2304 false);
2305 info->new_local_var_chain = save_local_var_chain;
2306 }
2307 break;
2308
2309 default:
2310 *handled_ops_p = false;
2311 return NULL_TREE;
2312 }
2313
2314 *handled_ops_p = true;
2315 return NULL_TREE;
2316 }
2317
2318
2319
2320 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2321 that reference nested functions to make sure that the static chain
2322 is set up properly for the call. */
2323
2324 static tree
2325 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2326 struct walk_stmt_info *wi)
2327 {
2328 struct nesting_info *const info = (struct nesting_info *) wi->info;
2329 tree decl, target_context;
2330 char save_static_chain_added;
2331 int i;
2332 gimple stmt = gsi_stmt (*gsi);
2333
2334 switch (gimple_code (stmt))
2335 {
2336 case GIMPLE_CALL:
2337 if (gimple_call_chain (stmt))
2338 break;
2339 decl = gimple_call_fndecl (stmt);
2340 if (!decl)
2341 break;
2342 target_context = decl_function_context (decl);
2343 if (target_context && DECL_STATIC_CHAIN (decl))
2344 {
2345 gimple_call_set_chain (as_a <gcall *> (stmt),
2346 get_static_chain (info, target_context,
2347 &wi->gsi));
2348 info->static_chain_added |= (1 << (info->context != target_context));
2349 }
2350 break;
2351
2352 case GIMPLE_OMP_PARALLEL:
2353 case GIMPLE_OMP_TASK:
2354 save_static_chain_added = info->static_chain_added;
2355 info->static_chain_added = 0;
2356 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2357 for (i = 0; i < 2; i++)
2358 {
2359 tree c, decl;
2360 if ((info->static_chain_added & (1 << i)) == 0)
2361 continue;
2362 decl = i ? get_chain_decl (info) : info->frame_decl;
2363 /* Don't add CHAIN.* or FRAME.* twice. */
2364 for (c = gimple_omp_taskreg_clauses (stmt);
2365 c;
2366 c = OMP_CLAUSE_CHAIN (c))
2367 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2368 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2369 && OMP_CLAUSE_DECL (c) == decl)
2370 break;
2371 if (c == NULL)
2372 {
2373 c = build_omp_clause (gimple_location (stmt),
2374 i ? OMP_CLAUSE_FIRSTPRIVATE
2375 : OMP_CLAUSE_SHARED);
2376 OMP_CLAUSE_DECL (c) = decl;
2377 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2378 gimple_omp_taskreg_set_clauses (stmt, c);
2379 }
2380 }
2381 info->static_chain_added |= save_static_chain_added;
2382 break;
2383
2384 case GIMPLE_OMP_TARGET:
2385 if (!is_gimple_omp_offloaded (stmt))
2386 {
2387 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2388 break;
2389 }
2390 save_static_chain_added = info->static_chain_added;
2391 info->static_chain_added = 0;
2392 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2393 for (i = 0; i < 2; i++)
2394 {
2395 tree c, decl;
2396 if ((info->static_chain_added & (1 << i)) == 0)
2397 continue;
2398 decl = i ? get_chain_decl (info) : info->frame_decl;
2399 /* Don't add CHAIN.* or FRAME.* twice. */
2400 for (c = gimple_omp_target_clauses (stmt);
2401 c;
2402 c = OMP_CLAUSE_CHAIN (c))
2403 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2404 && OMP_CLAUSE_DECL (c) == decl)
2405 break;
2406 if (c == NULL)
2407 {
2408 c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2409 OMP_CLAUSE_DECL (c) = decl;
2410 OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2411 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2412 OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2413 gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2414 c);
2415 }
2416 }
2417 info->static_chain_added |= save_static_chain_added;
2418 break;
2419
2420 case GIMPLE_OMP_FOR:
2421 walk_body (convert_gimple_call, NULL, info,
2422 gimple_omp_for_pre_body_ptr (stmt));
2423 /* FALLTHRU */
2424 case GIMPLE_OMP_SECTIONS:
2425 case GIMPLE_OMP_SECTION:
2426 case GIMPLE_OMP_SINGLE:
2427 case GIMPLE_OMP_TEAMS:
2428 case GIMPLE_OMP_MASTER:
2429 case GIMPLE_OMP_TASKGROUP:
2430 case GIMPLE_OMP_ORDERED:
2431 case GIMPLE_OMP_CRITICAL:
2432 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2433 break;
2434
2435 default:
2436 /* Keep looking for other operands. */
2437 *handled_ops_p = false;
2438 return NULL_TREE;
2439 }
2440
2441 *handled_ops_p = true;
2442 return NULL_TREE;
2443 }
2444
2445 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2446 call expressions. At the same time, determine if a nested function
2447 actually uses its static chain; if not, remember that. */
2448
2449 static void
2450 convert_all_function_calls (struct nesting_info *root)
2451 {
2452 unsigned int chain_count = 0, old_chain_count, iter_count;
2453 struct nesting_info *n;
2454
2455 /* First, optimistically clear static_chain for all decls that haven't
2456 used the static chain already for variable access. But always create
2457 it if not optimizing. This makes it possible to reconstruct the static
2458 nesting tree at run time and thus to resolve up-level references from
2459 within the debugger. */
2460 FOR_EACH_NEST_INFO (n, root)
2461 {
2462 tree decl = n->context;
2463 if (!optimize)
2464 {
2465 if (n->inner)
2466 (void) get_frame_type (n);
2467 if (n->outer)
2468 (void) get_chain_decl (n);
2469 }
2470 else if (!n->outer || (!n->chain_decl && !n->chain_field))
2471 {
2472 DECL_STATIC_CHAIN (decl) = 0;
2473 if (dump_file && (dump_flags & TDF_DETAILS))
2474 fprintf (dump_file, "Guessing no static-chain for %s\n",
2475 lang_hooks.decl_printable_name (decl, 2));
2476 }
2477 else
2478 DECL_STATIC_CHAIN (decl) = 1;
2479 chain_count += DECL_STATIC_CHAIN (decl);
2480 }
2481
2482 /* Walk the functions and perform transformations. Note that these
2483 transformations can induce new uses of the static chain, which in turn
2484 require re-examining all users of the decl. */
2485 /* ??? It would make sense to try to use the call graph to speed this up,
2486 but the call graph hasn't really been built yet. Even if it did, we
2487 would still need to iterate in this loop since address-of references
2488 wouldn't show up in the callgraph anyway. */
2489 iter_count = 0;
2490 do
2491 {
2492 old_chain_count = chain_count;
2493 chain_count = 0;
2494 iter_count++;
2495
2496 if (dump_file && (dump_flags & TDF_DETAILS))
2497 fputc ('\n', dump_file);
2498
2499 FOR_EACH_NEST_INFO (n, root)
2500 {
2501 tree decl = n->context;
2502 walk_function (convert_tramp_reference_stmt,
2503 convert_tramp_reference_op, n);
2504 walk_function (convert_gimple_call, NULL, n);
2505 chain_count += DECL_STATIC_CHAIN (decl);
2506 }
2507 }
2508 while (chain_count != old_chain_count);
2509
2510 if (dump_file && (dump_flags & TDF_DETAILS))
2511 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2512 iter_count);
2513 }
2514
2515 struct nesting_copy_body_data
2516 {
2517 copy_body_data cb;
2518 struct nesting_info *root;
2519 };
2520
2521 /* A helper subroutine for debug_var_chain type remapping. */
2522
2523 static tree
2524 nesting_copy_decl (tree decl, copy_body_data *id)
2525 {
2526 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2527 tree *slot = nid->root->var_map->get (decl);
2528
2529 if (slot)
2530 return (tree) *slot;
2531
2532 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2533 {
2534 tree new_decl = copy_decl_no_change (decl, id);
2535 DECL_ORIGINAL_TYPE (new_decl)
2536 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2537 return new_decl;
2538 }
2539
2540 if (TREE_CODE (decl) == VAR_DECL
2541 || TREE_CODE (decl) == PARM_DECL
2542 || TREE_CODE (decl) == RESULT_DECL)
2543 return decl;
2544
2545 return copy_decl_no_change (decl, id);
2546 }
2547
2548 /* A helper function for remap_vla_decls. See if *TP contains
2549 some remapped variables. */
2550
2551 static tree
2552 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2553 {
2554 struct nesting_info *root = (struct nesting_info *) data;
2555 tree t = *tp;
2556
2557 if (DECL_P (t))
2558 {
2559 *walk_subtrees = 0;
2560 tree *slot = root->var_map->get (t);
2561
2562 if (slot)
2563 return *slot;
2564 }
2565 return NULL;
2566 }
2567
2568 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2569 involved. */
2570
2571 static void
2572 remap_vla_decls (tree block, struct nesting_info *root)
2573 {
2574 tree var, subblock, val, type;
2575 struct nesting_copy_body_data id;
2576
2577 for (subblock = BLOCK_SUBBLOCKS (block);
2578 subblock;
2579 subblock = BLOCK_CHAIN (subblock))
2580 remap_vla_decls (subblock, root);
2581
2582 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2583 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2584 {
2585 val = DECL_VALUE_EXPR (var);
2586 type = TREE_TYPE (var);
2587
2588 if (!(TREE_CODE (val) == INDIRECT_REF
2589 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2590 && variably_modified_type_p (type, NULL)))
2591 continue;
2592
2593 if (root->var_map->get (TREE_OPERAND (val, 0))
2594 || walk_tree (&type, contains_remapped_vars, root, NULL))
2595 break;
2596 }
2597
2598 if (var == NULL_TREE)
2599 return;
2600
2601 memset (&id, 0, sizeof (id));
2602 id.cb.copy_decl = nesting_copy_decl;
2603 id.cb.decl_map = new hash_map<tree, tree>;
2604 id.root = root;
2605
2606 for (; var; var = DECL_CHAIN (var))
2607 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2608 {
2609 struct nesting_info *i;
2610 tree newt, context;
2611
2612 val = DECL_VALUE_EXPR (var);
2613 type = TREE_TYPE (var);
2614
2615 if (!(TREE_CODE (val) == INDIRECT_REF
2616 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2617 && variably_modified_type_p (type, NULL)))
2618 continue;
2619
2620 tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2621 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2622 continue;
2623
2624 context = decl_function_context (var);
2625 for (i = root; i; i = i->outer)
2626 if (i->context == context)
2627 break;
2628
2629 if (i == NULL)
2630 continue;
2631
2632 /* Fully expand value expressions. This avoids having debug variables
2633 only referenced from them and that can be swept during GC. */
2634 if (slot)
2635 {
2636 tree t = (tree) *slot;
2637 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2638 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2639 }
2640
2641 id.cb.src_fn = i->context;
2642 id.cb.dst_fn = i->context;
2643 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2644
2645 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2646 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2647 {
2648 newt = TREE_TYPE (newt);
2649 type = TREE_TYPE (type);
2650 }
2651 if (TYPE_NAME (newt)
2652 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2653 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2654 && newt != type
2655 && TYPE_NAME (newt) == TYPE_NAME (type))
2656 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2657
2658 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2659 if (val != DECL_VALUE_EXPR (var))
2660 SET_DECL_VALUE_EXPR (var, val);
2661 }
2662
2663 delete id.cb.decl_map;
2664 }
2665
2666 /* Fold the MEM_REF *E. */
2667 bool
2668 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2669 {
2670 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2671 *ref_p = fold (*ref_p);
2672 return true;
2673 }
2674
2675 /* Do "everything else" to clean up or complete state collected by the
2676 various walking passes -- lay out the types and decls, generate code
2677 to initialize the frame decl, store critical expressions in the
2678 struct function for rtl to find. */
2679
2680 static void
2681 finalize_nesting_tree_1 (struct nesting_info *root)
2682 {
2683 gimple_seq stmt_list;
2684 gimple stmt;
2685 tree context = root->context;
2686 struct function *sf;
2687
2688 stmt_list = NULL;
2689
2690 /* If we created a non-local frame type or decl, we need to lay them
2691 out at this time. */
2692 if (root->frame_type)
2693 {
2694 /* In some cases the frame type will trigger the -Wpadded warning.
2695 This is not helpful; suppress it. */
2696 int save_warn_padded = warn_padded;
2697 tree *adjust;
2698
2699 warn_padded = 0;
2700 layout_type (root->frame_type);
2701 warn_padded = save_warn_padded;
2702 layout_decl (root->frame_decl, 0);
2703
2704 /* Remove root->frame_decl from root->new_local_var_chain, so
2705 that we can declare it also in the lexical blocks, which
2706 helps ensure virtual regs that end up appearing in its RTL
2707 expression get substituted in instantiate_virtual_regs(). */
2708 for (adjust = &root->new_local_var_chain;
2709 *adjust != root->frame_decl;
2710 adjust = &DECL_CHAIN (*adjust))
2711 gcc_assert (DECL_CHAIN (*adjust));
2712 *adjust = DECL_CHAIN (*adjust);
2713
2714 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2715 declare_vars (root->frame_decl,
2716 gimple_seq_first_stmt (gimple_body (context)), true);
2717 }
2718
2719 /* If any parameters were referenced non-locally, then we need to
2720 insert a copy. Likewise, if any variables were referenced by
2721 pointer, we need to initialize the address. */
2722 if (root->any_parm_remapped)
2723 {
2724 tree p;
2725 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2726 {
2727 tree field, x, y;
2728
2729 field = lookup_field_for_decl (root, p, NO_INSERT);
2730 if (!field)
2731 continue;
2732
2733 if (use_pointer_in_frame (p))
2734 x = build_addr (p, context);
2735 else
2736 x = p;
2737
2738 /* If the assignment is from a non-register the stmt is
2739 not valid gimple. Make it so by using a temporary instead. */
2740 if (!is_gimple_reg (x)
2741 && is_gimple_reg_type (TREE_TYPE (x)))
2742 {
2743 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2744 x = init_tmp_var (root, x, &gsi);
2745 }
2746
2747 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2748 root->frame_decl, field, NULL_TREE);
2749 stmt = gimple_build_assign (y, x);
2750 gimple_seq_add_stmt (&stmt_list, stmt);
2751 }
2752 }
2753
2754 /* If a chain_field was created, then it needs to be initialized
2755 from chain_decl. */
2756 if (root->chain_field)
2757 {
2758 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2759 root->frame_decl, root->chain_field, NULL_TREE);
2760 stmt = gimple_build_assign (x, get_chain_decl (root));
2761 gimple_seq_add_stmt (&stmt_list, stmt);
2762 }
2763
2764 /* If trampolines were created, then we need to initialize them. */
2765 if (root->any_tramp_created)
2766 {
2767 struct nesting_info *i;
2768 for (i = root->inner; i ; i = i->next)
2769 {
2770 tree arg1, arg2, arg3, x, field;
2771
2772 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2773 if (!field)
2774 continue;
2775
2776 gcc_assert (DECL_STATIC_CHAIN (i->context));
2777 arg3 = build_addr (root->frame_decl, context);
2778
2779 arg2 = build_addr (i->context, context);
2780
2781 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2782 root->frame_decl, field, NULL_TREE);
2783 arg1 = build_addr (x, context);
2784
2785 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2786 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2787 gimple_seq_add_stmt (&stmt_list, stmt);
2788 }
2789 }
2790
2791 /* If we created initialization statements, insert them. */
2792 if (stmt_list)
2793 {
2794 gbind *bind;
2795 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2796 bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2797 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2798 gimple_bind_set_body (bind, stmt_list);
2799 }
2800
2801 /* If a chain_decl was created, then it needs to be registered with
2802 struct function so that it gets initialized from the static chain
2803 register at the beginning of the function. */
2804 sf = DECL_STRUCT_FUNCTION (root->context);
2805 sf->static_chain_decl = root->chain_decl;
2806
2807 /* Similarly for the non-local goto save area. */
2808 if (root->nl_goto_field)
2809 {
2810 sf->nonlocal_goto_save_area
2811 = get_frame_field (root, context, root->nl_goto_field, NULL);
2812 sf->has_nonlocal_label = 1;
2813 }
2814
2815 /* Make sure all new local variables get inserted into the
2816 proper BIND_EXPR. */
2817 if (root->new_local_var_chain)
2818 declare_vars (root->new_local_var_chain,
2819 gimple_seq_first_stmt (gimple_body (root->context)),
2820 false);
2821
2822 if (root->debug_var_chain)
2823 {
2824 tree debug_var;
2825 gbind *scope;
2826
2827 remap_vla_decls (DECL_INITIAL (root->context), root);
2828
2829 for (debug_var = root->debug_var_chain; debug_var;
2830 debug_var = DECL_CHAIN (debug_var))
2831 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2832 break;
2833
2834 /* If there are any debug decls with variable length types,
2835 remap those types using other debug_var_chain variables. */
2836 if (debug_var)
2837 {
2838 struct nesting_copy_body_data id;
2839
2840 memset (&id, 0, sizeof (id));
2841 id.cb.copy_decl = nesting_copy_decl;
2842 id.cb.decl_map = new hash_map<tree, tree>;
2843 id.root = root;
2844
2845 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2846 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2847 {
2848 tree type = TREE_TYPE (debug_var);
2849 tree newt, t = type;
2850 struct nesting_info *i;
2851
2852 for (i = root; i; i = i->outer)
2853 if (variably_modified_type_p (type, i->context))
2854 break;
2855
2856 if (i == NULL)
2857 continue;
2858
2859 id.cb.src_fn = i->context;
2860 id.cb.dst_fn = i->context;
2861 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2862
2863 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2864 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2865 {
2866 newt = TREE_TYPE (newt);
2867 t = TREE_TYPE (t);
2868 }
2869 if (TYPE_NAME (newt)
2870 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2871 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2872 && newt != t
2873 && TYPE_NAME (newt) == TYPE_NAME (t))
2874 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2875 }
2876
2877 delete id.cb.decl_map;
2878 }
2879
2880 scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2881 if (gimple_bind_block (scope))
2882 declare_vars (root->debug_var_chain, scope, true);
2883 else
2884 BLOCK_VARS (DECL_INITIAL (root->context))
2885 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2886 root->debug_var_chain);
2887 }
2888
2889 /* Fold the rewritten MEM_REF trees. */
2890 root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2891
2892 /* Dump the translated tree function. */
2893 if (dump_file)
2894 {
2895 fputs ("\n\n", dump_file);
2896 dump_function_to_file (root->context, dump_file, dump_flags);
2897 }
2898 }
2899
2900 static void
2901 finalize_nesting_tree (struct nesting_info *root)
2902 {
2903 struct nesting_info *n;
2904 FOR_EACH_NEST_INFO (n, root)
2905 finalize_nesting_tree_1 (n);
2906 }
2907
2908 /* Unnest the nodes and pass them to cgraph. */
2909
2910 static void
2911 unnest_nesting_tree_1 (struct nesting_info *root)
2912 {
2913 struct cgraph_node *node = cgraph_node::get (root->context);
2914
2915 /* For nested functions update the cgraph to reflect unnesting.
2916 We also delay finalizing of these functions up to this point. */
2917 if (node->origin)
2918 {
2919 node->unnest ();
2920 cgraph_node::finalize_function (root->context, true);
2921 }
2922 }
2923
2924 static void
2925 unnest_nesting_tree (struct nesting_info *root)
2926 {
2927 struct nesting_info *n;
2928 FOR_EACH_NEST_INFO (n, root)
2929 unnest_nesting_tree_1 (n);
2930 }
2931
2932 /* Free the data structures allocated during this pass. */
2933
2934 static void
2935 free_nesting_tree (struct nesting_info *root)
2936 {
2937 struct nesting_info *node, *next;
2938
2939 node = iter_nestinfo_start (root);
2940 do
2941 {
2942 next = iter_nestinfo_next (node);
2943 delete node->var_map;
2944 delete node->field_map;
2945 delete node->mem_refs;
2946 free (node);
2947 node = next;
2948 }
2949 while (node);
2950 }
2951
2952 /* Gimplify a function and all its nested functions. */
2953 static void
2954 gimplify_all_functions (struct cgraph_node *root)
2955 {
2956 struct cgraph_node *iter;
2957 if (!gimple_body (root->decl))
2958 gimplify_function_tree (root->decl);
2959 for (iter = root->nested; iter; iter = iter->next_nested)
2960 gimplify_all_functions (iter);
2961 }
2962
2963 /* Main entry point for this pass. Process FNDECL and all of its nested
2964 subroutines and turn them into something less tightly bound. */
2965
2966 void
2967 lower_nested_functions (tree fndecl)
2968 {
2969 struct cgraph_node *cgn;
2970 struct nesting_info *root;
2971
2972 /* If there are no nested functions, there's nothing to do. */
2973 cgn = cgraph_node::get (fndecl);
2974 if (!cgn->nested)
2975 return;
2976
2977 gimplify_all_functions (cgn);
2978
2979 dump_file = dump_begin (TDI_nested, &dump_flags);
2980 if (dump_file)
2981 fprintf (dump_file, "\n;; Function %s\n\n",
2982 lang_hooks.decl_printable_name (fndecl, 2));
2983
2984 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2985 root = create_nesting_tree (cgn);
2986
2987 walk_all_functions (convert_nonlocal_reference_stmt,
2988 convert_nonlocal_reference_op,
2989 root);
2990 walk_all_functions (convert_local_reference_stmt,
2991 convert_local_reference_op,
2992 root);
2993 walk_all_functions (convert_nl_goto_reference, NULL, root);
2994 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2995
2996 convert_all_function_calls (root);
2997 finalize_nesting_tree (root);
2998 unnest_nesting_tree (root);
2999
3000 free_nesting_tree (root);
3001 bitmap_obstack_release (&nesting_info_bitmap_obstack);
3002
3003 if (dump_file)
3004 {
3005 dump_end (TDI_nested, dump_file);
3006 dump_file = NULL;
3007 }
3008 }
3009
3010 #include "gt-tree-nested.h"