]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Nested function decomposition for trees. |
35fd3193 | 2 | Copyright (C) 2004, 2005 Free Software Foundation, Inc. |
6de9cd9a DN |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GCC; see the file COPYING. If not, write to | |
366ccddb KC |
18 | the Free Software Foundation, 51 Franklin Street, Fifth Floor, |
19 | Boston, MA 02110-1301, USA. */ | |
6de9cd9a DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "rtl.h" | |
27 | #include "tm_p.h" | |
28 | #include "function.h" | |
29 | #include "tree-dump.h" | |
30 | #include "tree-inline.h" | |
eadf906f | 31 | #include "tree-gimple.h" |
6de9cd9a DN |
32 | #include "tree-iterator.h" |
33 | #include "tree-flow.h" | |
34 | #include "cgraph.h" | |
35 | #include "expr.h" | |
36 | #include "langhooks.h" | |
37 | #include "ggc.h" | |
38 | ||
39 | ||
40 | /* The object of this pass is to lower the representation of a set of nested | |
41 | functions in order to expose all of the gory details of the various | |
42 | nonlocal references. We want to do this sooner rather than later, in | |
43 | order to give us more freedom in emitting all of the functions in question. | |
44 | ||
45 | Back in olden times, when gcc was young, we developed an insanely | |
46 | complicated scheme whereby variables which were referenced nonlocally | |
47 | were forced to live in the stack of the declaring function, and then | |
48 | the nested functions magically discovered where these variables were | |
49 | placed. In order for this scheme to function properly, it required | |
50 | that the outer function be partially expanded, then we switch to | |
51 | compiling the inner function, and once done with those we switch back | |
52 | to compiling the outer function. Such delicate ordering requirements | |
53 | makes it difficult to do whole translation unit optimizations | |
54 | involving such functions. | |
55 | ||
56 | The implementation here is much more direct. Everything that can be | |
57 | referenced by an inner function is a member of an explicitly created | |
2e6ae27f | 58 | structure herein called the "nonlocal frame struct". The incoming |
6de9cd9a DN |
59 | static chain for a nested function is a pointer to this struct in |
60 | the parent. In this way, we settle on known offsets from a known | |
61 | base, and so are decoupled from the logic that places objects in the | |
62 | function's stack frame. More importantly, we don't have to wait for | |
63 | that to happen -- since the compilation of the inner function is no | |
64 | longer tied to a real stack frame, the nonlocal frame struct can be | |
65 | allocated anywhere. Which means that the outer function is now | |
66 | inlinable. | |
67 | ||
68 | Theory of operation here is very simple. Iterate over all the | |
69 | statements in all the functions (depth first) several times, | |
70 | allocating structures and fields on demand. In general we want to | |
71 | examine inner functions first, so that we can avoid making changes | |
72 | to outer functions which are unnecessary. | |
73 | ||
74 | The order of the passes matters a bit, in that later passes will be | |
75 | skipped if it is discovered that the functions don't actually interact | |
76 | at all. That is, they're nested in the lexical sense but could have | |
77 | been written as independent functions without change. */ | |
78 | ||
79 | ||
9bf777ee | 80 | struct var_map_elt GTY(()) |
6de9cd9a DN |
81 | { |
82 | tree old; | |
83 | tree new; | |
84 | }; | |
85 | ||
9bf777ee | 86 | struct nesting_info GTY ((chain_next ("%h.next"))) |
6de9cd9a DN |
87 | { |
88 | struct nesting_info *outer; | |
89 | struct nesting_info *inner; | |
90 | struct nesting_info *next; | |
91 | ||
9bf777ee | 92 | htab_t GTY ((param_is (struct var_map_elt))) var_map; |
6de9cd9a DN |
93 | tree context; |
94 | tree new_local_var_chain; | |
95 | tree frame_type; | |
96 | tree frame_decl; | |
97 | tree chain_field; | |
98 | tree chain_decl; | |
99 | tree nl_goto_field; | |
100 | ||
101 | bool any_parm_remapped; | |
102 | bool any_tramp_created; | |
103 | }; | |
104 | ||
105 | ||
106 | /* Hashing and equality functions for nesting_info->var_map. */ | |
107 | ||
108 | static hashval_t | |
109 | var_map_hash (const void *x) | |
110 | { | |
111 | const struct var_map_elt *a = x; | |
112 | return htab_hash_pointer (a->old); | |
113 | } | |
114 | ||
115 | static int | |
116 | var_map_eq (const void *x, const void *y) | |
117 | { | |
118 | const struct var_map_elt *a = x; | |
119 | const struct var_map_elt *b = y; | |
120 | return a->old == b->old; | |
121 | } | |
122 | ||
123 | /* We're working in so many different function contexts simultaneously, | |
124 | that create_tmp_var is dangerous. Prevent mishap. */ | |
125 | #define create_tmp_var cant_use_create_tmp_var_here_dummy | |
126 | ||
127 | /* Like create_tmp_var, except record the variable for registration at | |
128 | the given nesting level. */ | |
129 | ||
130 | static tree | |
131 | create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix) | |
132 | { | |
133 | tree tmp_var; | |
134 | ||
70883f72 | 135 | /* If the type is of variable size or a type which must be created by the |
6de9cd9a DN |
136 | frontend, something is wrong. Note that we explicitly allow |
137 | incomplete types here, since we create them ourselves here. */ | |
d73d4593 | 138 | gcc_assert (!TREE_ADDRESSABLE (type)); |
1e128c5f GB |
139 | gcc_assert (!TYPE_SIZE_UNIT (type) |
140 | || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST); | |
6de9cd9a DN |
141 | |
142 | tmp_var = create_tmp_var_raw (type, prefix); | |
143 | DECL_CONTEXT (tmp_var) = info->context; | |
144 | TREE_CHAIN (tmp_var) = info->new_local_var_chain; | |
48eb4e53 | 145 | DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1; |
6de9cd9a DN |
146 | info->new_local_var_chain = tmp_var; |
147 | ||
148 | return tmp_var; | |
149 | } | |
150 | ||
bde6c65d JL |
151 | /* Take the address of EXP to be used within function CONTEXT. |
152 | Mark it for addressability as necessary. */ | |
6de9cd9a | 153 | |
8b11a64c | 154 | tree |
bde6c65d | 155 | build_addr (tree exp, tree context) |
6de9cd9a DN |
156 | { |
157 | tree base = exp; | |
bde6c65d JL |
158 | tree save_context; |
159 | tree retval; | |
44de5aeb | 160 | |
afe84921 | 161 | while (handled_component_p (base)) |
6de9cd9a | 162 | base = TREE_OPERAND (base, 0); |
44de5aeb | 163 | |
6de9cd9a DN |
164 | if (DECL_P (base)) |
165 | TREE_ADDRESSABLE (base) = 1; | |
166 | ||
bde6c65d JL |
167 | /* Building the ADDR_EXPR will compute a set of properties for |
168 | that ADDR_EXPR. Those properties are unfortunately context | |
169 | specific. ie, they are dependent on CURRENT_FUNCTION_DECL. | |
170 | ||
171 | Temporarily set CURRENT_FUNCTION_DECL to the desired context, | |
172 | build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That | |
173 | way the properties are for the ADDR_EXPR are computed properly. */ | |
174 | save_context = current_function_decl; | |
175 | current_function_decl = context; | |
176 | retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp); | |
177 | current_function_decl = save_context;; | |
178 | return retval; | |
6de9cd9a DN |
179 | } |
180 | ||
181 | /* Insert FIELD into TYPE, sorted by alignment requirements. */ | |
182 | ||
183 | static void | |
184 | insert_field_into_struct (tree type, tree field) | |
185 | { | |
186 | tree *p; | |
187 | ||
188 | DECL_CONTEXT (field) = type; | |
189 | ||
190 | for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p)) | |
191 | if (DECL_ALIGN (field) >= DECL_ALIGN (*p)) | |
192 | break; | |
193 | ||
194 | TREE_CHAIN (field) = *p; | |
195 | *p = field; | |
196 | } | |
197 | ||
198 | /* Build or return the RECORD_TYPE that describes the frame state that is | |
199 | shared between INFO->CONTEXT and its nested functions. This record will | |
200 | not be complete until finalize_nesting_tree; up until that point we'll | |
201 | be adding fields as necessary. | |
202 | ||
203 | We also build the DECL that represents this frame in the function. */ | |
204 | ||
205 | static tree | |
206 | get_frame_type (struct nesting_info *info) | |
207 | { | |
208 | tree type = info->frame_type; | |
209 | if (!type) | |
210 | { | |
211 | char *name; | |
212 | ||
213 | type = make_node (RECORD_TYPE); | |
214 | ||
215 | name = concat ("FRAME.", | |
216 | IDENTIFIER_POINTER (DECL_NAME (info->context)), | |
217 | NULL); | |
218 | TYPE_NAME (type) = get_identifier (name); | |
219 | free (name); | |
220 | ||
221 | info->frame_type = type; | |
222 | info->frame_decl = create_tmp_var_for (info, type, "FRAME"); | |
d430cbe7 EB |
223 | |
224 | /* ??? Always make it addressable for now, since it is meant to | |
225 | be pointed to by the static chain pointer. This pessimizes | |
226 | when it turns out that no static chains are needed because | |
227 | the nested functions referencing non-local variables are not | |
228 | reachable, but the true pessimization is to create the non- | |
229 | local frame structure in the first place. */ | |
230 | TREE_ADDRESSABLE (info->frame_decl) = 1; | |
6de9cd9a DN |
231 | } |
232 | return type; | |
233 | } | |
234 | ||
235 | /* Return true if DECL should be referenced by pointer in the non-local | |
236 | frame structure. */ | |
237 | ||
238 | static bool | |
239 | use_pointer_in_frame (tree decl) | |
240 | { | |
241 | if (TREE_CODE (decl) == PARM_DECL) | |
242 | { | |
243 | /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable | |
244 | sized decls, and inefficient to copy large aggregates. Don't bother | |
245 | moving anything but scalar variables. */ | |
246 | return AGGREGATE_TYPE_P (TREE_TYPE (decl)); | |
247 | } | |
248 | else | |
249 | { | |
250 | /* Variable sized types make things "interesting" in the frame. */ | |
251 | return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl)); | |
252 | } | |
253 | } | |
254 | ||
255 | /* Given DECL, a non-locally accessed variable, find or create a field | |
256 | in the non-local frame structure for the given nesting context. */ | |
257 | ||
258 | static tree | |
259 | lookup_field_for_decl (struct nesting_info *info, tree decl, | |
260 | enum insert_option insert) | |
261 | { | |
262 | struct var_map_elt *elt, dummy; | |
263 | void **slot; | |
264 | tree field; | |
265 | ||
266 | dummy.old = decl; | |
267 | slot = htab_find_slot (info->var_map, &dummy, insert); | |
268 | if (!slot) | |
269 | { | |
1e128c5f | 270 | gcc_assert (insert != INSERT); |
6de9cd9a DN |
271 | return NULL; |
272 | } | |
273 | elt = *slot; | |
274 | ||
275 | if (!elt && insert == INSERT) | |
276 | { | |
277 | field = make_node (FIELD_DECL); | |
278 | DECL_NAME (field) = DECL_NAME (decl); | |
279 | ||
280 | if (use_pointer_in_frame (decl)) | |
281 | { | |
282 | TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl)); | |
283 | DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field)); | |
284 | DECL_NONADDRESSABLE_P (field) = 1; | |
285 | } | |
286 | else | |
287 | { | |
288 | TREE_TYPE (field) = TREE_TYPE (decl); | |
289 | DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl); | |
290 | DECL_ALIGN (field) = DECL_ALIGN (decl); | |
291 | DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl); | |
292 | TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl); | |
293 | DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl); | |
294 | TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl); | |
295 | } | |
296 | ||
297 | insert_field_into_struct (get_frame_type (info), field); | |
298 | ||
9bf777ee | 299 | elt = ggc_alloc (sizeof (*elt)); |
6de9cd9a DN |
300 | elt->old = decl; |
301 | elt->new = field; | |
302 | *slot = elt; | |
303 | ||
304 | if (TREE_CODE (decl) == PARM_DECL) | |
305 | info->any_parm_remapped = true; | |
306 | } | |
307 | else | |
308 | field = elt ? elt->new : NULL; | |
309 | ||
310 | return field; | |
311 | } | |
312 | ||
313 | /* Build or return the variable that holds the static chain within | |
314 | INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */ | |
315 | ||
316 | static tree | |
317 | get_chain_decl (struct nesting_info *info) | |
318 | { | |
319 | tree decl = info->chain_decl; | |
320 | if (!decl) | |
321 | { | |
322 | tree type; | |
323 | ||
324 | type = get_frame_type (info->outer); | |
325 | type = build_pointer_type (type); | |
326 | ||
327 | /* Note that this variable is *not* entered into any BIND_EXPR; | |
328 | the construction of this variable is handled specially in | |
7e140280 RH |
329 | expand_function_start and initialize_inlined_parameters. |
330 | Note also that it's represented as a parameter. This is more | |
331 | close to the truth, since the initial value does come from | |
332 | the caller. */ | |
333 | decl = build_decl (PARM_DECL, create_tmp_var_name ("CHAIN"), type); | |
334 | DECL_ARTIFICIAL (decl) = 1; | |
335 | DECL_IGNORED_P (decl) = 1; | |
336 | TREE_USED (decl) = 1; | |
6de9cd9a | 337 | DECL_CONTEXT (decl) = info->context; |
7e140280 | 338 | DECL_ARG_TYPE (decl) = type; |
6de9cd9a DN |
339 | |
340 | /* Tell tree-inline.c that we never write to this variable, so | |
341 | it can copy-prop the replacement value immediately. */ | |
342 | TREE_READONLY (decl) = 1; | |
343 | ||
344 | info->chain_decl = decl; | |
345 | } | |
346 | return decl; | |
347 | } | |
348 | ||
349 | /* Build or return the field within the non-local frame state that holds | |
350 | the static chain for INFO->CONTEXT. This is the way to walk back up | |
351 | multiple nesting levels. */ | |
352 | ||
353 | static tree | |
354 | get_chain_field (struct nesting_info *info) | |
355 | { | |
356 | tree field = info->chain_field; | |
357 | if (!field) | |
358 | { | |
359 | tree type = build_pointer_type (get_frame_type (info->outer)); | |
360 | ||
361 | field = make_node (FIELD_DECL); | |
362 | DECL_NAME (field) = get_identifier ("__chain"); | |
363 | TREE_TYPE (field) = type; | |
364 | DECL_ALIGN (field) = TYPE_ALIGN (type); | |
365 | DECL_NONADDRESSABLE_P (field) = 1; | |
366 | ||
367 | insert_field_into_struct (get_frame_type (info), field); | |
368 | ||
369 | info->chain_field = field; | |
370 | } | |
371 | return field; | |
372 | } | |
373 | ||
374 | /* Copy EXP into a temporary. Allocate the temporary in the context of | |
375 | INFO and insert the initialization statement before TSI. */ | |
376 | ||
377 | static tree | |
378 | init_tmp_var (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi) | |
379 | { | |
380 | tree t, stmt; | |
381 | ||
382 | t = create_tmp_var_for (info, TREE_TYPE (exp), NULL); | |
383 | stmt = build (MODIFY_EXPR, TREE_TYPE (t), t, exp); | |
384 | SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi))); | |
385 | tsi_link_before (tsi, stmt, TSI_SAME_STMT); | |
386 | ||
387 | return t; | |
388 | } | |
389 | ||
390 | /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */ | |
391 | ||
392 | static tree | |
26277d41 | 393 | tsi_gimplify_val (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi) |
6de9cd9a DN |
394 | { |
395 | if (is_gimple_val (exp)) | |
396 | return exp; | |
397 | else | |
398 | return init_tmp_var (info, exp, tsi); | |
399 | } | |
400 | ||
c529c27e RH |
401 | /* Similarly, but copy from the temporary and insert the statement |
402 | after the iterator. */ | |
403 | ||
404 | static tree | |
405 | save_tmp_var (struct nesting_info *info, tree exp, | |
406 | tree_stmt_iterator *tsi) | |
407 | { | |
408 | tree t, stmt; | |
409 | ||
410 | t = create_tmp_var_for (info, TREE_TYPE (exp), NULL); | |
411 | stmt = build (MODIFY_EXPR, TREE_TYPE (t), exp, t); | |
412 | SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi))); | |
413 | tsi_link_after (tsi, stmt, TSI_SAME_STMT); | |
414 | ||
415 | return t; | |
416 | } | |
417 | ||
6de9cd9a DN |
418 | /* Build or return the type used to represent a nested function trampoline. */ |
419 | ||
420 | static GTY(()) tree trampoline_type; | |
421 | ||
422 | static tree | |
423 | get_trampoline_type (void) | |
424 | { | |
425 | tree record, t; | |
426 | unsigned align, size; | |
427 | ||
428 | if (trampoline_type) | |
429 | return trampoline_type; | |
430 | ||
431 | align = TRAMPOLINE_ALIGNMENT; | |
432 | size = TRAMPOLINE_SIZE; | |
433 | ||
434 | /* If we won't be able to guarantee alignment simply via TYPE_ALIGN, | |
435 | then allocate extra space so that we can do dynamic alignment. */ | |
436 | if (align > STACK_BOUNDARY) | |
437 | { | |
438 | size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT); | |
439 | align = STACK_BOUNDARY; | |
440 | } | |
441 | ||
7d60be94 | 442 | t = build_index_type (build_int_cst (NULL_TREE, size - 1)); |
6de9cd9a DN |
443 | t = build_array_type (char_type_node, t); |
444 | t = build_decl (FIELD_DECL, get_identifier ("__data"), t); | |
445 | DECL_ALIGN (t) = align; | |
446 | DECL_USER_ALIGN (t) = 1; | |
447 | ||
448 | record = make_node (RECORD_TYPE); | |
449 | TYPE_NAME (record) = get_identifier ("__builtin_trampoline"); | |
450 | TYPE_FIELDS (record) = t; | |
451 | layout_type (record); | |
452 | ||
453 | return record; | |
454 | } | |
455 | ||
456 | /* Given DECL, a nested function, find or create a field in the non-local | |
457 | frame structure for a trampoline for this function. */ | |
458 | ||
459 | static tree | |
460 | lookup_tramp_for_decl (struct nesting_info *info, tree decl, | |
461 | enum insert_option insert) | |
462 | { | |
463 | struct var_map_elt *elt, dummy; | |
464 | void **slot; | |
465 | tree field; | |
466 | ||
467 | dummy.old = decl; | |
468 | slot = htab_find_slot (info->var_map, &dummy, insert); | |
469 | if (!slot) | |
470 | { | |
1e128c5f | 471 | gcc_assert (insert != INSERT); |
6de9cd9a DN |
472 | return NULL; |
473 | } | |
474 | elt = *slot; | |
475 | ||
476 | if (!elt && insert == INSERT) | |
477 | { | |
478 | field = make_node (FIELD_DECL); | |
479 | DECL_NAME (field) = DECL_NAME (decl); | |
480 | TREE_TYPE (field) = get_trampoline_type (); | |
481 | TREE_ADDRESSABLE (field) = 1; | |
482 | ||
483 | insert_field_into_struct (get_frame_type (info), field); | |
484 | ||
9bf777ee | 485 | elt = ggc_alloc (sizeof (*elt)); |
6de9cd9a DN |
486 | elt->old = decl; |
487 | elt->new = field; | |
488 | *slot = elt; | |
489 | ||
490 | info->any_tramp_created = true; | |
491 | } | |
492 | else | |
493 | field = elt ? elt->new : NULL; | |
494 | ||
495 | return field; | |
496 | } | |
497 | ||
498 | /* Build or return the field within the non-local frame state that holds | |
499 | the non-local goto "jmp_buf". The buffer itself is maintained by the | |
500 | rtl middle-end as dynamic stack space is allocated. */ | |
501 | ||
502 | static tree | |
503 | get_nl_goto_field (struct nesting_info *info) | |
504 | { | |
505 | tree field = info->nl_goto_field; | |
506 | if (!field) | |
507 | { | |
508 | unsigned size; | |
509 | tree type; | |
510 | ||
511 | /* For __builtin_nonlocal_goto, we need N words. The first is the | |
512 | frame pointer, the rest is for the target's stack pointer save | |
1ea7e6ad | 513 | area. The number of words is controlled by STACK_SAVEAREA_MODE; |
6de9cd9a DN |
514 | not the best interface, but it'll do for now. */ |
515 | if (Pmode == ptr_mode) | |
516 | type = ptr_type_node; | |
517 | else | |
518 | type = lang_hooks.types.type_for_mode (Pmode, 1); | |
519 | ||
520 | size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL)); | |
521 | size = size / GET_MODE_SIZE (Pmode); | |
522 | size = size + 1; | |
523 | ||
4a90aeeb | 524 | type = build_array_type |
7d60be94 | 525 | (type, build_index_type (build_int_cst (NULL_TREE, size))); |
6de9cd9a DN |
526 | |
527 | field = make_node (FIELD_DECL); | |
528 | DECL_NAME (field) = get_identifier ("__nl_goto_buf"); | |
529 | TREE_TYPE (field) = type; | |
530 | DECL_ALIGN (field) = TYPE_ALIGN (type); | |
531 | TREE_ADDRESSABLE (field) = 1; | |
532 | ||
533 | insert_field_into_struct (get_frame_type (info), field); | |
534 | ||
535 | info->nl_goto_field = field; | |
536 | } | |
537 | ||
538 | return field; | |
539 | } | |
540 | \f | |
541 | /* Convenience routines to walk all statements of a gimple function. | |
542 | ||
543 | For each statement, we invoke CALLBACK via walk_tree. The passed | |
544 | data is a walk_stmt_info structure. Of note here is a TSI that | |
545 | points to the current statement being walked. The VAL_ONLY flag | |
546 | that indicates whether the *TP being examined may be replaced | |
547 | with something that matches is_gimple_val (if true) or something | |
548 | slightly more complicated (if false). "Something" technically | |
549 | means the common subset of is_gimple_lvalue and is_gimple_rhs, | |
550 | but we never try to form anything more complicated than that, so | |
551 | we don't bother checking. */ | |
552 | ||
553 | struct walk_stmt_info | |
554 | { | |
555 | walk_tree_fn callback; | |
556 | tree_stmt_iterator tsi; | |
557 | struct nesting_info *info; | |
558 | bool val_only; | |
c529c27e | 559 | bool is_lhs; |
a3ed9870 | 560 | bool changed; |
6de9cd9a DN |
561 | }; |
562 | ||
563 | /* A subroutine of walk_function. Iterate over all sub-statements of *TP. */ | |
564 | ||
565 | static void | |
566 | walk_stmts (struct walk_stmt_info *wi, tree *tp) | |
567 | { | |
568 | tree t = *tp; | |
569 | if (!t) | |
570 | return; | |
571 | ||
572 | switch (TREE_CODE (t)) | |
573 | { | |
574 | case STATEMENT_LIST: | |
575 | { | |
576 | tree_stmt_iterator i; | |
577 | for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i)) | |
578 | { | |
579 | wi->tsi = i; | |
580 | walk_stmts (wi, tsi_stmt_ptr (i)); | |
581 | } | |
582 | } | |
583 | break; | |
584 | ||
585 | case COND_EXPR: | |
586 | walk_tree (&COND_EXPR_COND (t), wi->callback, wi, NULL); | |
587 | walk_stmts (wi, &COND_EXPR_THEN (t)); | |
588 | walk_stmts (wi, &COND_EXPR_ELSE (t)); | |
589 | break; | |
590 | case CATCH_EXPR: | |
591 | walk_stmts (wi, &CATCH_BODY (t)); | |
44de5aeb | 592 | break; |
6de9cd9a DN |
593 | case EH_FILTER_EXPR: |
594 | walk_stmts (wi, &EH_FILTER_FAILURE (t)); | |
595 | break; | |
596 | case TRY_CATCH_EXPR: | |
597 | case TRY_FINALLY_EXPR: | |
598 | walk_stmts (wi, &TREE_OPERAND (t, 0)); | |
599 | walk_stmts (wi, &TREE_OPERAND (t, 1)); | |
600 | break; | |
601 | case BIND_EXPR: | |
602 | walk_stmts (wi, &BIND_EXPR_BODY (t)); | |
603 | break; | |
604 | ||
605 | case RETURN_EXPR: | |
606 | walk_stmts (wi, &TREE_OPERAND (t, 0)); | |
607 | break; | |
608 | ||
609 | case MODIFY_EXPR: | |
c529c27e RH |
610 | /* A formal temporary lhs may use a COMPONENT_REF rhs. */ |
611 | wi->val_only = !is_gimple_formal_tmp_var (TREE_OPERAND (t, 0)); | |
6de9cd9a | 612 | walk_tree (&TREE_OPERAND (t, 1), wi->callback, wi, NULL); |
c529c27e RH |
613 | |
614 | /* If the rhs is appropriate for a memory, we may use a | |
615 | COMPONENT_REF on the lhs. */ | |
616 | wi->val_only = !is_gimple_mem_rhs (TREE_OPERAND (t, 1)); | |
617 | wi->is_lhs = true; | |
618 | walk_tree (&TREE_OPERAND (t, 0), wi->callback, wi, NULL); | |
619 | ||
6de9cd9a | 620 | wi->val_only = true; |
c529c27e | 621 | wi->is_lhs = false; |
6de9cd9a DN |
622 | break; |
623 | ||
624 | default: | |
625 | wi->val_only = true; | |
626 | walk_tree (tp, wi->callback, wi, NULL); | |
627 | break; | |
628 | } | |
629 | } | |
630 | ||
631 | /* Invoke CALLBACK on all statements of INFO->CONTEXT. */ | |
632 | ||
633 | static void | |
634 | walk_function (walk_tree_fn callback, struct nesting_info *info) | |
635 | { | |
636 | struct walk_stmt_info wi; | |
637 | ||
638 | memset (&wi, 0, sizeof (wi)); | |
639 | wi.callback = callback; | |
640 | wi.info = info; | |
641 | wi.val_only = true; | |
642 | ||
643 | walk_stmts (&wi, &DECL_SAVED_TREE (info->context)); | |
644 | } | |
645 | ||
646 | /* Similarly for ROOT and all functions nested underneath, depth first. */ | |
647 | ||
648 | static void | |
649 | walk_all_functions (walk_tree_fn callback, struct nesting_info *root) | |
650 | { | |
651 | do | |
652 | { | |
653 | if (root->inner) | |
654 | walk_all_functions (callback, root->inner); | |
655 | walk_function (callback, root); | |
656 | root = root->next; | |
657 | } | |
658 | while (root); | |
659 | } | |
6de9cd9a | 660 | \f |
35fd3193 | 661 | /* We have to check for a fairly pathological case. The operands of function |
ab1a8620 RK |
662 | nested function are to be interpreted in the context of the enclosing |
663 | function. So if any are variably-sized, they will get remapped when the | |
664 | enclosing function is inlined. But that remapping would also have to be | |
665 | done in the types of the PARM_DECLs of the nested function, meaning the | |
666 | argument types of that function will disagree with the arguments in the | |
667 | calls to that function. So we'd either have to make a copy of the nested | |
668 | function corresponding to each time the enclosing function was inlined or | |
669 | add a VIEW_CONVERT_EXPR to each such operand for each call to the nested | |
670 | function. The former is not practical. The latter would still require | |
671 | detecting this case to know when to add the conversions. So, for now at | |
672 | least, we don't inline such an enclosing function. | |
673 | ||
674 | We have to do that check recursively, so here return indicating whether | |
675 | FNDECL has such a nested function. ORIG_FN is the function we were | |
676 | trying to inline to use for checking whether any argument is variably | |
677 | modified by anything in it. | |
678 | ||
679 | It would be better to do this in tree-inline.c so that we could give | |
680 | the appropriate warning for why a function can't be inlined, but that's | |
681 | too late since the nesting structure has already been flattened and | |
682 | adding a flag just to record this fact seems a waste of a flag. */ | |
683 | ||
684 | static bool | |
685 | check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl) | |
686 | { | |
687 | struct cgraph_node *cgn = cgraph_node (fndecl); | |
688 | tree arg; | |
689 | ||
690 | for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested) | |
691 | { | |
692 | for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg)) | |
693 | if (variably_modified_type_p (TREE_TYPE (arg), 0), orig_fndecl) | |
694 | return true; | |
695 | ||
696 | if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl)) | |
697 | return true; | |
698 | } | |
699 | ||
700 | return false; | |
701 | } | |
702 | ||
6de9cd9a DN |
703 | /* Construct our local datastructure describing the function nesting |
704 | tree rooted by CGN. */ | |
705 | ||
706 | static struct nesting_info * | |
707 | create_nesting_tree (struct cgraph_node *cgn) | |
708 | { | |
9bf777ee AP |
709 | struct nesting_info *info = ggc_calloc (1, sizeof (*info)); |
710 | info->var_map = htab_create_ggc (7, var_map_hash, var_map_eq, ggc_free); | |
6de9cd9a DN |
711 | info->context = cgn->decl; |
712 | ||
713 | for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested) | |
714 | { | |
715 | struct nesting_info *sub = create_nesting_tree (cgn); | |
716 | sub->outer = info; | |
717 | sub->next = info->inner; | |
718 | info->inner = sub; | |
719 | } | |
720 | ||
ab1a8620 RK |
721 | /* See discussion at check_for_nested_with_variably_modified for a |
722 | discussion of why this has to be here. */ | |
723 | if (check_for_nested_with_variably_modified (info->context, info->context)) | |
724 | DECL_UNINLINABLE (info->context) = true; | |
725 | ||
6de9cd9a DN |
726 | return info; |
727 | } | |
728 | ||
729 | /* Return an expression computing the static chain for TARGET_CONTEXT | |
730 | from INFO->CONTEXT. Insert any necessary computations before TSI. */ | |
731 | ||
732 | static tree | |
733 | get_static_chain (struct nesting_info *info, tree target_context, | |
734 | tree_stmt_iterator *tsi) | |
735 | { | |
736 | struct nesting_info *i; | |
737 | tree x; | |
738 | ||
739 | if (info->context == target_context) | |
740 | { | |
bde6c65d | 741 | x = build_addr (info->frame_decl, target_context); |
6de9cd9a DN |
742 | } |
743 | else | |
744 | { | |
745 | x = get_chain_decl (info); | |
746 | ||
747 | for (i = info->outer; i->context != target_context; i = i->outer) | |
748 | { | |
749 | tree field = get_chain_field (i); | |
750 | ||
751 | x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x); | |
44de5aeb | 752 | x = build (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE); |
6de9cd9a DN |
753 | x = init_tmp_var (info, x, tsi); |
754 | } | |
755 | } | |
756 | ||
757 | return x; | |
758 | } | |
759 | ||
760 | /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local | |
761 | frame as seen from INFO->CONTEXT. Insert any necessary computations | |
762 | before TSI. */ | |
763 | ||
764 | static tree | |
765 | get_frame_field (struct nesting_info *info, tree target_context, | |
766 | tree field, tree_stmt_iterator *tsi) | |
767 | { | |
768 | struct nesting_info *i; | |
769 | tree x; | |
770 | ||
771 | if (info->context == target_context) | |
772 | { | |
773 | /* Make sure frame_decl gets created. */ | |
774 | (void) get_frame_type (info); | |
775 | x = info->frame_decl; | |
776 | } | |
777 | else | |
778 | { | |
779 | x = get_chain_decl (info); | |
780 | ||
781 | for (i = info->outer; i->context != target_context; i = i->outer) | |
782 | { | |
783 | tree field = get_chain_field (i); | |
784 | ||
785 | x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x); | |
44de5aeb | 786 | x = build (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE); |
6de9cd9a DN |
787 | x = init_tmp_var (info, x, tsi); |
788 | } | |
789 | ||
790 | x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x); | |
791 | } | |
792 | ||
44de5aeb | 793 | x = build (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE); |
6de9cd9a DN |
794 | return x; |
795 | } | |
796 | ||
797 | /* Called via walk_function+walk_tree, rewrite all references to VAR | |
798 | and PARM_DECLs that belong to outer functions. | |
799 | ||
800 | The rewrite will involve some number of structure accesses back up | |
801 | the static chain. E.g. for a variable FOO up one nesting level it'll | |
802 | be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further | |
803 | indirections apply to decls for which use_pointer_in_frame is true. */ | |
804 | ||
805 | static tree | |
806 | convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data) | |
807 | { | |
808 | struct walk_stmt_info *wi = data; | |
809 | struct nesting_info *info = wi->info; | |
810 | tree t = *tp; | |
811 | ||
812 | *walk_subtrees = 0; | |
813 | switch (TREE_CODE (t)) | |
814 | { | |
815 | case VAR_DECL: | |
816 | /* Non-automatic variables are never processed. */ | |
817 | if (TREE_STATIC (t) || DECL_EXTERNAL (t)) | |
818 | break; | |
819 | /* FALLTHRU */ | |
820 | ||
821 | case PARM_DECL: | |
822 | if (decl_function_context (t) != info->context) | |
823 | { | |
824 | tree target_context = decl_function_context (t); | |
825 | struct nesting_info *i; | |
826 | tree x; | |
a3ed9870 | 827 | wi->changed = true; |
6de9cd9a DN |
828 | |
829 | for (i = info->outer; i->context != target_context; i = i->outer) | |
830 | continue; | |
831 | x = lookup_field_for_decl (i, t, INSERT); | |
832 | x = get_frame_field (info, target_context, x, &wi->tsi); | |
833 | if (use_pointer_in_frame (t)) | |
834 | { | |
835 | x = init_tmp_var (info, x, &wi->tsi); | |
836 | x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x); | |
837 | } | |
c529c27e | 838 | |
6de9cd9a | 839 | if (wi->val_only) |
c529c27e RH |
840 | { |
841 | if (wi->is_lhs) | |
842 | x = save_tmp_var (info, x, &wi->tsi); | |
843 | else | |
844 | x = init_tmp_var (info, x, &wi->tsi); | |
845 | } | |
6de9cd9a DN |
846 | |
847 | *tp = x; | |
848 | } | |
849 | break; | |
850 | ||
851 | case GOTO_EXPR: | |
852 | /* Don't walk non-local gotos for now. */ | |
853 | if (TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL) | |
854 | { | |
855 | *walk_subtrees = 1; | |
856 | wi->val_only = true; | |
c529c27e | 857 | wi->is_lhs = false; |
6de9cd9a DN |
858 | } |
859 | break; | |
860 | ||
861 | case LABEL_DECL: | |
862 | /* We're taking the address of a label from a parent function, but | |
863 | this is not itself a non-local goto. Mark the label such that it | |
864 | will not be deleted, much as we would with a label address in | |
865 | static storage. */ | |
866 | if (decl_function_context (t) != info->context) | |
867 | FORCED_LABEL (t) = 1; | |
868 | break; | |
869 | ||
870 | case ADDR_EXPR: | |
871 | { | |
872 | bool save_val_only = wi->val_only; | |
6de9cd9a DN |
873 | |
874 | wi->val_only = false; | |
c529c27e RH |
875 | wi->is_lhs = false; |
876 | wi->changed = false; | |
6de9cd9a DN |
877 | walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference, wi, NULL); |
878 | wi->val_only = true; | |
879 | ||
a3ed9870 | 880 | if (wi->changed) |
6de9cd9a | 881 | { |
81fc3052 DB |
882 | tree save_context; |
883 | ||
6de9cd9a DN |
884 | /* If we changed anything, then TREE_INVARIANT is be wrong, |
885 | since we're no longer directly referencing a decl. */ | |
81fc3052 DB |
886 | save_context = current_function_decl; |
887 | current_function_decl = info->context; | |
a3ed9870 | 888 | recompute_tree_invarant_for_addr_expr (t); |
81fc3052 | 889 | current_function_decl = save_context; |
6de9cd9a DN |
890 | |
891 | /* If the callback converted the address argument in a context | |
892 | where we only accept variables (and min_invariant, presumably), | |
893 | then compute the address into a temporary. */ | |
894 | if (save_val_only) | |
26277d41 | 895 | *tp = tsi_gimplify_val (wi->info, t, &wi->tsi); |
6de9cd9a DN |
896 | } |
897 | } | |
898 | break; | |
899 | ||
6de9cd9a DN |
900 | case REALPART_EXPR: |
901 | case IMAGPART_EXPR: | |
8f44bc38 | 902 | case COMPONENT_REF: |
6de9cd9a | 903 | case ARRAY_REF: |
44de5aeb | 904 | case ARRAY_RANGE_REF: |
6de9cd9a | 905 | case BIT_FIELD_REF: |
70883f72 RK |
906 | /* Go down this entire nest and just look at the final prefix and |
907 | anything that describes the references. Otherwise, we lose track | |
908 | of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */ | |
6de9cd9a | 909 | wi->val_only = true; |
c529c27e | 910 | wi->is_lhs = false; |
afe84921 | 911 | for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp) |
70883f72 RK |
912 | { |
913 | if (TREE_CODE (t) == COMPONENT_REF) | |
914 | walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi, | |
915 | NULL); | |
916 | else if (TREE_CODE (t) == ARRAY_REF | |
917 | || TREE_CODE (t) == ARRAY_RANGE_REF) | |
918 | { | |
919 | walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi, | |
920 | NULL); | |
921 | walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi, | |
922 | NULL); | |
923 | walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference, wi, | |
924 | NULL); | |
925 | } | |
926 | else if (TREE_CODE (t) == BIT_FIELD_REF) | |
927 | { | |
928 | walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi, | |
929 | NULL); | |
930 | walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi, | |
931 | NULL); | |
932 | } | |
933 | } | |
934 | wi->val_only = false; | |
935 | walk_tree (tp, convert_nonlocal_reference, wi, NULL); | |
6de9cd9a DN |
936 | break; |
937 | ||
938 | default: | |
6615c446 | 939 | if (!IS_TYPE_OR_DECL_P (t)) |
6de9cd9a DN |
940 | { |
941 | *walk_subtrees = 1; | |
942 | wi->val_only = true; | |
c529c27e | 943 | wi->is_lhs = false; |
6de9cd9a DN |
944 | } |
945 | break; | |
946 | } | |
947 | ||
948 | return NULL_TREE; | |
949 | } | |
950 | ||
951 | /* Called via walk_function+walk_tree, rewrite all references to VAR | |
952 | and PARM_DECLs that were referenced by inner nested functions. | |
953 | The rewrite will be a structure reference to the local frame variable. */ | |
954 | ||
955 | static tree | |
956 | convert_local_reference (tree *tp, int *walk_subtrees, void *data) | |
957 | { | |
958 | struct walk_stmt_info *wi = data; | |
959 | struct nesting_info *info = wi->info; | |
1a186ec5 | 960 | tree t = *tp, field, x; |
455c08cb | 961 | bool save_val_only; |
6de9cd9a | 962 | |
455c08cb | 963 | *walk_subtrees = 0; |
6de9cd9a DN |
964 | switch (TREE_CODE (t)) |
965 | { | |
966 | case VAR_DECL: | |
967 | /* Non-automatic variables are never processed. */ | |
968 | if (TREE_STATIC (t) || DECL_EXTERNAL (t)) | |
969 | break; | |
970 | /* FALLTHRU */ | |
971 | ||
972 | case PARM_DECL: | |
973 | if (decl_function_context (t) == info->context) | |
974 | { | |
975 | /* If we copied a pointer to the frame, then the original decl | |
976 | is used unchanged in the parent function. */ | |
977 | if (use_pointer_in_frame (t)) | |
978 | break; | |
979 | ||
980 | /* No need to transform anything if no child references the | |
981 | variable. */ | |
982 | field = lookup_field_for_decl (info, t, NO_INSERT); | |
983 | if (!field) | |
984 | break; | |
a3ed9870 | 985 | wi->changed = true; |
6de9cd9a DN |
986 | |
987 | x = get_frame_field (info, info->context, field, &wi->tsi); | |
c529c27e | 988 | |
6de9cd9a | 989 | if (wi->val_only) |
c529c27e RH |
990 | { |
991 | if (wi->is_lhs) | |
992 | x = save_tmp_var (info, x, &wi->tsi); | |
993 | else | |
994 | x = init_tmp_var (info, x, &wi->tsi); | |
995 | } | |
996 | ||
6de9cd9a DN |
997 | *tp = x; |
998 | } | |
999 | break; | |
1000 | ||
1001 | case ADDR_EXPR: | |
455c08cb RH |
1002 | save_val_only = wi->val_only; |
1003 | wi->val_only = false; | |
1004 | wi->is_lhs = false; | |
1005 | wi->changed = false; | |
1006 | walk_tree (&TREE_OPERAND (t, 0), convert_local_reference, wi, NULL); | |
1007 | wi->val_only = save_val_only; | |
6de9cd9a | 1008 | |
455c08cb RH |
1009 | /* If we converted anything ... */ |
1010 | if (wi->changed) | |
1011 | { | |
1012 | tree save_context; | |
81fc3052 | 1013 | |
455c08cb RH |
1014 | /* Then the frame decl is now addressable. */ |
1015 | TREE_ADDRESSABLE (info->frame_decl) = 1; | |
a3ed9870 | 1016 | |
455c08cb RH |
1017 | save_context = current_function_decl; |
1018 | current_function_decl = info->context; | |
1019 | recompute_tree_invarant_for_addr_expr (t); | |
1020 | current_function_decl = save_context; | |
1021 | ||
1022 | /* If we are in a context where we only accept values, then | |
1023 | compute the address into a temporary. */ | |
1024 | if (save_val_only) | |
1025 | *tp = tsi_gimplify_val (wi->info, t, &wi->tsi); | |
1026 | } | |
6de9cd9a DN |
1027 | break; |
1028 | ||
6de9cd9a DN |
1029 | case REALPART_EXPR: |
1030 | case IMAGPART_EXPR: | |
8f44bc38 | 1031 | case COMPONENT_REF: |
6de9cd9a | 1032 | case ARRAY_REF: |
44de5aeb | 1033 | case ARRAY_RANGE_REF: |
6de9cd9a | 1034 | case BIT_FIELD_REF: |
70883f72 RK |
1035 | /* Go down this entire nest and just look at the final prefix and |
1036 | anything that describes the references. Otherwise, we lose track | |
1037 | of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */ | |
455c08cb | 1038 | save_val_only = wi->val_only; |
6de9cd9a | 1039 | wi->val_only = true; |
c529c27e | 1040 | wi->is_lhs = false; |
afe84921 | 1041 | for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp) |
70883f72 RK |
1042 | { |
1043 | if (TREE_CODE (t) == COMPONENT_REF) | |
1044 | walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi, | |
1045 | NULL); | |
1046 | else if (TREE_CODE (t) == ARRAY_REF | |
1047 | || TREE_CODE (t) == ARRAY_RANGE_REF) | |
1048 | { | |
1049 | walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi, | |
1050 | NULL); | |
1051 | walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi, | |
1052 | NULL); | |
1053 | walk_tree (&TREE_OPERAND (t, 3), convert_local_reference, wi, | |
1054 | NULL); | |
1055 | } | |
1056 | else if (TREE_CODE (t) == BIT_FIELD_REF) | |
1057 | { | |
1058 | walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi, | |
1059 | NULL); | |
1060 | walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi, | |
1061 | NULL); | |
1062 | } | |
1063 | } | |
1064 | wi->val_only = false; | |
1065 | walk_tree (tp, convert_local_reference, wi, NULL); | |
455c08cb | 1066 | wi->val_only = save_val_only; |
6de9cd9a DN |
1067 | break; |
1068 | ||
1069 | default: | |
6615c446 | 1070 | if (!IS_TYPE_OR_DECL_P (t)) |
6de9cd9a DN |
1071 | { |
1072 | *walk_subtrees = 1; | |
1073 | wi->val_only = true; | |
c529c27e | 1074 | wi->is_lhs = false; |
6de9cd9a DN |
1075 | } |
1076 | break; | |
1077 | } | |
1078 | ||
1079 | return NULL_TREE; | |
1080 | } | |
1081 | ||
1082 | /* Called via walk_function+walk_tree, rewrite all GOTO_EXPRs that | |
1083 | reference labels from outer functions. The rewrite will be a | |
1084 | call to __builtin_nonlocal_goto. */ | |
1085 | ||
1086 | static tree | |
1087 | convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data) | |
1088 | { | |
1089 | struct walk_stmt_info *wi = data; | |
1090 | struct nesting_info *info = wi->info, *i; | |
1091 | tree t = *tp, label, new_label, target_context, x, arg, field; | |
1718a2aa | 1092 | struct var_map_elt *elt, dummy; |
6de9cd9a DN |
1093 | void **slot; |
1094 | ||
1095 | *walk_subtrees = 0; | |
1096 | if (TREE_CODE (t) != GOTO_EXPR) | |
1097 | return NULL_TREE; | |
1098 | label = GOTO_DESTINATION (t); | |
1099 | if (TREE_CODE (label) != LABEL_DECL) | |
1100 | return NULL_TREE; | |
1101 | target_context = decl_function_context (label); | |
1102 | if (target_context == info->context) | |
1103 | return NULL_TREE; | |
1104 | ||
1105 | for (i = info->outer; target_context != i->context; i = i->outer) | |
1106 | continue; | |
1107 | ||
1108 | /* The original user label may also be use for a normal goto, therefore | |
1109 | we must create a new label that will actually receive the abnormal | |
1110 | control transfer. This new label will be marked LABEL_NONLOCAL; this | |
1ea7e6ad | 1111 | mark will trigger proper behavior in the cfg, as well as cause the |
6de9cd9a | 1112 | (hairy target-specific) non-local goto receiver code to be generated |
1718a2aa RH |
1113 | when we expand rtl. Enter this association into var_map so that we |
1114 | can insert the new label into the IL during a second pass. */ | |
1115 | dummy.old = label; | |
1116 | slot = htab_find_slot (i->var_map, &dummy, INSERT); | |
1117 | elt = *slot; | |
1118 | if (elt == NULL) | |
1119 | { | |
1120 | new_label = create_artificial_label (); | |
1121 | DECL_NONLOCAL (new_label) = 1; | |
1122 | ||
1123 | elt = ggc_alloc (sizeof (*elt)); | |
1124 | elt->old = label; | |
1125 | elt->new = new_label; | |
1126 | *slot = elt; | |
1127 | } | |
1128 | else | |
1129 | new_label = elt->new; | |
6de9cd9a DN |
1130 | |
1131 | /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */ | |
1132 | field = get_nl_goto_field (i); | |
1133 | x = get_frame_field (info, target_context, field, &wi->tsi); | |
bde6c65d | 1134 | x = build_addr (x, target_context); |
26277d41 | 1135 | x = tsi_gimplify_val (info, x, &wi->tsi); |
6de9cd9a | 1136 | arg = tree_cons (NULL, x, NULL); |
bde6c65d | 1137 | x = build_addr (new_label, target_context); |
6de9cd9a DN |
1138 | arg = tree_cons (NULL, x, arg); |
1139 | x = implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO]; | |
1140 | x = build_function_call_expr (x, arg); | |
1141 | ||
1142 | SET_EXPR_LOCUS (x, EXPR_LOCUS (tsi_stmt (wi->tsi))); | |
1143 | *tsi_stmt_ptr (wi->tsi) = x; | |
1144 | ||
1145 | return NULL_TREE; | |
1146 | } | |
1147 | ||
1148 | /* Called via walk_function+walk_tree, rewrite all LABEL_EXPRs that | |
1149 | are referenced via nonlocal goto from a nested function. The rewrite | |
1150 | will involve installing a newly generated DECL_NONLOCAL label, and | |
1151 | (potentially) a branch around the rtl gunk that is assumed to be | |
1152 | attached to such a label. */ | |
1153 | ||
1154 | static tree | |
1155 | convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data) | |
1156 | { | |
1157 | struct walk_stmt_info *wi = data; | |
1158 | struct nesting_info *info = wi->info; | |
1159 | tree t = *tp, label, new_label, x; | |
1160 | struct var_map_elt *elt, dummy; | |
1161 | tree_stmt_iterator tmp_tsi; | |
1162 | ||
1163 | *walk_subtrees = 0; | |
1164 | if (TREE_CODE (t) != LABEL_EXPR) | |
1165 | return NULL_TREE; | |
1166 | label = LABEL_EXPR_LABEL (t); | |
1167 | ||
1168 | dummy.old = label; | |
1169 | elt = htab_find (info->var_map, &dummy); | |
1170 | if (!elt) | |
1171 | return NULL_TREE; | |
1172 | new_label = elt->new; | |
1173 | ||
1174 | /* If there's any possibility that the previous statement falls through, | |
1175 | then we must branch around the new non-local label. */ | |
1176 | tmp_tsi = wi->tsi; | |
1177 | tsi_prev (&tmp_tsi); | |
1178 | if (tsi_end_p (tmp_tsi) || block_may_fallthru (tsi_stmt (tmp_tsi))) | |
1179 | { | |
1180 | x = build1 (GOTO_EXPR, void_type_node, label); | |
1181 | tsi_link_before (&wi->tsi, x, TSI_SAME_STMT); | |
1182 | } | |
1183 | x = build1 (LABEL_EXPR, void_type_node, new_label); | |
1184 | tsi_link_before (&wi->tsi, x, TSI_SAME_STMT); | |
1185 | ||
1186 | return NULL_TREE; | |
1187 | } | |
1188 | ||
1189 | /* Called via walk_function+walk_tree, rewrite all references to addresses | |
1190 | of nested functions that require the use of trampolines. The rewrite | |
1191 | will involve a reference a trampoline generated for the occasion. */ | |
1192 | ||
1193 | static tree | |
1194 | convert_tramp_reference (tree *tp, int *walk_subtrees, void *data) | |
1195 | { | |
1196 | struct walk_stmt_info *wi = data; | |
1197 | struct nesting_info *info = wi->info, *i; | |
1198 | tree t = *tp, decl, target_context, x, arg; | |
1199 | ||
1200 | *walk_subtrees = 0; | |
1201 | switch (TREE_CODE (t)) | |
1202 | { | |
1203 | case ADDR_EXPR: | |
1204 | /* Build | |
1205 | T.1 = &CHAIN->tramp; | |
1206 | T.2 = __builtin_adjust_trampoline (T.1); | |
1207 | T.3 = (func_type)T.2; | |
1208 | */ | |
1209 | ||
1210 | decl = TREE_OPERAND (t, 0); | |
1211 | if (TREE_CODE (decl) != FUNCTION_DECL) | |
1212 | break; | |
1213 | ||
1214 | /* Only need to process nested functions. */ | |
1215 | target_context = decl_function_context (decl); | |
1216 | if (!target_context) | |
1217 | break; | |
1218 | ||
1219 | /* If the nested function doesn't use a static chain, then | |
1220 | it doesn't need a trampoline. */ | |
1221 | if (DECL_NO_STATIC_CHAIN (decl)) | |
1222 | break; | |
1223 | ||
1224 | /* Lookup the immediate parent of the callee, as that's where | |
1225 | we need to insert the trampoline. */ | |
1226 | for (i = info; i->context != target_context; i = i->outer) | |
1227 | continue; | |
1228 | x = lookup_tramp_for_decl (i, decl, INSERT); | |
1229 | ||
1230 | /* Compute the address of the field holding the trampoline. */ | |
1231 | x = get_frame_field (info, target_context, x, &wi->tsi); | |
bde6c65d | 1232 | x = build_addr (x, target_context); |
26277d41 | 1233 | x = tsi_gimplify_val (info, x, &wi->tsi); |
6de9cd9a DN |
1234 | arg = tree_cons (NULL, x, NULL); |
1235 | ||
1236 | /* Do machine-specific ugliness. Normally this will involve | |
1237 | computing extra alignment, but it can really be anything. */ | |
1238 | x = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE]; | |
1239 | x = build_function_call_expr (x, arg); | |
1240 | x = init_tmp_var (info, x, &wi->tsi); | |
1241 | ||
1242 | /* Cast back to the proper function type. */ | |
1243 | x = build1 (NOP_EXPR, TREE_TYPE (t), x); | |
1244 | x = init_tmp_var (info, x, &wi->tsi); | |
1245 | ||
1246 | *tp = x; | |
1247 | break; | |
1248 | ||
1249 | case CALL_EXPR: | |
1250 | /* Only walk call arguments, lest we generate trampolines for | |
1251 | direct calls. */ | |
1252 | walk_tree (&TREE_OPERAND (t, 1), convert_tramp_reference, wi, NULL); | |
1253 | break; | |
1254 | ||
1255 | default: | |
6615c446 | 1256 | if (!IS_TYPE_OR_DECL_P (t)) |
6de9cd9a DN |
1257 | *walk_subtrees = 1; |
1258 | break; | |
1259 | } | |
1260 | ||
1261 | return NULL_TREE; | |
1262 | } | |
1263 | ||
1264 | /* Called via walk_function+walk_tree, rewrite all CALL_EXPRs that | |
1265 | reference nested functions to make sure that the static chain is | |
1266 | set up properly for the call. */ | |
1267 | ||
1268 | static tree | |
1269 | convert_call_expr (tree *tp, int *walk_subtrees, void *data) | |
1270 | { | |
1271 | struct walk_stmt_info *wi = data; | |
1272 | struct nesting_info *info = wi->info; | |
1273 | tree t = *tp, decl, target_context; | |
1274 | ||
1275 | *walk_subtrees = 0; | |
1276 | switch (TREE_CODE (t)) | |
1277 | { | |
1278 | case CALL_EXPR: | |
1279 | decl = get_callee_fndecl (t); | |
1280 | if (!decl) | |
1281 | break; | |
1282 | target_context = decl_function_context (decl); | |
1283 | if (target_context && !DECL_NO_STATIC_CHAIN (decl)) | |
1284 | TREE_OPERAND (t, 2) | |
1285 | = get_static_chain (info, target_context, &wi->tsi); | |
1286 | break; | |
1287 | ||
1288 | case RETURN_EXPR: | |
1289 | case MODIFY_EXPR: | |
d25cee4d RH |
1290 | case WITH_SIZE_EXPR: |
1291 | /* Only return modify and with_size_expr may contain calls. */ | |
6de9cd9a DN |
1292 | *walk_subtrees = 1; |
1293 | break; | |
1294 | ||
1295 | default: | |
1296 | break; | |
1297 | } | |
1298 | ||
1299 | return NULL_TREE; | |
1300 | } | |
1301 | ||
1302 | /* Walk the nesting tree starting with ROOT, depth first. Convert all | |
1303 | trampolines and call expressions. On the way back up, determine if | |
1304 | a nested function actually uses its static chain; if not, remember that. */ | |
1305 | ||
1306 | static void | |
1307 | convert_all_function_calls (struct nesting_info *root) | |
1308 | { | |
1309 | do | |
1310 | { | |
1311 | if (root->inner) | |
1312 | convert_all_function_calls (root->inner); | |
1313 | ||
1314 | walk_function (convert_tramp_reference, root); | |
1315 | walk_function (convert_call_expr, root); | |
1316 | ||
1317 | /* If the function does not use a static chain, then remember that. */ | |
1318 | if (root->outer && !root->chain_decl && !root->chain_field) | |
1319 | DECL_NO_STATIC_CHAIN (root->context) = 1; | |
1320 | else | |
1e128c5f | 1321 | gcc_assert (!DECL_NO_STATIC_CHAIN (root->context)); |
6de9cd9a DN |
1322 | |
1323 | root = root->next; | |
1324 | } | |
1325 | while (root); | |
1326 | } | |
1327 | ||
1328 | /* Do "everything else" to clean up or complete state collected by the | |
1329 | various walking passes -- lay out the types and decls, generate code | |
1330 | to initialize the frame decl, store critical expressions in the | |
1331 | struct function for rtl to find. */ | |
1332 | ||
1333 | static void | |
1334 | finalize_nesting_tree_1 (struct nesting_info *root) | |
1335 | { | |
1336 | tree stmt_list = NULL; | |
1337 | tree context = root->context; | |
1338 | struct function *sf; | |
8f235343 | 1339 | struct cgraph_node *node; |
6de9cd9a DN |
1340 | |
1341 | /* If we created a non-local frame type or decl, we need to lay them | |
1342 | out at this time. */ | |
1343 | if (root->frame_type) | |
1344 | { | |
022a8174 DJ |
1345 | /* In some cases the frame type will trigger the -Wpadded warning. |
1346 | This is not helpful; suppress it. */ | |
1347 | int save_warn_padded = warn_padded; | |
1348 | warn_padded = 0; | |
6de9cd9a | 1349 | layout_type (root->frame_type); |
022a8174 | 1350 | warn_padded = save_warn_padded; |
6de9cd9a DN |
1351 | layout_decl (root->frame_decl, 0); |
1352 | } | |
1353 | ||
1354 | /* If any parameters were referenced non-locally, then we need to | |
1355 | insert a copy. Likewise, if any variables were referenced by | |
1356 | pointer, we need to initialize the address. */ | |
1357 | if (root->any_parm_remapped) | |
1358 | { | |
1359 | tree p; | |
1360 | for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p)) | |
1361 | { | |
1362 | tree field, x, y; | |
1363 | ||
1364 | field = lookup_field_for_decl (root, p, NO_INSERT); | |
1365 | if (!field) | |
1366 | continue; | |
1367 | ||
1368 | if (use_pointer_in_frame (p)) | |
bde6c65d | 1369 | x = build_addr (p, context); |
6de9cd9a DN |
1370 | else |
1371 | x = p; | |
1372 | ||
1373 | y = build (COMPONENT_REF, TREE_TYPE (field), | |
44de5aeb | 1374 | root->frame_decl, field, NULL_TREE); |
6de9cd9a DN |
1375 | x = build (MODIFY_EXPR, TREE_TYPE (field), y, x); |
1376 | append_to_statement_list (x, &stmt_list); | |
1377 | } | |
1378 | } | |
1379 | ||
1380 | /* If a chain_field was created, then it needs to be initialized | |
1381 | from chain_decl. */ | |
1382 | if (root->chain_field) | |
1383 | { | |
44de5aeb RK |
1384 | tree x = build (COMPONENT_REF, TREE_TYPE (root->chain_field), |
1385 | root->frame_decl, root->chain_field, NULL_TREE); | |
6de9cd9a DN |
1386 | x = build (MODIFY_EXPR, TREE_TYPE (x), x, get_chain_decl (root)); |
1387 | append_to_statement_list (x, &stmt_list); | |
1388 | } | |
1389 | ||
1390 | /* If trampolines were created, then we need to initialize them. */ | |
1391 | if (root->any_tramp_created) | |
1392 | { | |
1393 | struct nesting_info *i; | |
1394 | for (i = root->inner; i ; i = i->next) | |
1395 | { | |
1396 | tree arg, x, field; | |
1397 | ||
1398 | field = lookup_tramp_for_decl (root, i->context, NO_INSERT); | |
1399 | if (!field) | |
1400 | continue; | |
1401 | ||
1402 | if (DECL_NO_STATIC_CHAIN (i->context)) | |
1403 | x = null_pointer_node; | |
1404 | else | |
bde6c65d | 1405 | x = build_addr (root->frame_decl, context); |
6de9cd9a DN |
1406 | arg = tree_cons (NULL, x, NULL); |
1407 | ||
bde6c65d | 1408 | x = build_addr (i->context, context); |
6de9cd9a DN |
1409 | arg = tree_cons (NULL, x, arg); |
1410 | ||
1411 | x = build (COMPONENT_REF, TREE_TYPE (field), | |
44de5aeb | 1412 | root->frame_decl, field, NULL_TREE); |
bde6c65d | 1413 | x = build_addr (x, context); |
6de9cd9a DN |
1414 | arg = tree_cons (NULL, x, arg); |
1415 | ||
1416 | x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE]; | |
1417 | x = build_function_call_expr (x, arg); | |
1418 | ||
1419 | append_to_statement_list (x, &stmt_list); | |
1420 | } | |
1421 | } | |
1422 | ||
1423 | /* If we created initialization statements, insert them. */ | |
1424 | if (stmt_list) | |
1425 | { | |
1426 | annotate_all_with_locus (&stmt_list, | |
1427 | DECL_SOURCE_LOCATION (context)); | |
1428 | append_to_statement_list (BIND_EXPR_BODY (DECL_SAVED_TREE (context)), | |
1429 | &stmt_list); | |
1430 | BIND_EXPR_BODY (DECL_SAVED_TREE (context)) = stmt_list; | |
1431 | } | |
1432 | ||
1433 | /* If a chain_decl was created, then it needs to be registered with | |
1434 | struct function so that it gets initialized from the static chain | |
1435 | register at the beginning of the function. */ | |
1436 | sf = DECL_STRUCT_FUNCTION (root->context); | |
1437 | sf->static_chain_decl = root->chain_decl; | |
1438 | ||
1439 | /* Similarly for the non-local goto save area. */ | |
1440 | if (root->nl_goto_field) | |
1441 | { | |
1442 | sf->nonlocal_goto_save_area | |
1443 | = get_frame_field (root, context, root->nl_goto_field, NULL); | |
1444 | sf->has_nonlocal_label = 1; | |
1445 | } | |
1446 | ||
1ea7e6ad | 1447 | /* Make sure all new local variables get inserted into the |
6de9cd9a DN |
1448 | proper BIND_EXPR. */ |
1449 | if (root->new_local_var_chain) | |
1450 | declare_tmp_vars (root->new_local_var_chain, | |
1451 | DECL_SAVED_TREE (root->context)); | |
1452 | ||
1453 | /* Dump the translated tree function. */ | |
1454 | dump_function (TDI_nested, root->context); | |
8f235343 JH |
1455 | node = cgraph_node (root->context); |
1456 | ||
1457 | /* For nested functions update the cgraph to reflect unnesting. | |
1458 | We also delay finalizing of these functions up to this point. */ | |
1459 | if (node->origin) | |
1460 | { | |
1461 | cgraph_unnest_node (cgraph_node (root->context)); | |
1462 | cgraph_finalize_function (root->context, true); | |
1463 | } | |
6de9cd9a DN |
1464 | } |
1465 | ||
1466 | static void | |
1467 | finalize_nesting_tree (struct nesting_info *root) | |
1468 | { | |
1469 | do | |
1470 | { | |
1471 | if (root->inner) | |
1472 | finalize_nesting_tree (root->inner); | |
1473 | finalize_nesting_tree_1 (root); | |
1474 | root = root->next; | |
1475 | } | |
1476 | while (root); | |
1477 | } | |
1478 | ||
1479 | /* Free the data structures allocated during this pass. */ | |
1480 | ||
1481 | static void | |
1482 | free_nesting_tree (struct nesting_info *root) | |
1483 | { | |
1484 | struct nesting_info *next; | |
1485 | do | |
1486 | { | |
1487 | if (root->inner) | |
1488 | free_nesting_tree (root->inner); | |
1489 | htab_delete (root->var_map); | |
1490 | next = root->next; | |
9bf777ee | 1491 | ggc_free (root); |
6de9cd9a DN |
1492 | root = next; |
1493 | } | |
1494 | while (root); | |
1495 | } | |
1496 | ||
9bf777ee AP |
1497 | static GTY(()) struct nesting_info *root; |
1498 | ||
6de9cd9a DN |
1499 | /* Main entry point for this pass. Process FNDECL and all of its nested |
1500 | subroutines and turn them into something less tightly bound. */ | |
1501 | ||
1502 | void | |
1503 | lower_nested_functions (tree fndecl) | |
1504 | { | |
6de9cd9a DN |
1505 | struct cgraph_node *cgn; |
1506 | ||
1507 | /* If there are no nested functions, there's nothing to do. */ | |
1508 | cgn = cgraph_node (fndecl); | |
1509 | if (!cgn->nested) | |
1510 | return; | |
1511 | ||
1512 | root = create_nesting_tree (cgn); | |
1513 | walk_all_functions (convert_nonlocal_reference, root); | |
1514 | walk_all_functions (convert_local_reference, root); | |
1515 | walk_all_functions (convert_nl_goto_reference, root); | |
1516 | walk_all_functions (convert_nl_goto_receiver, root); | |
1517 | convert_all_function_calls (root); | |
1518 | finalize_nesting_tree (root); | |
1519 | free_nesting_tree (root); | |
9bf777ee | 1520 | root = NULL; |
6de9cd9a DN |
1521 | } |
1522 | ||
1523 | #include "gt-tree-nested.h" |