]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-low.c
Remove unnecessary VEC function overloads.
[thirdparty/gcc.git] / gcc / gimple-low.c
CommitLineData
726a989a 1/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
6de9cd9a 2
d652f226 3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
f9487002 4 Free Software Foundation, Inc.
6de9cd9a
DN
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
726a989a
RB
27#include "gimple.h"
28#include "tree-iterator.h"
6de9cd9a 29#include "tree-inline.h"
6de9cd9a 30#include "tree-flow.h"
6de9cd9a
DN
31#include "flags.h"
32#include "function.h"
718f9c0f 33#include "diagnostic-core.h"
6de9cd9a
DN
34#include "tree-pass.h"
35
726a989a
RB
36/* The differences between High GIMPLE and Low GIMPLE are the
37 following:
38
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
40
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
44
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
47
48/* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51struct return_statements_t
52{
53 tree label;
54 gimple stmt;
55};
56typedef struct return_statements_t return_statements_t;
57
58DEF_VEC_O(return_statements_t);
59DEF_VEC_ALLOC_O(return_statements_t,heap);
60
6de9cd9a
DN
61struct lower_data
62{
63 /* Block the current statement belongs to. */
64 tree block;
f5a76aea 65
726a989a 66 /* A vector of label and return statements to be moved to the end
71877985 67 of the function. */
726a989a 68 VEC(return_statements_t,heap) *return_statements;
4f6c2131 69
a141816c
EB
70 /* True if the current statement cannot fall through. */
71 bool cannot_fallthru;
72
4f6c2131
EB
73 /* True if the function calls __builtin_setjmp. */
74 bool calls_builtin_setjmp;
6de9cd9a
DN
75};
76
726a989a
RB
77static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
78static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
f778c049 79static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
726a989a
RB
80static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
81static void lower_builtin_setjmp (gimple_stmt_iterator *);
6de9cd9a 82
726a989a
RB
83
84/* Lower the body of current_function_decl from High GIMPLE into Low
85 GIMPLE. */
6de9cd9a 86
c2924966 87static unsigned int
6de9cd9a
DN
88lower_function_body (void)
89{
90 struct lower_data data;
726a989a
RB
91 gimple_seq body = gimple_body (current_function_decl);
92 gimple_seq lowered_body;
93 gimple_stmt_iterator i;
94 gimple bind;
95 tree t;
96 gimple x;
97
98 /* The gimplifier should've left a body of exactly one statement,
99 namely a GIMPLE_BIND. */
100 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
101 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
6de9cd9a 102
953ff289 103 memset (&data, 0, sizeof (data));
6de9cd9a
DN
104 data.block = DECL_INITIAL (current_function_decl);
105 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
106 BLOCK_CHAIN (data.block) = NULL_TREE;
107 TREE_ASM_WRITTEN (data.block) = 1;
726a989a
RB
108 data.return_statements = VEC_alloc (return_statements_t, heap, 8);
109
110 bind = gimple_seq_first_stmt (body);
111 lowered_body = NULL;
112 gimple_seq_add_stmt (&lowered_body, bind);
113 i = gsi_start (lowered_body);
114 lower_gimple_bind (&i, &data);
6de9cd9a 115
726a989a 116 i = gsi_last (lowered_body);
ff98621c
RH
117
118 /* If the function falls off the end, we need a null return statement.
726a989a 119 If we've already got one in the return_statements vector, we don't
ff98621c 120 need to do anything special. Otherwise build one by hand. */
726a989a
RB
121 if (gimple_seq_may_fallthru (lowered_body)
122 && (VEC_empty (return_statements_t, data.return_statements)
123 || gimple_return_retval (VEC_last (return_statements_t,
0823efed 124 data.return_statements).stmt) != NULL))
ff98621c 125 {
726a989a
RB
126 x = gimple_build_return (NULL);
127 gimple_set_location (x, cfun->function_end_locus);
cc2a64dd 128 gimple_set_block (x, DECL_INITIAL (current_function_decl));
726a989a 129 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
ff98621c
RH
130 }
131
132 /* If we lowered any return statements, emit the representative
133 at the end of the function. */
726a989a 134 while (!VEC_empty (return_statements_t, data.return_statements))
f5a76aea 135 {
726a989a
RB
136 return_statements_t t;
137
138 /* Unfortunately, we can't use VEC_pop because it returns void for
139 objects. */
0823efed 140 t = VEC_last (return_statements_t, data.return_statements);
726a989a
RB
141 VEC_truncate (return_statements_t,
142 data.return_statements,
143 VEC_length (return_statements_t,
144 data.return_statements) - 1);
145
146 x = gimple_build_label (t.label);
147 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
726a989a 148 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
f5a76aea
RH
149 }
150
4f6c2131
EB
151 /* If the function calls __builtin_setjmp, we need to emit the computed
152 goto that will serve as the unique dispatcher for all the receivers. */
153 if (data.calls_builtin_setjmp)
154 {
155 tree disp_label, disp_var, arg;
156
157 /* Build 'DISP_LABEL:' and insert. */
c2255bc4 158 disp_label = create_artificial_label (cfun->function_end_locus);
4f6c2131
EB
159 /* This mark will create forward edges from every call site. */
160 DECL_NONLOCAL (disp_label) = 1;
e3b5732b 161 cfun->has_nonlocal_label = 1;
726a989a
RB
162 x = gimple_build_label (disp_label);
163 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
4f6c2131
EB
164
165 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
166 and insert. */
167 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
5039610b 168 arg = build_addr (disp_label, current_function_decl);
e79983f4 169 t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
726a989a
RB
170 x = gimple_build_call (t, 1, arg);
171 gimple_call_set_lhs (x, disp_var);
4f6c2131
EB
172
173 /* Build 'goto DISP_VAR;' and insert. */
726a989a
RB
174 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
175 x = gimple_build_goto (disp_var);
176 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
4f6c2131
EB
177 }
178
355a7673
MM
179 /* Once the old body has been lowered, replace it with the new
180 lowered sequence. */
181 gimple_set_body (current_function_decl, lowered_body);
182
282899df 183 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
6de9cd9a
DN
184 BLOCK_SUBBLOCKS (data.block)
185 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
186
187 clear_block_marks (data.block);
726a989a 188 VEC_free(return_statements_t, heap, data.return_statements);
c2924966 189 return 0;
6de9cd9a
DN
190}
191
b8698a0f 192struct gimple_opt_pass pass_lower_cf =
6de9cd9a 193{
8ddbbcae
JH
194 {
195 GIMPLE_PASS,
6de9cd9a
DN
196 "lower", /* name */
197 NULL, /* gate */
198 lower_function_body, /* execute */
199 NULL, /* sub */
200 NULL, /* next */
201 0, /* static_pass_number */
7072a650 202 TV_NONE, /* tv_id */
6de9cd9a
DN
203 PROP_gimple_any, /* properties_required */
204 PROP_gimple_lcf, /* properties_provided */
bbbe4e7b 205 0, /* properties_destroyed */
6de9cd9a 206 0, /* todo_flags_start */
22c5fa5f 207 0 /* todo_flags_finish */
8ddbbcae 208 }
6de9cd9a
DN
209};
210
211
26e0228f 212
f9487002
JJ
213/* Verify if the type of the argument matches that of the function
214 declaration. If we cannot verify this or there is a mismatch,
6eb29714 215 return false. */
f9487002 216
26e0228f
XDL
217static bool
218gimple_check_call_args (gimple stmt, tree fndecl)
f9487002 219{
26e0228f 220 tree parms, p;
f9487002
JJ
221 unsigned int i, nargs;
222
25583c4f
RS
223 /* Calls to internal functions always match their signature. */
224 if (gimple_call_internal_p (stmt))
225 return true;
226
f9487002
JJ
227 nargs = gimple_call_num_args (stmt);
228
229 /* Get argument types for verification. */
f9487002
JJ
230 if (fndecl)
231 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9bfc434b
RG
232 else
233 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
f9487002
JJ
234
235 /* Verify if the type of the argument matches that of the function
236 declaration. If we cannot verify this or there is a mismatch,
6eb29714 237 return false. */
f9487002
JJ
238 if (fndecl && DECL_ARGUMENTS (fndecl))
239 {
240 for (i = 0, p = DECL_ARGUMENTS (fndecl);
241 i < nargs;
910ad8de 242 i++, p = DECL_CHAIN (p))
f9487002 243 {
e30459d5 244 tree arg;
f9487002
JJ
245 /* We cannot distinguish a varargs function from the case
246 of excess parameters, still deferring the inlining decision
247 to the callee is possible. */
248 if (!p)
249 break;
e30459d5 250 arg = gimple_call_arg (stmt, i);
f9487002 251 if (p == error_mark_node
e30459d5
RG
252 || arg == error_mark_node
253 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
254 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
6eb29714 255 return false;
f9487002
JJ
256 }
257 }
258 else if (parms)
259 {
260 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
261 {
e30459d5 262 tree arg;
f9487002
JJ
263 /* If this is a varargs function defer inlining decision
264 to callee. */
265 if (!p)
266 break;
e30459d5 267 arg = gimple_call_arg (stmt, i);
f9487002 268 if (TREE_VALUE (p) == error_mark_node
e30459d5 269 || arg == error_mark_node
f9487002 270 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
e30459d5
RG
271 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
272 && !fold_convertible_p (TREE_VALUE (p), arg)))
6eb29714 273 return false;
f9487002
JJ
274 }
275 }
276 else
277 {
278 if (nargs != 0)
6eb29714 279 return false;
f9487002 280 }
6eb29714 281 return true;
f9487002
JJ
282}
283
26e0228f
XDL
284/* Verify if the type of the argument and lhs of CALL_STMT matches
285 that of the function declaration CALLEE.
286 If we cannot verify this or there is a mismatch, return false. */
287
288bool
289gimple_check_call_matching_types (gimple call_stmt, tree callee)
290{
291 tree lhs;
292
293 if ((DECL_RESULT (callee)
294 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
295 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
296 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
297 TREE_TYPE (lhs))
298 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
299 || !gimple_check_call_args (call_stmt, callee))
300 return false;
301 return true;
302}
f9487002 303
726a989a 304/* Lower sequence SEQ. Unlike gimplification the statements are not relowered
6de9cd9a
DN
305 when they are changed -- if this has to be done, the lowering routine must
306 do it explicitly. DATA is passed through the recursion. */
307
1ebf7687 308static void
355a7673 309lower_sequence (gimple_seq *seq, struct lower_data *data)
6de9cd9a 310{
726a989a 311 gimple_stmt_iterator gsi;
6de9cd9a 312
355a7673 313 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
726a989a 314 lower_stmt (&gsi, data);
6de9cd9a
DN
315}
316
50674e96 317
726a989a 318/* Lower the OpenMP directive statement pointed by GSI. DATA is
50674e96
DN
319 passed through the recursion. */
320
321static void
726a989a 322lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
50674e96 323{
726a989a 324 gimple stmt;
b8698a0f 325
726a989a 326 stmt = gsi_stmt (*gsi);
50674e96 327
355a7673
MM
328 lower_sequence (gimple_omp_body_ptr (stmt), data);
329 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
726a989a 330 gimple_omp_set_body (stmt, NULL);
355a7673 331 gsi_next (gsi);
50674e96
DN
332}
333
334
a141816c
EB
335/* Lower statement GSI. DATA is passed through the recursion. We try to
336 track the fallthruness of statements and get rid of unreachable return
337 statements in order to prevent the EH lowering pass from adding useless
338 edges that can cause bogus warnings to be issued later; this guess need
339 not be 100% accurate, simply be conservative and reset cannot_fallthru
340 to false if we don't know. */
6de9cd9a
DN
341
342static void
726a989a 343lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
6de9cd9a 344{
726a989a 345 gimple stmt = gsi_stmt (*gsi);
6de9cd9a 346
726a989a 347 gimple_set_block (stmt, data->block);
6de9cd9a 348
726a989a 349 switch (gimple_code (stmt))
6de9cd9a 350 {
726a989a
RB
351 case GIMPLE_BIND:
352 lower_gimple_bind (gsi, data);
a141816c 353 /* Propagate fallthruness. */
f5a76aea 354 return;
6de9cd9a 355
726a989a 356 case GIMPLE_COND:
a141816c
EB
357 case GIMPLE_GOTO:
358 case GIMPLE_SWITCH:
359 data->cannot_fallthru = true;
360 gsi_next (gsi);
361 return;
726a989a
RB
362
363 case GIMPLE_RETURN:
a141816c
EB
364 if (data->cannot_fallthru)
365 {
366 gsi_remove (gsi, false);
367 /* Propagate fallthruness. */
368 }
369 else
370 {
371 lower_gimple_return (gsi, data);
372 data->cannot_fallthru = true;
373 }
726a989a
RB
374 return;
375
376 case GIMPLE_TRY:
f778c049
EB
377 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
378 lower_try_catch (gsi, data);
379 else
380 {
381 /* It must be a GIMPLE_TRY_FINALLY. */
382 bool cannot_fallthru;
383 lower_sequence (gimple_try_eval_ptr (stmt), data);
384 cannot_fallthru = data->cannot_fallthru;
385
386 /* The finally clause is always executed after the try clause,
387 so if it does not fall through, then the try-finally will not
388 fall through. Otherwise, if the try clause does not fall
389 through, then when the finally clause falls through it will
390 resume execution wherever the try clause was going. So the
391 whole try-finally will only fall through if both the try
392 clause and the finally clause fall through. */
393 data->cannot_fallthru = false;
394 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
395 data->cannot_fallthru |= cannot_fallthru;
396 gsi_next (gsi);
397 }
398 return;
777f7f9a 399
0a35513e 400 case GIMPLE_EH_ELSE:
355a7673
MM
401 lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
402 lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
0a35513e
AH
403 break;
404
726a989a
RB
405 case GIMPLE_NOP:
406 case GIMPLE_ASM:
407 case GIMPLE_ASSIGN:
726a989a
RB
408 case GIMPLE_PREDICT:
409 case GIMPLE_LABEL:
1d65f45c 410 case GIMPLE_EH_MUST_NOT_THROW:
726a989a
RB
411 case GIMPLE_OMP_FOR:
412 case GIMPLE_OMP_SECTIONS:
413 case GIMPLE_OMP_SECTIONS_SWITCH:
414 case GIMPLE_OMP_SECTION:
415 case GIMPLE_OMP_SINGLE:
416 case GIMPLE_OMP_MASTER:
417 case GIMPLE_OMP_ORDERED:
418 case GIMPLE_OMP_CRITICAL:
419 case GIMPLE_OMP_RETURN:
420 case GIMPLE_OMP_ATOMIC_LOAD:
421 case GIMPLE_OMP_ATOMIC_STORE:
422 case GIMPLE_OMP_CONTINUE:
423 break;
4f6c2131 424
726a989a 425 case GIMPLE_CALL:
4f6c2131 426 {
726a989a
RB
427 tree decl = gimple_call_fndecl (stmt);
428
4f6c2131
EB
429 if (decl
430 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
431 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
432 {
726a989a 433 lower_builtin_setjmp (gsi);
a141816c
EB
434 data->cannot_fallthru = false;
435 data->calls_builtin_setjmp = true;
4f6c2131
EB
436 return;
437 }
79ddec02 438
79ddec02
EB
439 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
440 {
a141816c 441 data->cannot_fallthru = true;
79ddec02 442 gsi_next (gsi);
79ddec02
EB
443 return;
444 }
4f6c2131
EB
445 }
446 break;
447
726a989a
RB
448 case GIMPLE_OMP_PARALLEL:
449 case GIMPLE_OMP_TASK:
a141816c 450 data->cannot_fallthru = false;
726a989a 451 lower_omp_directive (gsi, data);
a141816c 452 data->cannot_fallthru = false;
50674e96
DN
453 return;
454
0a35513e 455 case GIMPLE_TRANSACTION:
355a7673 456 lower_sequence (gimple_transaction_body_ptr (stmt), data);
0a35513e
AH
457 break;
458
6de9cd9a 459 default:
282899df 460 gcc_unreachable ();
6de9cd9a
DN
461 }
462
a141816c 463 data->cannot_fallthru = false;
726a989a 464 gsi_next (gsi);
6de9cd9a
DN
465}
466
4f6c2131 467/* Lower a bind_expr TSI. DATA is passed through the recursion. */
6de9cd9a
DN
468
469static void
726a989a 470lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
6de9cd9a
DN
471{
472 tree old_block = data->block;
726a989a
RB
473 gimple stmt = gsi_stmt (*gsi);
474 tree new_block = gimple_bind_block (stmt);
6de9cd9a
DN
475
476 if (new_block)
477 {
478 if (new_block == old_block)
479 {
480 /* The outermost block of the original function may not be the
481 outermost statement chain of the gimplified function. So we
482 may see the outermost block just inside the function. */
282899df 483 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
6de9cd9a
DN
484 new_block = NULL;
485 }
486 else
487 {
488 /* We do not expect to handle duplicate blocks. */
282899df 489 gcc_assert (!TREE_ASM_WRITTEN (new_block));
6de9cd9a
DN
490 TREE_ASM_WRITTEN (new_block) = 1;
491
492 /* Block tree may get clobbered by inlining. Normally this would
493 be fixed in rest_of_decl_compilation using block notes, but
494 since we are not going to emit them, it is up to us. */
495 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
496 BLOCK_SUBBLOCKS (old_block) = new_block;
497 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
498 BLOCK_SUPERCONTEXT (new_block) = old_block;
499
500 data->block = new_block;
501 }
502 }
503
726a989a 504 record_vars (gimple_bind_vars (stmt));
355a7673 505 lower_sequence (gimple_bind_body_ptr (stmt), data);
6de9cd9a
DN
506
507 if (new_block)
508 {
282899df 509 gcc_assert (data->block == new_block);
6de9cd9a
DN
510
511 BLOCK_SUBBLOCKS (new_block)
512 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
513 data->block = old_block;
514 }
515
726a989a
RB
516 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
517 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
518 gsi_remove (gsi, false);
6de9cd9a
DN
519}
520
f778c049
EB
521/* Same as above, but for a GIMPLE_TRY_CATCH. */
522
523static void
524lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
525{
526 bool cannot_fallthru;
527 gimple stmt = gsi_stmt (*gsi);
528 gimple_stmt_iterator i;
529
530 /* We don't handle GIMPLE_TRY_FINALLY. */
531 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
532
533 lower_sequence (gimple_try_eval_ptr (stmt), data);
534 cannot_fallthru = data->cannot_fallthru;
535
536 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
537 switch (gimple_code (gsi_stmt (i)))
538 {
539 case GIMPLE_CATCH:
540 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
541 catch expression and a body. The whole try/catch may fall
542 through iff any of the catch bodies falls through. */
543 for (; !gsi_end_p (i); gsi_next (&i))
544 {
545 data->cannot_fallthru = false;
546 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
547 if (!data->cannot_fallthru)
548 cannot_fallthru = false;
549 }
550 break;
551
552 case GIMPLE_EH_FILTER:
553 /* The exception filter expression only matters if there is an
554 exception. If the exception does not match EH_FILTER_TYPES,
555 we will execute EH_FILTER_FAILURE, and we will fall through
556 if that falls through. If the exception does match
557 EH_FILTER_TYPES, the stack unwinder will continue up the
558 stack, so we will not fall through. We don't know whether we
559 will throw an exception which matches EH_FILTER_TYPES or not,
560 so we just ignore EH_FILTER_TYPES and assume that we might
561 throw an exception which doesn't match. */
562 data->cannot_fallthru = false;
563 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
564 if (!data->cannot_fallthru)
565 cannot_fallthru = false;
566 break;
567
568 default:
569 /* This case represents statements to be executed when an
570 exception occurs. Those statements are implicitly followed
571 by a GIMPLE_RESX to resume execution after the exception. So
572 in this case the try/catch never falls through. */
573 data->cannot_fallthru = false;
574 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
575 break;
576 }
577
578 data->cannot_fallthru = cannot_fallthru;
579 gsi_next (gsi);
580}
581
6737ba67
ILT
582/* Try to determine whether a TRY_CATCH expression can fall through.
583 This is a subroutine of block_may_fallthru. */
584
585static bool
22ea9ec0 586try_catch_may_fallthru (const_tree stmt)
6737ba67
ILT
587{
588 tree_stmt_iterator i;
589
590 /* If the TRY block can fall through, the whole TRY_CATCH can
591 fall through. */
592 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
593 return true;
594
595 i = tsi_start (TREE_OPERAND (stmt, 1));
596 switch (TREE_CODE (tsi_stmt (i)))
597 {
598 case CATCH_EXPR:
599 /* We expect to see a sequence of CATCH_EXPR trees, each with a
600 catch expression and a body. The whole TRY_CATCH may fall
601 through iff any of the catch bodies falls through. */
602 for (; !tsi_end_p (i); tsi_next (&i))
603 {
604 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
605 return true;
606 }
607 return false;
608
609 case EH_FILTER_EXPR:
5eefcf2f
ILT
610 /* The exception filter expression only matters if there is an
611 exception. If the exception does not match EH_FILTER_TYPES,
612 we will execute EH_FILTER_FAILURE, and we will fall through
613 if that falls through. If the exception does match
614 EH_FILTER_TYPES, the stack unwinder will continue up the
615 stack, so we will not fall through. We don't know whether we
616 will throw an exception which matches EH_FILTER_TYPES or not,
617 so we just ignore EH_FILTER_TYPES and assume that we might
618 throw an exception which doesn't match. */
619 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
6737ba67
ILT
620
621 default:
622 /* This case represents statements to be executed when an
623 exception occurs. Those statements are implicitly followed
1d65f45c
RH
624 by a RESX statement to resume execution after the exception.
625 So in this case the TRY_CATCH never falls through. */
6737ba67
ILT
626 return false;
627 }
628}
629
726a989a
RB
630
631/* Same as above, but for a GIMPLE_TRY_CATCH. */
632
633static bool
634gimple_try_catch_may_fallthru (gimple stmt)
635{
636 gimple_stmt_iterator i;
637
638 /* We don't handle GIMPLE_TRY_FINALLY. */
639 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
640
641 /* If the TRY block can fall through, the whole TRY_CATCH can
642 fall through. */
643 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
644 return true;
645
355a7673 646 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
726a989a
RB
647 switch (gimple_code (gsi_stmt (i)))
648 {
649 case GIMPLE_CATCH:
650 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
651 catch expression and a body. The whole try/catch may fall
652 through iff any of the catch bodies falls through. */
653 for (; !gsi_end_p (i); gsi_next (&i))
654 {
655 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
656 return true;
657 }
658 return false;
659
660 case GIMPLE_EH_FILTER:
661 /* The exception filter expression only matters if there is an
662 exception. If the exception does not match EH_FILTER_TYPES,
663 we will execute EH_FILTER_FAILURE, and we will fall through
664 if that falls through. If the exception does match
665 EH_FILTER_TYPES, the stack unwinder will continue up the
666 stack, so we will not fall through. We don't know whether we
667 will throw an exception which matches EH_FILTER_TYPES or not,
668 so we just ignore EH_FILTER_TYPES and assume that we might
669 throw an exception which doesn't match. */
670 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
671
672 default:
673 /* This case represents statements to be executed when an
674 exception occurs. Those statements are implicitly followed
675 by a GIMPLE_RESX to resume execution after the exception. So
676 in this case the try/catch never falls through. */
677 return false;
678 }
679}
680
681
6de9cd9a
DN
682/* Try to determine if we can fall out of the bottom of BLOCK. This guess
683 need not be 100% accurate; simply be conservative and return true if we
684 don't know. This is used only to avoid stupidly generating extra code.
685 If we're wrong, we'll just delete the extra code later. */
686
687bool
22ea9ec0 688block_may_fallthru (const_tree block)
6de9cd9a 689{
4e9b57fa 690 /* This CONST_CAST is okay because expr_last returns its argument
75547801 691 unmodified and we assign it to a const_tree. */
b1d5455a 692 const_tree stmt = expr_last (CONST_CAST_TREE(block));
6de9cd9a
DN
693
694 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
695 {
696 case GOTO_EXPR:
697 case RETURN_EXPR:
b8698a0f 698 /* Easy cases. If the last statement of the block implies
6de9cd9a
DN
699 control transfer, then we can't fall through. */
700 return false;
701
089efaa4
ILT
702 case SWITCH_EXPR:
703 /* If SWITCH_LABELS is set, this is lowered, and represents a
704 branch to a selected label and hence can not fall through.
705 Otherwise SWITCH_BODY is set, and the switch can fall
706 through. */
6a03afdf 707 return SWITCH_LABELS (stmt) == NULL_TREE;
089efaa4 708
6de9cd9a
DN
709 case COND_EXPR:
710 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
711 return true;
712 return block_may_fallthru (COND_EXPR_ELSE (stmt));
713
714 case BIND_EXPR:
715 return block_may_fallthru (BIND_EXPR_BODY (stmt));
716
6737ba67
ILT
717 case TRY_CATCH_EXPR:
718 return try_catch_may_fallthru (stmt);
719
6de9cd9a 720 case TRY_FINALLY_EXPR:
09f8cf2d
ILT
721 /* The finally clause is always executed after the try clause,
722 so if it does not fall through, then the try-finally will not
723 fall through. Otherwise, if the try clause does not fall
724 through, then when the finally clause falls through it will
725 resume execution wherever the try clause was going. So the
726 whole try-finally will only fall through if both the try
727 clause and the finally clause fall through. */
728 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
729 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
6de9cd9a 730
726a989a
RB
731 case MODIFY_EXPR:
732 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
733 stmt = TREE_OPERAND (stmt, 1);
6de9cd9a
DN
734 else
735 return true;
736 /* FALLTHRU */
737
738 case CALL_EXPR:
739 /* Functions that do not return do not fall through. */
740 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
b8698a0f 741
1581a9a1
AP
742 case CLEANUP_POINT_EXPR:
743 return block_may_fallthru (TREE_OPERAND (stmt, 0));
6de9cd9a
DN
744
745 default:
746 return true;
747 }
748}
749
6de9cd9a 750
726a989a
RB
751/* Try to determine if we can continue executing the statement
752 immediately following STMT. This guess need not be 100% accurate;
753 simply be conservative and return true if we don't know. This is
754 used only to avoid stupidly generating extra code. If we're wrong,
755 we'll just delete the extra code later. */
756
757bool
758gimple_stmt_may_fallthru (gimple stmt)
6de9cd9a 759{
726a989a
RB
760 if (!stmt)
761 return true;
6de9cd9a 762
726a989a
RB
763 switch (gimple_code (stmt))
764 {
765 case GIMPLE_GOTO:
766 case GIMPLE_RETURN:
767 case GIMPLE_RESX:
b8698a0f 768 /* Easy cases. If the last statement of the seq implies
726a989a
RB
769 control transfer, then we can't fall through. */
770 return false;
6de9cd9a 771
726a989a 772 case GIMPLE_SWITCH:
a141816c
EB
773 /* Switch has already been lowered and represents a branch
774 to a selected label and hence can't fall through. */
775 return false;
6de9cd9a 776
726a989a
RB
777 case GIMPLE_COND:
778 /* GIMPLE_COND's are already lowered into a two-way branch. They
779 can't fall through. */
780 return false;
6de9cd9a 781
726a989a
RB
782 case GIMPLE_BIND:
783 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
6de9cd9a 784
726a989a
RB
785 case GIMPLE_TRY:
786 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
787 return gimple_try_catch_may_fallthru (stmt);
6de9cd9a 788
726a989a 789 /* It must be a GIMPLE_TRY_FINALLY. */
6de9cd9a 790
726a989a
RB
791 /* The finally clause is always executed after the try clause,
792 so if it does not fall through, then the try-finally will not
793 fall through. Otherwise, if the try clause does not fall
794 through, then when the finally clause falls through it will
795 resume execution wherever the try clause was going. So the
796 whole try-finally will only fall through if both the try
797 clause and the finally clause fall through. */
798 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
799 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
800
0a35513e
AH
801 case GIMPLE_EH_ELSE:
802 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
803 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
804
726a989a
RB
805 case GIMPLE_CALL:
806 /* Functions that do not return do not fall through. */
807 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
a141816c 808
726a989a
RB
809 default:
810 return true;
6de9cd9a 811 }
726a989a
RB
812}
813
6de9cd9a 814
726a989a 815/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
6de9cd9a 816
726a989a
RB
817bool
818gimple_seq_may_fallthru (gimple_seq seq)
819{
820 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
6de9cd9a 821}
f5a76aea 822
726a989a
RB
823
824/* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
4f6c2131 825
f5a76aea 826static void
726a989a 827lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
f5a76aea 828{
726a989a
RB
829 gimple stmt = gsi_stmt (*gsi);
830 gimple t;
831 int i;
832 return_statements_t tmp_rs;
f5a76aea 833
71877985 834 /* Match this up with an existing return statement that's been created. */
726a989a
RB
835 for (i = VEC_length (return_statements_t, data->return_statements) - 1;
836 i >= 0; i--)
f5a76aea 837 {
0823efed 838 tmp_rs = VEC_index (return_statements_t, data->return_statements, i);
71877985 839
726a989a 840 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
0efb9d64
AK
841 {
842 /* Remove the line number from the representative return statement.
843 It now fills in for many such returns. Failure to remove this
844 will result in incorrect results for coverage analysis. */
845 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
846
847 goto found;
848 }
f5a76aea
RH
849 }
850
71877985 851 /* Not found. Create a new label and record the return statement. */
c2255bc4 852 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
726a989a 853 tmp_rs.stmt = stmt;
f32682ca 854 VEC_safe_push (return_statements_t, heap, data->return_statements, tmp_rs);
71877985
RH
855
856 /* Generate a goto statement and remove the return statement. */
857 found:
516426da
EB
858 /* When not optimizing, make sure user returns are preserved. */
859 if (!optimize && gimple_has_location (stmt))
860 DECL_ARTIFICIAL (tmp_rs.label) = 0;
726a989a
RB
861 t = gimple_build_goto (tmp_rs.label);
862 gimple_set_location (t, gimple_location (stmt));
cc2a64dd 863 gimple_set_block (t, gimple_block (stmt));
726a989a
RB
864 gsi_insert_before (gsi, t, GSI_SAME_STMT);
865 gsi_remove (gsi, false);
4f6c2131
EB
866}
867
a141816c 868/* Lower a __builtin_setjmp GSI.
4f6c2131
EB
869
870 __builtin_setjmp is passed a pointer to an array of five words (not
871 all will be used on all machines). It operates similarly to the C
872 library function of the same name, but is more efficient.
873
874 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
875 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
876 __builtin_setjmp_dispatcher shared among all the instances; that's
877 why it is only emitted at the end by lower_function_body.
878
879 After full lowering, the body of the function should look like:
880
881 {
882 void * setjmpvar.0;
883 int D.1844;
884 int D.2844;
885
886 [...]
887
888 __builtin_setjmp_setup (&buf, &<D1847>);
889 D.1844 = 0;
890 goto <D1846>;
891 <D1847>:;
892 __builtin_setjmp_receiver (&<D1847>);
893 D.1844 = 1;
894 <D1846>:;
895 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
896
897 [...]
898
899 __builtin_setjmp_setup (&buf, &<D2847>);
900 D.2844 = 0;
901 goto <D2846>;
902 <D2847>:;
903 __builtin_setjmp_receiver (&<D2847>);
904 D.2844 = 1;
905 <D2846>:;
906 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
907
908 [...]
909
910 <D3850>:;
911 return;
912 <D3853>: [non-local];
913 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
914 goto setjmpvar.0;
915 }
916
917 The dispatcher block will be both the unique destination of all the
918 abnormal call edges and the unique source of all the abnormal edges
919 to the receivers, thus keeping the complexity explosion localized. */
920
921static void
726a989a 922lower_builtin_setjmp (gimple_stmt_iterator *gsi)
4f6c2131 923{
726a989a 924 gimple stmt = gsi_stmt (*gsi);
c2255bc4
AH
925 location_t loc = gimple_location (stmt);
926 tree cont_label = create_artificial_label (loc);
927 tree next_label = create_artificial_label (loc);
4f6c2131 928 tree dest, t, arg;
726a989a 929 gimple g;
4f6c2131
EB
930
931 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
932 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
933 FORCED_LABEL (next_label) = 1;
934
726a989a 935 dest = gimple_call_lhs (stmt);
4f6c2131
EB
936
937 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
5039610b 938 arg = build_addr (next_label, current_function_decl);
e79983f4 939 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
726a989a 940 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
db3927fb 941 gimple_set_location (g, loc);
cc2a64dd 942 gimple_set_block (g, gimple_block (stmt));
726a989a 943 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
944
945 /* Build 'DEST = 0' and insert. */
946 if (dest)
947 {
e8160c9a 948 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
db3927fb 949 gimple_set_location (g, loc);
cc2a64dd 950 gimple_set_block (g, gimple_block (stmt));
726a989a 951 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
952 }
953
954 /* Build 'goto CONT_LABEL' and insert. */
726a989a 955 g = gimple_build_goto (cont_label);
bbbbb16a 956 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
957
958 /* Build 'NEXT_LABEL:' and insert. */
726a989a
RB
959 g = gimple_build_label (next_label);
960 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
961
962 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
5039610b 963 arg = build_addr (next_label, current_function_decl);
e79983f4 964 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
726a989a 965 g = gimple_build_call (t, 1, arg);
db3927fb 966 gimple_set_location (g, loc);
cc2a64dd 967 gimple_set_block (g, gimple_block (stmt));
726a989a 968 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
969
970 /* Build 'DEST = 1' and insert. */
971 if (dest)
972 {
db3927fb
AH
973 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
974 integer_one_node));
975 gimple_set_location (g, loc);
cc2a64dd 976 gimple_set_block (g, gimple_block (stmt));
726a989a 977 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
978 }
979
980 /* Build 'CONT_LABEL:' and insert. */
726a989a
RB
981 g = gimple_build_label (cont_label);
982 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
983
984 /* Remove the call to __builtin_setjmp. */
726a989a 985 gsi_remove (gsi, false);
f5a76aea 986}
6de9cd9a
DN
987\f
988
50674e96 989/* Record the variables in VARS into function FN. */
6de9cd9a
DN
990
991void
50674e96 992record_vars_into (tree vars, tree fn)
6de9cd9a 993{
50674e96 994 if (fn != current_function_decl)
db2960f4 995 push_cfun (DECL_STRUCT_FUNCTION (fn));
50674e96 996
910ad8de 997 for (; vars; vars = DECL_CHAIN (vars))
6de9cd9a
DN
998 {
999 tree var = vars;
1000
acb8f212
JH
1001 /* BIND_EXPRs contains also function/type/constant declarations
1002 we don't need to care about. */
1003 if (TREE_CODE (var) != VAR_DECL)
1004 continue;
50674e96 1005
6de9cd9a
DN
1006 /* Nothing to do in this case. */
1007 if (DECL_EXTERNAL (var))
1008 continue;
6de9cd9a
DN
1009
1010 /* Record the variable. */
c021f10b 1011 add_local_decl (cfun, var);
6de9cd9a 1012 }
50674e96
DN
1013
1014 if (fn != current_function_decl)
db2960f4 1015 pop_cfun ();
50674e96
DN
1016}
1017
1018
1019/* Record the variables in VARS into current_function_decl. */
1020
1021void
1022record_vars (tree vars)
1023{
1024 record_vars_into (vars, current_function_decl);
6de9cd9a 1025}