]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-low.c
This patch rewrites the old VEC macro-based interface into a new one
[thirdparty/gcc.git] / gcc / gimple-low.c
CommitLineData
75a70cf9 1/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
4ee9c684 2
d91f7526 3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
fdcb802d 4 Free Software Foundation, Inc.
4ee9c684 5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
8c4c00c1 10Software Foundation; either version 3, or (at your option) any later
4ee9c684 11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
8c4c00c1 19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
4ee9c684 21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
75a70cf9 27#include "gimple.h"
28#include "tree-iterator.h"
4ee9c684 29#include "tree-inline.h"
4ee9c684 30#include "tree-flow.h"
4ee9c684 31#include "flags.h"
32#include "function.h"
0b205f4c 33#include "diagnostic-core.h"
4ee9c684 34#include "tree-pass.h"
35
75a70cf9 36/* The differences between High GIMPLE and Low GIMPLE are the
37 following:
38
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
40
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
44
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
47
48/* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51struct return_statements_t
52{
53 tree label;
54 gimple stmt;
55};
56typedef struct return_statements_t return_statements_t;
57
75a70cf9 58
4ee9c684 59struct lower_data
60{
61 /* Block the current statement belongs to. */
62 tree block;
22e30d4e 63
75a70cf9 64 /* A vector of label and return statements to be moved to the end
6c6a0f2f 65 of the function. */
f1f41a6c 66 vec<return_statements_t> return_statements;
2c8a1497 67
a2159096 68 /* True if the current statement cannot fall through. */
69 bool cannot_fallthru;
70
2c8a1497 71 /* True if the function calls __builtin_setjmp. */
72 bool calls_builtin_setjmp;
4ee9c684 73};
74
75a70cf9 75static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
76static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
929384bb 77static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
75a70cf9 78static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
79static void lower_builtin_setjmp (gimple_stmt_iterator *);
4ee9c684 80
75a70cf9 81
82/* Lower the body of current_function_decl from High GIMPLE into Low
83 GIMPLE. */
4ee9c684 84
2a1990e9 85static unsigned int
4ee9c684 86lower_function_body (void)
87{
88 struct lower_data data;
75a70cf9 89 gimple_seq body = gimple_body (current_function_decl);
90 gimple_seq lowered_body;
91 gimple_stmt_iterator i;
92 gimple bind;
93 tree t;
94 gimple x;
95
96 /* The gimplifier should've left a body of exactly one statement,
97 namely a GIMPLE_BIND. */
98 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
99 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
4ee9c684 100
1e8e9920 101 memset (&data, 0, sizeof (data));
4ee9c684 102 data.block = DECL_INITIAL (current_function_decl);
103 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
104 BLOCK_CHAIN (data.block) = NULL_TREE;
105 TREE_ASM_WRITTEN (data.block) = 1;
f1f41a6c 106 data.return_statements.create (8);
75a70cf9 107
108 bind = gimple_seq_first_stmt (body);
109 lowered_body = NULL;
110 gimple_seq_add_stmt (&lowered_body, bind);
111 i = gsi_start (lowered_body);
112 lower_gimple_bind (&i, &data);
4ee9c684 113
75a70cf9 114 i = gsi_last (lowered_body);
751ddc2b 115
116 /* If the function falls off the end, we need a null return statement.
75a70cf9 117 If we've already got one in the return_statements vector, we don't
751ddc2b 118 need to do anything special. Otherwise build one by hand. */
75a70cf9 119 if (gimple_seq_may_fallthru (lowered_body)
f1f41a6c 120 && (data.return_statements.is_empty ()
121 || gimple_return_retval (data.return_statements.last().stmt) != NULL))
751ddc2b 122 {
75a70cf9 123 x = gimple_build_return (NULL);
124 gimple_set_location (x, cfun->function_end_locus);
32dedf8f 125 gimple_set_block (x, DECL_INITIAL (current_function_decl));
75a70cf9 126 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
751ddc2b 127 }
128
129 /* If we lowered any return statements, emit the representative
130 at the end of the function. */
f1f41a6c 131 while (!data.return_statements.is_empty ())
22e30d4e 132 {
f1f41a6c 133 return_statements_t t = data.return_statements.pop ();
75a70cf9 134 x = gimple_build_label (t.label);
135 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
75a70cf9 136 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
22e30d4e 137 }
138
2c8a1497 139 /* If the function calls __builtin_setjmp, we need to emit the computed
140 goto that will serve as the unique dispatcher for all the receivers. */
141 if (data.calls_builtin_setjmp)
142 {
143 tree disp_label, disp_var, arg;
144
145 /* Build 'DISP_LABEL:' and insert. */
e60a6f7b 146 disp_label = create_artificial_label (cfun->function_end_locus);
2c8a1497 147 /* This mark will create forward edges from every call site. */
148 DECL_NONLOCAL (disp_label) = 1;
18d50ae6 149 cfun->has_nonlocal_label = 1;
75a70cf9 150 x = gimple_build_label (disp_label);
151 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
2c8a1497 152
153 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
154 and insert. */
155 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
c2f47e15 156 arg = build_addr (disp_label, current_function_decl);
b9a16870 157 t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
75a70cf9 158 x = gimple_build_call (t, 1, arg);
159 gimple_call_set_lhs (x, disp_var);
2c8a1497 160
161 /* Build 'goto DISP_VAR;' and insert. */
75a70cf9 162 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
163 x = gimple_build_goto (disp_var);
164 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
2c8a1497 165 }
166
e3a19533 167 /* Once the old body has been lowered, replace it with the new
168 lowered sequence. */
169 gimple_set_body (current_function_decl, lowered_body);
170
0d59b19d 171 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
4ee9c684 172 BLOCK_SUBBLOCKS (data.block)
173 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
174
175 clear_block_marks (data.block);
f1f41a6c 176 data.return_statements.release ();
2a1990e9 177 return 0;
4ee9c684 178}
179
48e1416a 180struct gimple_opt_pass pass_lower_cf =
4ee9c684 181{
20099e35 182 {
183 GIMPLE_PASS,
4ee9c684 184 "lower", /* name */
c7875731 185 OPTGROUP_NONE, /* optinfo_flags */
4ee9c684 186 NULL, /* gate */
187 lower_function_body, /* execute */
188 NULL, /* sub */
189 NULL, /* next */
190 0, /* static_pass_number */
0b1615c1 191 TV_NONE, /* tv_id */
4ee9c684 192 PROP_gimple_any, /* properties_required */
193 PROP_gimple_lcf, /* properties_provided */
6354626c 194 0, /* properties_destroyed */
4ee9c684 195 0, /* todo_flags_start */
771e2890 196 0 /* todo_flags_finish */
20099e35 197 }
4ee9c684 198};
199
200
850ff64c 201
fdcb802d 202/* Verify if the type of the argument matches that of the function
203 declaration. If we cannot verify this or there is a mismatch,
19bcf521 204 return false. */
fdcb802d 205
850ff64c 206static bool
207gimple_check_call_args (gimple stmt, tree fndecl)
fdcb802d 208{
850ff64c 209 tree parms, p;
fdcb802d 210 unsigned int i, nargs;
211
fb049fba 212 /* Calls to internal functions always match their signature. */
213 if (gimple_call_internal_p (stmt))
214 return true;
215
fdcb802d 216 nargs = gimple_call_num_args (stmt);
217
218 /* Get argument types for verification. */
fdcb802d 219 if (fndecl)
220 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2de00a2d 221 else
222 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
fdcb802d 223
224 /* Verify if the type of the argument matches that of the function
225 declaration. If we cannot verify this or there is a mismatch,
19bcf521 226 return false. */
fdcb802d 227 if (fndecl && DECL_ARGUMENTS (fndecl))
228 {
229 for (i = 0, p = DECL_ARGUMENTS (fndecl);
230 i < nargs;
1767a056 231 i++, p = DECL_CHAIN (p))
fdcb802d 232 {
0b06d494 233 tree arg;
fdcb802d 234 /* We cannot distinguish a varargs function from the case
235 of excess parameters, still deferring the inlining decision
236 to the callee is possible. */
237 if (!p)
238 break;
0b06d494 239 arg = gimple_call_arg (stmt, i);
fdcb802d 240 if (p == error_mark_node
0b06d494 241 || arg == error_mark_node
242 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
243 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
19bcf521 244 return false;
fdcb802d 245 }
246 }
247 else if (parms)
248 {
249 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
250 {
0b06d494 251 tree arg;
fdcb802d 252 /* If this is a varargs function defer inlining decision
253 to callee. */
254 if (!p)
255 break;
0b06d494 256 arg = gimple_call_arg (stmt, i);
fdcb802d 257 if (TREE_VALUE (p) == error_mark_node
0b06d494 258 || arg == error_mark_node
fdcb802d 259 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
0b06d494 260 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
261 && !fold_convertible_p (TREE_VALUE (p), arg)))
19bcf521 262 return false;
fdcb802d 263 }
264 }
265 else
266 {
267 if (nargs != 0)
19bcf521 268 return false;
fdcb802d 269 }
19bcf521 270 return true;
fdcb802d 271}
272
850ff64c 273/* Verify if the type of the argument and lhs of CALL_STMT matches
274 that of the function declaration CALLEE.
275 If we cannot verify this or there is a mismatch, return false. */
276
277bool
278gimple_check_call_matching_types (gimple call_stmt, tree callee)
279{
280 tree lhs;
281
282 if ((DECL_RESULT (callee)
283 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
284 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
285 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
286 TREE_TYPE (lhs))
287 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
288 || !gimple_check_call_args (call_stmt, callee))
289 return false;
290 return true;
291}
fdcb802d 292
75a70cf9 293/* Lower sequence SEQ. Unlike gimplification the statements are not relowered
4ee9c684 294 when they are changed -- if this has to be done, the lowering routine must
295 do it explicitly. DATA is passed through the recursion. */
296
c939e803 297static void
e3a19533 298lower_sequence (gimple_seq *seq, struct lower_data *data)
4ee9c684 299{
75a70cf9 300 gimple_stmt_iterator gsi;
4ee9c684 301
e3a19533 302 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
75a70cf9 303 lower_stmt (&gsi, data);
4ee9c684 304}
305
773c5ba7 306
75a70cf9 307/* Lower the OpenMP directive statement pointed by GSI. DATA is
773c5ba7 308 passed through the recursion. */
309
310static void
75a70cf9 311lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
773c5ba7 312{
75a70cf9 313 gimple stmt;
48e1416a 314
75a70cf9 315 stmt = gsi_stmt (*gsi);
773c5ba7 316
e3a19533 317 lower_sequence (gimple_omp_body_ptr (stmt), data);
318 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
75a70cf9 319 gimple_omp_set_body (stmt, NULL);
e3a19533 320 gsi_next (gsi);
773c5ba7 321}
322
323
a2159096 324/* Lower statement GSI. DATA is passed through the recursion. We try to
325 track the fallthruness of statements and get rid of unreachable return
326 statements in order to prevent the EH lowering pass from adding useless
327 edges that can cause bogus warnings to be issued later; this guess need
328 not be 100% accurate, simply be conservative and reset cannot_fallthru
329 to false if we don't know. */
4ee9c684 330
331static void
75a70cf9 332lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
4ee9c684 333{
75a70cf9 334 gimple stmt = gsi_stmt (*gsi);
4ee9c684 335
75a70cf9 336 gimple_set_block (stmt, data->block);
4ee9c684 337
75a70cf9 338 switch (gimple_code (stmt))
4ee9c684 339 {
75a70cf9 340 case GIMPLE_BIND:
341 lower_gimple_bind (gsi, data);
a2159096 342 /* Propagate fallthruness. */
22e30d4e 343 return;
4ee9c684 344
75a70cf9 345 case GIMPLE_COND:
a2159096 346 case GIMPLE_GOTO:
347 case GIMPLE_SWITCH:
348 data->cannot_fallthru = true;
349 gsi_next (gsi);
350 return;
75a70cf9 351
352 case GIMPLE_RETURN:
a2159096 353 if (data->cannot_fallthru)
354 {
355 gsi_remove (gsi, false);
356 /* Propagate fallthruness. */
357 }
358 else
359 {
360 lower_gimple_return (gsi, data);
361 data->cannot_fallthru = true;
362 }
75a70cf9 363 return;
364
365 case GIMPLE_TRY:
929384bb 366 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
367 lower_try_catch (gsi, data);
368 else
369 {
370 /* It must be a GIMPLE_TRY_FINALLY. */
371 bool cannot_fallthru;
372 lower_sequence (gimple_try_eval_ptr (stmt), data);
373 cannot_fallthru = data->cannot_fallthru;
374
375 /* The finally clause is always executed after the try clause,
376 so if it does not fall through, then the try-finally will not
377 fall through. Otherwise, if the try clause does not fall
378 through, then when the finally clause falls through it will
379 resume execution wherever the try clause was going. So the
380 whole try-finally will only fall through if both the try
381 clause and the finally clause fall through. */
382 data->cannot_fallthru = false;
383 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
384 data->cannot_fallthru |= cannot_fallthru;
385 gsi_next (gsi);
386 }
387 return;
61e47ac8 388
4c0315d0 389 case GIMPLE_EH_ELSE:
e3a19533 390 lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
391 lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
4c0315d0 392 break;
393
75a70cf9 394 case GIMPLE_NOP:
395 case GIMPLE_ASM:
396 case GIMPLE_ASSIGN:
75a70cf9 397 case GIMPLE_PREDICT:
398 case GIMPLE_LABEL:
e38def9c 399 case GIMPLE_EH_MUST_NOT_THROW:
75a70cf9 400 case GIMPLE_OMP_FOR:
401 case GIMPLE_OMP_SECTIONS:
402 case GIMPLE_OMP_SECTIONS_SWITCH:
403 case GIMPLE_OMP_SECTION:
404 case GIMPLE_OMP_SINGLE:
405 case GIMPLE_OMP_MASTER:
406 case GIMPLE_OMP_ORDERED:
407 case GIMPLE_OMP_CRITICAL:
408 case GIMPLE_OMP_RETURN:
409 case GIMPLE_OMP_ATOMIC_LOAD:
410 case GIMPLE_OMP_ATOMIC_STORE:
411 case GIMPLE_OMP_CONTINUE:
412 break;
2c8a1497 413
75a70cf9 414 case GIMPLE_CALL:
2c8a1497 415 {
75a70cf9 416 tree decl = gimple_call_fndecl (stmt);
3c8c0942 417 unsigned i;
418
419 for (i = 0; i < gimple_call_num_args (stmt); i++)
420 {
421 tree arg = gimple_call_arg (stmt, i);
422 if (EXPR_P (arg))
423 TREE_SET_BLOCK (arg, data->block);
424 }
75a70cf9 425
2c8a1497 426 if (decl
427 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
428 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
429 {
75a70cf9 430 lower_builtin_setjmp (gsi);
a2159096 431 data->cannot_fallthru = false;
432 data->calls_builtin_setjmp = true;
2c8a1497 433 return;
434 }
264ee46d 435
264ee46d 436 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
437 {
a2159096 438 data->cannot_fallthru = true;
264ee46d 439 gsi_next (gsi);
264ee46d 440 return;
441 }
2c8a1497 442 }
443 break;
444
75a70cf9 445 case GIMPLE_OMP_PARALLEL:
446 case GIMPLE_OMP_TASK:
a2159096 447 data->cannot_fallthru = false;
75a70cf9 448 lower_omp_directive (gsi, data);
a2159096 449 data->cannot_fallthru = false;
773c5ba7 450 return;
451
4c0315d0 452 case GIMPLE_TRANSACTION:
e3a19533 453 lower_sequence (gimple_transaction_body_ptr (stmt), data);
4c0315d0 454 break;
455
4ee9c684 456 default:
0d59b19d 457 gcc_unreachable ();
4ee9c684 458 }
459
a2159096 460 data->cannot_fallthru = false;
75a70cf9 461 gsi_next (gsi);
4ee9c684 462}
463
2c8a1497 464/* Lower a bind_expr TSI. DATA is passed through the recursion. */
4ee9c684 465
466static void
75a70cf9 467lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
4ee9c684 468{
469 tree old_block = data->block;
75a70cf9 470 gimple stmt = gsi_stmt (*gsi);
471 tree new_block = gimple_bind_block (stmt);
4ee9c684 472
473 if (new_block)
474 {
475 if (new_block == old_block)
476 {
477 /* The outermost block of the original function may not be the
478 outermost statement chain of the gimplified function. So we
479 may see the outermost block just inside the function. */
0d59b19d 480 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
4ee9c684 481 new_block = NULL;
482 }
483 else
484 {
485 /* We do not expect to handle duplicate blocks. */
0d59b19d 486 gcc_assert (!TREE_ASM_WRITTEN (new_block));
4ee9c684 487 TREE_ASM_WRITTEN (new_block) = 1;
488
489 /* Block tree may get clobbered by inlining. Normally this would
490 be fixed in rest_of_decl_compilation using block notes, but
491 since we are not going to emit them, it is up to us. */
492 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
493 BLOCK_SUBBLOCKS (old_block) = new_block;
494 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
495 BLOCK_SUPERCONTEXT (new_block) = old_block;
496
497 data->block = new_block;
498 }
499 }
500
75a70cf9 501 record_vars (gimple_bind_vars (stmt));
e3a19533 502 lower_sequence (gimple_bind_body_ptr (stmt), data);
4ee9c684 503
504 if (new_block)
505 {
0d59b19d 506 gcc_assert (data->block == new_block);
4ee9c684 507
508 BLOCK_SUBBLOCKS (new_block)
509 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
510 data->block = old_block;
511 }
512
75a70cf9 513 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
514 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
515 gsi_remove (gsi, false);
4ee9c684 516}
517
929384bb 518/* Same as above, but for a GIMPLE_TRY_CATCH. */
519
520static void
521lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
522{
523 bool cannot_fallthru;
524 gimple stmt = gsi_stmt (*gsi);
525 gimple_stmt_iterator i;
526
527 /* We don't handle GIMPLE_TRY_FINALLY. */
528 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
529
530 lower_sequence (gimple_try_eval_ptr (stmt), data);
531 cannot_fallthru = data->cannot_fallthru;
532
533 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
534 switch (gimple_code (gsi_stmt (i)))
535 {
536 case GIMPLE_CATCH:
537 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
538 catch expression and a body. The whole try/catch may fall
539 through iff any of the catch bodies falls through. */
540 for (; !gsi_end_p (i); gsi_next (&i))
541 {
542 data->cannot_fallthru = false;
543 lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
544 if (!data->cannot_fallthru)
545 cannot_fallthru = false;
546 }
547 break;
548
549 case GIMPLE_EH_FILTER:
550 /* The exception filter expression only matters if there is an
551 exception. If the exception does not match EH_FILTER_TYPES,
552 we will execute EH_FILTER_FAILURE, and we will fall through
553 if that falls through. If the exception does match
554 EH_FILTER_TYPES, the stack unwinder will continue up the
555 stack, so we will not fall through. We don't know whether we
556 will throw an exception which matches EH_FILTER_TYPES or not,
557 so we just ignore EH_FILTER_TYPES and assume that we might
558 throw an exception which doesn't match. */
559 data->cannot_fallthru = false;
560 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
561 if (!data->cannot_fallthru)
562 cannot_fallthru = false;
563 break;
564
565 default:
566 /* This case represents statements to be executed when an
567 exception occurs. Those statements are implicitly followed
568 by a GIMPLE_RESX to resume execution after the exception. So
569 in this case the try/catch never falls through. */
570 data->cannot_fallthru = false;
571 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
572 break;
573 }
574
575 data->cannot_fallthru = cannot_fallthru;
576 gsi_next (gsi);
577}
578
93f29170 579/* Try to determine whether a TRY_CATCH expression can fall through.
580 This is a subroutine of block_may_fallthru. */
581
582static bool
1f1872fd 583try_catch_may_fallthru (const_tree stmt)
93f29170 584{
585 tree_stmt_iterator i;
586
587 /* If the TRY block can fall through, the whole TRY_CATCH can
588 fall through. */
589 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
590 return true;
591
592 i = tsi_start (TREE_OPERAND (stmt, 1));
593 switch (TREE_CODE (tsi_stmt (i)))
594 {
595 case CATCH_EXPR:
596 /* We expect to see a sequence of CATCH_EXPR trees, each with a
597 catch expression and a body. The whole TRY_CATCH may fall
598 through iff any of the catch bodies falls through. */
599 for (; !tsi_end_p (i); tsi_next (&i))
600 {
601 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
602 return true;
603 }
604 return false;
605
606 case EH_FILTER_EXPR:
5cf68346 607 /* The exception filter expression only matters if there is an
608 exception. If the exception does not match EH_FILTER_TYPES,
609 we will execute EH_FILTER_FAILURE, and we will fall through
610 if that falls through. If the exception does match
611 EH_FILTER_TYPES, the stack unwinder will continue up the
612 stack, so we will not fall through. We don't know whether we
613 will throw an exception which matches EH_FILTER_TYPES or not,
614 so we just ignore EH_FILTER_TYPES and assume that we might
615 throw an exception which doesn't match. */
616 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
93f29170 617
618 default:
619 /* This case represents statements to be executed when an
620 exception occurs. Those statements are implicitly followed
e38def9c 621 by a RESX statement to resume execution after the exception.
622 So in this case the TRY_CATCH never falls through. */
93f29170 623 return false;
624 }
625}
626
75a70cf9 627
628/* Same as above, but for a GIMPLE_TRY_CATCH. */
629
630static bool
631gimple_try_catch_may_fallthru (gimple stmt)
632{
633 gimple_stmt_iterator i;
634
635 /* We don't handle GIMPLE_TRY_FINALLY. */
636 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
637
638 /* If the TRY block can fall through, the whole TRY_CATCH can
639 fall through. */
640 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
641 return true;
642
e3a19533 643 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
75a70cf9 644 switch (gimple_code (gsi_stmt (i)))
645 {
646 case GIMPLE_CATCH:
647 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
648 catch expression and a body. The whole try/catch may fall
649 through iff any of the catch bodies falls through. */
650 for (; !gsi_end_p (i); gsi_next (&i))
651 {
652 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
653 return true;
654 }
655 return false;
656
657 case GIMPLE_EH_FILTER:
658 /* The exception filter expression only matters if there is an
659 exception. If the exception does not match EH_FILTER_TYPES,
660 we will execute EH_FILTER_FAILURE, and we will fall through
661 if that falls through. If the exception does match
662 EH_FILTER_TYPES, the stack unwinder will continue up the
663 stack, so we will not fall through. We don't know whether we
664 will throw an exception which matches EH_FILTER_TYPES or not,
665 so we just ignore EH_FILTER_TYPES and assume that we might
666 throw an exception which doesn't match. */
667 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
668
669 default:
670 /* This case represents statements to be executed when an
671 exception occurs. Those statements are implicitly followed
672 by a GIMPLE_RESX to resume execution after the exception. So
673 in this case the try/catch never falls through. */
674 return false;
675 }
676}
677
678
4ee9c684 679/* Try to determine if we can fall out of the bottom of BLOCK. This guess
680 need not be 100% accurate; simply be conservative and return true if we
681 don't know. This is used only to avoid stupidly generating extra code.
682 If we're wrong, we'll just delete the extra code later. */
683
684bool
1f1872fd 685block_may_fallthru (const_tree block)
4ee9c684 686{
5ca94202 687 /* This CONST_CAST is okay because expr_last returns its argument
ce4469fa 688 unmodified and we assign it to a const_tree. */
e47a6f81 689 const_tree stmt = expr_last (CONST_CAST_TREE(block));
4ee9c684 690
691 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
692 {
693 case GOTO_EXPR:
694 case RETURN_EXPR:
48e1416a 695 /* Easy cases. If the last statement of the block implies
4ee9c684 696 control transfer, then we can't fall through. */
697 return false;
698
7f0f308d 699 case SWITCH_EXPR:
700 /* If SWITCH_LABELS is set, this is lowered, and represents a
701 branch to a selected label and hence can not fall through.
702 Otherwise SWITCH_BODY is set, and the switch can fall
703 through. */
ec20cf72 704 return SWITCH_LABELS (stmt) == NULL_TREE;
7f0f308d 705
4ee9c684 706 case COND_EXPR:
707 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
708 return true;
709 return block_may_fallthru (COND_EXPR_ELSE (stmt));
710
711 case BIND_EXPR:
712 return block_may_fallthru (BIND_EXPR_BODY (stmt));
713
93f29170 714 case TRY_CATCH_EXPR:
715 return try_catch_may_fallthru (stmt);
716
4ee9c684 717 case TRY_FINALLY_EXPR:
0ac5fbe2 718 /* The finally clause is always executed after the try clause,
719 so if it does not fall through, then the try-finally will not
720 fall through. Otherwise, if the try clause does not fall
721 through, then when the finally clause falls through it will
722 resume execution wherever the try clause was going. So the
723 whole try-finally will only fall through if both the try
724 clause and the finally clause fall through. */
725 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
726 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
4ee9c684 727
75a70cf9 728 case MODIFY_EXPR:
729 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
730 stmt = TREE_OPERAND (stmt, 1);
4ee9c684 731 else
732 return true;
733 /* FALLTHRU */
734
735 case CALL_EXPR:
736 /* Functions that do not return do not fall through. */
737 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
48e1416a 738
17fd1194 739 case CLEANUP_POINT_EXPR:
740 return block_may_fallthru (TREE_OPERAND (stmt, 0));
4ee9c684 741
742 default:
743 return true;
744 }
745}
746
4ee9c684 747
75a70cf9 748/* Try to determine if we can continue executing the statement
749 immediately following STMT. This guess need not be 100% accurate;
750 simply be conservative and return true if we don't know. This is
751 used only to avoid stupidly generating extra code. If we're wrong,
752 we'll just delete the extra code later. */
753
754bool
755gimple_stmt_may_fallthru (gimple stmt)
4ee9c684 756{
75a70cf9 757 if (!stmt)
758 return true;
4ee9c684 759
75a70cf9 760 switch (gimple_code (stmt))
761 {
762 case GIMPLE_GOTO:
763 case GIMPLE_RETURN:
764 case GIMPLE_RESX:
48e1416a 765 /* Easy cases. If the last statement of the seq implies
75a70cf9 766 control transfer, then we can't fall through. */
767 return false;
4ee9c684 768
75a70cf9 769 case GIMPLE_SWITCH:
a2159096 770 /* Switch has already been lowered and represents a branch
771 to a selected label and hence can't fall through. */
772 return false;
4ee9c684 773
75a70cf9 774 case GIMPLE_COND:
775 /* GIMPLE_COND's are already lowered into a two-way branch. They
776 can't fall through. */
777 return false;
4ee9c684 778
75a70cf9 779 case GIMPLE_BIND:
780 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
4ee9c684 781
75a70cf9 782 case GIMPLE_TRY:
783 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
784 return gimple_try_catch_may_fallthru (stmt);
4ee9c684 785
75a70cf9 786 /* It must be a GIMPLE_TRY_FINALLY. */
4ee9c684 787
75a70cf9 788 /* The finally clause is always executed after the try clause,
789 so if it does not fall through, then the try-finally will not
790 fall through. Otherwise, if the try clause does not fall
791 through, then when the finally clause falls through it will
792 resume execution wherever the try clause was going. So the
793 whole try-finally will only fall through if both the try
794 clause and the finally clause fall through. */
795 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
796 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
797
4c0315d0 798 case GIMPLE_EH_ELSE:
799 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
800 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
801
75a70cf9 802 case GIMPLE_CALL:
803 /* Functions that do not return do not fall through. */
804 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
a2159096 805
75a70cf9 806 default:
807 return true;
4ee9c684 808 }
75a70cf9 809}
810
4ee9c684 811
75a70cf9 812/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
4ee9c684 813
75a70cf9 814bool
815gimple_seq_may_fallthru (gimple_seq seq)
816{
817 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
4ee9c684 818}
22e30d4e 819
75a70cf9 820
821/* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
2c8a1497 822
22e30d4e 823static void
75a70cf9 824lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
22e30d4e 825{
75a70cf9 826 gimple stmt = gsi_stmt (*gsi);
827 gimple t;
828 int i;
829 return_statements_t tmp_rs;
22e30d4e 830
6c6a0f2f 831 /* Match this up with an existing return statement that's been created. */
f1f41a6c 832 for (i = data->return_statements.length () - 1;
75a70cf9 833 i >= 0; i--)
22e30d4e 834 {
f1f41a6c 835 tmp_rs = data->return_statements[i];
6c6a0f2f 836
75a70cf9 837 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
417a43d7 838 {
839 /* Remove the line number from the representative return statement.
840 It now fills in for many such returns. Failure to remove this
841 will result in incorrect results for coverage analysis. */
842 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
843
844 goto found;
845 }
22e30d4e 846 }
847
6c6a0f2f 848 /* Not found. Create a new label and record the return statement. */
e60a6f7b 849 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
75a70cf9 850 tmp_rs.stmt = stmt;
f1f41a6c 851 data->return_statements.safe_push (tmp_rs);
6c6a0f2f 852
853 /* Generate a goto statement and remove the return statement. */
854 found:
28f9c1a1 855 /* When not optimizing, make sure user returns are preserved. */
856 if (!optimize && gimple_has_location (stmt))
857 DECL_ARTIFICIAL (tmp_rs.label) = 0;
75a70cf9 858 t = gimple_build_goto (tmp_rs.label);
859 gimple_set_location (t, gimple_location (stmt));
32dedf8f 860 gimple_set_block (t, gimple_block (stmt));
75a70cf9 861 gsi_insert_before (gsi, t, GSI_SAME_STMT);
862 gsi_remove (gsi, false);
2c8a1497 863}
864
a2159096 865/* Lower a __builtin_setjmp GSI.
2c8a1497 866
867 __builtin_setjmp is passed a pointer to an array of five words (not
868 all will be used on all machines). It operates similarly to the C
869 library function of the same name, but is more efficient.
870
871 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
872 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
873 __builtin_setjmp_dispatcher shared among all the instances; that's
874 why it is only emitted at the end by lower_function_body.
875
876 After full lowering, the body of the function should look like:
877
878 {
879 void * setjmpvar.0;
880 int D.1844;
881 int D.2844;
882
883 [...]
884
885 __builtin_setjmp_setup (&buf, &<D1847>);
886 D.1844 = 0;
887 goto <D1846>;
888 <D1847>:;
889 __builtin_setjmp_receiver (&<D1847>);
890 D.1844 = 1;
891 <D1846>:;
892 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
893
894 [...]
895
896 __builtin_setjmp_setup (&buf, &<D2847>);
897 D.2844 = 0;
898 goto <D2846>;
899 <D2847>:;
900 __builtin_setjmp_receiver (&<D2847>);
901 D.2844 = 1;
902 <D2846>:;
903 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
904
905 [...]
906
907 <D3850>:;
908 return;
909 <D3853>: [non-local];
910 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
911 goto setjmpvar.0;
912 }
913
914 The dispatcher block will be both the unique destination of all the
915 abnormal call edges and the unique source of all the abnormal edges
916 to the receivers, thus keeping the complexity explosion localized. */
917
918static void
75a70cf9 919lower_builtin_setjmp (gimple_stmt_iterator *gsi)
2c8a1497 920{
75a70cf9 921 gimple stmt = gsi_stmt (*gsi);
e60a6f7b 922 location_t loc = gimple_location (stmt);
923 tree cont_label = create_artificial_label (loc);
924 tree next_label = create_artificial_label (loc);
2c8a1497 925 tree dest, t, arg;
75a70cf9 926 gimple g;
2c8a1497 927
928 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
929 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
930 FORCED_LABEL (next_label) = 1;
931
75a70cf9 932 dest = gimple_call_lhs (stmt);
2c8a1497 933
934 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
c2f47e15 935 arg = build_addr (next_label, current_function_decl);
b9a16870 936 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
75a70cf9 937 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
389dd41b 938 gimple_set_location (g, loc);
32dedf8f 939 gimple_set_block (g, gimple_block (stmt));
75a70cf9 940 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 941
942 /* Build 'DEST = 0' and insert. */
943 if (dest)
944 {
385f3f36 945 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
389dd41b 946 gimple_set_location (g, loc);
32dedf8f 947 gimple_set_block (g, gimple_block (stmt));
75a70cf9 948 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 949 }
950
951 /* Build 'goto CONT_LABEL' and insert. */
75a70cf9 952 g = gimple_build_goto (cont_label);
b9c74b4d 953 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 954
955 /* Build 'NEXT_LABEL:' and insert. */
75a70cf9 956 g = gimple_build_label (next_label);
957 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 958
959 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
c2f47e15 960 arg = build_addr (next_label, current_function_decl);
b9a16870 961 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
75a70cf9 962 g = gimple_build_call (t, 1, arg);
389dd41b 963 gimple_set_location (g, loc);
32dedf8f 964 gimple_set_block (g, gimple_block (stmt));
75a70cf9 965 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 966
967 /* Build 'DEST = 1' and insert. */
968 if (dest)
969 {
389dd41b 970 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
971 integer_one_node));
972 gimple_set_location (g, loc);
32dedf8f 973 gimple_set_block (g, gimple_block (stmt));
75a70cf9 974 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 975 }
976
977 /* Build 'CONT_LABEL:' and insert. */
75a70cf9 978 g = gimple_build_label (cont_label);
979 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 980
981 /* Remove the call to __builtin_setjmp. */
75a70cf9 982 gsi_remove (gsi, false);
22e30d4e 983}
4ee9c684 984\f
985
773c5ba7 986/* Record the variables in VARS into function FN. */
4ee9c684 987
988void
773c5ba7 989record_vars_into (tree vars, tree fn)
4ee9c684 990{
9078126c 991 bool change_cfun = fn != current_function_decl;
992
993 if (change_cfun)
87d4aa85 994 push_cfun (DECL_STRUCT_FUNCTION (fn));
773c5ba7 995
1767a056 996 for (; vars; vars = DECL_CHAIN (vars))
4ee9c684 997 {
998 tree var = vars;
999
b3d24a23 1000 /* BIND_EXPRs contains also function/type/constant declarations
1001 we don't need to care about. */
1002 if (TREE_CODE (var) != VAR_DECL)
1003 continue;
773c5ba7 1004
4ee9c684 1005 /* Nothing to do in this case. */
1006 if (DECL_EXTERNAL (var))
1007 continue;
4ee9c684 1008
1009 /* Record the variable. */
2ab2ce89 1010 add_local_decl (cfun, var);
4ee9c684 1011 }
773c5ba7 1012
9078126c 1013 if (change_cfun)
87d4aa85 1014 pop_cfun ();
773c5ba7 1015}
1016
1017
1018/* Record the variables in VARS into current_function_decl. */
1019
1020void
1021record_vars (tree vars)
1022{
1023 record_vars_into (vars, current_function_decl);
4ee9c684 1024}