]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-low.c
switch from gimple to gimple*
[thirdparty/gcc.git] / gcc / gimple-low.c
CommitLineData
726a989a 1/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
6de9cd9a 2
5624e564 3 Copyright (C) 2003-2015 Free Software Foundation, Inc.
6de9cd9a
DN
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
6de9cd9a 25#include "tree.h"
c7131fb2
AM
26#include "gimple.h"
27#include "hard-reg-set.h"
28#include "alias.h"
40e23961 29#include "fold-const.h"
d8a2d370
DN
30#include "tree-nested.h"
31#include "calls.h"
2fb9a547 32#include "internal-fn.h"
5be5c238 33#include "gimple-iterator.h"
726a989a 34#include "tree-iterator.h"
6de9cd9a 35#include "tree-inline.h"
6de9cd9a 36#include "flags.h"
718f9c0f 37#include "diagnostic-core.h"
6de9cd9a 38#include "tree-pass.h"
88cd0e88 39#include "langhooks.h"
4484a35a 40#include "gimple-low.h"
1fe37220 41#include "tree-nested.h"
6de9cd9a 42
726a989a
RB
43/* The differences between High GIMPLE and Low GIMPLE are the
44 following:
45
46 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
47
48 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
49 flow and exception regions are built as an on-the-side region
50 hierarchy (See tree-eh.c:lower_eh_constructs).
51
52 3- Multiple identical return statements are grouped into a single
53 return and gotos to the unique return site. */
54
55/* Match a return statement with a label. During lowering, we identify
56 identical return statements and replace duplicates with a jump to
57 the corresponding label. */
58struct return_statements_t
59{
60 tree label;
538dd0b7 61 greturn *stmt;
726a989a
RB
62};
63typedef struct return_statements_t return_statements_t;
64
726a989a 65
6de9cd9a
DN
66struct lower_data
67{
68 /* Block the current statement belongs to. */
69 tree block;
f5a76aea 70
726a989a 71 /* A vector of label and return statements to be moved to the end
71877985 72 of the function. */
9771b263 73 vec<return_statements_t> return_statements;
4f6c2131 74
a141816c
EB
75 /* True if the current statement cannot fall through. */
76 bool cannot_fallthru;
6de9cd9a
DN
77};
78
726a989a
RB
79static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
80static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
f778c049 81static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
726a989a
RB
82static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
83static void lower_builtin_setjmp (gimple_stmt_iterator *);
831806cb 84static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
6de9cd9a 85
726a989a
RB
86
87/* Lower the body of current_function_decl from High GIMPLE into Low
88 GIMPLE. */
6de9cd9a 89
c2924966 90static unsigned int
6de9cd9a
DN
91lower_function_body (void)
92{
93 struct lower_data data;
726a989a
RB
94 gimple_seq body = gimple_body (current_function_decl);
95 gimple_seq lowered_body;
96 gimple_stmt_iterator i;
355fe088
TS
97 gimple *bind;
98 gimple *x;
726a989a
RB
99
100 /* The gimplifier should've left a body of exactly one statement,
101 namely a GIMPLE_BIND. */
102 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
103 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
6de9cd9a 104
953ff289 105 memset (&data, 0, sizeof (data));
6de9cd9a
DN
106 data.block = DECL_INITIAL (current_function_decl);
107 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
108 BLOCK_CHAIN (data.block) = NULL_TREE;
109 TREE_ASM_WRITTEN (data.block) = 1;
9771b263 110 data.return_statements.create (8);
726a989a
RB
111
112 bind = gimple_seq_first_stmt (body);
113 lowered_body = NULL;
114 gimple_seq_add_stmt (&lowered_body, bind);
115 i = gsi_start (lowered_body);
116 lower_gimple_bind (&i, &data);
6de9cd9a 117
726a989a 118 i = gsi_last (lowered_body);
ff98621c
RH
119
120 /* If the function falls off the end, we need a null return statement.
726a989a 121 If we've already got one in the return_statements vector, we don't
ff98621c 122 need to do anything special. Otherwise build one by hand. */
67b69814
EB
123 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
124 if (may_fallthru
9771b263 125 && (data.return_statements.is_empty ()
c3284718
RS
126 || (gimple_return_retval (data.return_statements.last().stmt)
127 != NULL)))
ff98621c 128 {
726a989a
RB
129 x = gimple_build_return (NULL);
130 gimple_set_location (x, cfun->function_end_locus);
cc2a64dd 131 gimple_set_block (x, DECL_INITIAL (current_function_decl));
726a989a 132 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
67b69814 133 may_fallthru = false;
ff98621c
RH
134 }
135
136 /* If we lowered any return statements, emit the representative
137 at the end of the function. */
9771b263 138 while (!data.return_statements.is_empty ())
f5a76aea 139 {
9771b263 140 return_statements_t t = data.return_statements.pop ();
726a989a
RB
141 x = gimple_build_label (t.label);
142 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
726a989a 143 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
67b69814
EB
144 if (may_fallthru)
145 {
146 /* Remove the line number from the representative return statement.
147 It now fills in for the fallthru too. Failure to remove this
148 will result in incorrect results for coverage analysis. */
149 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
150 may_fallthru = false;
151 }
f5a76aea
RH
152 }
153
355a7673
MM
154 /* Once the old body has been lowered, replace it with the new
155 lowered sequence. */
156 gimple_set_body (current_function_decl, lowered_body);
157
282899df 158 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
6de9cd9a
DN
159 BLOCK_SUBBLOCKS (data.block)
160 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
161
162 clear_block_marks (data.block);
9771b263 163 data.return_statements.release ();
c2924966 164 return 0;
6de9cd9a
DN
165}
166
27a4cd48
DM
167namespace {
168
169const pass_data pass_data_lower_cf =
6de9cd9a 170{
27a4cd48
DM
171 GIMPLE_PASS, /* type */
172 "lower", /* name */
173 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
174 TV_NONE, /* tv_id */
175 PROP_gimple_any, /* properties_required */
176 PROP_gimple_lcf, /* properties_provided */
177 0, /* properties_destroyed */
178 0, /* todo_flags_start */
179 0, /* todo_flags_finish */
6de9cd9a
DN
180};
181
27a4cd48
DM
182class pass_lower_cf : public gimple_opt_pass
183{
184public:
c3284718
RS
185 pass_lower_cf (gcc::context *ctxt)
186 : gimple_opt_pass (pass_data_lower_cf, ctxt)
27a4cd48
DM
187 {}
188
189 /* opt_pass methods: */
be55bfe6 190 virtual unsigned int execute (function *) { return lower_function_body (); }
27a4cd48
DM
191
192}; // class pass_lower_cf
193
194} // anon namespace
195
196gimple_opt_pass *
197make_pass_lower_cf (gcc::context *ctxt)
198{
199 return new pass_lower_cf (ctxt);
200}
201
726a989a 202/* Lower sequence SEQ. Unlike gimplification the statements are not relowered
6de9cd9a
DN
203 when they are changed -- if this has to be done, the lowering routine must
204 do it explicitly. DATA is passed through the recursion. */
205
1ebf7687 206static void
355a7673 207lower_sequence (gimple_seq *seq, struct lower_data *data)
6de9cd9a 208{
726a989a 209 gimple_stmt_iterator gsi;
6de9cd9a 210
355a7673 211 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
726a989a 212 lower_stmt (&gsi, data);
6de9cd9a
DN
213}
214
50674e96 215
726a989a 216/* Lower the OpenMP directive statement pointed by GSI. DATA is
50674e96
DN
217 passed through the recursion. */
218
219static void
726a989a 220lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
50674e96 221{
355fe088 222 gimple *stmt;
b8698a0f 223
726a989a 224 stmt = gsi_stmt (*gsi);
50674e96 225
355a7673
MM
226 lower_sequence (gimple_omp_body_ptr (stmt), data);
227 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
726a989a 228 gimple_omp_set_body (stmt, NULL);
355a7673 229 gsi_next (gsi);
50674e96
DN
230}
231
232
a141816c
EB
233/* Lower statement GSI. DATA is passed through the recursion. We try to
234 track the fallthruness of statements and get rid of unreachable return
235 statements in order to prevent the EH lowering pass from adding useless
236 edges that can cause bogus warnings to be issued later; this guess need
237 not be 100% accurate, simply be conservative and reset cannot_fallthru
238 to false if we don't know. */
6de9cd9a
DN
239
240static void
726a989a 241lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
6de9cd9a 242{
355fe088 243 gimple *stmt = gsi_stmt (*gsi);
6de9cd9a 244
726a989a 245 gimple_set_block (stmt, data->block);
6de9cd9a 246
726a989a 247 switch (gimple_code (stmt))
6de9cd9a 248 {
726a989a
RB
249 case GIMPLE_BIND:
250 lower_gimple_bind (gsi, data);
a141816c 251 /* Propagate fallthruness. */
f5a76aea 252 return;
6de9cd9a 253
726a989a 254 case GIMPLE_COND:
a141816c
EB
255 case GIMPLE_GOTO:
256 case GIMPLE_SWITCH:
257 data->cannot_fallthru = true;
258 gsi_next (gsi);
259 return;
726a989a
RB
260
261 case GIMPLE_RETURN:
a141816c
EB
262 if (data->cannot_fallthru)
263 {
264 gsi_remove (gsi, false);
265 /* Propagate fallthruness. */
266 }
267 else
268 {
269 lower_gimple_return (gsi, data);
270 data->cannot_fallthru = true;
271 }
726a989a
RB
272 return;
273
274 case GIMPLE_TRY:
f778c049
EB
275 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
276 lower_try_catch (gsi, data);
277 else
278 {
279 /* It must be a GIMPLE_TRY_FINALLY. */
280 bool cannot_fallthru;
281 lower_sequence (gimple_try_eval_ptr (stmt), data);
282 cannot_fallthru = data->cannot_fallthru;
283
284 /* The finally clause is always executed after the try clause,
285 so if it does not fall through, then the try-finally will not
286 fall through. Otherwise, if the try clause does not fall
287 through, then when the finally clause falls through it will
288 resume execution wherever the try clause was going. So the
289 whole try-finally will only fall through if both the try
290 clause and the finally clause fall through. */
291 data->cannot_fallthru = false;
292 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
293 data->cannot_fallthru |= cannot_fallthru;
294 gsi_next (gsi);
295 }
296 return;
777f7f9a 297
0a35513e 298 case GIMPLE_EH_ELSE:
538dd0b7
DM
299 {
300 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
301 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
302 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
303 }
0a35513e
AH
304 break;
305
726a989a
RB
306 case GIMPLE_NOP:
307 case GIMPLE_ASM:
308 case GIMPLE_ASSIGN:
726a989a
RB
309 case GIMPLE_PREDICT:
310 case GIMPLE_LABEL:
1d65f45c 311 case GIMPLE_EH_MUST_NOT_THROW:
726a989a
RB
312 case GIMPLE_OMP_FOR:
313 case GIMPLE_OMP_SECTIONS:
314 case GIMPLE_OMP_SECTIONS_SWITCH:
315 case GIMPLE_OMP_SECTION:
316 case GIMPLE_OMP_SINGLE:
317 case GIMPLE_OMP_MASTER:
acf0174b 318 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
319 case GIMPLE_OMP_ORDERED:
320 case GIMPLE_OMP_CRITICAL:
321 case GIMPLE_OMP_RETURN:
322 case GIMPLE_OMP_ATOMIC_LOAD:
323 case GIMPLE_OMP_ATOMIC_STORE:
324 case GIMPLE_OMP_CONTINUE:
325 break;
4f6c2131 326
726a989a 327 case GIMPLE_CALL:
4f6c2131 328 {
726a989a 329 tree decl = gimple_call_fndecl (stmt);
f16dd822
DC
330 unsigned i;
331
332 for (i = 0; i < gimple_call_num_args (stmt); i++)
333 {
334 tree arg = gimple_call_arg (stmt, i);
335 if (EXPR_P (arg))
336 TREE_SET_BLOCK (arg, data->block);
337 }
726a989a 338
4f6c2131 339 if (decl
831806cb 340 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
4f6c2131 341 {
831806cb
RB
342 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
343 {
344 lower_builtin_setjmp (gsi);
345 data->cannot_fallthru = false;
346 return;
347 }
c4c8514e 348 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
41dd7cf7
MP
349 && flag_tree_bit_ccp
350 && gimple_builtin_call_types_compatible_p (stmt, decl))
831806cb
RB
351 {
352 lower_builtin_posix_memalign (gsi);
353 return;
354 }
4f6c2131 355 }
79ddec02 356
79ddec02
EB
357 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
358 {
a141816c 359 data->cannot_fallthru = true;
79ddec02 360 gsi_next (gsi);
79ddec02
EB
361 return;
362 }
4f6c2131
EB
363 }
364 break;
365
726a989a
RB
366 case GIMPLE_OMP_PARALLEL:
367 case GIMPLE_OMP_TASK:
acf0174b
JJ
368 case GIMPLE_OMP_TARGET:
369 case GIMPLE_OMP_TEAMS:
a141816c 370 data->cannot_fallthru = false;
726a989a 371 lower_omp_directive (gsi, data);
a141816c 372 data->cannot_fallthru = false;
50674e96
DN
373 return;
374
0a35513e 375 case GIMPLE_TRANSACTION:
538dd0b7
DM
376 lower_sequence (gimple_transaction_body_ptr (
377 as_a <gtransaction *> (stmt)),
378 data);
0a35513e
AH
379 break;
380
6de9cd9a 381 default:
282899df 382 gcc_unreachable ();
6de9cd9a
DN
383 }
384
a141816c 385 data->cannot_fallthru = false;
726a989a 386 gsi_next (gsi);
6de9cd9a
DN
387}
388
4f6c2131 389/* Lower a bind_expr TSI. DATA is passed through the recursion. */
6de9cd9a
DN
390
391static void
726a989a 392lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
6de9cd9a
DN
393{
394 tree old_block = data->block;
538dd0b7 395 gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
726a989a 396 tree new_block = gimple_bind_block (stmt);
6de9cd9a
DN
397
398 if (new_block)
399 {
400 if (new_block == old_block)
401 {
402 /* The outermost block of the original function may not be the
403 outermost statement chain of the gimplified function. So we
404 may see the outermost block just inside the function. */
282899df 405 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
6de9cd9a
DN
406 new_block = NULL;
407 }
408 else
409 {
410 /* We do not expect to handle duplicate blocks. */
282899df 411 gcc_assert (!TREE_ASM_WRITTEN (new_block));
6de9cd9a
DN
412 TREE_ASM_WRITTEN (new_block) = 1;
413
414 /* Block tree may get clobbered by inlining. Normally this would
415 be fixed in rest_of_decl_compilation using block notes, but
416 since we are not going to emit them, it is up to us. */
417 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
418 BLOCK_SUBBLOCKS (old_block) = new_block;
419 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
420 BLOCK_SUPERCONTEXT (new_block) = old_block;
421
422 data->block = new_block;
423 }
424 }
425
726a989a 426 record_vars (gimple_bind_vars (stmt));
355a7673 427 lower_sequence (gimple_bind_body_ptr (stmt), data);
6de9cd9a
DN
428
429 if (new_block)
430 {
282899df 431 gcc_assert (data->block == new_block);
6de9cd9a
DN
432
433 BLOCK_SUBBLOCKS (new_block)
434 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
435 data->block = old_block;
436 }
437
726a989a
RB
438 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
439 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
440 gsi_remove (gsi, false);
6de9cd9a
DN
441}
442
f778c049
EB
443/* Same as above, but for a GIMPLE_TRY_CATCH. */
444
445static void
446lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
447{
448 bool cannot_fallthru;
355fe088 449 gimple *stmt = gsi_stmt (*gsi);
f778c049
EB
450 gimple_stmt_iterator i;
451
452 /* We don't handle GIMPLE_TRY_FINALLY. */
453 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
454
455 lower_sequence (gimple_try_eval_ptr (stmt), data);
456 cannot_fallthru = data->cannot_fallthru;
457
458 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
459 switch (gimple_code (gsi_stmt (i)))
460 {
461 case GIMPLE_CATCH:
462 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
463 catch expression and a body. The whole try/catch may fall
464 through iff any of the catch bodies falls through. */
465 for (; !gsi_end_p (i); gsi_next (&i))
466 {
467 data->cannot_fallthru = false;
538dd0b7
DM
468 lower_sequence (gimple_catch_handler_ptr (
469 as_a <gcatch *> (gsi_stmt (i))),
470 data);
f778c049
EB
471 if (!data->cannot_fallthru)
472 cannot_fallthru = false;
473 }
474 break;
475
476 case GIMPLE_EH_FILTER:
477 /* The exception filter expression only matters if there is an
478 exception. If the exception does not match EH_FILTER_TYPES,
479 we will execute EH_FILTER_FAILURE, and we will fall through
480 if that falls through. If the exception does match
481 EH_FILTER_TYPES, the stack unwinder will continue up the
482 stack, so we will not fall through. We don't know whether we
483 will throw an exception which matches EH_FILTER_TYPES or not,
484 so we just ignore EH_FILTER_TYPES and assume that we might
485 throw an exception which doesn't match. */
486 data->cannot_fallthru = false;
487 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
488 if (!data->cannot_fallthru)
489 cannot_fallthru = false;
490 break;
491
492 default:
493 /* This case represents statements to be executed when an
494 exception occurs. Those statements are implicitly followed
495 by a GIMPLE_RESX to resume execution after the exception. So
496 in this case the try/catch never falls through. */
497 data->cannot_fallthru = false;
498 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
499 break;
500 }
501
502 data->cannot_fallthru = cannot_fallthru;
503 gsi_next (gsi);
504}
505
6737ba67 506
cf2d1b38
AM
507/* Try to determine whether a TRY_CATCH expression can fall through.
508 This is a subroutine of gimple_stmt_may_fallthru. */
726a989a
RB
509
510static bool
538dd0b7 511gimple_try_catch_may_fallthru (gtry *stmt)
726a989a
RB
512{
513 gimple_stmt_iterator i;
514
515 /* We don't handle GIMPLE_TRY_FINALLY. */
516 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
517
518 /* If the TRY block can fall through, the whole TRY_CATCH can
519 fall through. */
520 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
521 return true;
522
355a7673 523 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
726a989a
RB
524 switch (gimple_code (gsi_stmt (i)))
525 {
526 case GIMPLE_CATCH:
527 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
528 catch expression and a body. The whole try/catch may fall
529 through iff any of the catch bodies falls through. */
530 for (; !gsi_end_p (i); gsi_next (&i))
531 {
538dd0b7
DM
532 if (gimple_seq_may_fallthru (gimple_catch_handler (
533 as_a <gcatch *> (gsi_stmt (i)))))
726a989a
RB
534 return true;
535 }
536 return false;
537
538 case GIMPLE_EH_FILTER:
539 /* The exception filter expression only matters if there is an
540 exception. If the exception does not match EH_FILTER_TYPES,
541 we will execute EH_FILTER_FAILURE, and we will fall through
542 if that falls through. If the exception does match
543 EH_FILTER_TYPES, the stack unwinder will continue up the
544 stack, so we will not fall through. We don't know whether we
545 will throw an exception which matches EH_FILTER_TYPES or not,
546 so we just ignore EH_FILTER_TYPES and assume that we might
547 throw an exception which doesn't match. */
548 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
549
550 default:
551 /* This case represents statements to be executed when an
552 exception occurs. Those statements are implicitly followed
553 by a GIMPLE_RESX to resume execution after the exception. So
554 in this case the try/catch never falls through. */
555 return false;
556 }
557}
558
559
726a989a
RB
560/* Try to determine if we can continue executing the statement
561 immediately following STMT. This guess need not be 100% accurate;
562 simply be conservative and return true if we don't know. This is
563 used only to avoid stupidly generating extra code. If we're wrong,
564 we'll just delete the extra code later. */
565
566bool
355fe088 567gimple_stmt_may_fallthru (gimple *stmt)
6de9cd9a 568{
726a989a
RB
569 if (!stmt)
570 return true;
6de9cd9a 571
726a989a
RB
572 switch (gimple_code (stmt))
573 {
574 case GIMPLE_GOTO:
575 case GIMPLE_RETURN:
576 case GIMPLE_RESX:
b8698a0f 577 /* Easy cases. If the last statement of the seq implies
726a989a
RB
578 control transfer, then we can't fall through. */
579 return false;
6de9cd9a 580
726a989a 581 case GIMPLE_SWITCH:
a141816c
EB
582 /* Switch has already been lowered and represents a branch
583 to a selected label and hence can't fall through. */
584 return false;
6de9cd9a 585
726a989a
RB
586 case GIMPLE_COND:
587 /* GIMPLE_COND's are already lowered into a two-way branch. They
588 can't fall through. */
589 return false;
6de9cd9a 590
726a989a 591 case GIMPLE_BIND:
538dd0b7
DM
592 return gimple_seq_may_fallthru (
593 gimple_bind_body (as_a <gbind *> (stmt)));
6de9cd9a 594
726a989a
RB
595 case GIMPLE_TRY:
596 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
538dd0b7 597 return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
6de9cd9a 598
726a989a 599 /* It must be a GIMPLE_TRY_FINALLY. */
6de9cd9a 600
726a989a
RB
601 /* The finally clause is always executed after the try clause,
602 so if it does not fall through, then the try-finally will not
603 fall through. Otherwise, if the try clause does not fall
604 through, then when the finally clause falls through it will
605 resume execution wherever the try clause was going. So the
606 whole try-finally will only fall through if both the try
607 clause and the finally clause fall through. */
608 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
609 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
610
0a35513e 611 case GIMPLE_EH_ELSE:
538dd0b7
DM
612 {
613 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
614 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
615 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
616 eh_else_stmt)));
617 }
0a35513e 618
726a989a
RB
619 case GIMPLE_CALL:
620 /* Functions that do not return do not fall through. */
621 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
a141816c 622
726a989a
RB
623 default:
624 return true;
6de9cd9a 625 }
726a989a
RB
626}
627
6de9cd9a 628
726a989a 629/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
6de9cd9a 630
726a989a
RB
631bool
632gimple_seq_may_fallthru (gimple_seq seq)
633{
634 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
6de9cd9a 635}
f5a76aea 636
726a989a
RB
637
638/* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
4f6c2131 639
f5a76aea 640static void
726a989a 641lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
f5a76aea 642{
538dd0b7 643 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
355fe088 644 gimple *t;
726a989a
RB
645 int i;
646 return_statements_t tmp_rs;
f5a76aea 647
71877985 648 /* Match this up with an existing return statement that's been created. */
9771b263 649 for (i = data->return_statements.length () - 1;
726a989a 650 i >= 0; i--)
f5a76aea 651 {
9771b263 652 tmp_rs = data->return_statements[i];
71877985 653
726a989a 654 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
0efb9d64
AK
655 {
656 /* Remove the line number from the representative return statement.
657 It now fills in for many such returns. Failure to remove this
658 will result in incorrect results for coverage analysis. */
659 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
660
661 goto found;
662 }
f5a76aea
RH
663 }
664
71877985 665 /* Not found. Create a new label and record the return statement. */
c2255bc4 666 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
726a989a 667 tmp_rs.stmt = stmt;
9771b263 668 data->return_statements.safe_push (tmp_rs);
71877985
RH
669
670 /* Generate a goto statement and remove the return statement. */
671 found:
516426da
EB
672 /* When not optimizing, make sure user returns are preserved. */
673 if (!optimize && gimple_has_location (stmt))
674 DECL_ARTIFICIAL (tmp_rs.label) = 0;
726a989a
RB
675 t = gimple_build_goto (tmp_rs.label);
676 gimple_set_location (t, gimple_location (stmt));
cc2a64dd 677 gimple_set_block (t, gimple_block (stmt));
726a989a
RB
678 gsi_insert_before (gsi, t, GSI_SAME_STMT);
679 gsi_remove (gsi, false);
4f6c2131
EB
680}
681
a141816c 682/* Lower a __builtin_setjmp GSI.
4f6c2131
EB
683
684 __builtin_setjmp is passed a pointer to an array of five words (not
685 all will be used on all machines). It operates similarly to the C
686 library function of the same name, but is more efficient.
687
09b22f48
JJ
688 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
689 __builtin_setjmp_receiver.
4f6c2131
EB
690
691 After full lowering, the body of the function should look like:
692
693 {
4f6c2131
EB
694 int D.1844;
695 int D.2844;
696
697 [...]
698
699 __builtin_setjmp_setup (&buf, &<D1847>);
700 D.1844 = 0;
701 goto <D1846>;
702 <D1847>:;
703 __builtin_setjmp_receiver (&<D1847>);
704 D.1844 = 1;
705 <D1846>:;
706 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
707
708 [...]
709
710 __builtin_setjmp_setup (&buf, &<D2847>);
711 D.2844 = 0;
712 goto <D2846>;
713 <D2847>:;
714 __builtin_setjmp_receiver (&<D2847>);
715 D.2844 = 1;
716 <D2846>:;
717 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
718
719 [...]
720
721 <D3850>:;
722 return;
4f6c2131
EB
723 }
724
09b22f48
JJ
725 During cfg creation an extra per-function (or per-OpenMP region)
726 block with ABNORMAL_DISPATCHER internal call will be added, unique
727 destination of all the abnormal call edges and the unique source of
728 all the abnormal edges to the receivers, thus keeping the complexity
729 explosion localized. */
4f6c2131
EB
730
731static void
726a989a 732lower_builtin_setjmp (gimple_stmt_iterator *gsi)
4f6c2131 733{
355fe088 734 gimple *stmt = gsi_stmt (*gsi);
c2255bc4
AH
735 location_t loc = gimple_location (stmt);
736 tree cont_label = create_artificial_label (loc);
737 tree next_label = create_artificial_label (loc);
4f6c2131 738 tree dest, t, arg;
355fe088 739 gimple *g;
4f6c2131 740
021293cb
JJ
741 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
742 these builtins are modelled as non-local label jumps to the label
743 that is passed to these two builtins, so pretend we have a non-local
744 label during GIMPLE passes too. See PR60003. */
d764963b 745 cfun->has_nonlocal_label = 1;
021293cb 746
4f6c2131
EB
747 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
748 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
749 FORCED_LABEL (next_label) = 1;
750
726a989a 751 dest = gimple_call_lhs (stmt);
4f6c2131
EB
752
753 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
5039610b 754 arg = build_addr (next_label, current_function_decl);
e79983f4 755 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
726a989a 756 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
db3927fb 757 gimple_set_location (g, loc);
cc2a64dd 758 gimple_set_block (g, gimple_block (stmt));
726a989a 759 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
760
761 /* Build 'DEST = 0' and insert. */
762 if (dest)
763 {
e8160c9a 764 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
db3927fb 765 gimple_set_location (g, loc);
cc2a64dd 766 gimple_set_block (g, gimple_block (stmt));
726a989a 767 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
768 }
769
770 /* Build 'goto CONT_LABEL' and insert. */
726a989a 771 g = gimple_build_goto (cont_label);
bbbbb16a 772 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
773
774 /* Build 'NEXT_LABEL:' and insert. */
726a989a
RB
775 g = gimple_build_label (next_label);
776 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
777
778 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
5039610b 779 arg = build_addr (next_label, current_function_decl);
e79983f4 780 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
726a989a 781 g = gimple_build_call (t, 1, arg);
db3927fb 782 gimple_set_location (g, loc);
cc2a64dd 783 gimple_set_block (g, gimple_block (stmt));
726a989a 784 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
785
786 /* Build 'DEST = 1' and insert. */
787 if (dest)
788 {
db3927fb
AH
789 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
790 integer_one_node));
791 gimple_set_location (g, loc);
cc2a64dd 792 gimple_set_block (g, gimple_block (stmt));
726a989a 793 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
794 }
795
796 /* Build 'CONT_LABEL:' and insert. */
726a989a
RB
797 g = gimple_build_label (cont_label);
798 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
799
800 /* Remove the call to __builtin_setjmp. */
726a989a 801 gsi_remove (gsi, false);
f5a76aea 802}
831806cb
RB
803
804/* Lower calls to posix_memalign to
c4c8514e
RB
805 res = posix_memalign (ptr, align, size);
806 if (res == 0)
807 *ptr = __builtin_assume_aligned (*ptr, align);
831806cb
RB
808 or to
809 void *tem;
c4c8514e
RB
810 res = posix_memalign (&tem, align, size);
811 if (res == 0)
812 ptr = __builtin_assume_aligned (tem, align);
831806cb
RB
813 in case the first argument was &ptr. That way we can get at the
814 alignment of the heap pointer in CCP. */
815
816static void
817lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
818{
355fe088 819 gimple *stmt, *call = gsi_stmt (*gsi);
c4c8514e
RB
820 tree pptr = gimple_call_arg (call, 0);
821 tree align = gimple_call_arg (call, 1);
822 tree res = gimple_call_lhs (call);
b731b390 823 tree ptr = create_tmp_reg (ptr_type_node);
831806cb
RB
824 if (TREE_CODE (pptr) == ADDR_EXPR)
825 {
b731b390 826 tree tem = create_tmp_var (ptr_type_node);
831806cb 827 TREE_ADDRESSABLE (tem) = 1;
c4c8514e 828 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
831806cb
RB
829 stmt = gimple_build_assign (ptr, tem);
830 }
831 else
832 stmt = gimple_build_assign (ptr,
833 fold_build2 (MEM_REF, ptr_type_node, pptr,
834 build_int_cst (ptr_type_node, 0)));
c4c8514e
RB
835 if (res == NULL_TREE)
836 {
b731b390 837 res = create_tmp_reg (integer_type_node);
c4c8514e
RB
838 gimple_call_set_lhs (call, res);
839 }
840 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
841 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
355fe088 842 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
c4c8514e
RB
843 align_label, noalign_label);
844 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
845 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
831806cb
RB
846 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
847 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
848 2, ptr, align);
849 gimple_call_set_lhs (stmt, ptr);
850 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
851 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
852 build_int_cst (ptr_type_node, 0)),
853 ptr);
854 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
c4c8514e 855 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
831806cb 856}
6de9cd9a
DN
857\f
858
50674e96 859/* Record the variables in VARS into function FN. */
6de9cd9a
DN
860
861void
50674e96 862record_vars_into (tree vars, tree fn)
6de9cd9a 863{
910ad8de 864 for (; vars; vars = DECL_CHAIN (vars))
6de9cd9a
DN
865 {
866 tree var = vars;
867
acb8f212
JH
868 /* BIND_EXPRs contains also function/type/constant declarations
869 we don't need to care about. */
870 if (TREE_CODE (var) != VAR_DECL)
871 continue;
50674e96 872
6de9cd9a
DN
873 /* Nothing to do in this case. */
874 if (DECL_EXTERNAL (var))
875 continue;
6de9cd9a
DN
876
877 /* Record the variable. */
45b62594 878 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
6de9cd9a 879 }
50674e96
DN
880}
881
882
883/* Record the variables in VARS into current_function_decl. */
884
885void
886record_vars (tree vars)
887{
888 record_vars_into (vars, current_function_decl);
6de9cd9a 889}