]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-low.c
Move operand_less_p to vr-values.c.
[thirdparty/gcc.git] / gcc / gimple-low.c
CommitLineData
726a989a 1/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
6de9cd9a 2
8d9254fc 3 Copyright (C) 2003-2020 Free Software Foundation, Inc.
6de9cd9a
DN
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
6de9cd9a
DN
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
6de9cd9a 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5 27#include "tree-pass.h"
40e23961 28#include "fold-const.h"
d8a2d370
DN
29#include "tree-nested.h"
30#include "calls.h"
5be5c238 31#include "gimple-iterator.h"
4484a35a 32#include "gimple-low.h"
e59a1c22
ML
33#include "predict.h"
34#include "gimple-predict.h"
665db3ae 35#include "gimple-fold.h"
6de9cd9a 36
726a989a
RB
37/* The differences between High GIMPLE and Low GIMPLE are the
38 following:
39
40 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41
42 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43 flow and exception regions are built as an on-the-side region
44 hierarchy (See tree-eh.c:lower_eh_constructs).
45
46 3- Multiple identical return statements are grouped into a single
47 return and gotos to the unique return site. */
48
49/* Match a return statement with a label. During lowering, we identify
50 identical return statements and replace duplicates with a jump to
51 the corresponding label. */
52struct return_statements_t
53{
54 tree label;
538dd0b7 55 greturn *stmt;
726a989a
RB
56};
57typedef struct return_statements_t return_statements_t;
58
726a989a 59
6de9cd9a
DN
60struct lower_data
61{
62 /* Block the current statement belongs to. */
63 tree block;
f5a76aea 64
726a989a 65 /* A vector of label and return statements to be moved to the end
71877985 66 of the function. */
9771b263 67 vec<return_statements_t> return_statements;
4f6c2131 68
a141816c
EB
69 /* True if the current statement cannot fall through. */
70 bool cannot_fallthru;
6de9cd9a
DN
71};
72
726a989a
RB
73static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
74static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
f778c049 75static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
726a989a
RB
76static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
77static void lower_builtin_setjmp (gimple_stmt_iterator *);
831806cb 78static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
6de9cd9a 79
726a989a
RB
80
81/* Lower the body of current_function_decl from High GIMPLE into Low
82 GIMPLE. */
6de9cd9a 83
c2924966 84static unsigned int
6de9cd9a
DN
85lower_function_body (void)
86{
87 struct lower_data data;
726a989a
RB
88 gimple_seq body = gimple_body (current_function_decl);
89 gimple_seq lowered_body;
90 gimple_stmt_iterator i;
355fe088
TS
91 gimple *bind;
92 gimple *x;
726a989a
RB
93
94 /* The gimplifier should've left a body of exactly one statement,
95 namely a GIMPLE_BIND. */
96 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
97 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
6de9cd9a 98
953ff289 99 memset (&data, 0, sizeof (data));
6de9cd9a
DN
100 data.block = DECL_INITIAL (current_function_decl);
101 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
102 BLOCK_CHAIN (data.block) = NULL_TREE;
103 TREE_ASM_WRITTEN (data.block) = 1;
9771b263 104 data.return_statements.create (8);
726a989a
RB
105
106 bind = gimple_seq_first_stmt (body);
107 lowered_body = NULL;
108 gimple_seq_add_stmt (&lowered_body, bind);
109 i = gsi_start (lowered_body);
110 lower_gimple_bind (&i, &data);
6de9cd9a 111
726a989a 112 i = gsi_last (lowered_body);
ff98621c 113
96a95ac1
AO
114 /* If we had begin stmt markers from e.g. PCH, but this compilation
115 doesn't want them, lower_stmt will have cleaned them up; we can
116 now clear the flag that indicates we had them. */
117 if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
118 {
119 /* This counter needs not be exact, but before lowering it will
120 most certainly be. */
121 gcc_assert (cfun->debug_marker_count == 0);
122 cfun->debug_nonbind_markers = false;
123 }
124
ff98621c 125 /* If the function falls off the end, we need a null return statement.
726a989a 126 If we've already got one in the return_statements vector, we don't
ff98621c 127 need to do anything special. Otherwise build one by hand. */
67b69814
EB
128 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
129 if (may_fallthru
9771b263 130 && (data.return_statements.is_empty ()
c3284718
RS
131 || (gimple_return_retval (data.return_statements.last().stmt)
132 != NULL)))
ff98621c 133 {
726a989a
RB
134 x = gimple_build_return (NULL);
135 gimple_set_location (x, cfun->function_end_locus);
cc2a64dd 136 gimple_set_block (x, DECL_INITIAL (current_function_decl));
726a989a 137 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
67b69814 138 may_fallthru = false;
ff98621c
RH
139 }
140
141 /* If we lowered any return statements, emit the representative
142 at the end of the function. */
9771b263 143 while (!data.return_statements.is_empty ())
f5a76aea 144 {
9771b263 145 return_statements_t t = data.return_statements.pop ();
726a989a
RB
146 x = gimple_build_label (t.label);
147 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
726a989a 148 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
67b69814
EB
149 if (may_fallthru)
150 {
151 /* Remove the line number from the representative return statement.
152 It now fills in for the fallthru too. Failure to remove this
153 will result in incorrect results for coverage analysis. */
154 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
155 may_fallthru = false;
156 }
f5a76aea
RH
157 }
158
355a7673
MM
159 /* Once the old body has been lowered, replace it with the new
160 lowered sequence. */
161 gimple_set_body (current_function_decl, lowered_body);
162
282899df 163 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
6de9cd9a
DN
164 BLOCK_SUBBLOCKS (data.block)
165 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
166
167 clear_block_marks (data.block);
9771b263 168 data.return_statements.release ();
c2924966 169 return 0;
6de9cd9a
DN
170}
171
27a4cd48
DM
172namespace {
173
174const pass_data pass_data_lower_cf =
6de9cd9a 175{
27a4cd48
DM
176 GIMPLE_PASS, /* type */
177 "lower", /* name */
178 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
179 TV_NONE, /* tv_id */
180 PROP_gimple_any, /* properties_required */
181 PROP_gimple_lcf, /* properties_provided */
182 0, /* properties_destroyed */
183 0, /* todo_flags_start */
184 0, /* todo_flags_finish */
6de9cd9a
DN
185};
186
27a4cd48
DM
187class pass_lower_cf : public gimple_opt_pass
188{
189public:
c3284718
RS
190 pass_lower_cf (gcc::context *ctxt)
191 : gimple_opt_pass (pass_data_lower_cf, ctxt)
27a4cd48
DM
192 {}
193
194 /* opt_pass methods: */
be55bfe6 195 virtual unsigned int execute (function *) { return lower_function_body (); }
27a4cd48
DM
196
197}; // class pass_lower_cf
198
199} // anon namespace
200
201gimple_opt_pass *
202make_pass_lower_cf (gcc::context *ctxt)
203{
204 return new pass_lower_cf (ctxt);
205}
206
726a989a 207/* Lower sequence SEQ. Unlike gimplification the statements are not relowered
6de9cd9a
DN
208 when they are changed -- if this has to be done, the lowering routine must
209 do it explicitly. DATA is passed through the recursion. */
210
1ebf7687 211static void
355a7673 212lower_sequence (gimple_seq *seq, struct lower_data *data)
6de9cd9a 213{
726a989a 214 gimple_stmt_iterator gsi;
6de9cd9a 215
355a7673 216 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
726a989a 217 lower_stmt (&gsi, data);
6de9cd9a
DN
218}
219
50674e96 220
726a989a 221/* Lower the OpenMP directive statement pointed by GSI. DATA is
50674e96
DN
222 passed through the recursion. */
223
224static void
726a989a 225lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
50674e96 226{
355fe088 227 gimple *stmt;
b8698a0f 228
726a989a 229 stmt = gsi_stmt (*gsi);
50674e96 230
355a7673
MM
231 lower_sequence (gimple_omp_body_ptr (stmt), data);
232 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
726a989a 233 gimple_omp_set_body (stmt, NULL);
355a7673 234 gsi_next (gsi);
50674e96
DN
235}
236
237
a141816c
EB
238/* Lower statement GSI. DATA is passed through the recursion. We try to
239 track the fallthruness of statements and get rid of unreachable return
240 statements in order to prevent the EH lowering pass from adding useless
241 edges that can cause bogus warnings to be issued later; this guess need
242 not be 100% accurate, simply be conservative and reset cannot_fallthru
243 to false if we don't know. */
6de9cd9a
DN
244
245static void
726a989a 246lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
6de9cd9a 247{
355fe088 248 gimple *stmt = gsi_stmt (*gsi);
6de9cd9a 249
726a989a 250 gimple_set_block (stmt, data->block);
6de9cd9a 251
726a989a 252 switch (gimple_code (stmt))
6de9cd9a 253 {
726a989a
RB
254 case GIMPLE_BIND:
255 lower_gimple_bind (gsi, data);
a141816c 256 /* Propagate fallthruness. */
f5a76aea 257 return;
6de9cd9a 258
726a989a 259 case GIMPLE_COND:
a141816c
EB
260 case GIMPLE_GOTO:
261 case GIMPLE_SWITCH:
262 data->cannot_fallthru = true;
263 gsi_next (gsi);
264 return;
726a989a
RB
265
266 case GIMPLE_RETURN:
a141816c
EB
267 if (data->cannot_fallthru)
268 {
269 gsi_remove (gsi, false);
270 /* Propagate fallthruness. */
271 }
272 else
273 {
274 lower_gimple_return (gsi, data);
275 data->cannot_fallthru = true;
276 }
726a989a
RB
277 return;
278
279 case GIMPLE_TRY:
f778c049
EB
280 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
281 lower_try_catch (gsi, data);
282 else
283 {
284 /* It must be a GIMPLE_TRY_FINALLY. */
285 bool cannot_fallthru;
286 lower_sequence (gimple_try_eval_ptr (stmt), data);
287 cannot_fallthru = data->cannot_fallthru;
288
289 /* The finally clause is always executed after the try clause,
290 so if it does not fall through, then the try-finally will not
291 fall through. Otherwise, if the try clause does not fall
292 through, then when the finally clause falls through it will
293 resume execution wherever the try clause was going. So the
294 whole try-finally will only fall through if both the try
295 clause and the finally clause fall through. */
296 data->cannot_fallthru = false;
297 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
298 data->cannot_fallthru |= cannot_fallthru;
299 gsi_next (gsi);
300 }
301 return;
777f7f9a 302
0a35513e 303 case GIMPLE_EH_ELSE:
538dd0b7
DM
304 {
305 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
306 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
307 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
308 }
0a35513e
AH
309 break;
310
96a95ac1
AO
311 case GIMPLE_DEBUG:
312 gcc_checking_assert (cfun->debug_nonbind_markers);
313 /* We can't possibly have debug bind stmts before lowering, we
314 first emit them when entering SSA. */
315 gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
316 /* Propagate fallthruness. */
317 /* If the function (e.g. from PCH) had debug stmts, but they're
318 disabled for this compilation, remove them. */
319 if (!MAY_HAVE_DEBUG_MARKER_STMTS)
320 gsi_remove (gsi, true);
321 else
322 gsi_next (gsi);
323 return;
324
726a989a
RB
325 case GIMPLE_NOP:
326 case GIMPLE_ASM:
327 case GIMPLE_ASSIGN:
726a989a
RB
328 case GIMPLE_PREDICT:
329 case GIMPLE_LABEL:
1d65f45c 330 case GIMPLE_EH_MUST_NOT_THROW:
726a989a
RB
331 case GIMPLE_OMP_FOR:
332 case GIMPLE_OMP_SECTIONS:
333 case GIMPLE_OMP_SECTIONS_SWITCH:
334 case GIMPLE_OMP_SECTION:
335 case GIMPLE_OMP_SINGLE:
336 case GIMPLE_OMP_MASTER:
acf0174b 337 case GIMPLE_OMP_TASKGROUP:
726a989a 338 case GIMPLE_OMP_ORDERED:
bf38f7e9 339 case GIMPLE_OMP_SCAN:
726a989a
RB
340 case GIMPLE_OMP_CRITICAL:
341 case GIMPLE_OMP_RETURN:
342 case GIMPLE_OMP_ATOMIC_LOAD:
343 case GIMPLE_OMP_ATOMIC_STORE:
344 case GIMPLE_OMP_CONTINUE:
345 break;
4f6c2131 346
726a989a 347 case GIMPLE_CALL:
4f6c2131 348 {
726a989a 349 tree decl = gimple_call_fndecl (stmt);
f16dd822
DC
350 unsigned i;
351
352 for (i = 0; i < gimple_call_num_args (stmt); i++)
353 {
354 tree arg = gimple_call_arg (stmt, i);
355 if (EXPR_P (arg))
356 TREE_SET_BLOCK (arg, data->block);
357 }
726a989a 358
4f6c2131 359 if (decl
3d78e008 360 && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
4f6c2131 361 {
903c723b 362 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
831806cb
RB
363 {
364 lower_builtin_setjmp (gsi);
365 data->cannot_fallthru = false;
366 return;
903c723b
TC
367 }
368 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
369 && flag_tree_bit_ccp
370 && gimple_builtin_call_types_compatible_p (stmt, decl))
371 {
372 lower_builtin_posix_memalign (gsi);
831806cb
RB
373 return;
374 }
4f6c2131 375 }
79ddec02 376
79ddec02
EB
377 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
378 {
a141816c 379 data->cannot_fallthru = true;
79ddec02 380 gsi_next (gsi);
79ddec02
EB
381 return;
382 }
665db3ae
JL
383
384 /* We delay folding of built calls from gimplification to
385 here so the IL is in consistent state for the diagnostic
386 machineries job. */
387 if (gimple_call_builtin_p (stmt))
388 fold_stmt (gsi);
4f6c2131
EB
389 }
390 break;
391
726a989a
RB
392 case GIMPLE_OMP_PARALLEL:
393 case GIMPLE_OMP_TASK:
acf0174b
JJ
394 case GIMPLE_OMP_TARGET:
395 case GIMPLE_OMP_TEAMS:
b2b40051 396 case GIMPLE_OMP_GRID_BODY:
a141816c 397 data->cannot_fallthru = false;
726a989a 398 lower_omp_directive (gsi, data);
a141816c 399 data->cannot_fallthru = false;
50674e96
DN
400 return;
401
0a35513e 402 case GIMPLE_TRANSACTION:
538dd0b7
DM
403 lower_sequence (gimple_transaction_body_ptr (
404 as_a <gtransaction *> (stmt)),
405 data);
0a35513e
AH
406 break;
407
6de9cd9a 408 default:
282899df 409 gcc_unreachable ();
6de9cd9a
DN
410 }
411
a141816c 412 data->cannot_fallthru = false;
726a989a 413 gsi_next (gsi);
6de9cd9a
DN
414}
415
4f6c2131 416/* Lower a bind_expr TSI. DATA is passed through the recursion. */
6de9cd9a
DN
417
418static void
726a989a 419lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
6de9cd9a
DN
420{
421 tree old_block = data->block;
538dd0b7 422 gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
726a989a 423 tree new_block = gimple_bind_block (stmt);
6de9cd9a
DN
424
425 if (new_block)
426 {
427 if (new_block == old_block)
428 {
429 /* The outermost block of the original function may not be the
430 outermost statement chain of the gimplified function. So we
431 may see the outermost block just inside the function. */
282899df 432 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
6de9cd9a
DN
433 new_block = NULL;
434 }
435 else
436 {
437 /* We do not expect to handle duplicate blocks. */
282899df 438 gcc_assert (!TREE_ASM_WRITTEN (new_block));
6de9cd9a
DN
439 TREE_ASM_WRITTEN (new_block) = 1;
440
441 /* Block tree may get clobbered by inlining. Normally this would
442 be fixed in rest_of_decl_compilation using block notes, but
443 since we are not going to emit them, it is up to us. */
444 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
445 BLOCK_SUBBLOCKS (old_block) = new_block;
446 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
447 BLOCK_SUPERCONTEXT (new_block) = old_block;
448
449 data->block = new_block;
450 }
451 }
452
726a989a 453 record_vars (gimple_bind_vars (stmt));
014b59e6
RB
454
455 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
456 need gimple_bind_vars. */
457 tree next;
b13ff1f5
RB
458 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
459 it by marking all BLOCK_VARS. */
014b59e6 460 if (gimple_bind_block (stmt))
b13ff1f5
RB
461 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
462 TREE_VISITED (t) = 1;
463 for (tree var = gimple_bind_vars (stmt);
464 var && ! TREE_VISITED (var); var = next)
014b59e6 465 {
014b59e6
RB
466 next = DECL_CHAIN (var);
467 DECL_CHAIN (var) = NULL_TREE;
468 }
b13ff1f5
RB
469 /* Unmark BLOCK_VARS. */
470 if (gimple_bind_block (stmt))
471 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
472 TREE_VISITED (t) = 0;
014b59e6 473
355a7673 474 lower_sequence (gimple_bind_body_ptr (stmt), data);
6de9cd9a
DN
475
476 if (new_block)
477 {
282899df 478 gcc_assert (data->block == new_block);
6de9cd9a
DN
479
480 BLOCK_SUBBLOCKS (new_block)
481 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
482 data->block = old_block;
483 }
484
726a989a
RB
485 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
486 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
487 gsi_remove (gsi, false);
6de9cd9a
DN
488}
489
f778c049
EB
490/* Same as above, but for a GIMPLE_TRY_CATCH. */
491
492static void
493lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
494{
495 bool cannot_fallthru;
355fe088 496 gimple *stmt = gsi_stmt (*gsi);
f778c049
EB
497 gimple_stmt_iterator i;
498
499 /* We don't handle GIMPLE_TRY_FINALLY. */
500 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
501
502 lower_sequence (gimple_try_eval_ptr (stmt), data);
503 cannot_fallthru = data->cannot_fallthru;
504
505 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
506 switch (gimple_code (gsi_stmt (i)))
507 {
508 case GIMPLE_CATCH:
509 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
510 catch expression and a body. The whole try/catch may fall
511 through iff any of the catch bodies falls through. */
512 for (; !gsi_end_p (i); gsi_next (&i))
513 {
514 data->cannot_fallthru = false;
538dd0b7
DM
515 lower_sequence (gimple_catch_handler_ptr (
516 as_a <gcatch *> (gsi_stmt (i))),
517 data);
f778c049
EB
518 if (!data->cannot_fallthru)
519 cannot_fallthru = false;
520 }
521 break;
522
523 case GIMPLE_EH_FILTER:
524 /* The exception filter expression only matters if there is an
525 exception. If the exception does not match EH_FILTER_TYPES,
526 we will execute EH_FILTER_FAILURE, and we will fall through
527 if that falls through. If the exception does match
528 EH_FILTER_TYPES, the stack unwinder will continue up the
529 stack, so we will not fall through. We don't know whether we
530 will throw an exception which matches EH_FILTER_TYPES or not,
531 so we just ignore EH_FILTER_TYPES and assume that we might
532 throw an exception which doesn't match. */
533 data->cannot_fallthru = false;
534 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
535 if (!data->cannot_fallthru)
536 cannot_fallthru = false;
537 break;
538
96a95ac1
AO
539 case GIMPLE_DEBUG:
540 gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
541 break;
542
f778c049
EB
543 default:
544 /* This case represents statements to be executed when an
545 exception occurs. Those statements are implicitly followed
546 by a GIMPLE_RESX to resume execution after the exception. So
547 in this case the try/catch never falls through. */
548 data->cannot_fallthru = false;
549 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
550 break;
551 }
552
553 data->cannot_fallthru = cannot_fallthru;
554 gsi_next (gsi);
555}
556
6737ba67 557
cf2d1b38
AM
558/* Try to determine whether a TRY_CATCH expression can fall through.
559 This is a subroutine of gimple_stmt_may_fallthru. */
726a989a
RB
560
561static bool
538dd0b7 562gimple_try_catch_may_fallthru (gtry *stmt)
726a989a
RB
563{
564 gimple_stmt_iterator i;
565
566 /* We don't handle GIMPLE_TRY_FINALLY. */
567 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
568
569 /* If the TRY block can fall through, the whole TRY_CATCH can
570 fall through. */
571 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
572 return true;
573
355a7673 574 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
726a989a
RB
575 switch (gimple_code (gsi_stmt (i)))
576 {
577 case GIMPLE_CATCH:
578 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
579 catch expression and a body. The whole try/catch may fall
580 through iff any of the catch bodies falls through. */
581 for (; !gsi_end_p (i); gsi_next (&i))
582 {
538dd0b7
DM
583 if (gimple_seq_may_fallthru (gimple_catch_handler (
584 as_a <gcatch *> (gsi_stmt (i)))))
726a989a
RB
585 return true;
586 }
587 return false;
588
589 case GIMPLE_EH_FILTER:
590 /* The exception filter expression only matters if there is an
591 exception. If the exception does not match EH_FILTER_TYPES,
592 we will execute EH_FILTER_FAILURE, and we will fall through
593 if that falls through. If the exception does match
594 EH_FILTER_TYPES, the stack unwinder will continue up the
595 stack, so we will not fall through. We don't know whether we
596 will throw an exception which matches EH_FILTER_TYPES or not,
597 so we just ignore EH_FILTER_TYPES and assume that we might
598 throw an exception which doesn't match. */
599 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
600
601 default:
602 /* This case represents statements to be executed when an
603 exception occurs. Those statements are implicitly followed
604 by a GIMPLE_RESX to resume execution after the exception. So
605 in this case the try/catch never falls through. */
606 return false;
607 }
608}
609
610
726a989a
RB
611/* Try to determine if we can continue executing the statement
612 immediately following STMT. This guess need not be 100% accurate;
613 simply be conservative and return true if we don't know. This is
614 used only to avoid stupidly generating extra code. If we're wrong,
615 we'll just delete the extra code later. */
616
617bool
355fe088 618gimple_stmt_may_fallthru (gimple *stmt)
6de9cd9a 619{
726a989a
RB
620 if (!stmt)
621 return true;
6de9cd9a 622
726a989a
RB
623 switch (gimple_code (stmt))
624 {
625 case GIMPLE_GOTO:
626 case GIMPLE_RETURN:
627 case GIMPLE_RESX:
b8698a0f 628 /* Easy cases. If the last statement of the seq implies
726a989a
RB
629 control transfer, then we can't fall through. */
630 return false;
6de9cd9a 631
726a989a 632 case GIMPLE_SWITCH:
a141816c
EB
633 /* Switch has already been lowered and represents a branch
634 to a selected label and hence can't fall through. */
635 return false;
6de9cd9a 636
726a989a
RB
637 case GIMPLE_COND:
638 /* GIMPLE_COND's are already lowered into a two-way branch. They
639 can't fall through. */
640 return false;
6de9cd9a 641
726a989a 642 case GIMPLE_BIND:
538dd0b7
DM
643 return gimple_seq_may_fallthru (
644 gimple_bind_body (as_a <gbind *> (stmt)));
6de9cd9a 645
726a989a
RB
646 case GIMPLE_TRY:
647 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
538dd0b7 648 return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
6de9cd9a 649
726a989a 650 /* It must be a GIMPLE_TRY_FINALLY. */
6de9cd9a 651
726a989a
RB
652 /* The finally clause is always executed after the try clause,
653 so if it does not fall through, then the try-finally will not
654 fall through. Otherwise, if the try clause does not fall
655 through, then when the finally clause falls through it will
656 resume execution wherever the try clause was going. So the
657 whole try-finally will only fall through if both the try
658 clause and the finally clause fall through. */
659 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
660 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
661
0a35513e 662 case GIMPLE_EH_ELSE:
538dd0b7
DM
663 {
664 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
665 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
666 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
667 eh_else_stmt)));
668 }
0a35513e 669
726a989a
RB
670 case GIMPLE_CALL:
671 /* Functions that do not return do not fall through. */
865f7046 672 return !gimple_call_noreturn_p (stmt);
a141816c 673
726a989a
RB
674 default:
675 return true;
6de9cd9a 676 }
726a989a
RB
677}
678
6de9cd9a 679
726a989a 680/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
6de9cd9a 681
726a989a
RB
682bool
683gimple_seq_may_fallthru (gimple_seq seq)
684{
65f4b875 685 return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq));
6de9cd9a 686}
f5a76aea 687
726a989a
RB
688
689/* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
4f6c2131 690
f5a76aea 691static void
726a989a 692lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
f5a76aea 693{
538dd0b7 694 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
355fe088 695 gimple *t;
726a989a
RB
696 int i;
697 return_statements_t tmp_rs;
f5a76aea 698
71877985 699 /* Match this up with an existing return statement that's been created. */
9771b263 700 for (i = data->return_statements.length () - 1;
726a989a 701 i >= 0; i--)
f5a76aea 702 {
9771b263 703 tmp_rs = data->return_statements[i];
71877985 704
726a989a 705 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
0efb9d64
AK
706 {
707 /* Remove the line number from the representative return statement.
708 It now fills in for many such returns. Failure to remove this
709 will result in incorrect results for coverage analysis. */
710 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
711
712 goto found;
713 }
f5a76aea
RH
714 }
715
71877985 716 /* Not found. Create a new label and record the return statement. */
c2255bc4 717 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
726a989a 718 tmp_rs.stmt = stmt;
9771b263 719 data->return_statements.safe_push (tmp_rs);
71877985
RH
720
721 /* Generate a goto statement and remove the return statement. */
722 found:
516426da
EB
723 /* When not optimizing, make sure user returns are preserved. */
724 if (!optimize && gimple_has_location (stmt))
725 DECL_ARTIFICIAL (tmp_rs.label) = 0;
726a989a 726 t = gimple_build_goto (tmp_rs.label);
cf66c831 727 /* location includes block. */
726a989a
RB
728 gimple_set_location (t, gimple_location (stmt));
729 gsi_insert_before (gsi, t, GSI_SAME_STMT);
730 gsi_remove (gsi, false);
4f6c2131
EB
731}
732
a141816c 733/* Lower a __builtin_setjmp GSI.
4f6c2131
EB
734
735 __builtin_setjmp is passed a pointer to an array of five words (not
736 all will be used on all machines). It operates similarly to the C
737 library function of the same name, but is more efficient.
738
09b22f48
JJ
739 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
740 __builtin_setjmp_receiver.
4f6c2131
EB
741
742 After full lowering, the body of the function should look like:
743
744 {
4f6c2131
EB
745 int D.1844;
746 int D.2844;
747
748 [...]
749
750 __builtin_setjmp_setup (&buf, &<D1847>);
751 D.1844 = 0;
752 goto <D1846>;
753 <D1847>:;
754 __builtin_setjmp_receiver (&<D1847>);
755 D.1844 = 1;
756 <D1846>:;
757 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
758
759 [...]
760
761 __builtin_setjmp_setup (&buf, &<D2847>);
762 D.2844 = 0;
763 goto <D2846>;
764 <D2847>:;
765 __builtin_setjmp_receiver (&<D2847>);
766 D.2844 = 1;
767 <D2846>:;
768 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
769
770 [...]
771
772 <D3850>:;
773 return;
4f6c2131
EB
774 }
775
09b22f48
JJ
776 During cfg creation an extra per-function (or per-OpenMP region)
777 block with ABNORMAL_DISPATCHER internal call will be added, unique
778 destination of all the abnormal call edges and the unique source of
779 all the abnormal edges to the receivers, thus keeping the complexity
780 explosion localized. */
4f6c2131
EB
781
782static void
726a989a 783lower_builtin_setjmp (gimple_stmt_iterator *gsi)
4f6c2131 784{
355fe088 785 gimple *stmt = gsi_stmt (*gsi);
c2255bc4
AH
786 location_t loc = gimple_location (stmt);
787 tree cont_label = create_artificial_label (loc);
788 tree next_label = create_artificial_label (loc);
4f6c2131 789 tree dest, t, arg;
355fe088 790 gimple *g;
4f6c2131 791
021293cb
JJ
792 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
793 these builtins are modelled as non-local label jumps to the label
794 that is passed to these two builtins, so pretend we have a non-local
795 label during GIMPLE passes too. See PR60003. */
d764963b 796 cfun->has_nonlocal_label = 1;
021293cb 797
4f6c2131
EB
798 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
799 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
800 FORCED_LABEL (next_label) = 1;
801
381cdae4
RB
802 tree orig_dest = dest = gimple_call_lhs (stmt);
803 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
804 dest = create_tmp_reg (TREE_TYPE (orig_dest));
4f6c2131
EB
805
806 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
aa00059c 807 arg = build_addr (next_label);
e79983f4 808 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
726a989a 809 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
cf66c831 810 /* location includes block. */
db3927fb 811 gimple_set_location (g, loc);
726a989a 812 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
813
814 /* Build 'DEST = 0' and insert. */
815 if (dest)
816 {
e8160c9a 817 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
db3927fb 818 gimple_set_location (g, loc);
726a989a 819 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
820 }
821
822 /* Build 'goto CONT_LABEL' and insert. */
726a989a 823 g = gimple_build_goto (cont_label);
bbbbb16a 824 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
825
826 /* Build 'NEXT_LABEL:' and insert. */
726a989a
RB
827 g = gimple_build_label (next_label);
828 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
829
830 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
aa00059c 831 arg = build_addr (next_label);
e79983f4 832 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
726a989a 833 g = gimple_build_call (t, 1, arg);
db3927fb 834 gimple_set_location (g, loc);
726a989a 835 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
836
837 /* Build 'DEST = 1' and insert. */
838 if (dest)
839 {
db3927fb
AH
840 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
841 integer_one_node));
842 gimple_set_location (g, loc);
726a989a 843 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131
EB
844 }
845
846 /* Build 'CONT_LABEL:' and insert. */
726a989a
RB
847 g = gimple_build_label (cont_label);
848 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4f6c2131 849
381cdae4
RB
850 /* Build orig_dest = dest if necessary. */
851 if (dest != orig_dest)
852 {
853 g = gimple_build_assign (orig_dest, dest);
854 gsi_insert_before (gsi, g, GSI_SAME_STMT);
855 }
856
4f6c2131 857 /* Remove the call to __builtin_setjmp. */
726a989a 858 gsi_remove (gsi, false);
f5a76aea 859}
831806cb
RB
860
861/* Lower calls to posix_memalign to
c4c8514e
RB
862 res = posix_memalign (ptr, align, size);
863 if (res == 0)
864 *ptr = __builtin_assume_aligned (*ptr, align);
831806cb
RB
865 or to
866 void *tem;
c4c8514e
RB
867 res = posix_memalign (&tem, align, size);
868 if (res == 0)
869 ptr = __builtin_assume_aligned (tem, align);
831806cb
RB
870 in case the first argument was &ptr. That way we can get at the
871 alignment of the heap pointer in CCP. */
872
873static void
874lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
875{
355fe088 876 gimple *stmt, *call = gsi_stmt (*gsi);
c4c8514e
RB
877 tree pptr = gimple_call_arg (call, 0);
878 tree align = gimple_call_arg (call, 1);
879 tree res = gimple_call_lhs (call);
b731b390 880 tree ptr = create_tmp_reg (ptr_type_node);
831806cb
RB
881 if (TREE_CODE (pptr) == ADDR_EXPR)
882 {
b731b390 883 tree tem = create_tmp_var (ptr_type_node);
831806cb 884 TREE_ADDRESSABLE (tem) = 1;
c4c8514e 885 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
831806cb
RB
886 stmt = gimple_build_assign (ptr, tem);
887 }
888 else
889 stmt = gimple_build_assign (ptr,
890 fold_build2 (MEM_REF, ptr_type_node, pptr,
891 build_int_cst (ptr_type_node, 0)));
c4c8514e
RB
892 if (res == NULL_TREE)
893 {
b731b390 894 res = create_tmp_reg (integer_type_node);
c4c8514e
RB
895 gimple_call_set_lhs (call, res);
896 }
897 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
898 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
355fe088 899 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
c4c8514e
RB
900 align_label, noalign_label);
901 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
902 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
831806cb
RB
903 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
904 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
905 2, ptr, align);
906 gimple_call_set_lhs (stmt, ptr);
907 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
908 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
909 build_int_cst (ptr_type_node, 0)),
910 ptr);
911 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
c4c8514e 912 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
831806cb 913}
6de9cd9a
DN
914\f
915
50674e96 916/* Record the variables in VARS into function FN. */
6de9cd9a
DN
917
918void
50674e96 919record_vars_into (tree vars, tree fn)
6de9cd9a 920{
910ad8de 921 for (; vars; vars = DECL_CHAIN (vars))
6de9cd9a
DN
922 {
923 tree var = vars;
924
acb8f212
JH
925 /* BIND_EXPRs contains also function/type/constant declarations
926 we don't need to care about. */
8813a647 927 if (!VAR_P (var))
acb8f212 928 continue;
50674e96 929
6de9cd9a
DN
930 /* Nothing to do in this case. */
931 if (DECL_EXTERNAL (var))
932 continue;
6de9cd9a
DN
933
934 /* Record the variable. */
45b62594 935 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
6de9cd9a 936 }
50674e96
DN
937}
938
939
940/* Record the variables in VARS into current_function_decl. */
941
942void
943record_vars (tree vars)
944{
945 record_vars_into (vars, current_function_decl);
6de9cd9a 946}