]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-low.c
[arm] Perform early splitting of adddi3.
[thirdparty/gcc.git] / gcc / gimple-low.c
CommitLineData
75a70cf9 1/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
4ee9c684 2
fbd26352 3 Copyright (C) 2003-2019 Free Software Foundation, Inc.
4ee9c684 4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
8c4c00c1 9Software Foundation; either version 3, or (at your option) any later
4ee9c684 10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
9ef16211 24#include "backend.h"
4ee9c684 25#include "tree.h"
9ef16211 26#include "gimple.h"
7c29e30e 27#include "tree-pass.h"
b20a8bb4 28#include "fold-const.h"
9ed99284 29#include "tree-nested.h"
30#include "calls.h"
dcf1a1ec 31#include "gimple-iterator.h"
424a4a92 32#include "gimple-low.h"
c65f167e 33#include "predict.h"
34#include "gimple-predict.h"
85df98d7 35#include "gimple-fold.h"
4ee9c684 36
75a70cf9 37/* The differences between High GIMPLE and Low GIMPLE are the
38 following:
39
40 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41
42 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43 flow and exception regions are built as an on-the-side region
44 hierarchy (See tree-eh.c:lower_eh_constructs).
45
46 3- Multiple identical return statements are grouped into a single
47 return and gotos to the unique return site. */
48
49/* Match a return statement with a label. During lowering, we identify
50 identical return statements and replace duplicates with a jump to
51 the corresponding label. */
52struct return_statements_t
53{
54 tree label;
1a91d914 55 greturn *stmt;
75a70cf9 56};
57typedef struct return_statements_t return_statements_t;
58
75a70cf9 59
4ee9c684 60struct lower_data
61{
62 /* Block the current statement belongs to. */
63 tree block;
22e30d4e 64
75a70cf9 65 /* A vector of label and return statements to be moved to the end
6c6a0f2f 66 of the function. */
f1f41a6c 67 vec<return_statements_t> return_statements;
2c8a1497 68
a2159096 69 /* True if the current statement cannot fall through. */
70 bool cannot_fallthru;
4ee9c684 71};
72
75a70cf9 73static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
74static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
929384bb 75static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
75a70cf9 76static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
77static void lower_builtin_setjmp (gimple_stmt_iterator *);
18593a2c 78static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
4ee9c684 79
75a70cf9 80
81/* Lower the body of current_function_decl from High GIMPLE into Low
82 GIMPLE. */
4ee9c684 83
2a1990e9 84static unsigned int
4ee9c684 85lower_function_body (void)
86{
87 struct lower_data data;
75a70cf9 88 gimple_seq body = gimple_body (current_function_decl);
89 gimple_seq lowered_body;
90 gimple_stmt_iterator i;
42acab1c 91 gimple *bind;
92 gimple *x;
75a70cf9 93
94 /* The gimplifier should've left a body of exactly one statement,
95 namely a GIMPLE_BIND. */
96 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
97 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
4ee9c684 98
1e8e9920 99 memset (&data, 0, sizeof (data));
4ee9c684 100 data.block = DECL_INITIAL (current_function_decl);
101 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
102 BLOCK_CHAIN (data.block) = NULL_TREE;
103 TREE_ASM_WRITTEN (data.block) = 1;
f1f41a6c 104 data.return_statements.create (8);
75a70cf9 105
106 bind = gimple_seq_first_stmt (body);
107 lowered_body = NULL;
108 gimple_seq_add_stmt (&lowered_body, bind);
109 i = gsi_start (lowered_body);
110 lower_gimple_bind (&i, &data);
4ee9c684 111
75a70cf9 112 i = gsi_last (lowered_body);
751ddc2b 113
90567983 114 /* If we had begin stmt markers from e.g. PCH, but this compilation
115 doesn't want them, lower_stmt will have cleaned them up; we can
116 now clear the flag that indicates we had them. */
117 if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
118 {
119 /* This counter needs not be exact, but before lowering it will
120 most certainly be. */
121 gcc_assert (cfun->debug_marker_count == 0);
122 cfun->debug_nonbind_markers = false;
123 }
124
751ddc2b 125 /* If the function falls off the end, we need a null return statement.
75a70cf9 126 If we've already got one in the return_statements vector, we don't
751ddc2b 127 need to do anything special. Otherwise build one by hand. */
c7bd9c39 128 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
129 if (may_fallthru
f1f41a6c 130 && (data.return_statements.is_empty ()
9af5ce0c 131 || (gimple_return_retval (data.return_statements.last().stmt)
132 != NULL)))
751ddc2b 133 {
75a70cf9 134 x = gimple_build_return (NULL);
135 gimple_set_location (x, cfun->function_end_locus);
32dedf8f 136 gimple_set_block (x, DECL_INITIAL (current_function_decl));
75a70cf9 137 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
c7bd9c39 138 may_fallthru = false;
751ddc2b 139 }
140
141 /* If we lowered any return statements, emit the representative
142 at the end of the function. */
f1f41a6c 143 while (!data.return_statements.is_empty ())
22e30d4e 144 {
f1f41a6c 145 return_statements_t t = data.return_statements.pop ();
75a70cf9 146 x = gimple_build_label (t.label);
147 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
75a70cf9 148 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
c7bd9c39 149 if (may_fallthru)
150 {
151 /* Remove the line number from the representative return statement.
152 It now fills in for the fallthru too. Failure to remove this
153 will result in incorrect results for coverage analysis. */
154 gimple_set_location (t.stmt, UNKNOWN_LOCATION);
155 may_fallthru = false;
156 }
22e30d4e 157 }
158
e3a19533 159 /* Once the old body has been lowered, replace it with the new
160 lowered sequence. */
161 gimple_set_body (current_function_decl, lowered_body);
162
0d59b19d 163 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
4ee9c684 164 BLOCK_SUBBLOCKS (data.block)
165 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
166
167 clear_block_marks (data.block);
f1f41a6c 168 data.return_statements.release ();
2a1990e9 169 return 0;
4ee9c684 170}
171
cbe8bda8 172namespace {
173
174const pass_data pass_data_lower_cf =
4ee9c684 175{
cbe8bda8 176 GIMPLE_PASS, /* type */
177 "lower", /* name */
178 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 179 TV_NONE, /* tv_id */
180 PROP_gimple_any, /* properties_required */
181 PROP_gimple_lcf, /* properties_provided */
182 0, /* properties_destroyed */
183 0, /* todo_flags_start */
184 0, /* todo_flags_finish */
4ee9c684 185};
186
cbe8bda8 187class pass_lower_cf : public gimple_opt_pass
188{
189public:
9af5ce0c 190 pass_lower_cf (gcc::context *ctxt)
191 : gimple_opt_pass (pass_data_lower_cf, ctxt)
cbe8bda8 192 {}
193
194 /* opt_pass methods: */
65b0537f 195 virtual unsigned int execute (function *) { return lower_function_body (); }
cbe8bda8 196
197}; // class pass_lower_cf
198
199} // anon namespace
200
201gimple_opt_pass *
202make_pass_lower_cf (gcc::context *ctxt)
203{
204 return new pass_lower_cf (ctxt);
205}
206
75a70cf9 207/* Lower sequence SEQ. Unlike gimplification the statements are not relowered
4ee9c684 208 when they are changed -- if this has to be done, the lowering routine must
209 do it explicitly. DATA is passed through the recursion. */
210
c939e803 211static void
e3a19533 212lower_sequence (gimple_seq *seq, struct lower_data *data)
4ee9c684 213{
75a70cf9 214 gimple_stmt_iterator gsi;
4ee9c684 215
e3a19533 216 for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
75a70cf9 217 lower_stmt (&gsi, data);
4ee9c684 218}
219
773c5ba7 220
75a70cf9 221/* Lower the OpenMP directive statement pointed by GSI. DATA is
773c5ba7 222 passed through the recursion. */
223
224static void
75a70cf9 225lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
773c5ba7 226{
42acab1c 227 gimple *stmt;
48e1416a 228
75a70cf9 229 stmt = gsi_stmt (*gsi);
773c5ba7 230
e3a19533 231 lower_sequence (gimple_omp_body_ptr (stmt), data);
232 gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
75a70cf9 233 gimple_omp_set_body (stmt, NULL);
e3a19533 234 gsi_next (gsi);
773c5ba7 235}
236
237
a2159096 238/* Lower statement GSI. DATA is passed through the recursion. We try to
239 track the fallthruness of statements and get rid of unreachable return
240 statements in order to prevent the EH lowering pass from adding useless
241 edges that can cause bogus warnings to be issued later; this guess need
242 not be 100% accurate, simply be conservative and reset cannot_fallthru
243 to false if we don't know. */
4ee9c684 244
245static void
75a70cf9 246lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
4ee9c684 247{
42acab1c 248 gimple *stmt = gsi_stmt (*gsi);
4ee9c684 249
75a70cf9 250 gimple_set_block (stmt, data->block);
4ee9c684 251
75a70cf9 252 switch (gimple_code (stmt))
4ee9c684 253 {
75a70cf9 254 case GIMPLE_BIND:
255 lower_gimple_bind (gsi, data);
a2159096 256 /* Propagate fallthruness. */
22e30d4e 257 return;
4ee9c684 258
75a70cf9 259 case GIMPLE_COND:
a2159096 260 case GIMPLE_GOTO:
261 case GIMPLE_SWITCH:
262 data->cannot_fallthru = true;
263 gsi_next (gsi);
264 return;
75a70cf9 265
266 case GIMPLE_RETURN:
a2159096 267 if (data->cannot_fallthru)
268 {
269 gsi_remove (gsi, false);
270 /* Propagate fallthruness. */
271 }
272 else
273 {
274 lower_gimple_return (gsi, data);
275 data->cannot_fallthru = true;
276 }
75a70cf9 277 return;
278
279 case GIMPLE_TRY:
929384bb 280 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
281 lower_try_catch (gsi, data);
282 else
283 {
284 /* It must be a GIMPLE_TRY_FINALLY. */
285 bool cannot_fallthru;
286 lower_sequence (gimple_try_eval_ptr (stmt), data);
287 cannot_fallthru = data->cannot_fallthru;
288
289 /* The finally clause is always executed after the try clause,
290 so if it does not fall through, then the try-finally will not
291 fall through. Otherwise, if the try clause does not fall
292 through, then when the finally clause falls through it will
293 resume execution wherever the try clause was going. So the
294 whole try-finally will only fall through if both the try
295 clause and the finally clause fall through. */
296 data->cannot_fallthru = false;
297 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
298 data->cannot_fallthru |= cannot_fallthru;
299 gsi_next (gsi);
300 }
301 return;
61e47ac8 302
4c0315d0 303 case GIMPLE_EH_ELSE:
1a91d914 304 {
305 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
306 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
307 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
308 }
4c0315d0 309 break;
310
90567983 311 case GIMPLE_DEBUG:
312 gcc_checking_assert (cfun->debug_nonbind_markers);
313 /* We can't possibly have debug bind stmts before lowering, we
314 first emit them when entering SSA. */
315 gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
316 /* Propagate fallthruness. */
317 /* If the function (e.g. from PCH) had debug stmts, but they're
318 disabled for this compilation, remove them. */
319 if (!MAY_HAVE_DEBUG_MARKER_STMTS)
320 gsi_remove (gsi, true);
321 else
322 gsi_next (gsi);
323 return;
324
75a70cf9 325 case GIMPLE_NOP:
326 case GIMPLE_ASM:
327 case GIMPLE_ASSIGN:
75a70cf9 328 case GIMPLE_PREDICT:
329 case GIMPLE_LABEL:
e38def9c 330 case GIMPLE_EH_MUST_NOT_THROW:
75a70cf9 331 case GIMPLE_OMP_FOR:
332 case GIMPLE_OMP_SECTIONS:
333 case GIMPLE_OMP_SECTIONS_SWITCH:
334 case GIMPLE_OMP_SECTION:
335 case GIMPLE_OMP_SINGLE:
336 case GIMPLE_OMP_MASTER:
bc7bff74 337 case GIMPLE_OMP_TASKGROUP:
75a70cf9 338 case GIMPLE_OMP_ORDERED:
70a6624c 339 case GIMPLE_OMP_SCAN:
75a70cf9 340 case GIMPLE_OMP_CRITICAL:
341 case GIMPLE_OMP_RETURN:
342 case GIMPLE_OMP_ATOMIC_LOAD:
343 case GIMPLE_OMP_ATOMIC_STORE:
344 case GIMPLE_OMP_CONTINUE:
345 break;
2c8a1497 346
75a70cf9 347 case GIMPLE_CALL:
2c8a1497 348 {
75a70cf9 349 tree decl = gimple_call_fndecl (stmt);
3c8c0942 350 unsigned i;
351
352 for (i = 0; i < gimple_call_num_args (stmt); i++)
353 {
354 tree arg = gimple_call_arg (stmt, i);
355 if (EXPR_P (arg))
356 TREE_SET_BLOCK (arg, data->block);
357 }
75a70cf9 358
2c8a1497 359 if (decl
a0e9bfbb 360 && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
2c8a1497 361 {
12f08300 362 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
18593a2c 363 {
364 lower_builtin_setjmp (gsi);
365 data->cannot_fallthru = false;
366 return;
12f08300 367 }
368 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
369 && flag_tree_bit_ccp
370 && gimple_builtin_call_types_compatible_p (stmt, decl))
371 {
372 lower_builtin_posix_memalign (gsi);
18593a2c 373 return;
374 }
2c8a1497 375 }
264ee46d 376
264ee46d 377 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
378 {
a2159096 379 data->cannot_fallthru = true;
264ee46d 380 gsi_next (gsi);
264ee46d 381 return;
382 }
85df98d7 383
384 /* We delay folding of built calls from gimplification to
385 here so the IL is in consistent state for the diagnostic
386 machineries job. */
387 if (gimple_call_builtin_p (stmt))
388 fold_stmt (gsi);
2c8a1497 389 }
390 break;
391
75a70cf9 392 case GIMPLE_OMP_PARALLEL:
393 case GIMPLE_OMP_TASK:
bc7bff74 394 case GIMPLE_OMP_TARGET:
395 case GIMPLE_OMP_TEAMS:
56686608 396 case GIMPLE_OMP_GRID_BODY:
a2159096 397 data->cannot_fallthru = false;
75a70cf9 398 lower_omp_directive (gsi, data);
a2159096 399 data->cannot_fallthru = false;
773c5ba7 400 return;
401
4c0315d0 402 case GIMPLE_TRANSACTION:
1a91d914 403 lower_sequence (gimple_transaction_body_ptr (
404 as_a <gtransaction *> (stmt)),
405 data);
4c0315d0 406 break;
407
4ee9c684 408 default:
0d59b19d 409 gcc_unreachable ();
4ee9c684 410 }
411
a2159096 412 data->cannot_fallthru = false;
75a70cf9 413 gsi_next (gsi);
4ee9c684 414}
415
2c8a1497 416/* Lower a bind_expr TSI. DATA is passed through the recursion. */
4ee9c684 417
418static void
75a70cf9 419lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
4ee9c684 420{
421 tree old_block = data->block;
1a91d914 422 gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
75a70cf9 423 tree new_block = gimple_bind_block (stmt);
4ee9c684 424
425 if (new_block)
426 {
427 if (new_block == old_block)
428 {
429 /* The outermost block of the original function may not be the
430 outermost statement chain of the gimplified function. So we
431 may see the outermost block just inside the function. */
0d59b19d 432 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
4ee9c684 433 new_block = NULL;
434 }
435 else
436 {
437 /* We do not expect to handle duplicate blocks. */
0d59b19d 438 gcc_assert (!TREE_ASM_WRITTEN (new_block));
4ee9c684 439 TREE_ASM_WRITTEN (new_block) = 1;
440
441 /* Block tree may get clobbered by inlining. Normally this would
442 be fixed in rest_of_decl_compilation using block notes, but
443 since we are not going to emit them, it is up to us. */
444 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
445 BLOCK_SUBBLOCKS (old_block) = new_block;
446 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
447 BLOCK_SUPERCONTEXT (new_block) = old_block;
448
449 data->block = new_block;
450 }
451 }
452
75a70cf9 453 record_vars (gimple_bind_vars (stmt));
909bd6ed 454
455 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
456 need gimple_bind_vars. */
457 tree next;
723f8a79 458 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
459 it by marking all BLOCK_VARS. */
909bd6ed 460 if (gimple_bind_block (stmt))
723f8a79 461 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
462 TREE_VISITED (t) = 1;
463 for (tree var = gimple_bind_vars (stmt);
464 var && ! TREE_VISITED (var); var = next)
909bd6ed 465 {
909bd6ed 466 next = DECL_CHAIN (var);
467 DECL_CHAIN (var) = NULL_TREE;
468 }
723f8a79 469 /* Unmark BLOCK_VARS. */
470 if (gimple_bind_block (stmt))
471 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
472 TREE_VISITED (t) = 0;
909bd6ed 473
e3a19533 474 lower_sequence (gimple_bind_body_ptr (stmt), data);
4ee9c684 475
476 if (new_block)
477 {
0d59b19d 478 gcc_assert (data->block == new_block);
4ee9c684 479
480 BLOCK_SUBBLOCKS (new_block)
481 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
482 data->block = old_block;
483 }
484
75a70cf9 485 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
486 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
487 gsi_remove (gsi, false);
4ee9c684 488}
489
929384bb 490/* Same as above, but for a GIMPLE_TRY_CATCH. */
491
492static void
493lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
494{
495 bool cannot_fallthru;
42acab1c 496 gimple *stmt = gsi_stmt (*gsi);
929384bb 497 gimple_stmt_iterator i;
498
499 /* We don't handle GIMPLE_TRY_FINALLY. */
500 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
501
502 lower_sequence (gimple_try_eval_ptr (stmt), data);
503 cannot_fallthru = data->cannot_fallthru;
504
505 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
506 switch (gimple_code (gsi_stmt (i)))
507 {
508 case GIMPLE_CATCH:
509 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
510 catch expression and a body. The whole try/catch may fall
511 through iff any of the catch bodies falls through. */
512 for (; !gsi_end_p (i); gsi_next (&i))
513 {
514 data->cannot_fallthru = false;
1a91d914 515 lower_sequence (gimple_catch_handler_ptr (
516 as_a <gcatch *> (gsi_stmt (i))),
517 data);
929384bb 518 if (!data->cannot_fallthru)
519 cannot_fallthru = false;
520 }
521 break;
522
523 case GIMPLE_EH_FILTER:
524 /* The exception filter expression only matters if there is an
525 exception. If the exception does not match EH_FILTER_TYPES,
526 we will execute EH_FILTER_FAILURE, and we will fall through
527 if that falls through. If the exception does match
528 EH_FILTER_TYPES, the stack unwinder will continue up the
529 stack, so we will not fall through. We don't know whether we
530 will throw an exception which matches EH_FILTER_TYPES or not,
531 so we just ignore EH_FILTER_TYPES and assume that we might
532 throw an exception which doesn't match. */
533 data->cannot_fallthru = false;
534 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
535 if (!data->cannot_fallthru)
536 cannot_fallthru = false;
537 break;
538
90567983 539 case GIMPLE_DEBUG:
540 gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
541 break;
542
929384bb 543 default:
544 /* This case represents statements to be executed when an
545 exception occurs. Those statements are implicitly followed
546 by a GIMPLE_RESX to resume execution after the exception. So
547 in this case the try/catch never falls through. */
548 data->cannot_fallthru = false;
549 lower_sequence (gimple_try_cleanup_ptr (stmt), data);
550 break;
551 }
552
553 data->cannot_fallthru = cannot_fallthru;
554 gsi_next (gsi);
555}
556
93f29170 557
0d9585ca 558/* Try to determine whether a TRY_CATCH expression can fall through.
559 This is a subroutine of gimple_stmt_may_fallthru. */
75a70cf9 560
561static bool
1a91d914 562gimple_try_catch_may_fallthru (gtry *stmt)
75a70cf9 563{
564 gimple_stmt_iterator i;
565
566 /* We don't handle GIMPLE_TRY_FINALLY. */
567 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
568
569 /* If the TRY block can fall through, the whole TRY_CATCH can
570 fall through. */
571 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
572 return true;
573
e3a19533 574 i = gsi_start (*gimple_try_cleanup_ptr (stmt));
75a70cf9 575 switch (gimple_code (gsi_stmt (i)))
576 {
577 case GIMPLE_CATCH:
578 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
579 catch expression and a body. The whole try/catch may fall
580 through iff any of the catch bodies falls through. */
581 for (; !gsi_end_p (i); gsi_next (&i))
582 {
1a91d914 583 if (gimple_seq_may_fallthru (gimple_catch_handler (
584 as_a <gcatch *> (gsi_stmt (i)))))
75a70cf9 585 return true;
586 }
587 return false;
588
589 case GIMPLE_EH_FILTER:
590 /* The exception filter expression only matters if there is an
591 exception. If the exception does not match EH_FILTER_TYPES,
592 we will execute EH_FILTER_FAILURE, and we will fall through
593 if that falls through. If the exception does match
594 EH_FILTER_TYPES, the stack unwinder will continue up the
595 stack, so we will not fall through. We don't know whether we
596 will throw an exception which matches EH_FILTER_TYPES or not,
597 so we just ignore EH_FILTER_TYPES and assume that we might
598 throw an exception which doesn't match. */
599 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
600
601 default:
602 /* This case represents statements to be executed when an
603 exception occurs. Those statements are implicitly followed
604 by a GIMPLE_RESX to resume execution after the exception. So
605 in this case the try/catch never falls through. */
606 return false;
607 }
608}
609
610
75a70cf9 611/* Try to determine if we can continue executing the statement
612 immediately following STMT. This guess need not be 100% accurate;
613 simply be conservative and return true if we don't know. This is
614 used only to avoid stupidly generating extra code. If we're wrong,
615 we'll just delete the extra code later. */
616
617bool
42acab1c 618gimple_stmt_may_fallthru (gimple *stmt)
4ee9c684 619{
75a70cf9 620 if (!stmt)
621 return true;
4ee9c684 622
75a70cf9 623 switch (gimple_code (stmt))
624 {
625 case GIMPLE_GOTO:
626 case GIMPLE_RETURN:
627 case GIMPLE_RESX:
48e1416a 628 /* Easy cases. If the last statement of the seq implies
75a70cf9 629 control transfer, then we can't fall through. */
630 return false;
4ee9c684 631
75a70cf9 632 case GIMPLE_SWITCH:
a2159096 633 /* Switch has already been lowered and represents a branch
634 to a selected label and hence can't fall through. */
635 return false;
4ee9c684 636
75a70cf9 637 case GIMPLE_COND:
638 /* GIMPLE_COND's are already lowered into a two-way branch. They
639 can't fall through. */
640 return false;
4ee9c684 641
75a70cf9 642 case GIMPLE_BIND:
1a91d914 643 return gimple_seq_may_fallthru (
644 gimple_bind_body (as_a <gbind *> (stmt)));
4ee9c684 645
75a70cf9 646 case GIMPLE_TRY:
647 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
1a91d914 648 return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
4ee9c684 649
75a70cf9 650 /* It must be a GIMPLE_TRY_FINALLY. */
4ee9c684 651
75a70cf9 652 /* The finally clause is always executed after the try clause,
653 so if it does not fall through, then the try-finally will not
654 fall through. Otherwise, if the try clause does not fall
655 through, then when the finally clause falls through it will
656 resume execution wherever the try clause was going. So the
657 whole try-finally will only fall through if both the try
658 clause and the finally clause fall through. */
659 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
660 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
661
4c0315d0 662 case GIMPLE_EH_ELSE:
1a91d914 663 {
664 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
665 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
666 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
667 eh_else_stmt)));
668 }
4c0315d0 669
75a70cf9 670 case GIMPLE_CALL:
671 /* Functions that do not return do not fall through. */
e10bc159 672 return !gimple_call_noreturn_p (stmt);
a2159096 673
75a70cf9 674 default:
675 return true;
4ee9c684 676 }
75a70cf9 677}
678
4ee9c684 679
75a70cf9 680/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
4ee9c684 681
75a70cf9 682bool
683gimple_seq_may_fallthru (gimple_seq seq)
684{
bce107d7 685 return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq));
4ee9c684 686}
22e30d4e 687
75a70cf9 688
689/* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
2c8a1497 690
22e30d4e 691static void
75a70cf9 692lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
22e30d4e 693{
1a91d914 694 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
42acab1c 695 gimple *t;
75a70cf9 696 int i;
697 return_statements_t tmp_rs;
22e30d4e 698
6c6a0f2f 699 /* Match this up with an existing return statement that's been created. */
f1f41a6c 700 for (i = data->return_statements.length () - 1;
75a70cf9 701 i >= 0; i--)
22e30d4e 702 {
f1f41a6c 703 tmp_rs = data->return_statements[i];
6c6a0f2f 704
75a70cf9 705 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
417a43d7 706 {
707 /* Remove the line number from the representative return statement.
708 It now fills in for many such returns. Failure to remove this
709 will result in incorrect results for coverage analysis. */
710 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
711
712 goto found;
713 }
22e30d4e 714 }
715
6c6a0f2f 716 /* Not found. Create a new label and record the return statement. */
e60a6f7b 717 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
75a70cf9 718 tmp_rs.stmt = stmt;
f1f41a6c 719 data->return_statements.safe_push (tmp_rs);
6c6a0f2f 720
721 /* Generate a goto statement and remove the return statement. */
722 found:
28f9c1a1 723 /* When not optimizing, make sure user returns are preserved. */
724 if (!optimize && gimple_has_location (stmt))
725 DECL_ARTIFICIAL (tmp_rs.label) = 0;
75a70cf9 726 t = gimple_build_goto (tmp_rs.label);
3b6fab6e 727 /* location includes block. */
75a70cf9 728 gimple_set_location (t, gimple_location (stmt));
729 gsi_insert_before (gsi, t, GSI_SAME_STMT);
730 gsi_remove (gsi, false);
2c8a1497 731}
732
a2159096 733/* Lower a __builtin_setjmp GSI.
2c8a1497 734
735 __builtin_setjmp is passed a pointer to an array of five words (not
736 all will be used on all machines). It operates similarly to the C
737 library function of the same name, but is more efficient.
738
b2c0e0b7 739 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
740 __builtin_setjmp_receiver.
2c8a1497 741
742 After full lowering, the body of the function should look like:
743
744 {
2c8a1497 745 int D.1844;
746 int D.2844;
747
748 [...]
749
750 __builtin_setjmp_setup (&buf, &<D1847>);
751 D.1844 = 0;
752 goto <D1846>;
753 <D1847>:;
754 __builtin_setjmp_receiver (&<D1847>);
755 D.1844 = 1;
756 <D1846>:;
757 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
758
759 [...]
760
761 __builtin_setjmp_setup (&buf, &<D2847>);
762 D.2844 = 0;
763 goto <D2846>;
764 <D2847>:;
765 __builtin_setjmp_receiver (&<D2847>);
766 D.2844 = 1;
767 <D2846>:;
768 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
769
770 [...]
771
772 <D3850>:;
773 return;
2c8a1497 774 }
775
b2c0e0b7 776 During cfg creation an extra per-function (or per-OpenMP region)
777 block with ABNORMAL_DISPATCHER internal call will be added, unique
778 destination of all the abnormal call edges and the unique source of
779 all the abnormal edges to the receivers, thus keeping the complexity
780 explosion localized. */
2c8a1497 781
782static void
75a70cf9 783lower_builtin_setjmp (gimple_stmt_iterator *gsi)
2c8a1497 784{
42acab1c 785 gimple *stmt = gsi_stmt (*gsi);
e60a6f7b 786 location_t loc = gimple_location (stmt);
787 tree cont_label = create_artificial_label (loc);
788 tree next_label = create_artificial_label (loc);
2c8a1497 789 tree dest, t, arg;
42acab1c 790 gimple *g;
2c8a1497 791
b8e66853 792 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
793 these builtins are modelled as non-local label jumps to the label
794 that is passed to these two builtins, so pretend we have a non-local
795 label during GIMPLE passes too. See PR60003. */
c4c3cd53 796 cfun->has_nonlocal_label = 1;
b8e66853 797
2c8a1497 798 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
799 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
800 FORCED_LABEL (next_label) = 1;
801
9ae1b28a 802 tree orig_dest = dest = gimple_call_lhs (stmt);
803 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
804 dest = create_tmp_reg (TREE_TYPE (orig_dest));
2c8a1497 805
806 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
0e49e441 807 arg = build_addr (next_label);
b9a16870 808 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
75a70cf9 809 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
3b6fab6e 810 /* location includes block. */
389dd41b 811 gimple_set_location (g, loc);
75a70cf9 812 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 813
814 /* Build 'DEST = 0' and insert. */
815 if (dest)
816 {
385f3f36 817 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
389dd41b 818 gimple_set_location (g, loc);
75a70cf9 819 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 820 }
821
822 /* Build 'goto CONT_LABEL' and insert. */
75a70cf9 823 g = gimple_build_goto (cont_label);
b9c74b4d 824 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 825
826 /* Build 'NEXT_LABEL:' and insert. */
75a70cf9 827 g = gimple_build_label (next_label);
828 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 829
830 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
0e49e441 831 arg = build_addr (next_label);
b9a16870 832 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
75a70cf9 833 g = gimple_build_call (t, 1, arg);
389dd41b 834 gimple_set_location (g, loc);
75a70cf9 835 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 836
837 /* Build 'DEST = 1' and insert. */
838 if (dest)
839 {
389dd41b 840 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
841 integer_one_node));
842 gimple_set_location (g, loc);
75a70cf9 843 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 844 }
845
846 /* Build 'CONT_LABEL:' and insert. */
75a70cf9 847 g = gimple_build_label (cont_label);
848 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2c8a1497 849
9ae1b28a 850 /* Build orig_dest = dest if necessary. */
851 if (dest != orig_dest)
852 {
853 g = gimple_build_assign (orig_dest, dest);
854 gsi_insert_before (gsi, g, GSI_SAME_STMT);
855 }
856
2c8a1497 857 /* Remove the call to __builtin_setjmp. */
75a70cf9 858 gsi_remove (gsi, false);
22e30d4e 859}
18593a2c 860
861/* Lower calls to posix_memalign to
712bf3a3 862 res = posix_memalign (ptr, align, size);
863 if (res == 0)
864 *ptr = __builtin_assume_aligned (*ptr, align);
18593a2c 865 or to
866 void *tem;
712bf3a3 867 res = posix_memalign (&tem, align, size);
868 if (res == 0)
869 ptr = __builtin_assume_aligned (tem, align);
18593a2c 870 in case the first argument was &ptr. That way we can get at the
871 alignment of the heap pointer in CCP. */
872
873static void
874lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
875{
42acab1c 876 gimple *stmt, *call = gsi_stmt (*gsi);
712bf3a3 877 tree pptr = gimple_call_arg (call, 0);
878 tree align = gimple_call_arg (call, 1);
879 tree res = gimple_call_lhs (call);
f9e245b2 880 tree ptr = create_tmp_reg (ptr_type_node);
18593a2c 881 if (TREE_CODE (pptr) == ADDR_EXPR)
882 {
f9e245b2 883 tree tem = create_tmp_var (ptr_type_node);
18593a2c 884 TREE_ADDRESSABLE (tem) = 1;
712bf3a3 885 gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
18593a2c 886 stmt = gimple_build_assign (ptr, tem);
887 }
888 else
889 stmt = gimple_build_assign (ptr,
890 fold_build2 (MEM_REF, ptr_type_node, pptr,
891 build_int_cst (ptr_type_node, 0)));
712bf3a3 892 if (res == NULL_TREE)
893 {
f9e245b2 894 res = create_tmp_reg (integer_type_node);
712bf3a3 895 gimple_call_set_lhs (call, res);
896 }
897 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
898 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
42acab1c 899 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
712bf3a3 900 align_label, noalign_label);
901 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
902 gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
18593a2c 903 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
904 stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
905 2, ptr, align);
906 gimple_call_set_lhs (stmt, ptr);
907 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
908 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
909 build_int_cst (ptr_type_node, 0)),
910 ptr);
911 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
712bf3a3 912 gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
18593a2c 913}
4ee9c684 914\f
915
773c5ba7 916/* Record the variables in VARS into function FN. */
4ee9c684 917
918void
773c5ba7 919record_vars_into (tree vars, tree fn)
4ee9c684 920{
1767a056 921 for (; vars; vars = DECL_CHAIN (vars))
4ee9c684 922 {
923 tree var = vars;
924
b3d24a23 925 /* BIND_EXPRs contains also function/type/constant declarations
926 we don't need to care about. */
53e9c5c4 927 if (!VAR_P (var))
b3d24a23 928 continue;
773c5ba7 929
4ee9c684 930 /* Nothing to do in this case. */
931 if (DECL_EXTERNAL (var))
932 continue;
4ee9c684 933
934 /* Record the variable. */
98107def 935 add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
4ee9c684 936 }
773c5ba7 937}
938
939
940/* Record the variables in VARS into current_function_decl. */
941
942void
943record_vars (tree vars)
944{
945 record_vars_into (vars, current_function_decl);
4ee9c684 946}