]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple.c
Support allocators in tuples of zero size.
[thirdparty/gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
818ab71a 3 Copyright (C) 2007-2016 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
c7131fb2
AM
25#include "backend.h"
26#include "tree.h"
27#include "gimple.h"
c7131fb2 28#include "ssa.h"
957060b5
AM
29#include "cgraph.h"
30#include "diagnostic.h"
40e23961 31#include "alias.h"
40e23961 32#include "fold-const.h"
d8a2d370 33#include "calls.h"
d8a2d370 34#include "stor-layout.h"
2fb9a547
AM
35#include "internal-fn.h"
36#include "tree-eh.h"
5be5c238
AM
37#include "gimple-iterator.h"
38#include "gimple-walk.h"
45b0be94 39#include "gimplify.h"
6626f970 40#include "target.h"
726a989a 41
d7f09764 42
f2c4a81c 43/* All the tuples have their operand vector (if present) at the very bottom
726a989a
RB
44 of the structure. Therefore, the offset required to find the
45 operands vector the size of the structure minus the size of the 1
46 element tree array at the end (see gimple_ops). */
f2c4a81c
RH
47#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
48 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
6bc7bc14 49EXPORTED_CONST size_t gimple_ops_offset_[] = {
f2c4a81c
RH
50#include "gsstruct.def"
51};
52#undef DEFGSSTRUCT
53
c3284718 54#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
f2c4a81c
RH
55static const size_t gsstruct_code_size[] = {
56#include "gsstruct.def"
57};
58#undef DEFGSSTRUCT
59
60#define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
61const char *const gimple_code_name[] = {
62#include "gimple.def"
63};
64#undef DEFGSCODE
65
66#define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
67EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
726a989a
RB
68#include "gimple.def"
69};
70#undef DEFGSCODE
71
726a989a
RB
72/* Gimple stats. */
73
74int gimple_alloc_counts[(int) gimple_alloc_kind_all];
75int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
76
77/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
78static const char * const gimple_alloc_kind_names[] = {
79 "assignments",
80 "phi nodes",
81 "conditionals",
726a989a
RB
82 "everything else"
83};
84
bde351d5
RB
85/* Static gimple tuple members. */
86const enum gimple_code gassign::code_;
003b40ae
RB
87const enum gimple_code gcall::code_;
88const enum gimple_code gcond::code_;
bde351d5
RB
89
90
726a989a
RB
91/* Gimple tuple constructors.
92 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
93 be passed a NULL to start with an empty sequence. */
94
95/* Set the code for statement G to CODE. */
96
97static inline void
355fe088 98gimple_set_code (gimple *g, enum gimple_code code)
726a989a 99{
daa6e488 100 g->code = code;
726a989a
RB
101}
102
726a989a
RB
103/* Return the number of bytes needed to hold a GIMPLE statement with
104 code CODE. */
105
f2c4a81c 106static inline size_t
726a989a
RB
107gimple_size (enum gimple_code code)
108{
f2c4a81c 109 return gsstruct_code_size[gss_for_code (code)];
726a989a
RB
110}
111
726a989a
RB
112/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
113 operands. */
114
355fe088 115gimple *
726a989a
RB
116gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
117{
118 size_t size;
355fe088 119 gimple *stmt;
726a989a
RB
120
121 size = gimple_size (code);
122 if (num_ops > 0)
123 size += sizeof (tree) * (num_ops - 1);
124
7aa6d18a
SB
125 if (GATHER_STATISTICS)
126 {
127 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
128 gimple_alloc_counts[(int) kind]++;
129 gimple_alloc_sizes[(int) kind] += size;
130 }
726a989a 131
daa6e488 132 stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
726a989a
RB
133 gimple_set_code (stmt, code);
134 gimple_set_num_ops (stmt, num_ops);
135
136 /* Do not call gimple_set_modified here as it has other side
137 effects and this tuple is still not completely built. */
daa6e488 138 stmt->modified = 1;
355a7673 139 gimple_init_singleton (stmt);
726a989a
RB
140
141 return stmt;
142}
143
144/* Set SUBCODE to be the code of the expression computed by statement G. */
145
146static inline void
355fe088 147gimple_set_subcode (gimple *g, unsigned subcode)
726a989a
RB
148{
149 /* We only have 16 bits for the RHS code. Assert that we are not
150 overflowing it. */
151 gcc_assert (subcode < (1 << 16));
daa6e488 152 g->subcode = subcode;
726a989a
RB
153}
154
155
156
157/* Build a tuple with operands. CODE is the statement to build (which
7d05cebb 158 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
b8698a0f 159 for the new tuple. NUM_OPS is the number of operands to allocate. */
726a989a
RB
160
161#define gimple_build_with_ops(c, s, n) \
162 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
163
355fe088 164static gimple *
b5b8b0ac 165gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
726a989a
RB
166 unsigned num_ops MEM_STAT_DECL)
167{
355fe088 168 gimple *s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
726a989a
RB
169 gimple_set_subcode (s, subcode);
170
171 return s;
172}
173
174
175/* Build a GIMPLE_RETURN statement returning RETVAL. */
176
538dd0b7 177greturn *
726a989a
RB
178gimple_build_return (tree retval)
179{
538dd0b7
DM
180 greturn *s
181 = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
182 2));
726a989a
RB
183 if (retval)
184 gimple_return_set_retval (s, retval);
185 return s;
186}
187
d086d311
RG
188/* Reset alias information on call S. */
189
190void
538dd0b7 191gimple_call_reset_alias_info (gcall *s)
d086d311
RG
192{
193 if (gimple_call_flags (s) & ECF_CONST)
194 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
195 else
196 pt_solution_reset (gimple_call_use_set (s));
197 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
198 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
199 else
200 pt_solution_reset (gimple_call_clobber_set (s));
201}
202
21860814
JJ
203/* Helper for gimple_build_call, gimple_build_call_valist,
204 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
205 components of a GIMPLE_CALL statement to function FN with NARGS
206 arguments. */
726a989a 207
538dd0b7 208static inline gcall *
726a989a
RB
209gimple_build_call_1 (tree fn, unsigned nargs)
210{
538dd0b7
DM
211 gcall *s
212 = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
213 nargs + 3));
7c9577be
RG
214 if (TREE_CODE (fn) == FUNCTION_DECL)
215 fn = build_fold_addr_expr (fn);
726a989a 216 gimple_set_op (s, 1, fn);
f20ca725 217 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
d086d311 218 gimple_call_reset_alias_info (s);
726a989a
RB
219 return s;
220}
221
222
223/* Build a GIMPLE_CALL statement to function FN with the arguments
224 specified in vector ARGS. */
225
538dd0b7 226gcall *
9771b263 227gimple_build_call_vec (tree fn, vec<tree> args)
726a989a
RB
228{
229 unsigned i;
9771b263 230 unsigned nargs = args.length ();
538dd0b7 231 gcall *call = gimple_build_call_1 (fn, nargs);
726a989a
RB
232
233 for (i = 0; i < nargs; i++)
9771b263 234 gimple_call_set_arg (call, i, args[i]);
726a989a
RB
235
236 return call;
237}
238
239
240/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
241 arguments. The ... are the arguments. */
242
538dd0b7 243gcall *
726a989a
RB
244gimple_build_call (tree fn, unsigned nargs, ...)
245{
246 va_list ap;
538dd0b7 247 gcall *call;
726a989a
RB
248 unsigned i;
249
250 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
251
252 call = gimple_build_call_1 (fn, nargs);
253
254 va_start (ap, nargs);
255 for (i = 0; i < nargs; i++)
256 gimple_call_set_arg (call, i, va_arg (ap, tree));
257 va_end (ap);
258
259 return call;
260}
261
262
21860814
JJ
263/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
264 arguments. AP contains the arguments. */
265
538dd0b7 266gcall *
21860814
JJ
267gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
268{
538dd0b7 269 gcall *call;
21860814
JJ
270 unsigned i;
271
272 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
273
274 call = gimple_build_call_1 (fn, nargs);
275
276 for (i = 0; i < nargs; i++)
277 gimple_call_set_arg (call, i, va_arg (ap, tree));
278
279 return call;
280}
281
282
25583c4f
RS
283/* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
284 Build the basic components of a GIMPLE_CALL statement to internal
285 function FN with NARGS arguments. */
286
538dd0b7 287static inline gcall *
25583c4f
RS
288gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
289{
538dd0b7
DM
290 gcall *s
291 = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
292 nargs + 3));
daa6e488 293 s->subcode |= GF_CALL_INTERNAL;
25583c4f
RS
294 gimple_call_set_internal_fn (s, fn);
295 gimple_call_reset_alias_info (s);
296 return s;
297}
298
299
300/* Build a GIMPLE_CALL statement to internal function FN. NARGS is
301 the number of arguments. The ... are the arguments. */
302
538dd0b7 303gcall *
25583c4f
RS
304gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
305{
306 va_list ap;
538dd0b7 307 gcall *call;
25583c4f
RS
308 unsigned i;
309
310 call = gimple_build_call_internal_1 (fn, nargs);
311 va_start (ap, nargs);
312 for (i = 0; i < nargs; i++)
313 gimple_call_set_arg (call, i, va_arg (ap, tree));
314 va_end (ap);
315
316 return call;
317}
318
319
320/* Build a GIMPLE_CALL statement to internal function FN with the arguments
321 specified in vector ARGS. */
322
538dd0b7 323gcall *
9771b263 324gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
25583c4f
RS
325{
326 unsigned i, nargs;
538dd0b7 327 gcall *call;
25583c4f 328
9771b263 329 nargs = args.length ();
25583c4f
RS
330 call = gimple_build_call_internal_1 (fn, nargs);
331 for (i = 0; i < nargs; i++)
9771b263 332 gimple_call_set_arg (call, i, args[i]);
25583c4f
RS
333
334 return call;
335}
336
337
726a989a
RB
338/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
339 assumed to be in GIMPLE form already. Minimal checking is done of
340 this fact. */
341
538dd0b7 342gcall *
726a989a
RB
343gimple_build_call_from_tree (tree t)
344{
345 unsigned i, nargs;
538dd0b7 346 gcall *call;
726a989a
RB
347 tree fndecl = get_callee_fndecl (t);
348
349 gcc_assert (TREE_CODE (t) == CALL_EXPR);
350
351 nargs = call_expr_nargs (t);
352 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
353
354 for (i = 0; i < nargs; i++)
355 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
356
357 gimple_set_block (call, TREE_BLOCK (t));
358
359 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
360 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
361 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
9a385c2d 362 gimple_call_set_must_tail (call, CALL_EXPR_MUST_TAIL_CALL (t));
726a989a 363 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
63d2a353
MM
364 if (fndecl
365 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13e49da9
TV
366 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
367 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
368 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
369 else
370 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
726a989a 371 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
9bb1a81b 372 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
d665b6e5 373 gimple_set_no_warning (call, TREE_NO_WARNING (t));
d5e254e1 374 gimple_call_set_with_bounds (call, CALL_WITH_BOUNDS_P (t));
726a989a
RB
375
376 return call;
377}
378
379
726a989a
RB
380/* Build a GIMPLE_ASSIGN statement.
381
382 LHS of the assignment.
383 RHS of the assignment which can be unary or binary. */
384
538dd0b7 385gassign *
0d0e4a03 386gimple_build_assign (tree lhs, tree rhs MEM_STAT_DECL)
726a989a
RB
387{
388 enum tree_code subcode;
0354c0c7 389 tree op1, op2, op3;
726a989a 390
d1e2bb2d 391 extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3);
0d0e4a03 392 return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
726a989a
RB
393}
394
395
7d05cebb 396/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
0d0e4a03 397 OP1, OP2 and OP3. */
726a989a 398
0d0e4a03
JJ
399static inline gassign *
400gimple_build_assign_1 (tree lhs, enum tree_code subcode, tree op1,
401 tree op2, tree op3 MEM_STAT_DECL)
726a989a
RB
402{
403 unsigned num_ops;
538dd0b7 404 gassign *p;
726a989a
RB
405
406 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
407 code). */
408 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
b8698a0f 409
538dd0b7
DM
410 p = as_a <gassign *> (
411 gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
412 PASS_MEM_STAT));
726a989a
RB
413 gimple_assign_set_lhs (p, lhs);
414 gimple_assign_set_rhs1 (p, op1);
415 if (op2)
416 {
417 gcc_assert (num_ops > 2);
418 gimple_assign_set_rhs2 (p, op2);
419 }
420
0354c0c7
BS
421 if (op3)
422 {
423 gcc_assert (num_ops > 3);
424 gimple_assign_set_rhs3 (p, op3);
425 }
426
726a989a
RB
427 return p;
428}
429
0d0e4a03
JJ
430/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
431 OP1, OP2 and OP3. */
432
433gassign *
434gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
435 tree op2, tree op3 MEM_STAT_DECL)
436{
437 return gimple_build_assign_1 (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
438}
439
440/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
441 OP1 and OP2. */
442
538dd0b7 443gassign *
0d0e4a03
JJ
444gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
445 tree op2 MEM_STAT_DECL)
73804b12 446{
0d0e4a03
JJ
447 return gimple_build_assign_1 (lhs, subcode, op1, op2, NULL_TREE
448 PASS_MEM_STAT);
73804b12
RG
449}
450
0d0e4a03
JJ
451/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operand OP1. */
452
538dd0b7 453gassign *
0d0e4a03 454gimple_build_assign (tree lhs, enum tree_code subcode, tree op1 MEM_STAT_DECL)
00d66391 455{
0d0e4a03
JJ
456 return gimple_build_assign_1 (lhs, subcode, op1, NULL_TREE, NULL_TREE
457 PASS_MEM_STAT);
00d66391
JJ
458}
459
726a989a 460
726a989a
RB
461/* Build a GIMPLE_COND statement.
462
463 PRED is the condition used to compare LHS and the RHS.
464 T_LABEL is the label to jump to if the condition is true.
465 F_LABEL is the label to jump to otherwise. */
466
538dd0b7 467gcond *
726a989a
RB
468gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
469 tree t_label, tree f_label)
470{
538dd0b7 471 gcond *p;
726a989a
RB
472
473 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
538dd0b7 474 p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
726a989a
RB
475 gimple_cond_set_lhs (p, lhs);
476 gimple_cond_set_rhs (p, rhs);
477 gimple_cond_set_true_label (p, t_label);
478 gimple_cond_set_false_label (p, f_label);
479 return p;
480}
481
726a989a
RB
482/* Build a GIMPLE_COND statement from the conditional expression tree
483 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
484
538dd0b7 485gcond *
726a989a
RB
486gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
487{
488 enum tree_code code;
489 tree lhs, rhs;
490
491 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
492 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
493}
494
495/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
496 boolean expression tree COND. */
497
498void
538dd0b7 499gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
726a989a
RB
500{
501 enum tree_code code;
502 tree lhs, rhs;
503
504 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
505 gimple_cond_set_condition (stmt, code, lhs, rhs);
506}
507
508/* Build a GIMPLE_LABEL statement for LABEL. */
509
538dd0b7 510glabel *
726a989a
RB
511gimple_build_label (tree label)
512{
538dd0b7
DM
513 glabel *p
514 = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
726a989a
RB
515 gimple_label_set_label (p, label);
516 return p;
517}
518
519/* Build a GIMPLE_GOTO statement to label DEST. */
520
538dd0b7 521ggoto *
726a989a
RB
522gimple_build_goto (tree dest)
523{
538dd0b7
DM
524 ggoto *p
525 = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
726a989a
RB
526 gimple_goto_set_dest (p, dest);
527 return p;
528}
529
530
531/* Build a GIMPLE_NOP statement. */
532
355fe088 533gimple *
726a989a
RB
534gimple_build_nop (void)
535{
536 return gimple_alloc (GIMPLE_NOP, 0);
537}
538
539
540/* Build a GIMPLE_BIND statement.
541 VARS are the variables in BODY.
542 BLOCK is the containing block. */
543
538dd0b7 544gbind *
726a989a
RB
545gimple_build_bind (tree vars, gimple_seq body, tree block)
546{
538dd0b7 547 gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
726a989a
RB
548 gimple_bind_set_vars (p, vars);
549 if (body)
550 gimple_bind_set_body (p, body);
551 if (block)
552 gimple_bind_set_block (p, block);
553 return p;
554}
555
556/* Helper function to set the simple fields of a asm stmt.
557
558 STRING is a pointer to a string that is the asm blocks assembly code.
559 NINPUT is the number of register inputs.
560 NOUTPUT is the number of register outputs.
561 NCLOBBERS is the number of clobbered registers.
562 */
563
538dd0b7 564static inline gasm *
b8698a0f 565gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
1c384bf1 566 unsigned nclobbers, unsigned nlabels)
726a989a 567{
538dd0b7 568 gasm *p;
726a989a
RB
569 int size = strlen (string);
570
1c384bf1
RH
571 /* ASMs with labels cannot have outputs. This should have been
572 enforced by the front end. */
573 gcc_assert (nlabels == 0 || noutputs == 0);
574
538dd0b7 575 p = as_a <gasm *> (
daa6e488
DM
576 gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
577 ninputs + noutputs + nclobbers + nlabels));
726a989a 578
daa6e488
DM
579 p->ni = ninputs;
580 p->no = noutputs;
581 p->nc = nclobbers;
582 p->nl = nlabels;
583 p->string = ggc_alloc_string (string, size);
726a989a 584
7aa6d18a
SB
585 if (GATHER_STATISTICS)
586 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
b8698a0f 587
726a989a
RB
588 return p;
589}
590
591/* Build a GIMPLE_ASM statement.
592
593 STRING is the assembly code.
594 NINPUT is the number of register inputs.
595 NOUTPUT is the number of register outputs.
596 NCLOBBERS is the number of clobbered registers.
597 INPUTS is a vector of the input register parameters.
598 OUTPUTS is a vector of the output register parameters.
1c384bf1
RH
599 CLOBBERS is a vector of the clobbered register parameters.
600 LABELS is a vector of destination labels. */
726a989a 601
538dd0b7 602gasm *
9771b263
DN
603gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
604 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
605 vec<tree, va_gc> *labels)
726a989a 606{
538dd0b7 607 gasm *p;
726a989a
RB
608 unsigned i;
609
610 p = gimple_build_asm_1 (string,
9771b263
DN
611 vec_safe_length (inputs),
612 vec_safe_length (outputs),
613 vec_safe_length (clobbers),
614 vec_safe_length (labels));
b8698a0f 615
9771b263
DN
616 for (i = 0; i < vec_safe_length (inputs); i++)
617 gimple_asm_set_input_op (p, i, (*inputs)[i]);
726a989a 618
9771b263
DN
619 for (i = 0; i < vec_safe_length (outputs); i++)
620 gimple_asm_set_output_op (p, i, (*outputs)[i]);
726a989a 621
9771b263
DN
622 for (i = 0; i < vec_safe_length (clobbers); i++)
623 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
b8698a0f 624
9771b263
DN
625 for (i = 0; i < vec_safe_length (labels); i++)
626 gimple_asm_set_label_op (p, i, (*labels)[i]);
b8698a0f 627
726a989a
RB
628 return p;
629}
630
631/* Build a GIMPLE_CATCH statement.
632
633 TYPES are the catch types.
634 HANDLER is the exception handler. */
635
538dd0b7 636gcatch *
726a989a
RB
637gimple_build_catch (tree types, gimple_seq handler)
638{
538dd0b7 639 gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
726a989a
RB
640 gimple_catch_set_types (p, types);
641 if (handler)
642 gimple_catch_set_handler (p, handler);
643
644 return p;
645}
646
647/* Build a GIMPLE_EH_FILTER statement.
648
649 TYPES are the filter's types.
650 FAILURE is the filter's failure action. */
651
538dd0b7 652geh_filter *
726a989a
RB
653gimple_build_eh_filter (tree types, gimple_seq failure)
654{
538dd0b7 655 geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
726a989a
RB
656 gimple_eh_filter_set_types (p, types);
657 if (failure)
658 gimple_eh_filter_set_failure (p, failure);
659
660 return p;
661}
662
1d65f45c
RH
663/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
664
538dd0b7 665geh_mnt *
1d65f45c
RH
666gimple_build_eh_must_not_throw (tree decl)
667{
538dd0b7 668 geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
1d65f45c
RH
669
670 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
671 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
d7f09764 672 gimple_eh_must_not_throw_set_fndecl (p, decl);
1d65f45c
RH
673
674 return p;
675}
676
0a35513e
AH
677/* Build a GIMPLE_EH_ELSE statement. */
678
538dd0b7 679geh_else *
0a35513e
AH
680gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
681{
538dd0b7 682 geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
0a35513e
AH
683 gimple_eh_else_set_n_body (p, n_body);
684 gimple_eh_else_set_e_body (p, e_body);
685 return p;
686}
687
726a989a
RB
688/* Build a GIMPLE_TRY statement.
689
690 EVAL is the expression to evaluate.
691 CLEANUP is the cleanup expression.
692 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
693 whether this is a try/catch or a try/finally respectively. */
694
538dd0b7 695gtry *
726a989a
RB
696gimple_build_try (gimple_seq eval, gimple_seq cleanup,
697 enum gimple_try_flags kind)
698{
538dd0b7 699 gtry *p;
726a989a
RB
700
701 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
538dd0b7 702 p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
726a989a
RB
703 gimple_set_subcode (p, kind);
704 if (eval)
705 gimple_try_set_eval (p, eval);
706 if (cleanup)
707 gimple_try_set_cleanup (p, cleanup);
708
709 return p;
710}
711
712/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
713
714 CLEANUP is the cleanup expression. */
715
355fe088 716gimple *
726a989a
RB
717gimple_build_wce (gimple_seq cleanup)
718{
355fe088 719 gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
726a989a
RB
720 if (cleanup)
721 gimple_wce_set_cleanup (p, cleanup);
722
723 return p;
724}
725
726
1d65f45c 727/* Build a GIMPLE_RESX statement. */
726a989a 728
538dd0b7 729gresx *
726a989a
RB
730gimple_build_resx (int region)
731{
538dd0b7
DM
732 gresx *p
733 = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
daa6e488 734 p->region = region;
726a989a
RB
735 return p;
736}
737
738
739/* The helper for constructing a gimple switch statement.
740 INDEX is the switch's index.
741 NLABELS is the number of labels in the switch excluding the default.
742 DEFAULT_LABEL is the default label for the switch statement. */
743
538dd0b7 744gswitch *
1d65f45c 745gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
726a989a
RB
746{
747 /* nlabels + 1 default label + 1 index. */
fd8d363e 748 gcc_checking_assert (default_label);
538dd0b7
DM
749 gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
750 ERROR_MARK,
751 1 + 1 + nlabels));
726a989a 752 gimple_switch_set_index (p, index);
fd8d363e 753 gimple_switch_set_default_label (p, default_label);
726a989a
RB
754 return p;
755}
756
726a989a
RB
757/* Build a GIMPLE_SWITCH statement.
758
759 INDEX is the switch's index.
760 DEFAULT_LABEL is the default label
761 ARGS is a vector of labels excluding the default. */
762
538dd0b7 763gswitch *
9771b263 764gimple_build_switch (tree index, tree default_label, vec<tree> args)
726a989a 765{
9771b263 766 unsigned i, nlabels = args.length ();
fd8d363e 767
538dd0b7 768 gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a 769
1d65f45c 770 /* Copy the labels from the vector to the switch statement. */
1d65f45c 771 for (i = 0; i < nlabels; i++)
9771b263 772 gimple_switch_set_label (p, i + 1, args[i]);
726a989a
RB
773
774 return p;
775}
776
1d65f45c
RH
777/* Build a GIMPLE_EH_DISPATCH statement. */
778
538dd0b7 779geh_dispatch *
1d65f45c
RH
780gimple_build_eh_dispatch (int region)
781{
538dd0b7
DM
782 geh_dispatch *p
783 = as_a <geh_dispatch *> (
784 gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
daa6e488 785 p->region = region;
1d65f45c
RH
786 return p;
787}
726a989a 788
b5b8b0ac
AO
789/* Build a new GIMPLE_DEBUG_BIND statement.
790
791 VAR is bound to VALUE; block and location are taken from STMT. */
792
538dd0b7 793gdebug *
355fe088 794gimple_build_debug_bind_stat (tree var, tree value, gimple *stmt MEM_STAT_DECL)
b5b8b0ac 795{
538dd0b7
DM
796 gdebug *p
797 = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
798 (unsigned)GIMPLE_DEBUG_BIND, 2
799 PASS_MEM_STAT));
b5b8b0ac
AO
800 gimple_debug_bind_set_var (p, var);
801 gimple_debug_bind_set_value (p, value);
802 if (stmt)
5368224f 803 gimple_set_location (p, gimple_location (stmt));
b5b8b0ac
AO
804
805 return p;
806}
807
808
ddb555ed
JJ
809/* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
810
811 VAR is bound to VALUE; block and location are taken from STMT. */
812
538dd0b7 813gdebug *
ddb555ed 814gimple_build_debug_source_bind_stat (tree var, tree value,
355fe088 815 gimple *stmt MEM_STAT_DECL)
ddb555ed 816{
538dd0b7
DM
817 gdebug *p
818 = as_a <gdebug *> (
819 gimple_build_with_ops_stat (GIMPLE_DEBUG,
820 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
821 PASS_MEM_STAT));
ddb555ed
JJ
822
823 gimple_debug_source_bind_set_var (p, var);
824 gimple_debug_source_bind_set_value (p, value);
825 if (stmt)
5368224f 826 gimple_set_location (p, gimple_location (stmt));
ddb555ed
JJ
827
828 return p;
829}
830
831
726a989a
RB
832/* Build a GIMPLE_OMP_CRITICAL statement.
833
834 BODY is the sequence of statements for which only one thread can execute.
d9a6bd32
JJ
835 NAME is optional identifier for this critical block.
836 CLAUSES are clauses for this critical block. */
726a989a 837
538dd0b7 838gomp_critical *
d9a6bd32 839gimple_build_omp_critical (gimple_seq body, tree name, tree clauses)
726a989a 840{
538dd0b7
DM
841 gomp_critical *p
842 = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
726a989a 843 gimple_omp_critical_set_name (p, name);
d9a6bd32 844 gimple_omp_critical_set_clauses (p, clauses);
726a989a
RB
845 if (body)
846 gimple_omp_set_body (p, body);
847
848 return p;
849}
850
851/* Build a GIMPLE_OMP_FOR statement.
852
853 BODY is sequence of statements inside the for loop.
74bf76ed 854 KIND is the `for' variant.
41dbbb37 855 CLAUSES, are any of the construct's clauses.
726a989a
RB
856 COLLAPSE is the collapse count.
857 PRE_BODY is the sequence of statements that are loop invariant. */
858
538dd0b7 859gomp_for *
74bf76ed 860gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
726a989a
RB
861 gimple_seq pre_body)
862{
538dd0b7 863 gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
726a989a
RB
864 if (body)
865 gimple_omp_set_body (p, body);
866 gimple_omp_for_set_clauses (p, clauses);
74bf76ed 867 gimple_omp_for_set_kind (p, kind);
daa6e488 868 p->collapse = collapse;
766090c2 869 p->iter = ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
daa6e488 870
726a989a
RB
871 if (pre_body)
872 gimple_omp_for_set_pre_body (p, pre_body);
873
874 return p;
875}
876
877
878/* Build a GIMPLE_OMP_PARALLEL statement.
879
880 BODY is sequence of statements which are executed in parallel.
881 CLAUSES, are the OMP parallel construct's clauses.
882 CHILD_FN is the function created for the parallel threads to execute.
883 DATA_ARG are the shared data argument(s). */
884
538dd0b7 885gomp_parallel *
b8698a0f 886gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
726a989a
RB
887 tree data_arg)
888{
538dd0b7
DM
889 gomp_parallel *p
890 = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
726a989a
RB
891 if (body)
892 gimple_omp_set_body (p, body);
893 gimple_omp_parallel_set_clauses (p, clauses);
894 gimple_omp_parallel_set_child_fn (p, child_fn);
895 gimple_omp_parallel_set_data_arg (p, data_arg);
896
897 return p;
898}
899
900
901/* Build a GIMPLE_OMP_TASK statement.
902
903 BODY is sequence of statements which are executed by the explicit task.
904 CLAUSES, are the OMP parallel construct's clauses.
905 CHILD_FN is the function created for the parallel threads to execute.
906 DATA_ARG are the shared data argument(s).
907 COPY_FN is the optional function for firstprivate initialization.
908 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
909
538dd0b7 910gomp_task *
726a989a
RB
911gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
912 tree data_arg, tree copy_fn, tree arg_size,
913 tree arg_align)
914{
538dd0b7 915 gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
726a989a
RB
916 if (body)
917 gimple_omp_set_body (p, body);
918 gimple_omp_task_set_clauses (p, clauses);
919 gimple_omp_task_set_child_fn (p, child_fn);
920 gimple_omp_task_set_data_arg (p, data_arg);
921 gimple_omp_task_set_copy_fn (p, copy_fn);
922 gimple_omp_task_set_arg_size (p, arg_size);
923 gimple_omp_task_set_arg_align (p, arg_align);
924
925 return p;
926}
927
928
929/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
930
931 BODY is the sequence of statements in the section. */
932
355fe088 933gimple *
726a989a
RB
934gimple_build_omp_section (gimple_seq body)
935{
355fe088 936 gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
726a989a
RB
937 if (body)
938 gimple_omp_set_body (p, body);
939
940 return p;
941}
942
943
944/* Build a GIMPLE_OMP_MASTER statement.
945
946 BODY is the sequence of statements to be executed by just the master. */
947
355fe088 948gimple *
726a989a
RB
949gimple_build_omp_master (gimple_seq body)
950{
355fe088 951 gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
726a989a
RB
952 if (body)
953 gimple_omp_set_body (p, body);
954
955 return p;
956}
957
b2b40051
MJ
958/* Build a GIMPLE_OMP_GRID_BODY statement.
959
960 BODY is the sequence of statements to be executed by the kernel. */
961
962gimple *
963gimple_build_omp_grid_body (gimple_seq body)
964{
965 gimple *p = gimple_alloc (GIMPLE_OMP_GRID_BODY, 0);
966 if (body)
967 gimple_omp_set_body (p, body);
968
969 return p;
970}
726a989a 971
acf0174b
JJ
972/* Build a GIMPLE_OMP_TASKGROUP statement.
973
974 BODY is the sequence of statements to be executed by the taskgroup
975 construct. */
976
355fe088 977gimple *
acf0174b
JJ
978gimple_build_omp_taskgroup (gimple_seq body)
979{
355fe088 980 gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
acf0174b
JJ
981 if (body)
982 gimple_omp_set_body (p, body);
983
984 return p;
985}
986
987
726a989a
RB
988/* Build a GIMPLE_OMP_CONTINUE statement.
989
990 CONTROL_DEF is the definition of the control variable.
991 CONTROL_USE is the use of the control variable. */
992
538dd0b7 993gomp_continue *
726a989a
RB
994gimple_build_omp_continue (tree control_def, tree control_use)
995{
538dd0b7
DM
996 gomp_continue *p
997 = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
726a989a
RB
998 gimple_omp_continue_set_control_def (p, control_def);
999 gimple_omp_continue_set_control_use (p, control_use);
1000 return p;
1001}
1002
1003/* Build a GIMPLE_OMP_ORDERED statement.
1004
1005 BODY is the sequence of statements inside a loop that will executed in
d9a6bd32
JJ
1006 sequence.
1007 CLAUSES are clauses for this statement. */
726a989a 1008
d9a6bd32
JJ
1009gomp_ordered *
1010gimple_build_omp_ordered (gimple_seq body, tree clauses)
726a989a 1011{
d9a6bd32
JJ
1012 gomp_ordered *p
1013 = as_a <gomp_ordered *> (gimple_alloc (GIMPLE_OMP_ORDERED, 0));
1014 gimple_omp_ordered_set_clauses (p, clauses);
726a989a
RB
1015 if (body)
1016 gimple_omp_set_body (p, body);
1017
1018 return p;
1019}
1020
1021
1022/* Build a GIMPLE_OMP_RETURN statement.
1023 WAIT_P is true if this is a non-waiting return. */
1024
355fe088 1025gimple *
726a989a
RB
1026gimple_build_omp_return (bool wait_p)
1027{
355fe088 1028 gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
726a989a
RB
1029 if (wait_p)
1030 gimple_omp_return_set_nowait (p);
1031
1032 return p;
1033}
1034
1035
1036/* Build a GIMPLE_OMP_SECTIONS statement.
1037
1038 BODY is a sequence of section statements.
1039 CLAUSES are any of the OMP sections contsruct's clauses: private,
1040 firstprivate, lastprivate, reduction, and nowait. */
1041
538dd0b7 1042gomp_sections *
726a989a
RB
1043gimple_build_omp_sections (gimple_seq body, tree clauses)
1044{
538dd0b7
DM
1045 gomp_sections *p
1046 = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
726a989a
RB
1047 if (body)
1048 gimple_omp_set_body (p, body);
1049 gimple_omp_sections_set_clauses (p, clauses);
1050
1051 return p;
1052}
1053
1054
1055/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1056
355fe088 1057gimple *
726a989a
RB
1058gimple_build_omp_sections_switch (void)
1059{
1060 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1061}
1062
1063
1064/* Build a GIMPLE_OMP_SINGLE statement.
1065
1066 BODY is the sequence of statements that will be executed once.
1067 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1068 copyprivate, nowait. */
1069
538dd0b7 1070gomp_single *
726a989a
RB
1071gimple_build_omp_single (gimple_seq body, tree clauses)
1072{
538dd0b7
DM
1073 gomp_single *p
1074 = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
726a989a
RB
1075 if (body)
1076 gimple_omp_set_body (p, body);
1077 gimple_omp_single_set_clauses (p, clauses);
1078
1079 return p;
1080}
1081
1082
acf0174b
JJ
1083/* Build a GIMPLE_OMP_TARGET statement.
1084
1085 BODY is the sequence of statements that will be executed.
41dbbb37
TS
1086 KIND is the kind of the region.
1087 CLAUSES are any of the construct's clauses. */
acf0174b 1088
538dd0b7 1089gomp_target *
acf0174b
JJ
1090gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1091{
538dd0b7
DM
1092 gomp_target *p
1093 = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
acf0174b
JJ
1094 if (body)
1095 gimple_omp_set_body (p, body);
1096 gimple_omp_target_set_clauses (p, clauses);
1097 gimple_omp_target_set_kind (p, kind);
1098
1099 return p;
1100}
1101
1102
1103/* Build a GIMPLE_OMP_TEAMS statement.
1104
1105 BODY is the sequence of statements that will be executed.
1106 CLAUSES are any of the OMP teams construct's clauses. */
1107
538dd0b7 1108gomp_teams *
acf0174b
JJ
1109gimple_build_omp_teams (gimple_seq body, tree clauses)
1110{
538dd0b7 1111 gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
acf0174b
JJ
1112 if (body)
1113 gimple_omp_set_body (p, body);
1114 gimple_omp_teams_set_clauses (p, clauses);
1115
1116 return p;
1117}
1118
1119
726a989a
RB
1120/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1121
538dd0b7 1122gomp_atomic_load *
726a989a
RB
1123gimple_build_omp_atomic_load (tree lhs, tree rhs)
1124{
538dd0b7
DM
1125 gomp_atomic_load *p
1126 = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
726a989a
RB
1127 gimple_omp_atomic_load_set_lhs (p, lhs);
1128 gimple_omp_atomic_load_set_rhs (p, rhs);
1129 return p;
1130}
1131
1132/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1133
1134 VAL is the value we are storing. */
1135
538dd0b7 1136gomp_atomic_store *
726a989a
RB
1137gimple_build_omp_atomic_store (tree val)
1138{
538dd0b7
DM
1139 gomp_atomic_store *p
1140 = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
726a989a
RB
1141 gimple_omp_atomic_store_set_val (p, val);
1142 return p;
1143}
1144
0a35513e
AH
1145/* Build a GIMPLE_TRANSACTION statement. */
1146
538dd0b7 1147gtransaction *
7c11b0fe 1148gimple_build_transaction (gimple_seq body)
0a35513e 1149{
538dd0b7
DM
1150 gtransaction *p
1151 = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
0a35513e 1152 gimple_transaction_set_body (p, body);
7c11b0fe
RH
1153 gimple_transaction_set_label_norm (p, 0);
1154 gimple_transaction_set_label_uninst (p, 0);
1155 gimple_transaction_set_label_over (p, 0);
0a35513e
AH
1156 return p;
1157}
1158
cea094ed 1159#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1160/* Complain of a gimple type mismatch and die. */
1161
1162void
355fe088 1163gimple_check_failed (const gimple *gs, const char *file, int line,
726a989a
RB
1164 const char *function, enum gimple_code code,
1165 enum tree_code subcode)
1166{
1167 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1168 gimple_code_name[code],
5806f481 1169 get_tree_code_name (subcode),
726a989a 1170 gimple_code_name[gimple_code (gs)],
daa6e488
DM
1171 gs->subcode > 0
1172 ? get_tree_code_name ((enum tree_code) gs->subcode)
726a989a
RB
1173 : "",
1174 function, trim_filename (file), line);
1175}
726a989a
RB
1176#endif /* ENABLE_GIMPLE_CHECKING */
1177
1178
726a989a
RB
1179/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1180 *SEQ_P is NULL, a new sequence is allocated. */
1181
1182void
355fe088 1183gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
726a989a
RB
1184{
1185 gimple_stmt_iterator si;
726a989a
RB
1186 if (gs == NULL)
1187 return;
1188
726a989a
RB
1189 si = gsi_last (*seq_p);
1190 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1191}
1192
45b0be94
AM
1193/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1194 *SEQ_P is NULL, a new sequence is allocated. This function is
1195 similar to gimple_seq_add_stmt, but does not scan the operands.
1196 During gimplification, we need to manipulate statement sequences
1197 before the def/use vectors have been constructed. */
1198
1199void
355fe088 1200gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
45b0be94
AM
1201{
1202 gimple_stmt_iterator si;
1203
1204 if (gs == NULL)
1205 return;
1206
1207 si = gsi_last (*seq_p);
1208 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1209}
726a989a
RB
1210
1211/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1212 NULL, a new sequence is allocated. */
1213
1214void
1215gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1216{
1217 gimple_stmt_iterator si;
726a989a
RB
1218 if (src == NULL)
1219 return;
1220
726a989a
RB
1221 si = gsi_last (*dst_p);
1222 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1223}
1224
fef5a0d9
RB
1225/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1226 NULL, a new sequence is allocated. This function is
1227 similar to gimple_seq_add_seq, but does not scan the operands. */
1228
1229void
1230gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1231{
1232 gimple_stmt_iterator si;
1233 if (src == NULL)
1234 return;
1235
1236 si = gsi_last (*dst_p);
1237 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1238}
1239
45b0be94
AM
1240/* Determine whether to assign a location to the statement GS. */
1241
1242static bool
355fe088 1243should_carry_location_p (gimple *gs)
45b0be94
AM
1244{
1245 /* Don't emit a line note for a label. We particularly don't want to
1246 emit one for the break label, since it doesn't actually correspond
1247 to the beginning of the loop/switch. */
1248 if (gimple_code (gs) == GIMPLE_LABEL)
1249 return false;
1250
1251 return true;
1252}
1253
1254/* Set the location for gimple statement GS to LOCATION. */
1255
1256static void
355fe088 1257annotate_one_with_location (gimple *gs, location_t location)
45b0be94
AM
1258{
1259 if (!gimple_has_location (gs)
1260 && !gimple_do_not_emit_location_p (gs)
1261 && should_carry_location_p (gs))
1262 gimple_set_location (gs, location);
1263}
1264
1265/* Set LOCATION for all the statements after iterator GSI in sequence
1266 SEQ. If GSI is pointing to the end of the sequence, start with the
1267 first statement in SEQ. */
1268
1269void
1270annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1271 location_t location)
1272{
1273 if (gsi_end_p (gsi))
1274 gsi = gsi_start (seq);
1275 else
1276 gsi_next (&gsi);
1277
1278 for (; !gsi_end_p (gsi); gsi_next (&gsi))
1279 annotate_one_with_location (gsi_stmt (gsi), location);
1280}
1281
1282/* Set the location for all the statements in a sequence STMT_P to LOCATION. */
1283
1284void
1285annotate_all_with_location (gimple_seq stmt_p, location_t location)
1286{
1287 gimple_stmt_iterator i;
1288
1289 if (gimple_seq_empty_p (stmt_p))
1290 return;
1291
1292 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1293 {
355fe088 1294 gimple *gs = gsi_stmt (i);
45b0be94
AM
1295 annotate_one_with_location (gs, location);
1296 }
1297}
726a989a
RB
1298
1299/* Helper function of empty_body_p. Return true if STMT is an empty
1300 statement. */
1301
1302static bool
355fe088 1303empty_stmt_p (gimple *stmt)
726a989a
RB
1304{
1305 if (gimple_code (stmt) == GIMPLE_NOP)
1306 return true;
538dd0b7
DM
1307 if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
1308 return empty_body_p (gimple_bind_body (bind_stmt));
726a989a
RB
1309 return false;
1310}
1311
1312
1313/* Return true if BODY contains nothing but empty statements. */
1314
1315bool
1316empty_body_p (gimple_seq body)
1317{
1318 gimple_stmt_iterator i;
1319
726a989a
RB
1320 if (gimple_seq_empty_p (body))
1321 return true;
1322 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
b5b8b0ac
AO
1323 if (!empty_stmt_p (gsi_stmt (i))
1324 && !is_gimple_debug (gsi_stmt (i)))
726a989a
RB
1325 return false;
1326
1327 return true;
1328}
1329
1330
1331/* Perform a deep copy of sequence SRC and return the result. */
1332
1333gimple_seq
1334gimple_seq_copy (gimple_seq src)
1335{
1336 gimple_stmt_iterator gsi;
355a7673 1337 gimple_seq new_seq = NULL;
355fe088 1338 gimple *stmt;
726a989a
RB
1339
1340 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1341 {
1342 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1343 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1344 }
1345
82d6e6fc 1346 return new_seq;
726a989a
RB
1347}
1348
1349
726a989a 1350
25583c4f
RS
1351/* Return true if calls C1 and C2 are known to go to the same function. */
1352
1353bool
355fe088 1354gimple_call_same_target_p (const gimple *c1, const gimple *c2)
25583c4f
RS
1355{
1356 if (gimple_call_internal_p (c1))
1357 return (gimple_call_internal_p (c2)
8ab78162 1358 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
3433ee35
NS
1359 && (!gimple_call_internal_unique_p (as_a <const gcall *> (c1))
1360 || c1 == c2));
25583c4f
RS
1361 else
1362 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1363 || (gimple_call_fndecl (c1)
1364 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1365}
1366
726a989a
RB
1367/* Detect flags from a GIMPLE_CALL. This is just like
1368 call_expr_flags, but for gimple tuples. */
1369
1370int
355fe088 1371gimple_call_flags (const gimple *stmt)
726a989a
RB
1372{
1373 int flags;
1374 tree decl = gimple_call_fndecl (stmt);
726a989a
RB
1375
1376 if (decl)
1377 flags = flags_from_decl_or_type (decl);
25583c4f
RS
1378 else if (gimple_call_internal_p (stmt))
1379 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
726a989a 1380 else
97e03fa1 1381 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
726a989a 1382
daa6e488 1383 if (stmt->subcode & GF_CALL_NOTHROW)
9bb1a81b
JM
1384 flags |= ECF_NOTHROW;
1385
726a989a
RB
1386 return flags;
1387}
1388
25583c4f
RS
1389/* Return the "fn spec" string for call STMT. */
1390
b78475cf 1391static const_tree
538dd0b7 1392gimple_call_fnspec (const gcall *stmt)
25583c4f
RS
1393{
1394 tree type, attr;
1395
b78475cf
YG
1396 if (gimple_call_internal_p (stmt))
1397 return internal_fn_fnspec (gimple_call_internal_fn (stmt));
1398
25583c4f
RS
1399 type = gimple_call_fntype (stmt);
1400 if (!type)
1401 return NULL_TREE;
1402
1403 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1404 if (!attr)
1405 return NULL_TREE;
1406
1407 return TREE_VALUE (TREE_VALUE (attr));
1408}
1409
0b7b376d
RG
1410/* Detects argument flags for argument number ARG on call STMT. */
1411
1412int
538dd0b7 1413gimple_call_arg_flags (const gcall *stmt, unsigned arg)
0b7b376d 1414{
b78475cf 1415 const_tree attr = gimple_call_fnspec (stmt);
0b7b376d 1416
25583c4f 1417 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
0b7b376d
RG
1418 return 0;
1419
1420 switch (TREE_STRING_POINTER (attr)[1 + arg])
1421 {
1422 case 'x':
1423 case 'X':
1424 return EAF_UNUSED;
1425
1426 case 'R':
1427 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1428
1429 case 'r':
1430 return EAF_NOCLOBBER | EAF_NOESCAPE;
1431
1432 case 'W':
1433 return EAF_DIRECT | EAF_NOESCAPE;
1434
1435 case 'w':
1436 return EAF_NOESCAPE;
1437
1438 case '.':
1439 default:
1440 return 0;
1441 }
1442}
1443
1444/* Detects return flags for the call STMT. */
1445
1446int
538dd0b7 1447gimple_call_return_flags (const gcall *stmt)
0b7b376d 1448{
b78475cf 1449 const_tree attr;
0b7b376d
RG
1450
1451 if (gimple_call_flags (stmt) & ECF_MALLOC)
1452 return ERF_NOALIAS;
1453
25583c4f
RS
1454 attr = gimple_call_fnspec (stmt);
1455 if (!attr || TREE_STRING_LENGTH (attr) < 1)
0b7b376d
RG
1456 return 0;
1457
1458 switch (TREE_STRING_POINTER (attr)[0])
1459 {
1460 case '1':
1461 case '2':
1462 case '3':
1463 case '4':
1464 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1465
1466 case 'm':
1467 return ERF_NOALIAS;
1468
1469 case '.':
1470 default:
1471 return 0;
1472 }
1473}
726a989a 1474
3dbe9454 1475
726a989a
RB
1476/* Return true if GS is a copy assignment. */
1477
1478bool
355fe088 1479gimple_assign_copy_p (gimple *gs)
726a989a 1480{
3dbe9454
RG
1481 return (gimple_assign_single_p (gs)
1482 && is_gimple_val (gimple_op (gs, 1)));
726a989a
RB
1483}
1484
1485
1486/* Return true if GS is a SSA_NAME copy assignment. */
1487
1488bool
355fe088 1489gimple_assign_ssa_name_copy_p (gimple *gs)
726a989a 1490{
3dbe9454 1491 return (gimple_assign_single_p (gs)
726a989a
RB
1492 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1493 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1494}
1495
1496
726a989a
RB
1497/* Return true if GS is an assignment with a unary RHS, but the
1498 operator has no effect on the assigned value. The logic is adapted
1499 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1500 instances in which STRIP_NOPS was previously applied to the RHS of
1501 an assignment.
1502
1503 NOTE: In the use cases that led to the creation of this function
1504 and of gimple_assign_single_p, it is typical to test for either
1505 condition and to proceed in the same manner. In each case, the
1506 assigned value is represented by the single RHS operand of the
1507 assignment. I suspect there may be cases where gimple_assign_copy_p,
1508 gimple_assign_single_p, or equivalent logic is used where a similar
1509 treatment of unary NOPs is appropriate. */
b8698a0f 1510
726a989a 1511bool
355fe088 1512gimple_assign_unary_nop_p (gimple *gs)
726a989a 1513{
3dbe9454 1514 return (is_gimple_assign (gs)
1a87cf0c 1515 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
1516 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1517 && gimple_assign_rhs1 (gs) != error_mark_node
1518 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1519 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1520}
1521
1522/* Set BB to be the basic block holding G. */
1523
1524void
355fe088 1525gimple_set_bb (gimple *stmt, basic_block bb)
726a989a 1526{
daa6e488 1527 stmt->bb = bb;
726a989a 1528
45b62594
RB
1529 if (gimple_code (stmt) != GIMPLE_LABEL)
1530 return;
1531
726a989a
RB
1532 /* If the statement is a label, add the label to block-to-labels map
1533 so that we can speed up edge creation for GIMPLE_GOTOs. */
45b62594 1534 if (cfun->cfg)
726a989a
RB
1535 {
1536 tree t;
1537 int uid;
1538
538dd0b7 1539 t = gimple_label_label (as_a <glabel *> (stmt));
726a989a
RB
1540 uid = LABEL_DECL_UID (t);
1541 if (uid == -1)
1542 {
99729d91
DM
1543 unsigned old_len =
1544 vec_safe_length (label_to_block_map_for_fn (cfun));
726a989a
RB
1545 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1546 if (old_len <= (unsigned) uid)
1547 {
5006671f 1548 unsigned new_len = 3 * uid / 2 + 1;
726a989a 1549
99729d91
DM
1550 vec_safe_grow_cleared (label_to_block_map_for_fn (cfun),
1551 new_len);
726a989a
RB
1552 }
1553 }
1554
99729d91 1555 (*label_to_block_map_for_fn (cfun))[uid] = bb;
726a989a
RB
1556 }
1557}
1558
1559
726a989a
RB
1560/* Modify the RHS of the assignment pointed-to by GSI using the
1561 operands in the expression tree EXPR.
1562
1563 NOTE: The statement pointed-to by GSI may be reallocated if it
1564 did not have enough operand slots.
1565
1566 This function is useful to convert an existing tree expression into
1567 the flat representation used for the RHS of a GIMPLE assignment.
1568 It will reallocate memory as needed to expand or shrink the number
1569 of operand slots needed to represent EXPR.
1570
1571 NOTE: If you find yourself building a tree and then calling this
1572 function, you are most certainly doing it the slow way. It is much
1573 better to build a new assignment or to use the function
1574 gimple_assign_set_rhs_with_ops, which does not require an
1575 expression tree to be built. */
1576
1577void
1578gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1579{
1580 enum tree_code subcode;
0354c0c7 1581 tree op1, op2, op3;
726a989a 1582
d1e2bb2d 1583 extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3);
00d66391 1584 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3);
726a989a
RB
1585}
1586
1587
1588/* Set the RHS of assignment statement pointed-to by GSI to CODE with
0354c0c7 1589 operands OP1, OP2 and OP3.
726a989a
RB
1590
1591 NOTE: The statement pointed-to by GSI may be reallocated if it
1592 did not have enough operand slots. */
1593
1594void
00d66391
JJ
1595gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1596 tree op1, tree op2, tree op3)
726a989a
RB
1597{
1598 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
355fe088 1599 gimple *stmt = gsi_stmt (*gsi);
726a989a
RB
1600
1601 /* If the new CODE needs more operands, allocate a new statement. */
1602 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1603 {
1604 tree lhs = gimple_assign_lhs (stmt);
355fe088 1605 gimple *new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
726a989a 1606 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
355a7673 1607 gimple_init_singleton (new_stmt);
726a989a
RB
1608 gsi_replace (gsi, new_stmt, true);
1609 stmt = new_stmt;
1610
1611 /* The LHS needs to be reset as this also changes the SSA name
1612 on the LHS. */
1613 gimple_assign_set_lhs (stmt, lhs);
1614 }
1615
1616 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1617 gimple_set_subcode (stmt, code);
1618 gimple_assign_set_rhs1 (stmt, op1);
1619 if (new_rhs_ops > 1)
1620 gimple_assign_set_rhs2 (stmt, op2);
0354c0c7
BS
1621 if (new_rhs_ops > 2)
1622 gimple_assign_set_rhs3 (stmt, op3);
726a989a
RB
1623}
1624
1625
1626/* Return the LHS of a statement that performs an assignment,
1627 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1628 for a call to a function that returns no value, or for a
1629 statement other than an assignment or a call. */
1630
1631tree
355fe088 1632gimple_get_lhs (const gimple *stmt)
726a989a 1633{
e0c68ce9 1634 enum gimple_code code = gimple_code (stmt);
726a989a
RB
1635
1636 if (code == GIMPLE_ASSIGN)
1637 return gimple_assign_lhs (stmt);
1638 else if (code == GIMPLE_CALL)
1639 return gimple_call_lhs (stmt);
1640 else
1641 return NULL_TREE;
1642}
1643
1644
1645/* Set the LHS of a statement that performs an assignment,
1646 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1647
1648void
355fe088 1649gimple_set_lhs (gimple *stmt, tree lhs)
726a989a 1650{
e0c68ce9 1651 enum gimple_code code = gimple_code (stmt);
726a989a
RB
1652
1653 if (code == GIMPLE_ASSIGN)
1654 gimple_assign_set_lhs (stmt, lhs);
1655 else if (code == GIMPLE_CALL)
1656 gimple_call_set_lhs (stmt, lhs);
1657 else
c3284718 1658 gcc_unreachable ();
726a989a
RB
1659}
1660
1661
1662/* Return a deep copy of statement STMT. All the operands from STMT
1663 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
355a7673
MM
1664 and VUSE operand arrays are set to empty in the new copy. The new
1665 copy isn't part of any sequence. */
726a989a 1666
355fe088
TS
1667gimple *
1668gimple_copy (gimple *stmt)
726a989a
RB
1669{
1670 enum gimple_code code = gimple_code (stmt);
1671 unsigned num_ops = gimple_num_ops (stmt);
355fe088 1672 gimple *copy = gimple_alloc (code, num_ops);
726a989a
RB
1673 unsigned i;
1674
1675 /* Shallow copy all the fields from STMT. */
1676 memcpy (copy, stmt, gimple_size (code));
355a7673 1677 gimple_init_singleton (copy);
726a989a
RB
1678
1679 /* If STMT has sub-statements, deep-copy them as well. */
1680 if (gimple_has_substatements (stmt))
1681 {
1682 gimple_seq new_seq;
1683 tree t;
1684
1685 switch (gimple_code (stmt))
1686 {
1687 case GIMPLE_BIND:
538dd0b7
DM
1688 {
1689 gbind *bind_stmt = as_a <gbind *> (stmt);
1690 gbind *bind_copy = as_a <gbind *> (copy);
1691 new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
1692 gimple_bind_set_body (bind_copy, new_seq);
1693 gimple_bind_set_vars (bind_copy,
1694 unshare_expr (gimple_bind_vars (bind_stmt)));
1695 gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
1696 }
726a989a
RB
1697 break;
1698
1699 case GIMPLE_CATCH:
538dd0b7
DM
1700 {
1701 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1702 gcatch *catch_copy = as_a <gcatch *> (copy);
1703 new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
1704 gimple_catch_set_handler (catch_copy, new_seq);
1705 t = unshare_expr (gimple_catch_types (catch_stmt));
1706 gimple_catch_set_types (catch_copy, t);
1707 }
726a989a
RB
1708 break;
1709
1710 case GIMPLE_EH_FILTER:
538dd0b7
DM
1711 {
1712 geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
1713 geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
1714 new_seq
1715 = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
1716 gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
1717 t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
1718 gimple_eh_filter_set_types (eh_filter_copy, t);
1719 }
726a989a
RB
1720 break;
1721
0a35513e 1722 case GIMPLE_EH_ELSE:
538dd0b7
DM
1723 {
1724 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1725 geh_else *eh_else_copy = as_a <geh_else *> (copy);
1726 new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
1727 gimple_eh_else_set_n_body (eh_else_copy, new_seq);
1728 new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
1729 gimple_eh_else_set_e_body (eh_else_copy, new_seq);
1730 }
0a35513e
AH
1731 break;
1732
726a989a 1733 case GIMPLE_TRY:
538dd0b7
DM
1734 {
1735 gtry *try_stmt = as_a <gtry *> (stmt);
1736 gtry *try_copy = as_a <gtry *> (copy);
1737 new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
1738 gimple_try_set_eval (try_copy, new_seq);
1739 new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
1740 gimple_try_set_cleanup (try_copy, new_seq);
1741 }
726a989a
RB
1742 break;
1743
1744 case GIMPLE_OMP_FOR:
1745 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1746 gimple_omp_for_set_pre_body (copy, new_seq);
1747 t = unshare_expr (gimple_omp_for_clauses (stmt));
1748 gimple_omp_for_set_clauses (copy, t);
daa6e488 1749 {
538dd0b7 1750 gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
766090c2
TS
1751 omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1752 ( gimple_omp_for_collapse (stmt));
daa6e488 1753 }
726a989a
RB
1754 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1755 {
1756 gimple_omp_for_set_cond (copy, i,
1757 gimple_omp_for_cond (stmt, i));
1758 gimple_omp_for_set_index (copy, i,
1759 gimple_omp_for_index (stmt, i));
1760 t = unshare_expr (gimple_omp_for_initial (stmt, i));
1761 gimple_omp_for_set_initial (copy, i, t);
1762 t = unshare_expr (gimple_omp_for_final (stmt, i));
1763 gimple_omp_for_set_final (copy, i, t);
1764 t = unshare_expr (gimple_omp_for_incr (stmt, i));
1765 gimple_omp_for_set_incr (copy, i, t);
1766 }
1767 goto copy_omp_body;
1768
1769 case GIMPLE_OMP_PARALLEL:
538dd0b7
DM
1770 {
1771 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1772 gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
1773 t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
1774 gimple_omp_parallel_set_clauses (omp_par_copy, t);
1775 t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
1776 gimple_omp_parallel_set_child_fn (omp_par_copy, t);
1777 t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
1778 gimple_omp_parallel_set_data_arg (omp_par_copy, t);
1779 }
726a989a
RB
1780 goto copy_omp_body;
1781
1782 case GIMPLE_OMP_TASK:
1783 t = unshare_expr (gimple_omp_task_clauses (stmt));
1784 gimple_omp_task_set_clauses (copy, t);
1785 t = unshare_expr (gimple_omp_task_child_fn (stmt));
1786 gimple_omp_task_set_child_fn (copy, t);
1787 t = unshare_expr (gimple_omp_task_data_arg (stmt));
1788 gimple_omp_task_set_data_arg (copy, t);
1789 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1790 gimple_omp_task_set_copy_fn (copy, t);
1791 t = unshare_expr (gimple_omp_task_arg_size (stmt));
1792 gimple_omp_task_set_arg_size (copy, t);
1793 t = unshare_expr (gimple_omp_task_arg_align (stmt));
1794 gimple_omp_task_set_arg_align (copy, t);
1795 goto copy_omp_body;
1796
1797 case GIMPLE_OMP_CRITICAL:
d9a6bd32
JJ
1798 t = unshare_expr (gimple_omp_critical_name
1799 (as_a <gomp_critical *> (stmt)));
538dd0b7 1800 gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
d9a6bd32
JJ
1801 t = unshare_expr (gimple_omp_critical_clauses
1802 (as_a <gomp_critical *> (stmt)));
1803 gimple_omp_critical_set_clauses (as_a <gomp_critical *> (copy), t);
1804 goto copy_omp_body;
1805
1806 case GIMPLE_OMP_ORDERED:
1807 t = unshare_expr (gimple_omp_ordered_clauses
1808 (as_a <gomp_ordered *> (stmt)));
1809 gimple_omp_ordered_set_clauses (as_a <gomp_ordered *> (copy), t);
726a989a
RB
1810 goto copy_omp_body;
1811
1812 case GIMPLE_OMP_SECTIONS:
1813 t = unshare_expr (gimple_omp_sections_clauses (stmt));
1814 gimple_omp_sections_set_clauses (copy, t);
1815 t = unshare_expr (gimple_omp_sections_control (stmt));
1816 gimple_omp_sections_set_control (copy, t);
1817 /* FALLTHRU */
1818
1819 case GIMPLE_OMP_SINGLE:
acf0174b
JJ
1820 case GIMPLE_OMP_TARGET:
1821 case GIMPLE_OMP_TEAMS:
726a989a
RB
1822 case GIMPLE_OMP_SECTION:
1823 case GIMPLE_OMP_MASTER:
acf0174b 1824 case GIMPLE_OMP_TASKGROUP:
b2b40051 1825 case GIMPLE_OMP_GRID_BODY:
726a989a
RB
1826 copy_omp_body:
1827 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1828 gimple_omp_set_body (copy, new_seq);
1829 break;
1830
0a35513e 1831 case GIMPLE_TRANSACTION:
538dd0b7
DM
1832 new_seq = gimple_seq_copy (gimple_transaction_body (
1833 as_a <gtransaction *> (stmt)));
1834 gimple_transaction_set_body (as_a <gtransaction *> (copy),
1835 new_seq);
0a35513e
AH
1836 break;
1837
726a989a
RB
1838 case GIMPLE_WITH_CLEANUP_EXPR:
1839 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1840 gimple_wce_set_cleanup (copy, new_seq);
1841 break;
1842
1843 default:
1844 gcc_unreachable ();
1845 }
1846 }
1847
1848 /* Make copy of operands. */
483ef49f
RG
1849 for (i = 0; i < num_ops; i++)
1850 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
726a989a 1851
483ef49f
RG
1852 if (gimple_has_mem_ops (stmt))
1853 {
1854 gimple_set_vdef (copy, gimple_vdef (stmt));
1855 gimple_set_vuse (copy, gimple_vuse (stmt));
1856 }
726a989a 1857
483ef49f
RG
1858 /* Clear out SSA operand vectors on COPY. */
1859 if (gimple_has_ops (stmt))
1860 {
483ef49f 1861 gimple_set_use_ops (copy, NULL);
726a989a 1862
5006671f
RG
1863 /* SSA operands need to be updated. */
1864 gimple_set_modified (copy, true);
726a989a
RB
1865 }
1866
1867 return copy;
1868}
1869
1870
726a989a
RB
1871/* Return true if statement S has side-effects. We consider a
1872 statement to have side effects if:
1873
1874 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1875 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
1876
1877bool
355fe088 1878gimple_has_side_effects (const gimple *s)
726a989a 1879{
b5b8b0ac
AO
1880 if (is_gimple_debug (s))
1881 return false;
1882
726a989a
RB
1883 /* We don't have to scan the arguments to check for
1884 volatile arguments, though, at present, we still
1885 do a scan to check for TREE_SIDE_EFFECTS. */
1886 if (gimple_has_volatile_ops (s))
1887 return true;
1888
179184e3 1889 if (gimple_code (s) == GIMPLE_ASM
538dd0b7 1890 && gimple_asm_volatile_p (as_a <const gasm *> (s)))
179184e3
RG
1891 return true;
1892
726a989a
RB
1893 if (is_gimple_call (s))
1894 {
723afc44 1895 int flags = gimple_call_flags (s);
726a989a 1896
723afc44
RG
1897 /* An infinite loop is considered a side effect. */
1898 if (!(flags & (ECF_CONST | ECF_PURE))
1899 || (flags & ECF_LOOPING_CONST_OR_PURE))
726a989a
RB
1900 return true;
1901
726a989a
RB
1902 return false;
1903 }
726a989a
RB
1904
1905 return false;
1906}
1907
726a989a 1908/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
e1fd038a
SP
1909 Return true if S can trap. When INCLUDE_MEM is true, check whether
1910 the memory operations could trap. When INCLUDE_STORES is true and
1911 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
726a989a 1912
e1fd038a 1913bool
355fe088 1914gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores)
726a989a 1915{
726a989a
RB
1916 tree t, div = NULL_TREE;
1917 enum tree_code op;
1918
e1fd038a
SP
1919 if (include_mem)
1920 {
1921 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
726a989a 1922
e1fd038a
SP
1923 for (i = start; i < gimple_num_ops (s); i++)
1924 if (tree_could_trap_p (gimple_op (s, i)))
1925 return true;
1926 }
726a989a
RB
1927
1928 switch (gimple_code (s))
1929 {
1930 case GIMPLE_ASM:
538dd0b7 1931 return gimple_asm_volatile_p (as_a <gasm *> (s));
726a989a
RB
1932
1933 case GIMPLE_CALL:
1934 t = gimple_call_fndecl (s);
1935 /* Assume that calls to weak functions may trap. */
1936 if (!t || !DECL_P (t) || DECL_WEAK (t))
1937 return true;
1938 return false;
1939
1940 case GIMPLE_ASSIGN:
1941 t = gimple_expr_type (s);
1942 op = gimple_assign_rhs_code (s);
1943 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
1944 div = gimple_assign_rhs2 (s);
1945 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
1946 (INTEGRAL_TYPE_P (t)
1947 && TYPE_OVERFLOW_TRAPS (t)),
1948 div));
1949
46ec7a06
RB
1950 case GIMPLE_COND:
1951 t = TREE_TYPE (gimple_cond_lhs (s));
1952 return operation_could_trap_p (gimple_cond_code (s),
1953 FLOAT_TYPE_P (t), false, NULL_TREE);
1954
726a989a
RB
1955 default:
1956 break;
1957 }
1958
1959 return false;
726a989a
RB
1960}
1961
726a989a
RB
1962/* Return true if statement S can trap. */
1963
1964bool
355fe088 1965gimple_could_trap_p (gimple *s)
726a989a 1966{
e1fd038a 1967 return gimple_could_trap_p_1 (s, true, true);
726a989a
RB
1968}
1969
726a989a
RB
1970/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
1971
1972bool
355fe088 1973gimple_assign_rhs_could_trap_p (gimple *s)
726a989a
RB
1974{
1975 gcc_assert (is_gimple_assign (s));
e1fd038a 1976 return gimple_could_trap_p_1 (s, true, false);
726a989a
RB
1977}
1978
1979
1980/* Print debugging information for gimple stmts generated. */
1981
1982void
1983dump_gimple_statistics (void)
1984{
726a989a
RB
1985 int i, total_tuples = 0, total_bytes = 0;
1986
7aa6d18a
SB
1987 if (! GATHER_STATISTICS)
1988 {
1989 fprintf (stderr, "No gimple statistics\n");
1990 return;
1991 }
1992
726a989a
RB
1993 fprintf (stderr, "\nGIMPLE statements\n");
1994 fprintf (stderr, "Kind Stmts Bytes\n");
1995 fprintf (stderr, "---------------------------------------\n");
1996 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
1997 {
1998 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
1999 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2000 total_tuples += gimple_alloc_counts[i];
2001 total_bytes += gimple_alloc_sizes[i];
2002 }
2003 fprintf (stderr, "---------------------------------------\n");
2004 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2005 fprintf (stderr, "---------------------------------------\n");
726a989a
RB
2006}
2007
2008
726a989a
RB
2009/* Return the number of operands needed on the RHS of a GIMPLE
2010 assignment for an expression with tree code CODE. */
2011
2012unsigned
2013get_gimple_rhs_num_ops (enum tree_code code)
2014{
2015 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2016
2017 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2018 return 1;
2019 else if (rhs_class == GIMPLE_BINARY_RHS)
2020 return 2;
0354c0c7
BS
2021 else if (rhs_class == GIMPLE_TERNARY_RHS)
2022 return 3;
726a989a
RB
2023 else
2024 gcc_unreachable ();
2025}
2026
2027#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2028 (unsigned char) \
2029 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2030 : ((TYPE) == tcc_binary \
2031 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2032 : ((TYPE) == tcc_constant \
2033 || (TYPE) == tcc_declaration \
2034 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2035 : ((SYM) == TRUTH_AND_EXPR \
2036 || (SYM) == TRUTH_OR_EXPR \
2037 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2038 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
4e71066d
RG
2039 : ((SYM) == COND_EXPR \
2040 || (SYM) == WIDEN_MULT_PLUS_EXPR \
16949072 2041 || (SYM) == WIDEN_MULT_MINUS_EXPR \
f471fe72 2042 || (SYM) == DOT_PROD_EXPR \
79d652a5 2043 || (SYM) == SAD_EXPR \
f471fe72 2044 || (SYM) == REALIGN_LOAD_EXPR \
4e71066d 2045 || (SYM) == VEC_COND_EXPR \
2205ed25 2046 || (SYM) == VEC_PERM_EXPR \
483c6429 2047 || (SYM) == BIT_INSERT_EXPR \
16949072 2048 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
4e71066d 2049 : ((SYM) == CONSTRUCTOR \
726a989a
RB
2050 || (SYM) == OBJ_TYPE_REF \
2051 || (SYM) == ASSERT_EXPR \
2052 || (SYM) == ADDR_EXPR \
2053 || (SYM) == WITH_SIZE_EXPR \
4e71066d 2054 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
726a989a
RB
2055 : GIMPLE_INVALID_RHS),
2056#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2057
2058const unsigned char gimple_rhs_class_table[] = {
2059#include "all-tree.def"
2060};
2061
2062#undef DEFTREECODE
2063#undef END_OF_BASE_TREE_CODES
2064
726a989a
RB
2065/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2066 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2067 we failed to create one. */
2068
2069tree
2070canonicalize_cond_expr_cond (tree t)
2071{
b66a1bac
RG
2072 /* Strip conversions around boolean operations. */
2073 if (CONVERT_EXPR_P (t)
9b80d091
KT
2074 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2075 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2076 == BOOLEAN_TYPE))
b66a1bac
RG
2077 t = TREE_OPERAND (t, 0);
2078
726a989a 2079 /* For !x use x == 0. */
12430896 2080 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
726a989a
RB
2081 {
2082 tree top0 = TREE_OPERAND (t, 0);
2083 t = build2 (EQ_EXPR, TREE_TYPE (t),
2084 top0, build_int_cst (TREE_TYPE (top0), 0));
2085 }
2086 /* For cmp ? 1 : 0 use cmp. */
2087 else if (TREE_CODE (t) == COND_EXPR
2088 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2089 && integer_onep (TREE_OPERAND (t, 1))
2090 && integer_zerop (TREE_OPERAND (t, 2)))
2091 {
2092 tree top0 = TREE_OPERAND (t, 0);
2093 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2094 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2095 }
4481581f
JL
2096 /* For x ^ y use x != y. */
2097 else if (TREE_CODE (t) == BIT_XOR_EXPR)
2098 t = build2 (NE_EXPR, TREE_TYPE (t),
2099 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2100
726a989a
RB
2101 if (is_gimple_condexpr (t))
2102 return t;
2103
2104 return NULL_TREE;
2105}
2106
e6c99067
DN
2107/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2108 the positions marked by the set ARGS_TO_SKIP. */
2109
538dd0b7
DM
2110gcall *
2111gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
c6f7cfc1
JH
2112{
2113 int i;
c6f7cfc1 2114 int nargs = gimple_call_num_args (stmt);
ef062b13 2115 auto_vec<tree> vargs (nargs);
538dd0b7 2116 gcall *new_stmt;
c6f7cfc1
JH
2117
2118 for (i = 0; i < nargs; i++)
2119 if (!bitmap_bit_p (args_to_skip, i))
9771b263 2120 vargs.quick_push (gimple_call_arg (stmt, i));
c6f7cfc1 2121
25583c4f
RS
2122 if (gimple_call_internal_p (stmt))
2123 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2124 vargs);
2125 else
2126 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
ef062b13 2127
c6f7cfc1
JH
2128 if (gimple_call_lhs (stmt))
2129 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2130
5006671f
RG
2131 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2132 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2133
c6f7cfc1
JH
2134 if (gimple_has_location (stmt))
2135 gimple_set_location (new_stmt, gimple_location (stmt));
8d2adc24 2136 gimple_call_copy_flags (new_stmt, stmt);
c6f7cfc1 2137 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
5006671f
RG
2138
2139 gimple_set_modified (new_stmt, true);
2140
c6f7cfc1
JH
2141 return new_stmt;
2142}
2143
5006671f 2144
d7f09764 2145
d025732d
EB
2146/* Return true if the field decls F1 and F2 are at the same offset.
2147
91f2fae8 2148 This is intended to be used on GIMPLE types only. */
d7f09764 2149
1e4bc4eb 2150bool
d025732d 2151gimple_compare_field_offset (tree f1, tree f2)
d7f09764
DN
2152{
2153 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
d025732d
EB
2154 {
2155 tree offset1 = DECL_FIELD_OFFSET (f1);
2156 tree offset2 = DECL_FIELD_OFFSET (f2);
2157 return ((offset1 == offset2
2158 /* Once gimplification is done, self-referential offsets are
2159 instantiated as operand #2 of the COMPONENT_REF built for
2160 each access and reset. Therefore, they are not relevant
2161 anymore and fields are interchangeable provided that they
2162 represent the same access. */
2163 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2164 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2165 && (DECL_SIZE (f1) == DECL_SIZE (f2)
2166 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2167 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2168 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2169 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2170 || operand_equal_p (offset1, offset2, 0))
2171 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2172 DECL_FIELD_BIT_OFFSET (f2)));
2173 }
d7f09764
DN
2174
2175 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2176 should be, so handle differing ones specially by decomposing
2177 the offset into a byte and bit offset manually. */
9541ffee
RS
2178 if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2179 && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
d7f09764
DN
2180 {
2181 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2182 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2183 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2184 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2185 + bit_offset1 / BITS_PER_UNIT);
2186 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2187 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2188 + bit_offset2 / BITS_PER_UNIT);
2189 if (byte_offset1 != byte_offset2)
2190 return false;
2191 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2192 }
2193
2194 return false;
2195}
2196
d7f09764
DN
2197
2198/* Return a type the same as TYPE except unsigned or
2199 signed according to UNSIGNEDP. */
2200
2201static tree
2202gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2203{
2204 tree type1;
78a7c317 2205 int i;
d7f09764
DN
2206
2207 type1 = TYPE_MAIN_VARIANT (type);
2208 if (type1 == signed_char_type_node
2209 || type1 == char_type_node
2210 || type1 == unsigned_char_type_node)
2211 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2212 if (type1 == integer_type_node || type1 == unsigned_type_node)
2213 return unsignedp ? unsigned_type_node : integer_type_node;
2214 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2215 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2216 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2217 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2218 if (type1 == long_long_integer_type_node
2219 || type1 == long_long_unsigned_type_node)
2220 return unsignedp
2221 ? long_long_unsigned_type_node
2222 : long_long_integer_type_node;
78a7c317
DD
2223
2224 for (i = 0; i < NUM_INT_N_ENTS; i ++)
2225 if (int_n_enabled_p[i]
2226 && (type1 == int_n_trees[i].unsigned_type
2227 || type1 == int_n_trees[i].signed_type))
2228 return unsignedp
2229 ? int_n_trees[i].unsigned_type
2230 : int_n_trees[i].signed_type;
2231
d7f09764
DN
2232#if HOST_BITS_PER_WIDE_INT >= 64
2233 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2234 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2235#endif
2236 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2237 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2238 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2239 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2240 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2241 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2242 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2243 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2244
2245#define GIMPLE_FIXED_TYPES(NAME) \
2246 if (type1 == short_ ## NAME ## _type_node \
2247 || type1 == unsigned_short_ ## NAME ## _type_node) \
2248 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2249 : short_ ## NAME ## _type_node; \
2250 if (type1 == NAME ## _type_node \
2251 || type1 == unsigned_ ## NAME ## _type_node) \
2252 return unsignedp ? unsigned_ ## NAME ## _type_node \
2253 : NAME ## _type_node; \
2254 if (type1 == long_ ## NAME ## _type_node \
2255 || type1 == unsigned_long_ ## NAME ## _type_node) \
2256 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2257 : long_ ## NAME ## _type_node; \
2258 if (type1 == long_long_ ## NAME ## _type_node \
2259 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2260 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2261 : long_long_ ## NAME ## _type_node;
2262
2263#define GIMPLE_FIXED_MODE_TYPES(NAME) \
2264 if (type1 == NAME ## _type_node \
2265 || type1 == u ## NAME ## _type_node) \
2266 return unsignedp ? u ## NAME ## _type_node \
2267 : NAME ## _type_node;
2268
2269#define GIMPLE_FIXED_TYPES_SAT(NAME) \
2270 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2271 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2272 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2273 : sat_ ## short_ ## NAME ## _type_node; \
2274 if (type1 == sat_ ## NAME ## _type_node \
2275 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2276 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2277 : sat_ ## NAME ## _type_node; \
2278 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2279 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2280 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2281 : sat_ ## long_ ## NAME ## _type_node; \
2282 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2283 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2284 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2285 : sat_ ## long_long_ ## NAME ## _type_node;
2286
2287#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
2288 if (type1 == sat_ ## NAME ## _type_node \
2289 || type1 == sat_ ## u ## NAME ## _type_node) \
2290 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2291 : sat_ ## NAME ## _type_node;
2292
2293 GIMPLE_FIXED_TYPES (fract);
2294 GIMPLE_FIXED_TYPES_SAT (fract);
2295 GIMPLE_FIXED_TYPES (accum);
2296 GIMPLE_FIXED_TYPES_SAT (accum);
2297
2298 GIMPLE_FIXED_MODE_TYPES (qq);
2299 GIMPLE_FIXED_MODE_TYPES (hq);
2300 GIMPLE_FIXED_MODE_TYPES (sq);
2301 GIMPLE_FIXED_MODE_TYPES (dq);
2302 GIMPLE_FIXED_MODE_TYPES (tq);
2303 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2304 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2305 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2306 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2307 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2308 GIMPLE_FIXED_MODE_TYPES (ha);
2309 GIMPLE_FIXED_MODE_TYPES (sa);
2310 GIMPLE_FIXED_MODE_TYPES (da);
2311 GIMPLE_FIXED_MODE_TYPES (ta);
2312 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2313 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2314 GIMPLE_FIXED_MODE_TYPES_SAT (da);
2315 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2316
2317 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2318 the precision; they have precision set to match their range, but
2319 may use a wider mode to match an ABI. If we change modes, we may
2320 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2321 the precision as well, so as to yield correct results for
2322 bit-field types. C++ does not have these separate bit-field
2323 types, and producing a signed or unsigned variant of an
2324 ENUMERAL_TYPE may cause other problems as well. */
2325 if (!INTEGRAL_TYPE_P (type)
2326 || TYPE_UNSIGNED (type) == unsignedp)
2327 return type;
2328
2329#define TYPE_OK(node) \
2330 (TYPE_MODE (type) == TYPE_MODE (node) \
2331 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2332 if (TYPE_OK (signed_char_type_node))
2333 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2334 if (TYPE_OK (integer_type_node))
2335 return unsignedp ? unsigned_type_node : integer_type_node;
2336 if (TYPE_OK (short_integer_type_node))
2337 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2338 if (TYPE_OK (long_integer_type_node))
2339 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2340 if (TYPE_OK (long_long_integer_type_node))
2341 return (unsignedp
2342 ? long_long_unsigned_type_node
2343 : long_long_integer_type_node);
78a7c317
DD
2344
2345 for (i = 0; i < NUM_INT_N_ENTS; i ++)
2346 if (int_n_enabled_p[i]
2347 && TYPE_MODE (type) == int_n_data[i].m
2348 && TYPE_PRECISION (type) == int_n_data[i].bitsize)
2349 return unsignedp
2350 ? int_n_trees[i].unsigned_type
2351 : int_n_trees[i].signed_type;
d7f09764
DN
2352
2353#if HOST_BITS_PER_WIDE_INT >= 64
2354 if (TYPE_OK (intTI_type_node))
2355 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2356#endif
2357 if (TYPE_OK (intDI_type_node))
2358 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2359 if (TYPE_OK (intSI_type_node))
2360 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2361 if (TYPE_OK (intHI_type_node))
2362 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2363 if (TYPE_OK (intQI_type_node))
2364 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2365
2366#undef GIMPLE_FIXED_TYPES
2367#undef GIMPLE_FIXED_MODE_TYPES
2368#undef GIMPLE_FIXED_TYPES_SAT
2369#undef GIMPLE_FIXED_MODE_TYPES_SAT
2370#undef TYPE_OK
2371
2372 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2373}
2374
2375
2376/* Return an unsigned type the same as TYPE in other respects. */
2377
2378tree
2379gimple_unsigned_type (tree type)
2380{
2381 return gimple_signed_or_unsigned_type (true, type);
2382}
2383
2384
2385/* Return a signed type the same as TYPE in other respects. */
2386
2387tree
2388gimple_signed_type (tree type)
2389{
2390 return gimple_signed_or_unsigned_type (false, type);
2391}
2392
2393
2394/* Return the typed-based alias set for T, which may be an expression
2395 or a type. Return -1 if we don't do anything special. */
2396
2397alias_set_type
2398gimple_get_alias_set (tree t)
2399{
2400 tree u;
2401
2402 /* Permit type-punning when accessing a union, provided the access
2403 is directly through the union. For example, this code does not
2404 permit taking the address of a union member and then storing
2405 through it. Even the type-punning allowed here is a GCC
2406 extension, albeit a common and useful one; the C standard says
2407 that such accesses have implementation-defined behavior. */
2408 for (u = t;
2409 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
2410 u = TREE_OPERAND (u, 0))
2411 if (TREE_CODE (u) == COMPONENT_REF
2412 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
2413 return 0;
2414
2415 /* That's all the expressions we handle specially. */
2416 if (!TYPE_P (t))
2417 return -1;
2418
2419 /* For convenience, follow the C standard when dealing with
2420 character types. Any object may be accessed via an lvalue that
2421 has character type. */
2422 if (t == char_type_node
2423 || t == signed_char_type_node
2424 || t == unsigned_char_type_node)
2425 return 0;
2426
2427 /* Allow aliasing between signed and unsigned variants of the same
2428 type. We treat the signed variant as canonical. */
2429 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2430 {
2431 tree t1 = gimple_signed_type (t);
2432
2433 /* t1 == t can happen for boolean nodes which are always unsigned. */
2434 if (t1 != t)
2435 return get_alias_set (t1);
2436 }
d7f09764
DN
2437
2438 return -1;
2439}
2440
2441
ccacdf06
RG
2442/* Helper for gimple_ior_addresses_taken_1. */
2443
2444static bool
355fe088 2445gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
ccacdf06
RG
2446{
2447 bitmap addresses_taken = (bitmap)data;
2ea9dc64
RG
2448 addr = get_base_address (addr);
2449 if (addr
2450 && DECL_P (addr))
ccacdf06
RG
2451 {
2452 bitmap_set_bit (addresses_taken, DECL_UID (addr));
2453 return true;
2454 }
2455 return false;
2456}
2457
2458/* Set the bit for the uid of all decls that have their address taken
2459 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
2460 were any in this stmt. */
2461
2462bool
355fe088 2463gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
ccacdf06
RG
2464{
2465 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2466 gimple_ior_addresses_taken_1);
2467}
2468
4537ec0c 2469
5c944c6c
RB
2470/* Return true when STMTs arguments and return value match those of FNDECL,
2471 a decl of a builtin function. */
3626621a 2472
5c944c6c 2473bool
355fe088 2474gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
3626621a 2475{
5c944c6c
RB
2476 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2477
2478 tree ret = gimple_call_lhs (stmt);
2479 if (ret
2ad3adf1
JJ
2480 && !useless_type_conversion_p (TREE_TYPE (ret),
2481 TREE_TYPE (TREE_TYPE (fndecl))))
5c944c6c
RB
2482 return false;
2483
3626621a
RB
2484 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2485 unsigned nargs = gimple_call_num_args (stmt);
2486 for (unsigned i = 0; i < nargs; ++i)
2487 {
2488 /* Variadic args follow. */
2489 if (!targs)
2490 return true;
2491 tree arg = gimple_call_arg (stmt, i);
fd39794a
JJ
2492 tree type = TREE_VALUE (targs);
2493 if (!useless_type_conversion_p (type, TREE_TYPE (arg))
2494 /* char/short integral arguments are promoted to int
2495 by several frontends if targetm.calls.promote_prototypes
2496 is true. Allow such promotion too. */
2497 && !(INTEGRAL_TYPE_P (type)
2498 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
2499 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
2500 && useless_type_conversion_p (integer_type_node,
2501 TREE_TYPE (arg))))
3626621a
RB
2502 return false;
2503 targs = TREE_CHAIN (targs);
2504 }
2505 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2506 return false;
2507 return true;
2508}
2509
5c944c6c
RB
2510/* Return true when STMT is builtins call. */
2511
2512bool
355fe088 2513gimple_call_builtin_p (const gimple *stmt)
5c944c6c
RB
2514{
2515 tree fndecl;
2516 if (is_gimple_call (stmt)
2517 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2518 && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2519 return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2520 return false;
2521}
2522
3626621a
RB
2523/* Return true when STMT is builtins call to CLASS. */
2524
2525bool
355fe088 2526gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
3626621a
RB
2527{
2528 tree fndecl;
2529 if (is_gimple_call (stmt)
2530 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2531 && DECL_BUILT_IN_CLASS (fndecl) == klass)
5c944c6c 2532 return gimple_builtin_call_types_compatible_p (stmt, fndecl);
3626621a
RB
2533 return false;
2534}
2535
2536/* Return true when STMT is builtins call to CODE of CLASS. */
c54c785d
JH
2537
2538bool
355fe088 2539gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
c54c785d
JH
2540{
2541 tree fndecl;
3626621a
RB
2542 if (is_gimple_call (stmt)
2543 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2544 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2545 && DECL_FUNCTION_CODE (fndecl) == code)
5c944c6c 2546 return gimple_builtin_call_types_compatible_p (stmt, fndecl);
3626621a 2547 return false;
c54c785d
JH
2548}
2549
00175cb2
RS
2550/* If CALL is a call to a combined_fn (i.e. an internal function or
2551 a normal built-in function), return its code, otherwise return
2552 CFN_LAST. */
2553
2554combined_fn
2555gimple_call_combined_fn (const gimple *stmt)
2556{
2557 if (const gcall *call = dyn_cast <const gcall *> (stmt))
2558 {
2559 if (gimple_call_internal_p (call))
2560 return as_combined_fn (gimple_call_internal_fn (call));
2561
2562 tree fndecl = gimple_call_fndecl (stmt);
2563 if (fndecl
2564 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2565 && gimple_builtin_call_types_compatible_p (stmt, fndecl))
2566 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
2567 }
2568 return CFN_LAST;
2569}
2570
edcdea5b
NF
2571/* Return true if STMT clobbers memory. STMT is required to be a
2572 GIMPLE_ASM. */
2573
2574bool
538dd0b7 2575gimple_asm_clobbers_memory_p (const gasm *stmt)
edcdea5b
NF
2576{
2577 unsigned i;
2578
2579 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2580 {
2581 tree op = gimple_asm_clobber_op (stmt, i);
2582 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2583 return true;
2584 }
2585
93671519
BE
2586 /* Non-empty basic ASM implicitly clobbers memory. */
2587 if (gimple_asm_input_p (stmt) && strlen (gimple_asm_string (stmt)) != 0)
2588 return true;
2589
edcdea5b
NF
2590 return false;
2591}
475b8f37 2592
80560f95
AM
2593/* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
2594
2595void
2596dump_decl_set (FILE *file, bitmap set)
2597{
2598 if (set)
2599 {
2600 bitmap_iterator bi;
2601 unsigned i;
2602
2603 fprintf (file, "{ ");
2604
2605 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2606 {
2607 fprintf (file, "D.%u", i);
2608 fprintf (file, " ");
2609 }
2610
2611 fprintf (file, "}");
2612 }
2613 else
2614 fprintf (file, "NIL");
2615}
7a300452 2616
3d9c733e
AM
2617/* Return true when CALL is a call stmt that definitely doesn't
2618 free any memory or makes it unavailable otherwise. */
2619bool
355fe088 2620nonfreeing_call_p (gimple *call)
3d9c733e
AM
2621{
2622 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2623 && gimple_call_flags (call) & ECF_LEAF)
2624 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2625 {
2626 /* Just in case these become ECF_LEAF in the future. */
2627 case BUILT_IN_FREE:
2628 case BUILT_IN_TM_FREE:
2629 case BUILT_IN_REALLOC:
2630 case BUILT_IN_STACK_RESTORE:
2631 return false;
2632 default:
2633 return true;
2634 }
8413ca87
JJ
2635 else if (gimple_call_internal_p (call))
2636 switch (gimple_call_internal_fn (call))
2637 {
2638 case IFN_ABNORMAL_DISPATCHER:
2639 return true;
2640 default:
2641 if (gimple_call_flags (call) & ECF_LEAF)
2642 return true;
2643 return false;
2644 }
3d9c733e 2645
8413ca87
JJ
2646 tree fndecl = gimple_call_fndecl (call);
2647 if (!fndecl)
2648 return false;
2649 struct cgraph_node *n = cgraph_node::get (fndecl);
2650 if (!n)
2651 return false;
2652 enum availability availability;
2653 n = n->function_symbol (&availability);
2654 if (!n || availability <= AVAIL_INTERPOSABLE)
2655 return false;
2656 return n->nonfreeing_fn;
3d9c733e 2657}
8fdc414d 2658
c000cd7c
BS
2659/* Return true when CALL is a call stmt that definitely need not
2660 be considered to be a memory barrier. */
2661bool
2662nonbarrier_call_p (gimple *call)
2663{
2664 if (gimple_call_flags (call) & (ECF_PURE | ECF_CONST))
2665 return true;
2666 /* Should extend this to have a nonbarrier_fn flag, just as above in
2667 the nonfreeing case. */
2668 return false;
2669}
2670
8fdc414d
JL
2671/* Callback for walk_stmt_load_store_ops.
2672
2673 Return TRUE if OP will dereference the tree stored in DATA, FALSE
2674 otherwise.
2675
2676 This routine only makes a superficial check for a dereference. Thus
2677 it must only be used if it is safe to return a false negative. */
2678static bool
355fe088 2679check_loadstore (gimple *, tree op, tree, void *data)
8fdc414d 2680{
6626f970
RH
2681 if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2682 {
2683 /* Some address spaces may legitimately dereference zero. */
2684 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
2685 if (targetm.addr_space.zero_address_valid (as))
2686 return false;
2687
2688 return operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0);
2689 }
8fdc414d
JL
2690 return false;
2691}
2692
ae93744d 2693
76787f70
MLI
2694/* Return true if OP can be inferred to be non-NULL after STMT executes,
2695 either by using a pointer dereference or attributes. */
2696bool
355fe088 2697infer_nonnull_range (gimple *stmt, tree op)
76787f70
MLI
2698{
2699 return infer_nonnull_range_by_dereference (stmt, op)
2700 || infer_nonnull_range_by_attribute (stmt, op);
2701}
8fdc414d 2702
76787f70
MLI
2703/* Return true if OP can be inferred to be non-NULL after STMT
2704 executes by using a pointer dereference. */
8fdc414d 2705bool
355fe088 2706infer_nonnull_range_by_dereference (gimple *stmt, tree op)
8fdc414d
JL
2707{
2708 /* We can only assume that a pointer dereference will yield
2709 non-NULL if -fdelete-null-pointer-checks is enabled. */
2710 if (!flag_delete_null_pointer_checks
2711 || !POINTER_TYPE_P (TREE_TYPE (op))
2712 || gimple_code (stmt) == GIMPLE_ASM)
2713 return false;
2714
76787f70
MLI
2715 if (walk_stmt_load_store_ops (stmt, (void *)op,
2716 check_loadstore, check_loadstore))
8fdc414d
JL
2717 return true;
2718
76787f70
MLI
2719 return false;
2720}
2721
2722/* Return true if OP can be inferred to be a non-NULL after STMT
2723 executes by using attributes. */
2724bool
355fe088 2725infer_nonnull_range_by_attribute (gimple *stmt, tree op)
76787f70
MLI
2726{
2727 /* We can only assume that a pointer dereference will yield
2728 non-NULL if -fdelete-null-pointer-checks is enabled. */
2729 if (!flag_delete_null_pointer_checks
2730 || !POINTER_TYPE_P (TREE_TYPE (op))
2731 || gimple_code (stmt) == GIMPLE_ASM)
2732 return false;
2733
2734 if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
8fdc414d
JL
2735 {
2736 tree fntype = gimple_call_fntype (stmt);
2737 tree attrs = TYPE_ATTRIBUTES (fntype);
2738 for (; attrs; attrs = TREE_CHAIN (attrs))
2739 {
2740 attrs = lookup_attribute ("nonnull", attrs);
2741
2742 /* If "nonnull" wasn't specified, we know nothing about
2743 the argument. */
2744 if (attrs == NULL_TREE)
2745 return false;
2746
2747 /* If "nonnull" applies to all the arguments, then ARG
2748 is non-null if it's in the argument list. */
2749 if (TREE_VALUE (attrs) == NULL_TREE)
2750 {
2751 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2752 {
36f291f7
PP
2753 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
2754 && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
8fdc414d
JL
2755 return true;
2756 }
2757 return false;
2758 }
2759
2760 /* Now see if op appears in the nonnull list. */
2761 for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2762 {
e37dcf45
MP
2763 unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2764 if (idx < gimple_call_num_args (stmt))
2765 {
2766 tree arg = gimple_call_arg (stmt, idx);
2767 if (operand_equal_p (op, arg, 0))
2768 return true;
2769 }
8fdc414d
JL
2770 }
2771 }
2772 }
2773
2774 /* If this function is marked as returning non-null, then we can
2775 infer OP is non-null if it is used in the return statement. */
76787f70
MLI
2776 if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2777 if (gimple_return_retval (return_stmt)
2778 && operand_equal_p (gimple_return_retval (return_stmt), op, 0)
2779 && lookup_attribute ("returns_nonnull",
2780 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2781 return true;
8fdc414d
JL
2782
2783 return false;
2784}
45b0be94
AM
2785
2786/* Compare two case labels. Because the front end should already have
2787 made sure that case ranges do not overlap, it is enough to only compare
2788 the CASE_LOW values of each case label. */
2789
2790static int
2791compare_case_labels (const void *p1, const void *p2)
2792{
2793 const_tree const case1 = *(const_tree const*)p1;
2794 const_tree const case2 = *(const_tree const*)p2;
2795
2796 /* The 'default' case label always goes first. */
2797 if (!CASE_LOW (case1))
2798 return -1;
2799 else if (!CASE_LOW (case2))
2800 return 1;
2801 else
2802 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2803}
2804
2805/* Sort the case labels in LABEL_VEC in place in ascending order. */
2806
2807void
2808sort_case_labels (vec<tree> label_vec)
2809{
2810 label_vec.qsort (compare_case_labels);
2811}
2812\f
2813/* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2814
2815 LABELS is a vector that contains all case labels to look at.
2816
2817 INDEX_TYPE is the type of the switch index expression. Case labels
2818 in LABELS are discarded if their values are not in the value range
2819 covered by INDEX_TYPE. The remaining case label values are folded
2820 to INDEX_TYPE.
2821
2822 If a default case exists in LABELS, it is removed from LABELS and
2823 returned in DEFAULT_CASEP. If no default case exists, but the
2824 case labels already cover the whole range of INDEX_TYPE, a default
2825 case is returned pointing to one of the existing case labels.
2826 Otherwise DEFAULT_CASEP is set to NULL_TREE.
2827
2828 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2829 apply and no action is taken regardless of whether a default case is
2830 found or not. */
2831
2832void
2833preprocess_case_label_vec_for_gimple (vec<tree> labels,
2834 tree index_type,
2835 tree *default_casep)
2836{
2837 tree min_value, max_value;
2838 tree default_case = NULL_TREE;
2839 size_t i, len;
2840
2841 i = 0;
2842 min_value = TYPE_MIN_VALUE (index_type);
2843 max_value = TYPE_MAX_VALUE (index_type);
2844 while (i < labels.length ())
2845 {
2846 tree elt = labels[i];
2847 tree low = CASE_LOW (elt);
2848 tree high = CASE_HIGH (elt);
2849 bool remove_element = FALSE;
2850
2851 if (low)
2852 {
2853 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2854 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2855
2856 /* This is a non-default case label, i.e. it has a value.
2857
2858 See if the case label is reachable within the range of
2859 the index type. Remove out-of-range case values. Turn
2860 case ranges into a canonical form (high > low strictly)
2861 and convert the case label values to the index type.
2862
2863 NB: The type of gimple_switch_index() may be the promoted
2864 type, but the case labels retain the original type. */
2865
2866 if (high)
2867 {
2868 /* This is a case range. Discard empty ranges.
2869 If the bounds or the range are equal, turn this
2870 into a simple (one-value) case. */
2871 int cmp = tree_int_cst_compare (high, low);
2872 if (cmp < 0)
2873 remove_element = TRUE;
2874 else if (cmp == 0)
2875 high = NULL_TREE;
2876 }
2877
2878 if (! high)
2879 {
2880 /* If the simple case value is unreachable, ignore it. */
2881 if ((TREE_CODE (min_value) == INTEGER_CST
2882 && tree_int_cst_compare (low, min_value) < 0)
2883 || (TREE_CODE (max_value) == INTEGER_CST
2884 && tree_int_cst_compare (low, max_value) > 0))
2885 remove_element = TRUE;
2886 else
2887 low = fold_convert (index_type, low);
2888 }
2889 else
2890 {
2891 /* If the entire case range is unreachable, ignore it. */
2892 if ((TREE_CODE (min_value) == INTEGER_CST
2893 && tree_int_cst_compare (high, min_value) < 0)
2894 || (TREE_CODE (max_value) == INTEGER_CST
2895 && tree_int_cst_compare (low, max_value) > 0))
2896 remove_element = TRUE;
2897 else
2898 {
2899 /* If the lower bound is less than the index type's
2900 minimum value, truncate the range bounds. */
2901 if (TREE_CODE (min_value) == INTEGER_CST
2902 && tree_int_cst_compare (low, min_value) < 0)
2903 low = min_value;
2904 low = fold_convert (index_type, low);
2905
2906 /* If the upper bound is greater than the index type's
2907 maximum value, truncate the range bounds. */
2908 if (TREE_CODE (max_value) == INTEGER_CST
2909 && tree_int_cst_compare (high, max_value) > 0)
2910 high = max_value;
2911 high = fold_convert (index_type, high);
2912
2913 /* We may have folded a case range to a one-value case. */
2914 if (tree_int_cst_equal (low, high))
2915 high = NULL_TREE;
2916 }
2917 }
2918
2919 CASE_LOW (elt) = low;
2920 CASE_HIGH (elt) = high;
2921 }
2922 else
2923 {
2924 gcc_assert (!default_case);
2925 default_case = elt;
2926 /* The default case must be passed separately to the
2927 gimple_build_switch routine. But if DEFAULT_CASEP
2928 is NULL, we do not remove the default case (it would
2929 be completely lost). */
2930 if (default_casep)
2931 remove_element = TRUE;
2932 }
2933
2934 if (remove_element)
2935 labels.ordered_remove (i);
2936 else
2937 i++;
2938 }
2939 len = i;
2940
2941 if (!labels.is_empty ())
2942 sort_case_labels (labels);
2943
2944 if (default_casep && !default_case)
2945 {
2946 /* If the switch has no default label, add one, so that we jump
2947 around the switch body. If the labels already cover the whole
2948 range of the switch index_type, add the default label pointing
2949 to one of the existing labels. */
2950 if (len
2951 && TYPE_MIN_VALUE (index_type)
2952 && TYPE_MAX_VALUE (index_type)
2953 && tree_int_cst_equal (CASE_LOW (labels[0]),
2954 TYPE_MIN_VALUE (index_type)))
2955 {
2956 tree low, high = CASE_HIGH (labels[len - 1]);
2957 if (!high)
2958 high = CASE_LOW (labels[len - 1]);
2959 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
2960 {
2961 for (i = 1; i < len; i++)
2962 {
2963 high = CASE_LOW (labels[i]);
2964 low = CASE_HIGH (labels[i - 1]);
2965 if (!low)
2966 low = CASE_LOW (labels[i - 1]);
807e902e 2967 if (wi::add (low, 1) != high)
45b0be94
AM
2968 break;
2969 }
2970 if (i == len)
2971 {
2972 tree label = CASE_LABEL (labels[0]);
2973 default_case = build_case_label (NULL_TREE, NULL_TREE,
2974 label);
2975 }
2976 }
2977 }
2978 }
2979
2980 if (default_casep)
2981 *default_casep = default_case;
2982}
5be5c238
AM
2983
2984/* Set the location of all statements in SEQ to LOC. */
2985
2986void
2987gimple_seq_set_location (gimple_seq seq, location_t loc)
2988{
2989 for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2990 gimple_set_location (gsi_stmt (i), loc);
2991}
73049af5
JJ
2992
2993/* Release SSA_NAMEs in SEQ as well as the GIMPLE statements. */
2994
2995void
2996gimple_seq_discard (gimple_seq seq)
2997{
2998 gimple_stmt_iterator gsi;
2999
3000 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
3001 {
355fe088 3002 gimple *stmt = gsi_stmt (gsi);
73049af5
JJ
3003 gsi_remove (&gsi, true);
3004 release_defs (stmt);
3005 ggc_free (stmt);
3006 }
3007}
0b986c6a
JH
3008
3009/* See if STMT now calls function that takes no parameters and if so, drop
3010 call arguments. This is used when devirtualization machinery redirects
538374e1 3011 to __builtin_unreachable or __cxa_pure_virtual. */
0b986c6a
JH
3012
3013void
355fe088 3014maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
0b986c6a
JH
3015{
3016 tree decl = gimple_call_fndecl (stmt);
3017 if (TYPE_ARG_TYPES (TREE_TYPE (decl))
3018 && TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))) == void_type_node
3019 && gimple_call_num_args (stmt))
3020 {
3021 gimple_set_num_ops (stmt, 3);
3022 update_stmt_fn (fn, stmt);
3023 }
3024}