]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple.c
ipa-chkp.c: New.
[thirdparty/gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright (C) 2007-2014 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "calls.h"
29 #include "stmt.h"
30 #include "stor-layout.h"
31 #include "hard-reg-set.h"
32 #include "predict.h"
33 #include "vec.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "input.h"
38 #include "function.h"
39 #include "dominance.h"
40 #include "cfg.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "tree-eh.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "gimple.h"
51 #include "gimplify.h"
52 #include "diagnostic.h"
53 #include "value-prof.h"
54 #include "flags.h"
55 #include "alias.h"
56 #include "demangle.h"
57 #include "langhooks.h"
58 #include "bitmap.h"
59 #include "stringpool.h"
60 #include "tree-ssanames.h"
61
62
63 /* All the tuples have their operand vector (if present) at the very bottom
64 of the structure. Therefore, the offset required to find the
65 operands vector the size of the structure minus the size of the 1
66 element tree array at the end (see gimple_ops). */
67 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
68 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
69 EXPORTED_CONST size_t gimple_ops_offset_[] = {
70 #include "gsstruct.def"
71 };
72 #undef DEFGSSTRUCT
73
74 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
75 static const size_t gsstruct_code_size[] = {
76 #include "gsstruct.def"
77 };
78 #undef DEFGSSTRUCT
79
80 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
81 const char *const gimple_code_name[] = {
82 #include "gimple.def"
83 };
84 #undef DEFGSCODE
85
86 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
87 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
88 #include "gimple.def"
89 };
90 #undef DEFGSCODE
91
92 /* Gimple stats. */
93
94 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
95 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
96
97 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
98 static const char * const gimple_alloc_kind_names[] = {
99 "assignments",
100 "phi nodes",
101 "conditionals",
102 "everything else"
103 };
104
105 /* Gimple tuple constructors.
106 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
107 be passed a NULL to start with an empty sequence. */
108
109 /* Set the code for statement G to CODE. */
110
111 static inline void
112 gimple_set_code (gimple g, enum gimple_code code)
113 {
114 g->code = code;
115 }
116
117 /* Return the number of bytes needed to hold a GIMPLE statement with
118 code CODE. */
119
120 static inline size_t
121 gimple_size (enum gimple_code code)
122 {
123 return gsstruct_code_size[gss_for_code (code)];
124 }
125
126 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
127 operands. */
128
129 gimple
130 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
131 {
132 size_t size;
133 gimple stmt;
134
135 size = gimple_size (code);
136 if (num_ops > 0)
137 size += sizeof (tree) * (num_ops - 1);
138
139 if (GATHER_STATISTICS)
140 {
141 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
142 gimple_alloc_counts[(int) kind]++;
143 gimple_alloc_sizes[(int) kind] += size;
144 }
145
146 stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
147 gimple_set_code (stmt, code);
148 gimple_set_num_ops (stmt, num_ops);
149
150 /* Do not call gimple_set_modified here as it has other side
151 effects and this tuple is still not completely built. */
152 stmt->modified = 1;
153 gimple_init_singleton (stmt);
154
155 return stmt;
156 }
157
158 /* Set SUBCODE to be the code of the expression computed by statement G. */
159
160 static inline void
161 gimple_set_subcode (gimple g, unsigned subcode)
162 {
163 /* We only have 16 bits for the RHS code. Assert that we are not
164 overflowing it. */
165 gcc_assert (subcode < (1 << 16));
166 g->subcode = subcode;
167 }
168
169
170
171 /* Build a tuple with operands. CODE is the statement to build (which
172 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
173 for the new tuple. NUM_OPS is the number of operands to allocate. */
174
175 #define gimple_build_with_ops(c, s, n) \
176 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
177
178 static gimple
179 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
180 unsigned num_ops MEM_STAT_DECL)
181 {
182 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
183 gimple_set_subcode (s, subcode);
184
185 return s;
186 }
187
188
189 /* Build a GIMPLE_RETURN statement returning RETVAL. */
190
191 gimple
192 gimple_build_return (tree retval)
193 {
194 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 2);
195 if (retval)
196 gimple_return_set_retval (s, retval);
197 return s;
198 }
199
200 /* Reset alias information on call S. */
201
202 void
203 gimple_call_reset_alias_info (gimple s)
204 {
205 if (gimple_call_flags (s) & ECF_CONST)
206 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
207 else
208 pt_solution_reset (gimple_call_use_set (s));
209 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
210 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
211 else
212 pt_solution_reset (gimple_call_clobber_set (s));
213 }
214
215 /* Helper for gimple_build_call, gimple_build_call_valist,
216 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
217 components of a GIMPLE_CALL statement to function FN with NARGS
218 arguments. */
219
220 static inline gimple
221 gimple_build_call_1 (tree fn, unsigned nargs)
222 {
223 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
224 if (TREE_CODE (fn) == FUNCTION_DECL)
225 fn = build_fold_addr_expr (fn);
226 gimple_set_op (s, 1, fn);
227 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
228 gimple_call_reset_alias_info (s);
229 return s;
230 }
231
232
233 /* Build a GIMPLE_CALL statement to function FN with the arguments
234 specified in vector ARGS. */
235
236 gimple
237 gimple_build_call_vec (tree fn, vec<tree> args)
238 {
239 unsigned i;
240 unsigned nargs = args.length ();
241 gimple call = gimple_build_call_1 (fn, nargs);
242
243 for (i = 0; i < nargs; i++)
244 gimple_call_set_arg (call, i, args[i]);
245
246 return call;
247 }
248
249
250 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
251 arguments. The ... are the arguments. */
252
253 gimple
254 gimple_build_call (tree fn, unsigned nargs, ...)
255 {
256 va_list ap;
257 gimple call;
258 unsigned i;
259
260 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
261
262 call = gimple_build_call_1 (fn, nargs);
263
264 va_start (ap, nargs);
265 for (i = 0; i < nargs; i++)
266 gimple_call_set_arg (call, i, va_arg (ap, tree));
267 va_end (ap);
268
269 return call;
270 }
271
272
273 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
274 arguments. AP contains the arguments. */
275
276 gimple
277 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
278 {
279 gimple call;
280 unsigned i;
281
282 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
283
284 call = gimple_build_call_1 (fn, nargs);
285
286 for (i = 0; i < nargs; i++)
287 gimple_call_set_arg (call, i, va_arg (ap, tree));
288
289 return call;
290 }
291
292
293 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
294 Build the basic components of a GIMPLE_CALL statement to internal
295 function FN with NARGS arguments. */
296
297 static inline gimple
298 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
299 {
300 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
301 s->subcode |= GF_CALL_INTERNAL;
302 gimple_call_set_internal_fn (s, fn);
303 gimple_call_reset_alias_info (s);
304 return s;
305 }
306
307
308 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
309 the number of arguments. The ... are the arguments. */
310
311 gimple
312 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
313 {
314 va_list ap;
315 gimple call;
316 unsigned i;
317
318 call = gimple_build_call_internal_1 (fn, nargs);
319 va_start (ap, nargs);
320 for (i = 0; i < nargs; i++)
321 gimple_call_set_arg (call, i, va_arg (ap, tree));
322 va_end (ap);
323
324 return call;
325 }
326
327
328 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
329 specified in vector ARGS. */
330
331 gimple
332 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
333 {
334 unsigned i, nargs;
335 gimple call;
336
337 nargs = args.length ();
338 call = gimple_build_call_internal_1 (fn, nargs);
339 for (i = 0; i < nargs; i++)
340 gimple_call_set_arg (call, i, args[i]);
341
342 return call;
343 }
344
345
346 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
347 assumed to be in GIMPLE form already. Minimal checking is done of
348 this fact. */
349
350 gimple
351 gimple_build_call_from_tree (tree t)
352 {
353 unsigned i, nargs;
354 gimple call;
355 tree fndecl = get_callee_fndecl (t);
356
357 gcc_assert (TREE_CODE (t) == CALL_EXPR);
358
359 nargs = call_expr_nargs (t);
360 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
361
362 for (i = 0; i < nargs; i++)
363 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
364
365 gimple_set_block (call, TREE_BLOCK (t));
366
367 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
368 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
369 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
370 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
371 if (fndecl
372 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
373 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
374 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
375 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
376 else
377 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
378 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
379 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
380 gimple_set_no_warning (call, TREE_NO_WARNING (t));
381 gimple_call_set_with_bounds (call, CALL_WITH_BOUNDS_P (t));
382
383 return call;
384 }
385
386
387 /* Build a GIMPLE_ASSIGN statement.
388
389 LHS of the assignment.
390 RHS of the assignment which can be unary or binary. */
391
392 gimple
393 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
394 {
395 enum tree_code subcode;
396 tree op1, op2, op3;
397
398 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
399 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
400 PASS_MEM_STAT);
401 }
402
403
404 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
405 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
406 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
407
408 gimple
409 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
410 tree op2, tree op3 MEM_STAT_DECL)
411 {
412 unsigned num_ops;
413 gimple p;
414
415 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
416 code). */
417 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
418
419 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
420 PASS_MEM_STAT);
421 gimple_assign_set_lhs (p, lhs);
422 gimple_assign_set_rhs1 (p, op1);
423 if (op2)
424 {
425 gcc_assert (num_ops > 2);
426 gimple_assign_set_rhs2 (p, op2);
427 }
428
429 if (op3)
430 {
431 gcc_assert (num_ops > 3);
432 gimple_assign_set_rhs3 (p, op3);
433 }
434
435 return p;
436 }
437
438 gimple
439 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
440 tree op2 MEM_STAT_DECL)
441 {
442 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
443 PASS_MEM_STAT);
444 }
445
446
447 /* Build a GIMPLE_COND statement.
448
449 PRED is the condition used to compare LHS and the RHS.
450 T_LABEL is the label to jump to if the condition is true.
451 F_LABEL is the label to jump to otherwise. */
452
453 gimple
454 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
455 tree t_label, tree f_label)
456 {
457 gimple p;
458
459 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
460 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
461 gimple_cond_set_lhs (p, lhs);
462 gimple_cond_set_rhs (p, rhs);
463 gimple_cond_set_true_label (p, t_label);
464 gimple_cond_set_false_label (p, f_label);
465 return p;
466 }
467
468 /* Build a GIMPLE_COND statement from the conditional expression tree
469 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
470
471 gimple
472 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
473 {
474 enum tree_code code;
475 tree lhs, rhs;
476
477 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
478 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
479 }
480
481 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
482 boolean expression tree COND. */
483
484 void
485 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
486 {
487 enum tree_code code;
488 tree lhs, rhs;
489
490 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
491 gimple_cond_set_condition (stmt, code, lhs, rhs);
492 }
493
494 /* Build a GIMPLE_LABEL statement for LABEL. */
495
496 gimple
497 gimple_build_label (tree label)
498 {
499 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
500 gimple_label_set_label (p, label);
501 return p;
502 }
503
504 /* Build a GIMPLE_GOTO statement to label DEST. */
505
506 gimple
507 gimple_build_goto (tree dest)
508 {
509 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
510 gimple_goto_set_dest (p, dest);
511 return p;
512 }
513
514
515 /* Build a GIMPLE_NOP statement. */
516
517 gimple
518 gimple_build_nop (void)
519 {
520 return gimple_alloc (GIMPLE_NOP, 0);
521 }
522
523
524 /* Build a GIMPLE_BIND statement.
525 VARS are the variables in BODY.
526 BLOCK is the containing block. */
527
528 gimple
529 gimple_build_bind (tree vars, gimple_seq body, tree block)
530 {
531 gimple p = gimple_alloc (GIMPLE_BIND, 0);
532 gimple_bind_set_vars (p, vars);
533 if (body)
534 gimple_bind_set_body (p, body);
535 if (block)
536 gimple_bind_set_block (p, block);
537 return p;
538 }
539
540 /* Helper function to set the simple fields of a asm stmt.
541
542 STRING is a pointer to a string that is the asm blocks assembly code.
543 NINPUT is the number of register inputs.
544 NOUTPUT is the number of register outputs.
545 NCLOBBERS is the number of clobbered registers.
546 */
547
548 static inline gimple
549 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
550 unsigned nclobbers, unsigned nlabels)
551 {
552 gimple_statement_asm *p;
553 int size = strlen (string);
554
555 /* ASMs with labels cannot have outputs. This should have been
556 enforced by the front end. */
557 gcc_assert (nlabels == 0 || noutputs == 0);
558
559 p = as_a <gimple_statement_asm *> (
560 gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
561 ninputs + noutputs + nclobbers + nlabels));
562
563 p->ni = ninputs;
564 p->no = noutputs;
565 p->nc = nclobbers;
566 p->nl = nlabels;
567 p->string = ggc_alloc_string (string, size);
568
569 if (GATHER_STATISTICS)
570 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
571
572 return p;
573 }
574
575 /* Build a GIMPLE_ASM statement.
576
577 STRING is the assembly code.
578 NINPUT is the number of register inputs.
579 NOUTPUT is the number of register outputs.
580 NCLOBBERS is the number of clobbered registers.
581 INPUTS is a vector of the input register parameters.
582 OUTPUTS is a vector of the output register parameters.
583 CLOBBERS is a vector of the clobbered register parameters.
584 LABELS is a vector of destination labels. */
585
586 gimple
587 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
588 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
589 vec<tree, va_gc> *labels)
590 {
591 gimple p;
592 unsigned i;
593
594 p = gimple_build_asm_1 (string,
595 vec_safe_length (inputs),
596 vec_safe_length (outputs),
597 vec_safe_length (clobbers),
598 vec_safe_length (labels));
599
600 for (i = 0; i < vec_safe_length (inputs); i++)
601 gimple_asm_set_input_op (p, i, (*inputs)[i]);
602
603 for (i = 0; i < vec_safe_length (outputs); i++)
604 gimple_asm_set_output_op (p, i, (*outputs)[i]);
605
606 for (i = 0; i < vec_safe_length (clobbers); i++)
607 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
608
609 for (i = 0; i < vec_safe_length (labels); i++)
610 gimple_asm_set_label_op (p, i, (*labels)[i]);
611
612 return p;
613 }
614
615 /* Build a GIMPLE_CATCH statement.
616
617 TYPES are the catch types.
618 HANDLER is the exception handler. */
619
620 gimple
621 gimple_build_catch (tree types, gimple_seq handler)
622 {
623 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
624 gimple_catch_set_types (p, types);
625 if (handler)
626 gimple_catch_set_handler (p, handler);
627
628 return p;
629 }
630
631 /* Build a GIMPLE_EH_FILTER statement.
632
633 TYPES are the filter's types.
634 FAILURE is the filter's failure action. */
635
636 gimple
637 gimple_build_eh_filter (tree types, gimple_seq failure)
638 {
639 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
640 gimple_eh_filter_set_types (p, types);
641 if (failure)
642 gimple_eh_filter_set_failure (p, failure);
643
644 return p;
645 }
646
647 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
648
649 gimple
650 gimple_build_eh_must_not_throw (tree decl)
651 {
652 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
653
654 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
655 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
656 gimple_eh_must_not_throw_set_fndecl (p, decl);
657
658 return p;
659 }
660
661 /* Build a GIMPLE_EH_ELSE statement. */
662
663 gimple
664 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
665 {
666 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
667 gimple_eh_else_set_n_body (p, n_body);
668 gimple_eh_else_set_e_body (p, e_body);
669 return p;
670 }
671
672 /* Build a GIMPLE_TRY statement.
673
674 EVAL is the expression to evaluate.
675 CLEANUP is the cleanup expression.
676 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
677 whether this is a try/catch or a try/finally respectively. */
678
679 gimple_statement_try *
680 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
681 enum gimple_try_flags kind)
682 {
683 gimple_statement_try *p;
684
685 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
686 p = as_a <gimple_statement_try *> (gimple_alloc (GIMPLE_TRY, 0));
687 gimple_set_subcode (p, kind);
688 if (eval)
689 gimple_try_set_eval (p, eval);
690 if (cleanup)
691 gimple_try_set_cleanup (p, cleanup);
692
693 return p;
694 }
695
696 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
697
698 CLEANUP is the cleanup expression. */
699
700 gimple
701 gimple_build_wce (gimple_seq cleanup)
702 {
703 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
704 if (cleanup)
705 gimple_wce_set_cleanup (p, cleanup);
706
707 return p;
708 }
709
710
711 /* Build a GIMPLE_RESX statement. */
712
713 gimple
714 gimple_build_resx (int region)
715 {
716 gimple_statement_resx *p =
717 as_a <gimple_statement_resx *> (
718 gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
719 p->region = region;
720 return p;
721 }
722
723
724 /* The helper for constructing a gimple switch statement.
725 INDEX is the switch's index.
726 NLABELS is the number of labels in the switch excluding the default.
727 DEFAULT_LABEL is the default label for the switch statement. */
728
729 gimple
730 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
731 {
732 /* nlabels + 1 default label + 1 index. */
733 gcc_checking_assert (default_label);
734 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
735 1 + 1 + nlabels);
736 gimple_switch_set_index (p, index);
737 gimple_switch_set_default_label (p, default_label);
738 return p;
739 }
740
741 /* Build a GIMPLE_SWITCH statement.
742
743 INDEX is the switch's index.
744 DEFAULT_LABEL is the default label
745 ARGS is a vector of labels excluding the default. */
746
747 gimple
748 gimple_build_switch (tree index, tree default_label, vec<tree> args)
749 {
750 unsigned i, nlabels = args.length ();
751
752 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
753
754 /* Copy the labels from the vector to the switch statement. */
755 for (i = 0; i < nlabels; i++)
756 gimple_switch_set_label (p, i + 1, args[i]);
757
758 return p;
759 }
760
761 /* Build a GIMPLE_EH_DISPATCH statement. */
762
763 gimple
764 gimple_build_eh_dispatch (int region)
765 {
766 gimple_statement_eh_dispatch *p =
767 as_a <gimple_statement_eh_dispatch *> (
768 gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
769 p->region = region;
770 return p;
771 }
772
773 /* Build a new GIMPLE_DEBUG_BIND statement.
774
775 VAR is bound to VALUE; block and location are taken from STMT. */
776
777 gimple
778 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
779 {
780 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
781 (unsigned)GIMPLE_DEBUG_BIND, 2
782 PASS_MEM_STAT);
783
784 gimple_debug_bind_set_var (p, var);
785 gimple_debug_bind_set_value (p, value);
786 if (stmt)
787 gimple_set_location (p, gimple_location (stmt));
788
789 return p;
790 }
791
792
793 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
794
795 VAR is bound to VALUE; block and location are taken from STMT. */
796
797 gimple
798 gimple_build_debug_source_bind_stat (tree var, tree value,
799 gimple stmt MEM_STAT_DECL)
800 {
801 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
802 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
803 PASS_MEM_STAT);
804
805 gimple_debug_source_bind_set_var (p, var);
806 gimple_debug_source_bind_set_value (p, value);
807 if (stmt)
808 gimple_set_location (p, gimple_location (stmt));
809
810 return p;
811 }
812
813
814 /* Build a GIMPLE_OMP_CRITICAL statement.
815
816 BODY is the sequence of statements for which only one thread can execute.
817 NAME is optional identifier for this critical block. */
818
819 gimple
820 gimple_build_omp_critical (gimple_seq body, tree name)
821 {
822 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
823 gimple_omp_critical_set_name (p, name);
824 if (body)
825 gimple_omp_set_body (p, body);
826
827 return p;
828 }
829
830 /* Build a GIMPLE_OMP_FOR statement.
831
832 BODY is sequence of statements inside the for loop.
833 KIND is the `for' variant.
834 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
835 lastprivate, reductions, ordered, schedule, and nowait.
836 COLLAPSE is the collapse count.
837 PRE_BODY is the sequence of statements that are loop invariant. */
838
839 gimple
840 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
841 gimple_seq pre_body)
842 {
843 gimple_statement_omp_for *p =
844 as_a <gimple_statement_omp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
845 if (body)
846 gimple_omp_set_body (p, body);
847 gimple_omp_for_set_clauses (p, clauses);
848 gimple_omp_for_set_kind (p, kind);
849 p->collapse = collapse;
850 p->iter = ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
851
852 if (pre_body)
853 gimple_omp_for_set_pre_body (p, pre_body);
854
855 return p;
856 }
857
858
859 /* Build a GIMPLE_OMP_PARALLEL statement.
860
861 BODY is sequence of statements which are executed in parallel.
862 CLAUSES, are the OMP parallel construct's clauses.
863 CHILD_FN is the function created for the parallel threads to execute.
864 DATA_ARG are the shared data argument(s). */
865
866 gimple
867 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
868 tree data_arg)
869 {
870 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
871 if (body)
872 gimple_omp_set_body (p, body);
873 gimple_omp_parallel_set_clauses (p, clauses);
874 gimple_omp_parallel_set_child_fn (p, child_fn);
875 gimple_omp_parallel_set_data_arg (p, data_arg);
876
877 return p;
878 }
879
880
881 /* Build a GIMPLE_OMP_TASK statement.
882
883 BODY is sequence of statements which are executed by the explicit task.
884 CLAUSES, are the OMP parallel construct's clauses.
885 CHILD_FN is the function created for the parallel threads to execute.
886 DATA_ARG are the shared data argument(s).
887 COPY_FN is the optional function for firstprivate initialization.
888 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
889
890 gimple
891 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
892 tree data_arg, tree copy_fn, tree arg_size,
893 tree arg_align)
894 {
895 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
896 if (body)
897 gimple_omp_set_body (p, body);
898 gimple_omp_task_set_clauses (p, clauses);
899 gimple_omp_task_set_child_fn (p, child_fn);
900 gimple_omp_task_set_data_arg (p, data_arg);
901 gimple_omp_task_set_copy_fn (p, copy_fn);
902 gimple_omp_task_set_arg_size (p, arg_size);
903 gimple_omp_task_set_arg_align (p, arg_align);
904
905 return p;
906 }
907
908
909 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
910
911 BODY is the sequence of statements in the section. */
912
913 gimple
914 gimple_build_omp_section (gimple_seq body)
915 {
916 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
917 if (body)
918 gimple_omp_set_body (p, body);
919
920 return p;
921 }
922
923
924 /* Build a GIMPLE_OMP_MASTER statement.
925
926 BODY is the sequence of statements to be executed by just the master. */
927
928 gimple
929 gimple_build_omp_master (gimple_seq body)
930 {
931 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
932 if (body)
933 gimple_omp_set_body (p, body);
934
935 return p;
936 }
937
938
939 /* Build a GIMPLE_OMP_TASKGROUP statement.
940
941 BODY is the sequence of statements to be executed by the taskgroup
942 construct. */
943
944 gimple
945 gimple_build_omp_taskgroup (gimple_seq body)
946 {
947 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
948 if (body)
949 gimple_omp_set_body (p, body);
950
951 return p;
952 }
953
954
955 /* Build a GIMPLE_OMP_CONTINUE statement.
956
957 CONTROL_DEF is the definition of the control variable.
958 CONTROL_USE is the use of the control variable. */
959
960 gimple
961 gimple_build_omp_continue (tree control_def, tree control_use)
962 {
963 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
964 gimple_omp_continue_set_control_def (p, control_def);
965 gimple_omp_continue_set_control_use (p, control_use);
966 return p;
967 }
968
969 /* Build a GIMPLE_OMP_ORDERED statement.
970
971 BODY is the sequence of statements inside a loop that will executed in
972 sequence. */
973
974 gimple
975 gimple_build_omp_ordered (gimple_seq body)
976 {
977 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
978 if (body)
979 gimple_omp_set_body (p, body);
980
981 return p;
982 }
983
984
985 /* Build a GIMPLE_OMP_RETURN statement.
986 WAIT_P is true if this is a non-waiting return. */
987
988 gimple
989 gimple_build_omp_return (bool wait_p)
990 {
991 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
992 if (wait_p)
993 gimple_omp_return_set_nowait (p);
994
995 return p;
996 }
997
998
999 /* Build a GIMPLE_OMP_SECTIONS statement.
1000
1001 BODY is a sequence of section statements.
1002 CLAUSES are any of the OMP sections contsruct's clauses: private,
1003 firstprivate, lastprivate, reduction, and nowait. */
1004
1005 gimple
1006 gimple_build_omp_sections (gimple_seq body, tree clauses)
1007 {
1008 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1009 if (body)
1010 gimple_omp_set_body (p, body);
1011 gimple_omp_sections_set_clauses (p, clauses);
1012
1013 return p;
1014 }
1015
1016
1017 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1018
1019 gimple
1020 gimple_build_omp_sections_switch (void)
1021 {
1022 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1023 }
1024
1025
1026 /* Build a GIMPLE_OMP_SINGLE statement.
1027
1028 BODY is the sequence of statements that will be executed once.
1029 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1030 copyprivate, nowait. */
1031
1032 gimple
1033 gimple_build_omp_single (gimple_seq body, tree clauses)
1034 {
1035 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1036 if (body)
1037 gimple_omp_set_body (p, body);
1038 gimple_omp_single_set_clauses (p, clauses);
1039
1040 return p;
1041 }
1042
1043
1044 /* Build a GIMPLE_OMP_TARGET statement.
1045
1046 BODY is the sequence of statements that will be executed.
1047 CLAUSES are any of the OMP target construct's clauses. */
1048
1049 gimple
1050 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1051 {
1052 gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
1053 if (body)
1054 gimple_omp_set_body (p, body);
1055 gimple_omp_target_set_clauses (p, clauses);
1056 gimple_omp_target_set_kind (p, kind);
1057
1058 return p;
1059 }
1060
1061
1062 /* Build a GIMPLE_OMP_TEAMS statement.
1063
1064 BODY is the sequence of statements that will be executed.
1065 CLAUSES are any of the OMP teams construct's clauses. */
1066
1067 gimple
1068 gimple_build_omp_teams (gimple_seq body, tree clauses)
1069 {
1070 gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
1071 if (body)
1072 gimple_omp_set_body (p, body);
1073 gimple_omp_teams_set_clauses (p, clauses);
1074
1075 return p;
1076 }
1077
1078
1079 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1080
1081 gimple
1082 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1083 {
1084 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1085 gimple_omp_atomic_load_set_lhs (p, lhs);
1086 gimple_omp_atomic_load_set_rhs (p, rhs);
1087 return p;
1088 }
1089
1090 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1091
1092 VAL is the value we are storing. */
1093
1094 gimple
1095 gimple_build_omp_atomic_store (tree val)
1096 {
1097 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1098 gimple_omp_atomic_store_set_val (p, val);
1099 return p;
1100 }
1101
1102 /* Build a GIMPLE_TRANSACTION statement. */
1103
1104 gimple
1105 gimple_build_transaction (gimple_seq body, tree label)
1106 {
1107 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1108 gimple_transaction_set_body (p, body);
1109 gimple_transaction_set_label (p, label);
1110 return p;
1111 }
1112
1113 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1114 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1115
1116 gimple
1117 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1118 {
1119 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1120 /* Ensure all the predictors fit into the lower bits of the subcode. */
1121 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1122 gimple_predict_set_predictor (p, predictor);
1123 gimple_predict_set_outcome (p, outcome);
1124 return p;
1125 }
1126
1127 #if defined ENABLE_GIMPLE_CHECKING
1128 /* Complain of a gimple type mismatch and die. */
1129
1130 void
1131 gimple_check_failed (const_gimple gs, const char *file, int line,
1132 const char *function, enum gimple_code code,
1133 enum tree_code subcode)
1134 {
1135 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1136 gimple_code_name[code],
1137 get_tree_code_name (subcode),
1138 gimple_code_name[gimple_code (gs)],
1139 gs->subcode > 0
1140 ? get_tree_code_name ((enum tree_code) gs->subcode)
1141 : "",
1142 function, trim_filename (file), line);
1143 }
1144 #endif /* ENABLE_GIMPLE_CHECKING */
1145
1146
1147 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1148 *SEQ_P is NULL, a new sequence is allocated. */
1149
1150 void
1151 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1152 {
1153 gimple_stmt_iterator si;
1154 if (gs == NULL)
1155 return;
1156
1157 si = gsi_last (*seq_p);
1158 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1159 }
1160
1161 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1162 *SEQ_P is NULL, a new sequence is allocated. This function is
1163 similar to gimple_seq_add_stmt, but does not scan the operands.
1164 During gimplification, we need to manipulate statement sequences
1165 before the def/use vectors have been constructed. */
1166
1167 void
1168 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
1169 {
1170 gimple_stmt_iterator si;
1171
1172 if (gs == NULL)
1173 return;
1174
1175 si = gsi_last (*seq_p);
1176 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1177 }
1178
1179 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1180 NULL, a new sequence is allocated. */
1181
1182 void
1183 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1184 {
1185 gimple_stmt_iterator si;
1186 if (src == NULL)
1187 return;
1188
1189 si = gsi_last (*dst_p);
1190 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1191 }
1192
1193 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1194 NULL, a new sequence is allocated. This function is
1195 similar to gimple_seq_add_seq, but does not scan the operands. */
1196
1197 void
1198 gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1199 {
1200 gimple_stmt_iterator si;
1201 if (src == NULL)
1202 return;
1203
1204 si = gsi_last (*dst_p);
1205 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1206 }
1207
1208 /* Determine whether to assign a location to the statement GS. */
1209
1210 static bool
1211 should_carry_location_p (gimple gs)
1212 {
1213 /* Don't emit a line note for a label. We particularly don't want to
1214 emit one for the break label, since it doesn't actually correspond
1215 to the beginning of the loop/switch. */
1216 if (gimple_code (gs) == GIMPLE_LABEL)
1217 return false;
1218
1219 return true;
1220 }
1221
1222 /* Set the location for gimple statement GS to LOCATION. */
1223
1224 static void
1225 annotate_one_with_location (gimple gs, location_t location)
1226 {
1227 if (!gimple_has_location (gs)
1228 && !gimple_do_not_emit_location_p (gs)
1229 && should_carry_location_p (gs))
1230 gimple_set_location (gs, location);
1231 }
1232
1233 /* Set LOCATION for all the statements after iterator GSI in sequence
1234 SEQ. If GSI is pointing to the end of the sequence, start with the
1235 first statement in SEQ. */
1236
1237 void
1238 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1239 location_t location)
1240 {
1241 if (gsi_end_p (gsi))
1242 gsi = gsi_start (seq);
1243 else
1244 gsi_next (&gsi);
1245
1246 for (; !gsi_end_p (gsi); gsi_next (&gsi))
1247 annotate_one_with_location (gsi_stmt (gsi), location);
1248 }
1249
1250 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
1251
1252 void
1253 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1254 {
1255 gimple_stmt_iterator i;
1256
1257 if (gimple_seq_empty_p (stmt_p))
1258 return;
1259
1260 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1261 {
1262 gimple gs = gsi_stmt (i);
1263 annotate_one_with_location (gs, location);
1264 }
1265 }
1266
1267 /* Helper function of empty_body_p. Return true if STMT is an empty
1268 statement. */
1269
1270 static bool
1271 empty_stmt_p (gimple stmt)
1272 {
1273 if (gimple_code (stmt) == GIMPLE_NOP)
1274 return true;
1275 if (gimple_code (stmt) == GIMPLE_BIND)
1276 return empty_body_p (gimple_bind_body (stmt));
1277 return false;
1278 }
1279
1280
1281 /* Return true if BODY contains nothing but empty statements. */
1282
1283 bool
1284 empty_body_p (gimple_seq body)
1285 {
1286 gimple_stmt_iterator i;
1287
1288 if (gimple_seq_empty_p (body))
1289 return true;
1290 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1291 if (!empty_stmt_p (gsi_stmt (i))
1292 && !is_gimple_debug (gsi_stmt (i)))
1293 return false;
1294
1295 return true;
1296 }
1297
1298
1299 /* Perform a deep copy of sequence SRC and return the result. */
1300
1301 gimple_seq
1302 gimple_seq_copy (gimple_seq src)
1303 {
1304 gimple_stmt_iterator gsi;
1305 gimple_seq new_seq = NULL;
1306 gimple stmt;
1307
1308 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1309 {
1310 stmt = gimple_copy (gsi_stmt (gsi));
1311 gimple_seq_add_stmt (&new_seq, stmt);
1312 }
1313
1314 return new_seq;
1315 }
1316
1317
1318
1319 /* Return true if calls C1 and C2 are known to go to the same function. */
1320
1321 bool
1322 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1323 {
1324 if (gimple_call_internal_p (c1))
1325 return (gimple_call_internal_p (c2)
1326 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1327 else
1328 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1329 || (gimple_call_fndecl (c1)
1330 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1331 }
1332
1333 /* Detect flags from a GIMPLE_CALL. This is just like
1334 call_expr_flags, but for gimple tuples. */
1335
1336 int
1337 gimple_call_flags (const_gimple stmt)
1338 {
1339 int flags;
1340 tree decl = gimple_call_fndecl (stmt);
1341
1342 if (decl)
1343 flags = flags_from_decl_or_type (decl);
1344 else if (gimple_call_internal_p (stmt))
1345 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1346 else
1347 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1348
1349 if (stmt->subcode & GF_CALL_NOTHROW)
1350 flags |= ECF_NOTHROW;
1351
1352 return flags;
1353 }
1354
1355 /* Return the "fn spec" string for call STMT. */
1356
1357 static const_tree
1358 gimple_call_fnspec (const_gimple stmt)
1359 {
1360 tree type, attr;
1361
1362 if (gimple_call_internal_p (stmt))
1363 return internal_fn_fnspec (gimple_call_internal_fn (stmt));
1364
1365 type = gimple_call_fntype (stmt);
1366 if (!type)
1367 return NULL_TREE;
1368
1369 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1370 if (!attr)
1371 return NULL_TREE;
1372
1373 return TREE_VALUE (TREE_VALUE (attr));
1374 }
1375
1376 /* Detects argument flags for argument number ARG on call STMT. */
1377
1378 int
1379 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1380 {
1381 const_tree attr = gimple_call_fnspec (stmt);
1382
1383 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1384 return 0;
1385
1386 switch (TREE_STRING_POINTER (attr)[1 + arg])
1387 {
1388 case 'x':
1389 case 'X':
1390 return EAF_UNUSED;
1391
1392 case 'R':
1393 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1394
1395 case 'r':
1396 return EAF_NOCLOBBER | EAF_NOESCAPE;
1397
1398 case 'W':
1399 return EAF_DIRECT | EAF_NOESCAPE;
1400
1401 case 'w':
1402 return EAF_NOESCAPE;
1403
1404 case '.':
1405 default:
1406 return 0;
1407 }
1408 }
1409
1410 /* Detects return flags for the call STMT. */
1411
1412 int
1413 gimple_call_return_flags (const_gimple stmt)
1414 {
1415 const_tree attr;
1416
1417 if (gimple_call_flags (stmt) & ECF_MALLOC)
1418 return ERF_NOALIAS;
1419
1420 attr = gimple_call_fnspec (stmt);
1421 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1422 return 0;
1423
1424 switch (TREE_STRING_POINTER (attr)[0])
1425 {
1426 case '1':
1427 case '2':
1428 case '3':
1429 case '4':
1430 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1431
1432 case 'm':
1433 return ERF_NOALIAS;
1434
1435 case '.':
1436 default:
1437 return 0;
1438 }
1439 }
1440
1441
1442 /* Return true if GS is a copy assignment. */
1443
1444 bool
1445 gimple_assign_copy_p (gimple gs)
1446 {
1447 return (gimple_assign_single_p (gs)
1448 && is_gimple_val (gimple_op (gs, 1)));
1449 }
1450
1451
1452 /* Return true if GS is a SSA_NAME copy assignment. */
1453
1454 bool
1455 gimple_assign_ssa_name_copy_p (gimple gs)
1456 {
1457 return (gimple_assign_single_p (gs)
1458 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1459 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1460 }
1461
1462
1463 /* Return true if GS is an assignment with a unary RHS, but the
1464 operator has no effect on the assigned value. The logic is adapted
1465 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1466 instances in which STRIP_NOPS was previously applied to the RHS of
1467 an assignment.
1468
1469 NOTE: In the use cases that led to the creation of this function
1470 and of gimple_assign_single_p, it is typical to test for either
1471 condition and to proceed in the same manner. In each case, the
1472 assigned value is represented by the single RHS operand of the
1473 assignment. I suspect there may be cases where gimple_assign_copy_p,
1474 gimple_assign_single_p, or equivalent logic is used where a similar
1475 treatment of unary NOPs is appropriate. */
1476
1477 bool
1478 gimple_assign_unary_nop_p (gimple gs)
1479 {
1480 return (is_gimple_assign (gs)
1481 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1482 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1483 && gimple_assign_rhs1 (gs) != error_mark_node
1484 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1485 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1486 }
1487
1488 /* Set BB to be the basic block holding G. */
1489
1490 void
1491 gimple_set_bb (gimple stmt, basic_block bb)
1492 {
1493 stmt->bb = bb;
1494
1495 if (gimple_code (stmt) != GIMPLE_LABEL)
1496 return;
1497
1498 /* If the statement is a label, add the label to block-to-labels map
1499 so that we can speed up edge creation for GIMPLE_GOTOs. */
1500 if (cfun->cfg)
1501 {
1502 tree t;
1503 int uid;
1504
1505 t = gimple_label_label (stmt);
1506 uid = LABEL_DECL_UID (t);
1507 if (uid == -1)
1508 {
1509 unsigned old_len =
1510 vec_safe_length (label_to_block_map_for_fn (cfun));
1511 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1512 if (old_len <= (unsigned) uid)
1513 {
1514 unsigned new_len = 3 * uid / 2 + 1;
1515
1516 vec_safe_grow_cleared (label_to_block_map_for_fn (cfun),
1517 new_len);
1518 }
1519 }
1520
1521 (*label_to_block_map_for_fn (cfun))[uid] = bb;
1522 }
1523 }
1524
1525
1526 /* Modify the RHS of the assignment pointed-to by GSI using the
1527 operands in the expression tree EXPR.
1528
1529 NOTE: The statement pointed-to by GSI may be reallocated if it
1530 did not have enough operand slots.
1531
1532 This function is useful to convert an existing tree expression into
1533 the flat representation used for the RHS of a GIMPLE assignment.
1534 It will reallocate memory as needed to expand or shrink the number
1535 of operand slots needed to represent EXPR.
1536
1537 NOTE: If you find yourself building a tree and then calling this
1538 function, you are most certainly doing it the slow way. It is much
1539 better to build a new assignment or to use the function
1540 gimple_assign_set_rhs_with_ops, which does not require an
1541 expression tree to be built. */
1542
1543 void
1544 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1545 {
1546 enum tree_code subcode;
1547 tree op1, op2, op3;
1548
1549 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
1550 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
1551 }
1552
1553
1554 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1555 operands OP1, OP2 and OP3.
1556
1557 NOTE: The statement pointed-to by GSI may be reallocated if it
1558 did not have enough operand slots. */
1559
1560 void
1561 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
1562 tree op1, tree op2, tree op3)
1563 {
1564 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1565 gimple stmt = gsi_stmt (*gsi);
1566
1567 /* If the new CODE needs more operands, allocate a new statement. */
1568 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1569 {
1570 tree lhs = gimple_assign_lhs (stmt);
1571 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1572 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1573 gimple_init_singleton (new_stmt);
1574 gsi_replace (gsi, new_stmt, true);
1575 stmt = new_stmt;
1576
1577 /* The LHS needs to be reset as this also changes the SSA name
1578 on the LHS. */
1579 gimple_assign_set_lhs (stmt, lhs);
1580 }
1581
1582 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1583 gimple_set_subcode (stmt, code);
1584 gimple_assign_set_rhs1 (stmt, op1);
1585 if (new_rhs_ops > 1)
1586 gimple_assign_set_rhs2 (stmt, op2);
1587 if (new_rhs_ops > 2)
1588 gimple_assign_set_rhs3 (stmt, op3);
1589 }
1590
1591
1592 /* Return the LHS of a statement that performs an assignment,
1593 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1594 for a call to a function that returns no value, or for a
1595 statement other than an assignment or a call. */
1596
1597 tree
1598 gimple_get_lhs (const_gimple stmt)
1599 {
1600 enum gimple_code code = gimple_code (stmt);
1601
1602 if (code == GIMPLE_ASSIGN)
1603 return gimple_assign_lhs (stmt);
1604 else if (code == GIMPLE_CALL)
1605 return gimple_call_lhs (stmt);
1606 else
1607 return NULL_TREE;
1608 }
1609
1610
1611 /* Set the LHS of a statement that performs an assignment,
1612 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1613
1614 void
1615 gimple_set_lhs (gimple stmt, tree lhs)
1616 {
1617 enum gimple_code code = gimple_code (stmt);
1618
1619 if (code == GIMPLE_ASSIGN)
1620 gimple_assign_set_lhs (stmt, lhs);
1621 else if (code == GIMPLE_CALL)
1622 gimple_call_set_lhs (stmt, lhs);
1623 else
1624 gcc_unreachable ();
1625 }
1626
1627
1628 /* Return a deep copy of statement STMT. All the operands from STMT
1629 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1630 and VUSE operand arrays are set to empty in the new copy. The new
1631 copy isn't part of any sequence. */
1632
1633 gimple
1634 gimple_copy (gimple stmt)
1635 {
1636 enum gimple_code code = gimple_code (stmt);
1637 unsigned num_ops = gimple_num_ops (stmt);
1638 gimple copy = gimple_alloc (code, num_ops);
1639 unsigned i;
1640
1641 /* Shallow copy all the fields from STMT. */
1642 memcpy (copy, stmt, gimple_size (code));
1643 gimple_init_singleton (copy);
1644
1645 /* If STMT has sub-statements, deep-copy them as well. */
1646 if (gimple_has_substatements (stmt))
1647 {
1648 gimple_seq new_seq;
1649 tree t;
1650
1651 switch (gimple_code (stmt))
1652 {
1653 case GIMPLE_BIND:
1654 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
1655 gimple_bind_set_body (copy, new_seq);
1656 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
1657 gimple_bind_set_block (copy, gimple_bind_block (stmt));
1658 break;
1659
1660 case GIMPLE_CATCH:
1661 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
1662 gimple_catch_set_handler (copy, new_seq);
1663 t = unshare_expr (gimple_catch_types (stmt));
1664 gimple_catch_set_types (copy, t);
1665 break;
1666
1667 case GIMPLE_EH_FILTER:
1668 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
1669 gimple_eh_filter_set_failure (copy, new_seq);
1670 t = unshare_expr (gimple_eh_filter_types (stmt));
1671 gimple_eh_filter_set_types (copy, t);
1672 break;
1673
1674 case GIMPLE_EH_ELSE:
1675 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
1676 gimple_eh_else_set_n_body (copy, new_seq);
1677 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
1678 gimple_eh_else_set_e_body (copy, new_seq);
1679 break;
1680
1681 case GIMPLE_TRY:
1682 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
1683 gimple_try_set_eval (copy, new_seq);
1684 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
1685 gimple_try_set_cleanup (copy, new_seq);
1686 break;
1687
1688 case GIMPLE_OMP_FOR:
1689 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1690 gimple_omp_for_set_pre_body (copy, new_seq);
1691 t = unshare_expr (gimple_omp_for_clauses (stmt));
1692 gimple_omp_for_set_clauses (copy, t);
1693 {
1694 gimple_statement_omp_for *omp_for_copy =
1695 as_a <gimple_statement_omp_for *> (copy);
1696 omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1697 ( gimple_omp_for_collapse (stmt));
1698 }
1699 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1700 {
1701 gimple_omp_for_set_cond (copy, i,
1702 gimple_omp_for_cond (stmt, i));
1703 gimple_omp_for_set_index (copy, i,
1704 gimple_omp_for_index (stmt, i));
1705 t = unshare_expr (gimple_omp_for_initial (stmt, i));
1706 gimple_omp_for_set_initial (copy, i, t);
1707 t = unshare_expr (gimple_omp_for_final (stmt, i));
1708 gimple_omp_for_set_final (copy, i, t);
1709 t = unshare_expr (gimple_omp_for_incr (stmt, i));
1710 gimple_omp_for_set_incr (copy, i, t);
1711 }
1712 goto copy_omp_body;
1713
1714 case GIMPLE_OMP_PARALLEL:
1715 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
1716 gimple_omp_parallel_set_clauses (copy, t);
1717 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
1718 gimple_omp_parallel_set_child_fn (copy, t);
1719 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
1720 gimple_omp_parallel_set_data_arg (copy, t);
1721 goto copy_omp_body;
1722
1723 case GIMPLE_OMP_TASK:
1724 t = unshare_expr (gimple_omp_task_clauses (stmt));
1725 gimple_omp_task_set_clauses (copy, t);
1726 t = unshare_expr (gimple_omp_task_child_fn (stmt));
1727 gimple_omp_task_set_child_fn (copy, t);
1728 t = unshare_expr (gimple_omp_task_data_arg (stmt));
1729 gimple_omp_task_set_data_arg (copy, t);
1730 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1731 gimple_omp_task_set_copy_fn (copy, t);
1732 t = unshare_expr (gimple_omp_task_arg_size (stmt));
1733 gimple_omp_task_set_arg_size (copy, t);
1734 t = unshare_expr (gimple_omp_task_arg_align (stmt));
1735 gimple_omp_task_set_arg_align (copy, t);
1736 goto copy_omp_body;
1737
1738 case GIMPLE_OMP_CRITICAL:
1739 t = unshare_expr (gimple_omp_critical_name (stmt));
1740 gimple_omp_critical_set_name (copy, t);
1741 goto copy_omp_body;
1742
1743 case GIMPLE_OMP_SECTIONS:
1744 t = unshare_expr (gimple_omp_sections_clauses (stmt));
1745 gimple_omp_sections_set_clauses (copy, t);
1746 t = unshare_expr (gimple_omp_sections_control (stmt));
1747 gimple_omp_sections_set_control (copy, t);
1748 /* FALLTHRU */
1749
1750 case GIMPLE_OMP_SINGLE:
1751 case GIMPLE_OMP_TARGET:
1752 case GIMPLE_OMP_TEAMS:
1753 case GIMPLE_OMP_SECTION:
1754 case GIMPLE_OMP_MASTER:
1755 case GIMPLE_OMP_TASKGROUP:
1756 case GIMPLE_OMP_ORDERED:
1757 copy_omp_body:
1758 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1759 gimple_omp_set_body (copy, new_seq);
1760 break;
1761
1762 case GIMPLE_TRANSACTION:
1763 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
1764 gimple_transaction_set_body (copy, new_seq);
1765 break;
1766
1767 case GIMPLE_WITH_CLEANUP_EXPR:
1768 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1769 gimple_wce_set_cleanup (copy, new_seq);
1770 break;
1771
1772 default:
1773 gcc_unreachable ();
1774 }
1775 }
1776
1777 /* Make copy of operands. */
1778 for (i = 0; i < num_ops; i++)
1779 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
1780
1781 if (gimple_has_mem_ops (stmt))
1782 {
1783 gimple_set_vdef (copy, gimple_vdef (stmt));
1784 gimple_set_vuse (copy, gimple_vuse (stmt));
1785 }
1786
1787 /* Clear out SSA operand vectors on COPY. */
1788 if (gimple_has_ops (stmt))
1789 {
1790 gimple_set_use_ops (copy, NULL);
1791
1792 /* SSA operands need to be updated. */
1793 gimple_set_modified (copy, true);
1794 }
1795
1796 return copy;
1797 }
1798
1799
1800 /* Return true if statement S has side-effects. We consider a
1801 statement to have side effects if:
1802
1803 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1804 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
1805
1806 bool
1807 gimple_has_side_effects (const_gimple s)
1808 {
1809 if (is_gimple_debug (s))
1810 return false;
1811
1812 /* We don't have to scan the arguments to check for
1813 volatile arguments, though, at present, we still
1814 do a scan to check for TREE_SIDE_EFFECTS. */
1815 if (gimple_has_volatile_ops (s))
1816 return true;
1817
1818 if (gimple_code (s) == GIMPLE_ASM
1819 && gimple_asm_volatile_p (s))
1820 return true;
1821
1822 if (is_gimple_call (s))
1823 {
1824 int flags = gimple_call_flags (s);
1825
1826 /* An infinite loop is considered a side effect. */
1827 if (!(flags & (ECF_CONST | ECF_PURE))
1828 || (flags & ECF_LOOPING_CONST_OR_PURE))
1829 return true;
1830
1831 return false;
1832 }
1833
1834 return false;
1835 }
1836
1837 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1838 Return true if S can trap. When INCLUDE_MEM is true, check whether
1839 the memory operations could trap. When INCLUDE_STORES is true and
1840 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
1841
1842 bool
1843 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
1844 {
1845 tree t, div = NULL_TREE;
1846 enum tree_code op;
1847
1848 if (include_mem)
1849 {
1850 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
1851
1852 for (i = start; i < gimple_num_ops (s); i++)
1853 if (tree_could_trap_p (gimple_op (s, i)))
1854 return true;
1855 }
1856
1857 switch (gimple_code (s))
1858 {
1859 case GIMPLE_ASM:
1860 return gimple_asm_volatile_p (s);
1861
1862 case GIMPLE_CALL:
1863 t = gimple_call_fndecl (s);
1864 /* Assume that calls to weak functions may trap. */
1865 if (!t || !DECL_P (t) || DECL_WEAK (t))
1866 return true;
1867 return false;
1868
1869 case GIMPLE_ASSIGN:
1870 t = gimple_expr_type (s);
1871 op = gimple_assign_rhs_code (s);
1872 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
1873 div = gimple_assign_rhs2 (s);
1874 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
1875 (INTEGRAL_TYPE_P (t)
1876 && TYPE_OVERFLOW_TRAPS (t)),
1877 div));
1878
1879 default:
1880 break;
1881 }
1882
1883 return false;
1884 }
1885
1886 /* Return true if statement S can trap. */
1887
1888 bool
1889 gimple_could_trap_p (gimple s)
1890 {
1891 return gimple_could_trap_p_1 (s, true, true);
1892 }
1893
1894 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
1895
1896 bool
1897 gimple_assign_rhs_could_trap_p (gimple s)
1898 {
1899 gcc_assert (is_gimple_assign (s));
1900 return gimple_could_trap_p_1 (s, true, false);
1901 }
1902
1903
1904 /* Print debugging information for gimple stmts generated. */
1905
1906 void
1907 dump_gimple_statistics (void)
1908 {
1909 int i, total_tuples = 0, total_bytes = 0;
1910
1911 if (! GATHER_STATISTICS)
1912 {
1913 fprintf (stderr, "No gimple statistics\n");
1914 return;
1915 }
1916
1917 fprintf (stderr, "\nGIMPLE statements\n");
1918 fprintf (stderr, "Kind Stmts Bytes\n");
1919 fprintf (stderr, "---------------------------------------\n");
1920 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
1921 {
1922 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
1923 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
1924 total_tuples += gimple_alloc_counts[i];
1925 total_bytes += gimple_alloc_sizes[i];
1926 }
1927 fprintf (stderr, "---------------------------------------\n");
1928 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
1929 fprintf (stderr, "---------------------------------------\n");
1930 }
1931
1932
1933 /* Return the number of operands needed on the RHS of a GIMPLE
1934 assignment for an expression with tree code CODE. */
1935
1936 unsigned
1937 get_gimple_rhs_num_ops (enum tree_code code)
1938 {
1939 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
1940
1941 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
1942 return 1;
1943 else if (rhs_class == GIMPLE_BINARY_RHS)
1944 return 2;
1945 else if (rhs_class == GIMPLE_TERNARY_RHS)
1946 return 3;
1947 else
1948 gcc_unreachable ();
1949 }
1950
1951 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
1952 (unsigned char) \
1953 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
1954 : ((TYPE) == tcc_binary \
1955 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
1956 : ((TYPE) == tcc_constant \
1957 || (TYPE) == tcc_declaration \
1958 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
1959 : ((SYM) == TRUTH_AND_EXPR \
1960 || (SYM) == TRUTH_OR_EXPR \
1961 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
1962 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
1963 : ((SYM) == COND_EXPR \
1964 || (SYM) == WIDEN_MULT_PLUS_EXPR \
1965 || (SYM) == WIDEN_MULT_MINUS_EXPR \
1966 || (SYM) == DOT_PROD_EXPR \
1967 || (SYM) == SAD_EXPR \
1968 || (SYM) == REALIGN_LOAD_EXPR \
1969 || (SYM) == VEC_COND_EXPR \
1970 || (SYM) == VEC_PERM_EXPR \
1971 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
1972 : ((SYM) == CONSTRUCTOR \
1973 || (SYM) == OBJ_TYPE_REF \
1974 || (SYM) == ASSERT_EXPR \
1975 || (SYM) == ADDR_EXPR \
1976 || (SYM) == WITH_SIZE_EXPR \
1977 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
1978 : GIMPLE_INVALID_RHS),
1979 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
1980
1981 const unsigned char gimple_rhs_class_table[] = {
1982 #include "all-tree.def"
1983 };
1984
1985 #undef DEFTREECODE
1986 #undef END_OF_BASE_TREE_CODES
1987
1988 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
1989 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
1990 we failed to create one. */
1991
1992 tree
1993 canonicalize_cond_expr_cond (tree t)
1994 {
1995 /* Strip conversions around boolean operations. */
1996 if (CONVERT_EXPR_P (t)
1997 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
1998 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
1999 == BOOLEAN_TYPE))
2000 t = TREE_OPERAND (t, 0);
2001
2002 /* For !x use x == 0. */
2003 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2004 {
2005 tree top0 = TREE_OPERAND (t, 0);
2006 t = build2 (EQ_EXPR, TREE_TYPE (t),
2007 top0, build_int_cst (TREE_TYPE (top0), 0));
2008 }
2009 /* For cmp ? 1 : 0 use cmp. */
2010 else if (TREE_CODE (t) == COND_EXPR
2011 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2012 && integer_onep (TREE_OPERAND (t, 1))
2013 && integer_zerop (TREE_OPERAND (t, 2)))
2014 {
2015 tree top0 = TREE_OPERAND (t, 0);
2016 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2017 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2018 }
2019 /* For x ^ y use x != y. */
2020 else if (TREE_CODE (t) == BIT_XOR_EXPR)
2021 t = build2 (NE_EXPR, TREE_TYPE (t),
2022 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2023
2024 if (is_gimple_condexpr (t))
2025 return t;
2026
2027 return NULL_TREE;
2028 }
2029
2030 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2031 the positions marked by the set ARGS_TO_SKIP. */
2032
2033 gimple
2034 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
2035 {
2036 int i;
2037 int nargs = gimple_call_num_args (stmt);
2038 auto_vec<tree> vargs (nargs);
2039 gimple new_stmt;
2040
2041 for (i = 0; i < nargs; i++)
2042 if (!bitmap_bit_p (args_to_skip, i))
2043 vargs.quick_push (gimple_call_arg (stmt, i));
2044
2045 if (gimple_call_internal_p (stmt))
2046 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2047 vargs);
2048 else
2049 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2050
2051 if (gimple_call_lhs (stmt))
2052 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2053
2054 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2055 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2056
2057 if (gimple_has_location (stmt))
2058 gimple_set_location (new_stmt, gimple_location (stmt));
2059 gimple_call_copy_flags (new_stmt, stmt);
2060 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2061
2062 gimple_set_modified (new_stmt, true);
2063
2064 return new_stmt;
2065 }
2066
2067
2068
2069 /* Return true if the field decls F1 and F2 are at the same offset.
2070
2071 This is intended to be used on GIMPLE types only. */
2072
2073 bool
2074 gimple_compare_field_offset (tree f1, tree f2)
2075 {
2076 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2077 {
2078 tree offset1 = DECL_FIELD_OFFSET (f1);
2079 tree offset2 = DECL_FIELD_OFFSET (f2);
2080 return ((offset1 == offset2
2081 /* Once gimplification is done, self-referential offsets are
2082 instantiated as operand #2 of the COMPONENT_REF built for
2083 each access and reset. Therefore, they are not relevant
2084 anymore and fields are interchangeable provided that they
2085 represent the same access. */
2086 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2087 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2088 && (DECL_SIZE (f1) == DECL_SIZE (f2)
2089 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2090 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2091 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2092 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2093 || operand_equal_p (offset1, offset2, 0))
2094 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2095 DECL_FIELD_BIT_OFFSET (f2)));
2096 }
2097
2098 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2099 should be, so handle differing ones specially by decomposing
2100 the offset into a byte and bit offset manually. */
2101 if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2102 && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2103 {
2104 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2105 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2106 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2107 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2108 + bit_offset1 / BITS_PER_UNIT);
2109 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2110 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2111 + bit_offset2 / BITS_PER_UNIT);
2112 if (byte_offset1 != byte_offset2)
2113 return false;
2114 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2115 }
2116
2117 return false;
2118 }
2119
2120
2121 /* Return a type the same as TYPE except unsigned or
2122 signed according to UNSIGNEDP. */
2123
2124 static tree
2125 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2126 {
2127 tree type1;
2128 int i;
2129
2130 type1 = TYPE_MAIN_VARIANT (type);
2131 if (type1 == signed_char_type_node
2132 || type1 == char_type_node
2133 || type1 == unsigned_char_type_node)
2134 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2135 if (type1 == integer_type_node || type1 == unsigned_type_node)
2136 return unsignedp ? unsigned_type_node : integer_type_node;
2137 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2138 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2139 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2140 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2141 if (type1 == long_long_integer_type_node
2142 || type1 == long_long_unsigned_type_node)
2143 return unsignedp
2144 ? long_long_unsigned_type_node
2145 : long_long_integer_type_node;
2146
2147 for (i = 0; i < NUM_INT_N_ENTS; i ++)
2148 if (int_n_enabled_p[i]
2149 && (type1 == int_n_trees[i].unsigned_type
2150 || type1 == int_n_trees[i].signed_type))
2151 return unsignedp
2152 ? int_n_trees[i].unsigned_type
2153 : int_n_trees[i].signed_type;
2154
2155 #if HOST_BITS_PER_WIDE_INT >= 64
2156 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2157 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2158 #endif
2159 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2160 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2161 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2162 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2163 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2164 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2165 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2166 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2167
2168 #define GIMPLE_FIXED_TYPES(NAME) \
2169 if (type1 == short_ ## NAME ## _type_node \
2170 || type1 == unsigned_short_ ## NAME ## _type_node) \
2171 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2172 : short_ ## NAME ## _type_node; \
2173 if (type1 == NAME ## _type_node \
2174 || type1 == unsigned_ ## NAME ## _type_node) \
2175 return unsignedp ? unsigned_ ## NAME ## _type_node \
2176 : NAME ## _type_node; \
2177 if (type1 == long_ ## NAME ## _type_node \
2178 || type1 == unsigned_long_ ## NAME ## _type_node) \
2179 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2180 : long_ ## NAME ## _type_node; \
2181 if (type1 == long_long_ ## NAME ## _type_node \
2182 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2183 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2184 : long_long_ ## NAME ## _type_node;
2185
2186 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2187 if (type1 == NAME ## _type_node \
2188 || type1 == u ## NAME ## _type_node) \
2189 return unsignedp ? u ## NAME ## _type_node \
2190 : NAME ## _type_node;
2191
2192 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2193 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2194 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2195 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2196 : sat_ ## short_ ## NAME ## _type_node; \
2197 if (type1 == sat_ ## NAME ## _type_node \
2198 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2199 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2200 : sat_ ## NAME ## _type_node; \
2201 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2202 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2203 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2204 : sat_ ## long_ ## NAME ## _type_node; \
2205 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2206 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2207 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2208 : sat_ ## long_long_ ## NAME ## _type_node;
2209
2210 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
2211 if (type1 == sat_ ## NAME ## _type_node \
2212 || type1 == sat_ ## u ## NAME ## _type_node) \
2213 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2214 : sat_ ## NAME ## _type_node;
2215
2216 GIMPLE_FIXED_TYPES (fract);
2217 GIMPLE_FIXED_TYPES_SAT (fract);
2218 GIMPLE_FIXED_TYPES (accum);
2219 GIMPLE_FIXED_TYPES_SAT (accum);
2220
2221 GIMPLE_FIXED_MODE_TYPES (qq);
2222 GIMPLE_FIXED_MODE_TYPES (hq);
2223 GIMPLE_FIXED_MODE_TYPES (sq);
2224 GIMPLE_FIXED_MODE_TYPES (dq);
2225 GIMPLE_FIXED_MODE_TYPES (tq);
2226 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2227 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2228 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2229 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2230 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2231 GIMPLE_FIXED_MODE_TYPES (ha);
2232 GIMPLE_FIXED_MODE_TYPES (sa);
2233 GIMPLE_FIXED_MODE_TYPES (da);
2234 GIMPLE_FIXED_MODE_TYPES (ta);
2235 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2236 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2237 GIMPLE_FIXED_MODE_TYPES_SAT (da);
2238 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2239
2240 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2241 the precision; they have precision set to match their range, but
2242 may use a wider mode to match an ABI. If we change modes, we may
2243 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2244 the precision as well, so as to yield correct results for
2245 bit-field types. C++ does not have these separate bit-field
2246 types, and producing a signed or unsigned variant of an
2247 ENUMERAL_TYPE may cause other problems as well. */
2248 if (!INTEGRAL_TYPE_P (type)
2249 || TYPE_UNSIGNED (type) == unsignedp)
2250 return type;
2251
2252 #define TYPE_OK(node) \
2253 (TYPE_MODE (type) == TYPE_MODE (node) \
2254 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2255 if (TYPE_OK (signed_char_type_node))
2256 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2257 if (TYPE_OK (integer_type_node))
2258 return unsignedp ? unsigned_type_node : integer_type_node;
2259 if (TYPE_OK (short_integer_type_node))
2260 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2261 if (TYPE_OK (long_integer_type_node))
2262 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2263 if (TYPE_OK (long_long_integer_type_node))
2264 return (unsignedp
2265 ? long_long_unsigned_type_node
2266 : long_long_integer_type_node);
2267
2268 for (i = 0; i < NUM_INT_N_ENTS; i ++)
2269 if (int_n_enabled_p[i]
2270 && TYPE_MODE (type) == int_n_data[i].m
2271 && TYPE_PRECISION (type) == int_n_data[i].bitsize)
2272 return unsignedp
2273 ? int_n_trees[i].unsigned_type
2274 : int_n_trees[i].signed_type;
2275
2276 #if HOST_BITS_PER_WIDE_INT >= 64
2277 if (TYPE_OK (intTI_type_node))
2278 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2279 #endif
2280 if (TYPE_OK (intDI_type_node))
2281 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2282 if (TYPE_OK (intSI_type_node))
2283 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2284 if (TYPE_OK (intHI_type_node))
2285 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2286 if (TYPE_OK (intQI_type_node))
2287 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2288
2289 #undef GIMPLE_FIXED_TYPES
2290 #undef GIMPLE_FIXED_MODE_TYPES
2291 #undef GIMPLE_FIXED_TYPES_SAT
2292 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2293 #undef TYPE_OK
2294
2295 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2296 }
2297
2298
2299 /* Return an unsigned type the same as TYPE in other respects. */
2300
2301 tree
2302 gimple_unsigned_type (tree type)
2303 {
2304 return gimple_signed_or_unsigned_type (true, type);
2305 }
2306
2307
2308 /* Return a signed type the same as TYPE in other respects. */
2309
2310 tree
2311 gimple_signed_type (tree type)
2312 {
2313 return gimple_signed_or_unsigned_type (false, type);
2314 }
2315
2316
2317 /* Return the typed-based alias set for T, which may be an expression
2318 or a type. Return -1 if we don't do anything special. */
2319
2320 alias_set_type
2321 gimple_get_alias_set (tree t)
2322 {
2323 tree u;
2324
2325 /* Permit type-punning when accessing a union, provided the access
2326 is directly through the union. For example, this code does not
2327 permit taking the address of a union member and then storing
2328 through it. Even the type-punning allowed here is a GCC
2329 extension, albeit a common and useful one; the C standard says
2330 that such accesses have implementation-defined behavior. */
2331 for (u = t;
2332 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
2333 u = TREE_OPERAND (u, 0))
2334 if (TREE_CODE (u) == COMPONENT_REF
2335 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
2336 return 0;
2337
2338 /* That's all the expressions we handle specially. */
2339 if (!TYPE_P (t))
2340 return -1;
2341
2342 /* For convenience, follow the C standard when dealing with
2343 character types. Any object may be accessed via an lvalue that
2344 has character type. */
2345 if (t == char_type_node
2346 || t == signed_char_type_node
2347 || t == unsigned_char_type_node)
2348 return 0;
2349
2350 /* Allow aliasing between signed and unsigned variants of the same
2351 type. We treat the signed variant as canonical. */
2352 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2353 {
2354 tree t1 = gimple_signed_type (t);
2355
2356 /* t1 == t can happen for boolean nodes which are always unsigned. */
2357 if (t1 != t)
2358 return get_alias_set (t1);
2359 }
2360
2361 return -1;
2362 }
2363
2364
2365 /* Helper for gimple_ior_addresses_taken_1. */
2366
2367 static bool
2368 gimple_ior_addresses_taken_1 (gimple, tree addr, tree, void *data)
2369 {
2370 bitmap addresses_taken = (bitmap)data;
2371 addr = get_base_address (addr);
2372 if (addr
2373 && DECL_P (addr))
2374 {
2375 bitmap_set_bit (addresses_taken, DECL_UID (addr));
2376 return true;
2377 }
2378 return false;
2379 }
2380
2381 /* Set the bit for the uid of all decls that have their address taken
2382 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
2383 were any in this stmt. */
2384
2385 bool
2386 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
2387 {
2388 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2389 gimple_ior_addresses_taken_1);
2390 }
2391
2392
2393 /* Return true if TYPE1 and TYPE2 are compatible enough for builtin
2394 processing. */
2395
2396 static bool
2397 validate_type (tree type1, tree type2)
2398 {
2399 if (INTEGRAL_TYPE_P (type1)
2400 && INTEGRAL_TYPE_P (type2))
2401 ;
2402 else if (POINTER_TYPE_P (type1)
2403 && POINTER_TYPE_P (type2))
2404 ;
2405 else if (TREE_CODE (type1)
2406 != TREE_CODE (type2))
2407 return false;
2408 return true;
2409 }
2410
2411 /* Return true when STMTs arguments and return value match those of FNDECL,
2412 a decl of a builtin function. */
2413
2414 bool
2415 gimple_builtin_call_types_compatible_p (const_gimple stmt, tree fndecl)
2416 {
2417 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2418
2419 tree ret = gimple_call_lhs (stmt);
2420 if (ret
2421 && !validate_type (TREE_TYPE (ret), TREE_TYPE (TREE_TYPE (fndecl))))
2422 return false;
2423
2424 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2425 unsigned nargs = gimple_call_num_args (stmt);
2426 for (unsigned i = 0; i < nargs; ++i)
2427 {
2428 /* Variadic args follow. */
2429 if (!targs)
2430 return true;
2431 tree arg = gimple_call_arg (stmt, i);
2432 if (!validate_type (TREE_TYPE (arg), TREE_VALUE (targs)))
2433 return false;
2434 targs = TREE_CHAIN (targs);
2435 }
2436 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2437 return false;
2438 return true;
2439 }
2440
2441 /* Return true when STMT is builtins call. */
2442
2443 bool
2444 gimple_call_builtin_p (const_gimple stmt)
2445 {
2446 tree fndecl;
2447 if (is_gimple_call (stmt)
2448 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2449 && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2450 return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2451 return false;
2452 }
2453
2454 /* Return true when STMT is builtins call to CLASS. */
2455
2456 bool
2457 gimple_call_builtin_p (const_gimple stmt, enum built_in_class klass)
2458 {
2459 tree fndecl;
2460 if (is_gimple_call (stmt)
2461 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2462 && DECL_BUILT_IN_CLASS (fndecl) == klass)
2463 return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2464 return false;
2465 }
2466
2467 /* Return true when STMT is builtins call to CODE of CLASS. */
2468
2469 bool
2470 gimple_call_builtin_p (const_gimple stmt, enum built_in_function code)
2471 {
2472 tree fndecl;
2473 if (is_gimple_call (stmt)
2474 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2475 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2476 && DECL_FUNCTION_CODE (fndecl) == code)
2477 return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2478 return false;
2479 }
2480
2481 /* Return true if STMT clobbers memory. STMT is required to be a
2482 GIMPLE_ASM. */
2483
2484 bool
2485 gimple_asm_clobbers_memory_p (const_gimple stmt)
2486 {
2487 unsigned i;
2488
2489 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2490 {
2491 tree op = gimple_asm_clobber_op (stmt, i);
2492 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2493 return true;
2494 }
2495
2496 return false;
2497 }
2498
2499 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
2500
2501 void
2502 dump_decl_set (FILE *file, bitmap set)
2503 {
2504 if (set)
2505 {
2506 bitmap_iterator bi;
2507 unsigned i;
2508
2509 fprintf (file, "{ ");
2510
2511 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2512 {
2513 fprintf (file, "D.%u", i);
2514 fprintf (file, " ");
2515 }
2516
2517 fprintf (file, "}");
2518 }
2519 else
2520 fprintf (file, "NIL");
2521 }
2522
2523 /* Return true when CALL is a call stmt that definitely doesn't
2524 free any memory or makes it unavailable otherwise. */
2525 bool
2526 nonfreeing_call_p (gimple call)
2527 {
2528 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2529 && gimple_call_flags (call) & ECF_LEAF)
2530 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2531 {
2532 /* Just in case these become ECF_LEAF in the future. */
2533 case BUILT_IN_FREE:
2534 case BUILT_IN_TM_FREE:
2535 case BUILT_IN_REALLOC:
2536 case BUILT_IN_STACK_RESTORE:
2537 return false;
2538 default:
2539 return true;
2540 }
2541
2542 return false;
2543 }
2544
2545 /* Callback for walk_stmt_load_store_ops.
2546
2547 Return TRUE if OP will dereference the tree stored in DATA, FALSE
2548 otherwise.
2549
2550 This routine only makes a superficial check for a dereference. Thus
2551 it must only be used if it is safe to return a false negative. */
2552 static bool
2553 check_loadstore (gimple, tree op, tree, void *data)
2554 {
2555 if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2556 && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
2557 return true;
2558 return false;
2559 }
2560
2561 /* If OP can be inferred to be non-NULL after STMT executes, return true.
2562
2563 DEREFERENCE is TRUE if we can use a pointer dereference to infer a
2564 non-NULL range, FALSE otherwise.
2565
2566 ATTRIBUTE is TRUE if we can use attributes to infer a non-NULL range
2567 for function arguments and return values. FALSE otherwise. */
2568
2569 bool
2570 infer_nonnull_range (gimple stmt, tree op, bool dereference, bool attribute)
2571 {
2572 /* We can only assume that a pointer dereference will yield
2573 non-NULL if -fdelete-null-pointer-checks is enabled. */
2574 if (!flag_delete_null_pointer_checks
2575 || !POINTER_TYPE_P (TREE_TYPE (op))
2576 || gimple_code (stmt) == GIMPLE_ASM)
2577 return false;
2578
2579 if (dereference
2580 && walk_stmt_load_store_ops (stmt, (void *)op,
2581 check_loadstore, check_loadstore))
2582 return true;
2583
2584 if (attribute
2585 && is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2586 {
2587 tree fntype = gimple_call_fntype (stmt);
2588 tree attrs = TYPE_ATTRIBUTES (fntype);
2589 for (; attrs; attrs = TREE_CHAIN (attrs))
2590 {
2591 attrs = lookup_attribute ("nonnull", attrs);
2592
2593 /* If "nonnull" wasn't specified, we know nothing about
2594 the argument. */
2595 if (attrs == NULL_TREE)
2596 return false;
2597
2598 /* If "nonnull" applies to all the arguments, then ARG
2599 is non-null if it's in the argument list. */
2600 if (TREE_VALUE (attrs) == NULL_TREE)
2601 {
2602 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2603 {
2604 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
2605 && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
2606 return true;
2607 }
2608 return false;
2609 }
2610
2611 /* Now see if op appears in the nonnull list. */
2612 for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2613 {
2614 int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2615 tree arg = gimple_call_arg (stmt, idx);
2616 if (operand_equal_p (op, arg, 0))
2617 return true;
2618 }
2619 }
2620 }
2621
2622 /* If this function is marked as returning non-null, then we can
2623 infer OP is non-null if it is used in the return statement. */
2624 if (attribute
2625 && gimple_code (stmt) == GIMPLE_RETURN
2626 && gimple_return_retval (stmt)
2627 && operand_equal_p (gimple_return_retval (stmt), op, 0)
2628 && lookup_attribute ("returns_nonnull",
2629 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2630 return true;
2631
2632 return false;
2633 }
2634
2635 /* Compare two case labels. Because the front end should already have
2636 made sure that case ranges do not overlap, it is enough to only compare
2637 the CASE_LOW values of each case label. */
2638
2639 static int
2640 compare_case_labels (const void *p1, const void *p2)
2641 {
2642 const_tree const case1 = *(const_tree const*)p1;
2643 const_tree const case2 = *(const_tree const*)p2;
2644
2645 /* The 'default' case label always goes first. */
2646 if (!CASE_LOW (case1))
2647 return -1;
2648 else if (!CASE_LOW (case2))
2649 return 1;
2650 else
2651 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2652 }
2653
2654 /* Sort the case labels in LABEL_VEC in place in ascending order. */
2655
2656 void
2657 sort_case_labels (vec<tree> label_vec)
2658 {
2659 label_vec.qsort (compare_case_labels);
2660 }
2661 \f
2662 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2663
2664 LABELS is a vector that contains all case labels to look at.
2665
2666 INDEX_TYPE is the type of the switch index expression. Case labels
2667 in LABELS are discarded if their values are not in the value range
2668 covered by INDEX_TYPE. The remaining case label values are folded
2669 to INDEX_TYPE.
2670
2671 If a default case exists in LABELS, it is removed from LABELS and
2672 returned in DEFAULT_CASEP. If no default case exists, but the
2673 case labels already cover the whole range of INDEX_TYPE, a default
2674 case is returned pointing to one of the existing case labels.
2675 Otherwise DEFAULT_CASEP is set to NULL_TREE.
2676
2677 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2678 apply and no action is taken regardless of whether a default case is
2679 found or not. */
2680
2681 void
2682 preprocess_case_label_vec_for_gimple (vec<tree> labels,
2683 tree index_type,
2684 tree *default_casep)
2685 {
2686 tree min_value, max_value;
2687 tree default_case = NULL_TREE;
2688 size_t i, len;
2689
2690 i = 0;
2691 min_value = TYPE_MIN_VALUE (index_type);
2692 max_value = TYPE_MAX_VALUE (index_type);
2693 while (i < labels.length ())
2694 {
2695 tree elt = labels[i];
2696 tree low = CASE_LOW (elt);
2697 tree high = CASE_HIGH (elt);
2698 bool remove_element = FALSE;
2699
2700 if (low)
2701 {
2702 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2703 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2704
2705 /* This is a non-default case label, i.e. it has a value.
2706
2707 See if the case label is reachable within the range of
2708 the index type. Remove out-of-range case values. Turn
2709 case ranges into a canonical form (high > low strictly)
2710 and convert the case label values to the index type.
2711
2712 NB: The type of gimple_switch_index() may be the promoted
2713 type, but the case labels retain the original type. */
2714
2715 if (high)
2716 {
2717 /* This is a case range. Discard empty ranges.
2718 If the bounds or the range are equal, turn this
2719 into a simple (one-value) case. */
2720 int cmp = tree_int_cst_compare (high, low);
2721 if (cmp < 0)
2722 remove_element = TRUE;
2723 else if (cmp == 0)
2724 high = NULL_TREE;
2725 }
2726
2727 if (! high)
2728 {
2729 /* If the simple case value is unreachable, ignore it. */
2730 if ((TREE_CODE (min_value) == INTEGER_CST
2731 && tree_int_cst_compare (low, min_value) < 0)
2732 || (TREE_CODE (max_value) == INTEGER_CST
2733 && tree_int_cst_compare (low, max_value) > 0))
2734 remove_element = TRUE;
2735 else
2736 low = fold_convert (index_type, low);
2737 }
2738 else
2739 {
2740 /* If the entire case range is unreachable, ignore it. */
2741 if ((TREE_CODE (min_value) == INTEGER_CST
2742 && tree_int_cst_compare (high, min_value) < 0)
2743 || (TREE_CODE (max_value) == INTEGER_CST
2744 && tree_int_cst_compare (low, max_value) > 0))
2745 remove_element = TRUE;
2746 else
2747 {
2748 /* If the lower bound is less than the index type's
2749 minimum value, truncate the range bounds. */
2750 if (TREE_CODE (min_value) == INTEGER_CST
2751 && tree_int_cst_compare (low, min_value) < 0)
2752 low = min_value;
2753 low = fold_convert (index_type, low);
2754
2755 /* If the upper bound is greater than the index type's
2756 maximum value, truncate the range bounds. */
2757 if (TREE_CODE (max_value) == INTEGER_CST
2758 && tree_int_cst_compare (high, max_value) > 0)
2759 high = max_value;
2760 high = fold_convert (index_type, high);
2761
2762 /* We may have folded a case range to a one-value case. */
2763 if (tree_int_cst_equal (low, high))
2764 high = NULL_TREE;
2765 }
2766 }
2767
2768 CASE_LOW (elt) = low;
2769 CASE_HIGH (elt) = high;
2770 }
2771 else
2772 {
2773 gcc_assert (!default_case);
2774 default_case = elt;
2775 /* The default case must be passed separately to the
2776 gimple_build_switch routine. But if DEFAULT_CASEP
2777 is NULL, we do not remove the default case (it would
2778 be completely lost). */
2779 if (default_casep)
2780 remove_element = TRUE;
2781 }
2782
2783 if (remove_element)
2784 labels.ordered_remove (i);
2785 else
2786 i++;
2787 }
2788 len = i;
2789
2790 if (!labels.is_empty ())
2791 sort_case_labels (labels);
2792
2793 if (default_casep && !default_case)
2794 {
2795 /* If the switch has no default label, add one, so that we jump
2796 around the switch body. If the labels already cover the whole
2797 range of the switch index_type, add the default label pointing
2798 to one of the existing labels. */
2799 if (len
2800 && TYPE_MIN_VALUE (index_type)
2801 && TYPE_MAX_VALUE (index_type)
2802 && tree_int_cst_equal (CASE_LOW (labels[0]),
2803 TYPE_MIN_VALUE (index_type)))
2804 {
2805 tree low, high = CASE_HIGH (labels[len - 1]);
2806 if (!high)
2807 high = CASE_LOW (labels[len - 1]);
2808 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
2809 {
2810 for (i = 1; i < len; i++)
2811 {
2812 high = CASE_LOW (labels[i]);
2813 low = CASE_HIGH (labels[i - 1]);
2814 if (!low)
2815 low = CASE_LOW (labels[i - 1]);
2816 if (wi::add (low, 1) != high)
2817 break;
2818 }
2819 if (i == len)
2820 {
2821 tree label = CASE_LABEL (labels[0]);
2822 default_case = build_case_label (NULL_TREE, NULL_TREE,
2823 label);
2824 }
2825 }
2826 }
2827 }
2828
2829 if (default_casep)
2830 *default_casep = default_case;
2831 }
2832
2833 /* Set the location of all statements in SEQ to LOC. */
2834
2835 void
2836 gimple_seq_set_location (gimple_seq seq, location_t loc)
2837 {
2838 for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2839 gimple_set_location (gsi_stmt (i), loc);
2840 }
2841
2842 /* Release SSA_NAMEs in SEQ as well as the GIMPLE statements. */
2843
2844 void
2845 gimple_seq_discard (gimple_seq seq)
2846 {
2847 gimple_stmt_iterator gsi;
2848
2849 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
2850 {
2851 gimple stmt = gsi_stmt (gsi);
2852 gsi_remove (&gsi, true);
2853 release_defs (stmt);
2854 ggc_free (stmt);
2855 }
2856 }