]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple.c
* gimple-walk.h: New File. Relocate prototypes from gimple.h.
[thirdparty/gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "gimple-iterator.h"
33 #include "gimple-walk.h"
34 #include "gimplify.h"
35 #include "diagnostic.h"
36 #include "value-prof.h"
37 #include "flags.h"
38 #include "alias.h"
39 #include "demangle.h"
40 #include "langhooks.h"
41 #include "bitmap.h"
42
43
44 /* All the tuples have their operand vector (if present) at the very bottom
45 of the structure. Therefore, the offset required to find the
46 operands vector the size of the structure minus the size of the 1
47 element tree array at the end (see gimple_ops). */
48 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
49 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
50 EXPORTED_CONST size_t gimple_ops_offset_[] = {
51 #include "gsstruct.def"
52 };
53 #undef DEFGSSTRUCT
54
55 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
56 static const size_t gsstruct_code_size[] = {
57 #include "gsstruct.def"
58 };
59 #undef DEFGSSTRUCT
60
61 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
62 const char *const gimple_code_name[] = {
63 #include "gimple.def"
64 };
65 #undef DEFGSCODE
66
67 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
68 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
69 #include "gimple.def"
70 };
71 #undef DEFGSCODE
72
73 /* Gimple stats. */
74
75 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
76 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
77
78 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
79 static const char * const gimple_alloc_kind_names[] = {
80 "assignments",
81 "phi nodes",
82 "conditionals",
83 "everything else"
84 };
85
86 /* Private API manipulation functions shared only with some
87 other files. */
88 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
89 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
90
91 /* Gimple tuple constructors.
92 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
93 be passed a NULL to start with an empty sequence. */
94
95 /* Set the code for statement G to CODE. */
96
97 static inline void
98 gimple_set_code (gimple g, enum gimple_code code)
99 {
100 g->gsbase.code = code;
101 }
102
103 /* Return the number of bytes needed to hold a GIMPLE statement with
104 code CODE. */
105
106 static inline size_t
107 gimple_size (enum gimple_code code)
108 {
109 return gsstruct_code_size[gss_for_code (code)];
110 }
111
112 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
113 operands. */
114
115 gimple
116 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
117 {
118 size_t size;
119 gimple stmt;
120
121 size = gimple_size (code);
122 if (num_ops > 0)
123 size += sizeof (tree) * (num_ops - 1);
124
125 if (GATHER_STATISTICS)
126 {
127 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
128 gimple_alloc_counts[(int) kind]++;
129 gimple_alloc_sizes[(int) kind] += size;
130 }
131
132 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
133 gimple_set_code (stmt, code);
134 gimple_set_num_ops (stmt, num_ops);
135
136 /* Do not call gimple_set_modified here as it has other side
137 effects and this tuple is still not completely built. */
138 stmt->gsbase.modified = 1;
139 gimple_init_singleton (stmt);
140
141 return stmt;
142 }
143
144 /* Set SUBCODE to be the code of the expression computed by statement G. */
145
146 static inline void
147 gimple_set_subcode (gimple g, unsigned subcode)
148 {
149 /* We only have 16 bits for the RHS code. Assert that we are not
150 overflowing it. */
151 gcc_assert (subcode < (1 << 16));
152 g->gsbase.subcode = subcode;
153 }
154
155
156
157 /* Build a tuple with operands. CODE is the statement to build (which
158 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
159 for the new tuple. NUM_OPS is the number of operands to allocate. */
160
161 #define gimple_build_with_ops(c, s, n) \
162 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
163
164 static gimple
165 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
166 unsigned num_ops MEM_STAT_DECL)
167 {
168 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
169 gimple_set_subcode (s, subcode);
170
171 return s;
172 }
173
174
175 /* Build a GIMPLE_RETURN statement returning RETVAL. */
176
177 gimple
178 gimple_build_return (tree retval)
179 {
180 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 2);
181 if (retval)
182 gimple_return_set_retval (s, retval);
183 return s;
184 }
185
186 /* Reset alias information on call S. */
187
188 void
189 gimple_call_reset_alias_info (gimple s)
190 {
191 if (gimple_call_flags (s) & ECF_CONST)
192 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
193 else
194 pt_solution_reset (gimple_call_use_set (s));
195 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
196 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
197 else
198 pt_solution_reset (gimple_call_clobber_set (s));
199 }
200
201 /* Helper for gimple_build_call, gimple_build_call_valist,
202 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
203 components of a GIMPLE_CALL statement to function FN with NARGS
204 arguments. */
205
206 static inline gimple
207 gimple_build_call_1 (tree fn, unsigned nargs)
208 {
209 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
210 if (TREE_CODE (fn) == FUNCTION_DECL)
211 fn = build_fold_addr_expr (fn);
212 gimple_set_op (s, 1, fn);
213 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
214 gimple_call_reset_alias_info (s);
215 return s;
216 }
217
218
219 /* Build a GIMPLE_CALL statement to function FN with the arguments
220 specified in vector ARGS. */
221
222 gimple
223 gimple_build_call_vec (tree fn, vec<tree> args)
224 {
225 unsigned i;
226 unsigned nargs = args.length ();
227 gimple call = gimple_build_call_1 (fn, nargs);
228
229 for (i = 0; i < nargs; i++)
230 gimple_call_set_arg (call, i, args[i]);
231
232 return call;
233 }
234
235
236 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
237 arguments. The ... are the arguments. */
238
239 gimple
240 gimple_build_call (tree fn, unsigned nargs, ...)
241 {
242 va_list ap;
243 gimple call;
244 unsigned i;
245
246 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
247
248 call = gimple_build_call_1 (fn, nargs);
249
250 va_start (ap, nargs);
251 for (i = 0; i < nargs; i++)
252 gimple_call_set_arg (call, i, va_arg (ap, tree));
253 va_end (ap);
254
255 return call;
256 }
257
258
259 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
260 arguments. AP contains the arguments. */
261
262 gimple
263 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
264 {
265 gimple call;
266 unsigned i;
267
268 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
269
270 call = gimple_build_call_1 (fn, nargs);
271
272 for (i = 0; i < nargs; i++)
273 gimple_call_set_arg (call, i, va_arg (ap, tree));
274
275 return call;
276 }
277
278
279 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
280 Build the basic components of a GIMPLE_CALL statement to internal
281 function FN with NARGS arguments. */
282
283 static inline gimple
284 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
285 {
286 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
287 s->gsbase.subcode |= GF_CALL_INTERNAL;
288 gimple_call_set_internal_fn (s, fn);
289 gimple_call_reset_alias_info (s);
290 return s;
291 }
292
293
294 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
295 the number of arguments. The ... are the arguments. */
296
297 gimple
298 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
299 {
300 va_list ap;
301 gimple call;
302 unsigned i;
303
304 call = gimple_build_call_internal_1 (fn, nargs);
305 va_start (ap, nargs);
306 for (i = 0; i < nargs; i++)
307 gimple_call_set_arg (call, i, va_arg (ap, tree));
308 va_end (ap);
309
310 return call;
311 }
312
313
314 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
315 specified in vector ARGS. */
316
317 gimple
318 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
319 {
320 unsigned i, nargs;
321 gimple call;
322
323 nargs = args.length ();
324 call = gimple_build_call_internal_1 (fn, nargs);
325 for (i = 0; i < nargs; i++)
326 gimple_call_set_arg (call, i, args[i]);
327
328 return call;
329 }
330
331
332 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
333 assumed to be in GIMPLE form already. Minimal checking is done of
334 this fact. */
335
336 gimple
337 gimple_build_call_from_tree (tree t)
338 {
339 unsigned i, nargs;
340 gimple call;
341 tree fndecl = get_callee_fndecl (t);
342
343 gcc_assert (TREE_CODE (t) == CALL_EXPR);
344
345 nargs = call_expr_nargs (t);
346 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
347
348 for (i = 0; i < nargs; i++)
349 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
350
351 gimple_set_block (call, TREE_BLOCK (t));
352
353 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
354 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
355 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
356 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
357 if (fndecl
358 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
359 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
360 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
361 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
362 else
363 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
364 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
365 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
366 gimple_set_no_warning (call, TREE_NO_WARNING (t));
367
368 return call;
369 }
370
371
372 /* Return index of INDEX's non bound argument of the call. */
373
374 unsigned
375 gimple_call_get_nobnd_arg_index (const_gimple gs, unsigned index)
376 {
377 unsigned num_args = gimple_call_num_args (gs);
378 for (unsigned n = 0; n < num_args; n++)
379 {
380 if (POINTER_BOUNDS_P (gimple_call_arg (gs, n)))
381 continue;
382 else if (index)
383 index--;
384 else
385 return n;
386 }
387
388 gcc_unreachable ();
389 }
390
391
392 /* Build a GIMPLE_ASSIGN statement.
393
394 LHS of the assignment.
395 RHS of the assignment which can be unary or binary. */
396
397 gimple
398 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
399 {
400 enum tree_code subcode;
401 tree op1, op2, op3;
402
403 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
404 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
405 PASS_MEM_STAT);
406 }
407
408
409 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
410 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
411 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
412
413 gimple
414 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
415 tree op2, tree op3 MEM_STAT_DECL)
416 {
417 unsigned num_ops;
418 gimple p;
419
420 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
421 code). */
422 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
423
424 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
425 PASS_MEM_STAT);
426 gimple_assign_set_lhs (p, lhs);
427 gimple_assign_set_rhs1 (p, op1);
428 if (op2)
429 {
430 gcc_assert (num_ops > 2);
431 gimple_assign_set_rhs2 (p, op2);
432 }
433
434 if (op3)
435 {
436 gcc_assert (num_ops > 3);
437 gimple_assign_set_rhs3 (p, op3);
438 }
439
440 return p;
441 }
442
443 gimple
444 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
445 tree op2 MEM_STAT_DECL)
446 {
447 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
448 PASS_MEM_STAT);
449 }
450
451
452 /* Build a GIMPLE_COND statement.
453
454 PRED is the condition used to compare LHS and the RHS.
455 T_LABEL is the label to jump to if the condition is true.
456 F_LABEL is the label to jump to otherwise. */
457
458 gimple
459 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
460 tree t_label, tree f_label)
461 {
462 gimple p;
463
464 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
465 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
466 gimple_cond_set_lhs (p, lhs);
467 gimple_cond_set_rhs (p, rhs);
468 gimple_cond_set_true_label (p, t_label);
469 gimple_cond_set_false_label (p, f_label);
470 return p;
471 }
472
473 /* Build a GIMPLE_COND statement from the conditional expression tree
474 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
475
476 gimple
477 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
478 {
479 enum tree_code code;
480 tree lhs, rhs;
481
482 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
483 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
484 }
485
486 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
487 boolean expression tree COND. */
488
489 void
490 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
491 {
492 enum tree_code code;
493 tree lhs, rhs;
494
495 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
496 gimple_cond_set_condition (stmt, code, lhs, rhs);
497 }
498
499 /* Build a GIMPLE_LABEL statement for LABEL. */
500
501 gimple
502 gimple_build_label (tree label)
503 {
504 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
505 gimple_label_set_label (p, label);
506 return p;
507 }
508
509 /* Build a GIMPLE_GOTO statement to label DEST. */
510
511 gimple
512 gimple_build_goto (tree dest)
513 {
514 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
515 gimple_goto_set_dest (p, dest);
516 return p;
517 }
518
519
520 /* Build a GIMPLE_NOP statement. */
521
522 gimple
523 gimple_build_nop (void)
524 {
525 return gimple_alloc (GIMPLE_NOP, 0);
526 }
527
528
529 /* Build a GIMPLE_BIND statement.
530 VARS are the variables in BODY.
531 BLOCK is the containing block. */
532
533 gimple
534 gimple_build_bind (tree vars, gimple_seq body, tree block)
535 {
536 gimple p = gimple_alloc (GIMPLE_BIND, 0);
537 gimple_bind_set_vars (p, vars);
538 if (body)
539 gimple_bind_set_body (p, body);
540 if (block)
541 gimple_bind_set_block (p, block);
542 return p;
543 }
544
545 /* Helper function to set the simple fields of a asm stmt.
546
547 STRING is a pointer to a string that is the asm blocks assembly code.
548 NINPUT is the number of register inputs.
549 NOUTPUT is the number of register outputs.
550 NCLOBBERS is the number of clobbered registers.
551 */
552
553 static inline gimple
554 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
555 unsigned nclobbers, unsigned nlabels)
556 {
557 gimple p;
558 int size = strlen (string);
559
560 /* ASMs with labels cannot have outputs. This should have been
561 enforced by the front end. */
562 gcc_assert (nlabels == 0 || noutputs == 0);
563
564 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
565 ninputs + noutputs + nclobbers + nlabels);
566
567 p->gimple_asm.ni = ninputs;
568 p->gimple_asm.no = noutputs;
569 p->gimple_asm.nc = nclobbers;
570 p->gimple_asm.nl = nlabels;
571 p->gimple_asm.string = ggc_alloc_string (string, size);
572
573 if (GATHER_STATISTICS)
574 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
575
576 return p;
577 }
578
579 /* Build a GIMPLE_ASM statement.
580
581 STRING is the assembly code.
582 NINPUT is the number of register inputs.
583 NOUTPUT is the number of register outputs.
584 NCLOBBERS is the number of clobbered registers.
585 INPUTS is a vector of the input register parameters.
586 OUTPUTS is a vector of the output register parameters.
587 CLOBBERS is a vector of the clobbered register parameters.
588 LABELS is a vector of destination labels. */
589
590 gimple
591 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
592 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
593 vec<tree, va_gc> *labels)
594 {
595 gimple p;
596 unsigned i;
597
598 p = gimple_build_asm_1 (string,
599 vec_safe_length (inputs),
600 vec_safe_length (outputs),
601 vec_safe_length (clobbers),
602 vec_safe_length (labels));
603
604 for (i = 0; i < vec_safe_length (inputs); i++)
605 gimple_asm_set_input_op (p, i, (*inputs)[i]);
606
607 for (i = 0; i < vec_safe_length (outputs); i++)
608 gimple_asm_set_output_op (p, i, (*outputs)[i]);
609
610 for (i = 0; i < vec_safe_length (clobbers); i++)
611 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
612
613 for (i = 0; i < vec_safe_length (labels); i++)
614 gimple_asm_set_label_op (p, i, (*labels)[i]);
615
616 return p;
617 }
618
619 /* Build a GIMPLE_CATCH statement.
620
621 TYPES are the catch types.
622 HANDLER is the exception handler. */
623
624 gimple
625 gimple_build_catch (tree types, gimple_seq handler)
626 {
627 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
628 gimple_catch_set_types (p, types);
629 if (handler)
630 gimple_catch_set_handler (p, handler);
631
632 return p;
633 }
634
635 /* Build a GIMPLE_EH_FILTER statement.
636
637 TYPES are the filter's types.
638 FAILURE is the filter's failure action. */
639
640 gimple
641 gimple_build_eh_filter (tree types, gimple_seq failure)
642 {
643 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
644 gimple_eh_filter_set_types (p, types);
645 if (failure)
646 gimple_eh_filter_set_failure (p, failure);
647
648 return p;
649 }
650
651 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
652
653 gimple
654 gimple_build_eh_must_not_throw (tree decl)
655 {
656 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
657
658 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
659 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
660 gimple_eh_must_not_throw_set_fndecl (p, decl);
661
662 return p;
663 }
664
665 /* Build a GIMPLE_EH_ELSE statement. */
666
667 gimple
668 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
669 {
670 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
671 gimple_eh_else_set_n_body (p, n_body);
672 gimple_eh_else_set_e_body (p, e_body);
673 return p;
674 }
675
676 /* Build a GIMPLE_TRY statement.
677
678 EVAL is the expression to evaluate.
679 CLEANUP is the cleanup expression.
680 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
681 whether this is a try/catch or a try/finally respectively. */
682
683 gimple
684 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
685 enum gimple_try_flags kind)
686 {
687 gimple p;
688
689 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
690 p = gimple_alloc (GIMPLE_TRY, 0);
691 gimple_set_subcode (p, kind);
692 if (eval)
693 gimple_try_set_eval (p, eval);
694 if (cleanup)
695 gimple_try_set_cleanup (p, cleanup);
696
697 return p;
698 }
699
700 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
701
702 CLEANUP is the cleanup expression. */
703
704 gimple
705 gimple_build_wce (gimple_seq cleanup)
706 {
707 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
708 if (cleanup)
709 gimple_wce_set_cleanup (p, cleanup);
710
711 return p;
712 }
713
714
715 /* Build a GIMPLE_RESX statement. */
716
717 gimple
718 gimple_build_resx (int region)
719 {
720 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
721 p->gimple_eh_ctrl.region = region;
722 return p;
723 }
724
725
726 /* The helper for constructing a gimple switch statement.
727 INDEX is the switch's index.
728 NLABELS is the number of labels in the switch excluding the default.
729 DEFAULT_LABEL is the default label for the switch statement. */
730
731 gimple
732 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
733 {
734 /* nlabels + 1 default label + 1 index. */
735 gcc_checking_assert (default_label);
736 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
737 1 + 1 + nlabels);
738 gimple_switch_set_index (p, index);
739 gimple_switch_set_default_label (p, default_label);
740 return p;
741 }
742
743 /* Build a GIMPLE_SWITCH statement.
744
745 INDEX is the switch's index.
746 DEFAULT_LABEL is the default label
747 ARGS is a vector of labels excluding the default. */
748
749 gimple
750 gimple_build_switch (tree index, tree default_label, vec<tree> args)
751 {
752 unsigned i, nlabels = args.length ();
753
754 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
755
756 /* Copy the labels from the vector to the switch statement. */
757 for (i = 0; i < nlabels; i++)
758 gimple_switch_set_label (p, i + 1, args[i]);
759
760 return p;
761 }
762
763 /* Build a GIMPLE_EH_DISPATCH statement. */
764
765 gimple
766 gimple_build_eh_dispatch (int region)
767 {
768 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
769 p->gimple_eh_ctrl.region = region;
770 return p;
771 }
772
773 /* Build a new GIMPLE_DEBUG_BIND statement.
774
775 VAR is bound to VALUE; block and location are taken from STMT. */
776
777 gimple
778 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
779 {
780 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
781 (unsigned)GIMPLE_DEBUG_BIND, 2
782 PASS_MEM_STAT);
783
784 gimple_debug_bind_set_var (p, var);
785 gimple_debug_bind_set_value (p, value);
786 if (stmt)
787 gimple_set_location (p, gimple_location (stmt));
788
789 return p;
790 }
791
792
793 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
794
795 VAR is bound to VALUE; block and location are taken from STMT. */
796
797 gimple
798 gimple_build_debug_source_bind_stat (tree var, tree value,
799 gimple stmt MEM_STAT_DECL)
800 {
801 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
802 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
803 PASS_MEM_STAT);
804
805 gimple_debug_source_bind_set_var (p, var);
806 gimple_debug_source_bind_set_value (p, value);
807 if (stmt)
808 gimple_set_location (p, gimple_location (stmt));
809
810 return p;
811 }
812
813
814 /* Build a GIMPLE_OMP_CRITICAL statement.
815
816 BODY is the sequence of statements for which only one thread can execute.
817 NAME is optional identifier for this critical block. */
818
819 gimple
820 gimple_build_omp_critical (gimple_seq body, tree name)
821 {
822 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
823 gimple_omp_critical_set_name (p, name);
824 if (body)
825 gimple_omp_set_body (p, body);
826
827 return p;
828 }
829
830 /* Build a GIMPLE_OMP_FOR statement.
831
832 BODY is sequence of statements inside the for loop.
833 KIND is the `for' variant.
834 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
835 lastprivate, reductions, ordered, schedule, and nowait.
836 COLLAPSE is the collapse count.
837 PRE_BODY is the sequence of statements that are loop invariant. */
838
839 gimple
840 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
841 gimple_seq pre_body)
842 {
843 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
844 if (body)
845 gimple_omp_set_body (p, body);
846 gimple_omp_for_set_clauses (p, clauses);
847 gimple_omp_for_set_kind (p, kind);
848 p->gimple_omp_for.collapse = collapse;
849 p->gimple_omp_for.iter
850 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
851 if (pre_body)
852 gimple_omp_for_set_pre_body (p, pre_body);
853
854 return p;
855 }
856
857
858 /* Build a GIMPLE_OMP_PARALLEL statement.
859
860 BODY is sequence of statements which are executed in parallel.
861 CLAUSES, are the OMP parallel construct's clauses.
862 CHILD_FN is the function created for the parallel threads to execute.
863 DATA_ARG are the shared data argument(s). */
864
865 gimple
866 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
867 tree data_arg)
868 {
869 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
870 if (body)
871 gimple_omp_set_body (p, body);
872 gimple_omp_parallel_set_clauses (p, clauses);
873 gimple_omp_parallel_set_child_fn (p, child_fn);
874 gimple_omp_parallel_set_data_arg (p, data_arg);
875
876 return p;
877 }
878
879
880 /* Build a GIMPLE_OMP_TASK statement.
881
882 BODY is sequence of statements which are executed by the explicit task.
883 CLAUSES, are the OMP parallel construct's clauses.
884 CHILD_FN is the function created for the parallel threads to execute.
885 DATA_ARG are the shared data argument(s).
886 COPY_FN is the optional function for firstprivate initialization.
887 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
888
889 gimple
890 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
891 tree data_arg, tree copy_fn, tree arg_size,
892 tree arg_align)
893 {
894 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
895 if (body)
896 gimple_omp_set_body (p, body);
897 gimple_omp_task_set_clauses (p, clauses);
898 gimple_omp_task_set_child_fn (p, child_fn);
899 gimple_omp_task_set_data_arg (p, data_arg);
900 gimple_omp_task_set_copy_fn (p, copy_fn);
901 gimple_omp_task_set_arg_size (p, arg_size);
902 gimple_omp_task_set_arg_align (p, arg_align);
903
904 return p;
905 }
906
907
908 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
909
910 BODY is the sequence of statements in the section. */
911
912 gimple
913 gimple_build_omp_section (gimple_seq body)
914 {
915 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
916 if (body)
917 gimple_omp_set_body (p, body);
918
919 return p;
920 }
921
922
923 /* Build a GIMPLE_OMP_MASTER statement.
924
925 BODY is the sequence of statements to be executed by just the master. */
926
927 gimple
928 gimple_build_omp_master (gimple_seq body)
929 {
930 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
931 if (body)
932 gimple_omp_set_body (p, body);
933
934 return p;
935 }
936
937
938 /* Build a GIMPLE_OMP_TASKGROUP statement.
939
940 BODY is the sequence of statements to be executed by the taskgroup
941 construct. */
942
943 gimple
944 gimple_build_omp_taskgroup (gimple_seq body)
945 {
946 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
947 if (body)
948 gimple_omp_set_body (p, body);
949
950 return p;
951 }
952
953
954 /* Build a GIMPLE_OMP_CONTINUE statement.
955
956 CONTROL_DEF is the definition of the control variable.
957 CONTROL_USE is the use of the control variable. */
958
959 gimple
960 gimple_build_omp_continue (tree control_def, tree control_use)
961 {
962 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
963 gimple_omp_continue_set_control_def (p, control_def);
964 gimple_omp_continue_set_control_use (p, control_use);
965 return p;
966 }
967
968 /* Build a GIMPLE_OMP_ORDERED statement.
969
970 BODY is the sequence of statements inside a loop that will executed in
971 sequence. */
972
973 gimple
974 gimple_build_omp_ordered (gimple_seq body)
975 {
976 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
977 if (body)
978 gimple_omp_set_body (p, body);
979
980 return p;
981 }
982
983
984 /* Build a GIMPLE_OMP_RETURN statement.
985 WAIT_P is true if this is a non-waiting return. */
986
987 gimple
988 gimple_build_omp_return (bool wait_p)
989 {
990 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
991 if (wait_p)
992 gimple_omp_return_set_nowait (p);
993
994 return p;
995 }
996
997
998 /* Build a GIMPLE_OMP_SECTIONS statement.
999
1000 BODY is a sequence of section statements.
1001 CLAUSES are any of the OMP sections contsruct's clauses: private,
1002 firstprivate, lastprivate, reduction, and nowait. */
1003
1004 gimple
1005 gimple_build_omp_sections (gimple_seq body, tree clauses)
1006 {
1007 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1008 if (body)
1009 gimple_omp_set_body (p, body);
1010 gimple_omp_sections_set_clauses (p, clauses);
1011
1012 return p;
1013 }
1014
1015
1016 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1017
1018 gimple
1019 gimple_build_omp_sections_switch (void)
1020 {
1021 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1022 }
1023
1024
1025 /* Build a GIMPLE_OMP_SINGLE statement.
1026
1027 BODY is the sequence of statements that will be executed once.
1028 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1029 copyprivate, nowait. */
1030
1031 gimple
1032 gimple_build_omp_single (gimple_seq body, tree clauses)
1033 {
1034 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1035 if (body)
1036 gimple_omp_set_body (p, body);
1037 gimple_omp_single_set_clauses (p, clauses);
1038
1039 return p;
1040 }
1041
1042
1043 /* Build a GIMPLE_OMP_TARGET statement.
1044
1045 BODY is the sequence of statements that will be executed.
1046 CLAUSES are any of the OMP target construct's clauses. */
1047
1048 gimple
1049 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1050 {
1051 gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
1052 if (body)
1053 gimple_omp_set_body (p, body);
1054 gimple_omp_target_set_clauses (p, clauses);
1055 gimple_omp_target_set_kind (p, kind);
1056
1057 return p;
1058 }
1059
1060
1061 /* Build a GIMPLE_OMP_TEAMS statement.
1062
1063 BODY is the sequence of statements that will be executed.
1064 CLAUSES are any of the OMP teams construct's clauses. */
1065
1066 gimple
1067 gimple_build_omp_teams (gimple_seq body, tree clauses)
1068 {
1069 gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
1070 if (body)
1071 gimple_omp_set_body (p, body);
1072 gimple_omp_teams_set_clauses (p, clauses);
1073
1074 return p;
1075 }
1076
1077
1078 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1079
1080 gimple
1081 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1082 {
1083 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1084 gimple_omp_atomic_load_set_lhs (p, lhs);
1085 gimple_omp_atomic_load_set_rhs (p, rhs);
1086 return p;
1087 }
1088
1089 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1090
1091 VAL is the value we are storing. */
1092
1093 gimple
1094 gimple_build_omp_atomic_store (tree val)
1095 {
1096 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1097 gimple_omp_atomic_store_set_val (p, val);
1098 return p;
1099 }
1100
1101 /* Build a GIMPLE_TRANSACTION statement. */
1102
1103 gimple
1104 gimple_build_transaction (gimple_seq body, tree label)
1105 {
1106 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1107 gimple_transaction_set_body (p, body);
1108 gimple_transaction_set_label (p, label);
1109 return p;
1110 }
1111
1112 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1113 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1114
1115 gimple
1116 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1117 {
1118 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1119 /* Ensure all the predictors fit into the lower bits of the subcode. */
1120 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1121 gimple_predict_set_predictor (p, predictor);
1122 gimple_predict_set_outcome (p, outcome);
1123 return p;
1124 }
1125
1126 #if defined ENABLE_GIMPLE_CHECKING
1127 /* Complain of a gimple type mismatch and die. */
1128
1129 void
1130 gimple_check_failed (const_gimple gs, const char *file, int line,
1131 const char *function, enum gimple_code code,
1132 enum tree_code subcode)
1133 {
1134 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1135 gimple_code_name[code],
1136 get_tree_code_name (subcode),
1137 gimple_code_name[gimple_code (gs)],
1138 gs->gsbase.subcode > 0
1139 ? get_tree_code_name ((enum tree_code) gs->gsbase.subcode)
1140 : "",
1141 function, trim_filename (file), line);
1142 }
1143 #endif /* ENABLE_GIMPLE_CHECKING */
1144
1145
1146 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1147 *SEQ_P is NULL, a new sequence is allocated. */
1148
1149 void
1150 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1151 {
1152 gimple_stmt_iterator si;
1153 if (gs == NULL)
1154 return;
1155
1156 si = gsi_last (*seq_p);
1157 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1158 }
1159
1160 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1161 *SEQ_P is NULL, a new sequence is allocated. This function is
1162 similar to gimple_seq_add_stmt, but does not scan the operands.
1163 During gimplification, we need to manipulate statement sequences
1164 before the def/use vectors have been constructed. */
1165
1166 void
1167 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
1168 {
1169 gimple_stmt_iterator si;
1170
1171 if (gs == NULL)
1172 return;
1173
1174 si = gsi_last (*seq_p);
1175 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1176 }
1177
1178 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1179 NULL, a new sequence is allocated. */
1180
1181 void
1182 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1183 {
1184 gimple_stmt_iterator si;
1185 if (src == NULL)
1186 return;
1187
1188 si = gsi_last (*dst_p);
1189 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1190 }
1191
1192 /* Determine whether to assign a location to the statement GS. */
1193
1194 static bool
1195 should_carry_location_p (gimple gs)
1196 {
1197 /* Don't emit a line note for a label. We particularly don't want to
1198 emit one for the break label, since it doesn't actually correspond
1199 to the beginning of the loop/switch. */
1200 if (gimple_code (gs) == GIMPLE_LABEL)
1201 return false;
1202
1203 return true;
1204 }
1205
1206 /* Set the location for gimple statement GS to LOCATION. */
1207
1208 static void
1209 annotate_one_with_location (gimple gs, location_t location)
1210 {
1211 if (!gimple_has_location (gs)
1212 && !gimple_do_not_emit_location_p (gs)
1213 && should_carry_location_p (gs))
1214 gimple_set_location (gs, location);
1215 }
1216
1217 /* Set LOCATION for all the statements after iterator GSI in sequence
1218 SEQ. If GSI is pointing to the end of the sequence, start with the
1219 first statement in SEQ. */
1220
1221 void
1222 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1223 location_t location)
1224 {
1225 if (gsi_end_p (gsi))
1226 gsi = gsi_start (seq);
1227 else
1228 gsi_next (&gsi);
1229
1230 for (; !gsi_end_p (gsi); gsi_next (&gsi))
1231 annotate_one_with_location (gsi_stmt (gsi), location);
1232 }
1233
1234 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
1235
1236 void
1237 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1238 {
1239 gimple_stmt_iterator i;
1240
1241 if (gimple_seq_empty_p (stmt_p))
1242 return;
1243
1244 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1245 {
1246 gimple gs = gsi_stmt (i);
1247 annotate_one_with_location (gs, location);
1248 }
1249 }
1250
1251 /* Helper function of empty_body_p. Return true if STMT is an empty
1252 statement. */
1253
1254 static bool
1255 empty_stmt_p (gimple stmt)
1256 {
1257 if (gimple_code (stmt) == GIMPLE_NOP)
1258 return true;
1259 if (gimple_code (stmt) == GIMPLE_BIND)
1260 return empty_body_p (gimple_bind_body (stmt));
1261 return false;
1262 }
1263
1264
1265 /* Return true if BODY contains nothing but empty statements. */
1266
1267 bool
1268 empty_body_p (gimple_seq body)
1269 {
1270 gimple_stmt_iterator i;
1271
1272 if (gimple_seq_empty_p (body))
1273 return true;
1274 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1275 if (!empty_stmt_p (gsi_stmt (i))
1276 && !is_gimple_debug (gsi_stmt (i)))
1277 return false;
1278
1279 return true;
1280 }
1281
1282
1283 /* Perform a deep copy of sequence SRC and return the result. */
1284
1285 gimple_seq
1286 gimple_seq_copy (gimple_seq src)
1287 {
1288 gimple_stmt_iterator gsi;
1289 gimple_seq new_seq = NULL;
1290 gimple stmt;
1291
1292 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1293 {
1294 stmt = gimple_copy (gsi_stmt (gsi));
1295 gimple_seq_add_stmt (&new_seq, stmt);
1296 }
1297
1298 return new_seq;
1299 }
1300
1301
1302
1303 /* Return true if calls C1 and C2 are known to go to the same function. */
1304
1305 bool
1306 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1307 {
1308 if (gimple_call_internal_p (c1))
1309 return (gimple_call_internal_p (c2)
1310 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1311 else
1312 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1313 || (gimple_call_fndecl (c1)
1314 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1315 }
1316
1317 /* Detect flags from a GIMPLE_CALL. This is just like
1318 call_expr_flags, but for gimple tuples. */
1319
1320 int
1321 gimple_call_flags (const_gimple stmt)
1322 {
1323 int flags;
1324 tree decl = gimple_call_fndecl (stmt);
1325
1326 if (decl)
1327 flags = flags_from_decl_or_type (decl);
1328 else if (gimple_call_internal_p (stmt))
1329 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1330 else
1331 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1332
1333 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1334 flags |= ECF_NOTHROW;
1335
1336 return flags;
1337 }
1338
1339 /* Return the "fn spec" string for call STMT. */
1340
1341 static tree
1342 gimple_call_fnspec (const_gimple stmt)
1343 {
1344 tree type, attr;
1345
1346 type = gimple_call_fntype (stmt);
1347 if (!type)
1348 return NULL_TREE;
1349
1350 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1351 if (!attr)
1352 return NULL_TREE;
1353
1354 return TREE_VALUE (TREE_VALUE (attr));
1355 }
1356
1357 /* Detects argument flags for argument number ARG on call STMT. */
1358
1359 int
1360 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1361 {
1362 tree attr = gimple_call_fnspec (stmt);
1363
1364 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1365 return 0;
1366
1367 switch (TREE_STRING_POINTER (attr)[1 + arg])
1368 {
1369 case 'x':
1370 case 'X':
1371 return EAF_UNUSED;
1372
1373 case 'R':
1374 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1375
1376 case 'r':
1377 return EAF_NOCLOBBER | EAF_NOESCAPE;
1378
1379 case 'W':
1380 return EAF_DIRECT | EAF_NOESCAPE;
1381
1382 case 'w':
1383 return EAF_NOESCAPE;
1384
1385 case '.':
1386 default:
1387 return 0;
1388 }
1389 }
1390
1391 /* Detects return flags for the call STMT. */
1392
1393 int
1394 gimple_call_return_flags (const_gimple stmt)
1395 {
1396 tree attr;
1397
1398 if (gimple_call_flags (stmt) & ECF_MALLOC)
1399 return ERF_NOALIAS;
1400
1401 attr = gimple_call_fnspec (stmt);
1402 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1403 return 0;
1404
1405 switch (TREE_STRING_POINTER (attr)[0])
1406 {
1407 case '1':
1408 case '2':
1409 case '3':
1410 case '4':
1411 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1412
1413 case 'm':
1414 return ERF_NOALIAS;
1415
1416 case '.':
1417 default:
1418 return 0;
1419 }
1420 }
1421
1422
1423 /* Return true if GS is a copy assignment. */
1424
1425 bool
1426 gimple_assign_copy_p (gimple gs)
1427 {
1428 return (gimple_assign_single_p (gs)
1429 && is_gimple_val (gimple_op (gs, 1)));
1430 }
1431
1432
1433 /* Return true if GS is a SSA_NAME copy assignment. */
1434
1435 bool
1436 gimple_assign_ssa_name_copy_p (gimple gs)
1437 {
1438 return (gimple_assign_single_p (gs)
1439 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1440 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1441 }
1442
1443
1444 /* Return true if GS is an assignment with a unary RHS, but the
1445 operator has no effect on the assigned value. The logic is adapted
1446 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1447 instances in which STRIP_NOPS was previously applied to the RHS of
1448 an assignment.
1449
1450 NOTE: In the use cases that led to the creation of this function
1451 and of gimple_assign_single_p, it is typical to test for either
1452 condition and to proceed in the same manner. In each case, the
1453 assigned value is represented by the single RHS operand of the
1454 assignment. I suspect there may be cases where gimple_assign_copy_p,
1455 gimple_assign_single_p, or equivalent logic is used where a similar
1456 treatment of unary NOPs is appropriate. */
1457
1458 bool
1459 gimple_assign_unary_nop_p (gimple gs)
1460 {
1461 return (is_gimple_assign (gs)
1462 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1463 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1464 && gimple_assign_rhs1 (gs) != error_mark_node
1465 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1466 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1467 }
1468
1469 /* Set BB to be the basic block holding G. */
1470
1471 void
1472 gimple_set_bb (gimple stmt, basic_block bb)
1473 {
1474 stmt->gsbase.bb = bb;
1475
1476 /* If the statement is a label, add the label to block-to-labels map
1477 so that we can speed up edge creation for GIMPLE_GOTOs. */
1478 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1479 {
1480 tree t;
1481 int uid;
1482
1483 t = gimple_label_label (stmt);
1484 uid = LABEL_DECL_UID (t);
1485 if (uid == -1)
1486 {
1487 unsigned old_len = vec_safe_length (label_to_block_map);
1488 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1489 if (old_len <= (unsigned) uid)
1490 {
1491 unsigned new_len = 3 * uid / 2 + 1;
1492
1493 vec_safe_grow_cleared (label_to_block_map, new_len);
1494 }
1495 }
1496
1497 (*label_to_block_map)[uid] = bb;
1498 }
1499 }
1500
1501
1502 /* Modify the RHS of the assignment pointed-to by GSI using the
1503 operands in the expression tree EXPR.
1504
1505 NOTE: The statement pointed-to by GSI may be reallocated if it
1506 did not have enough operand slots.
1507
1508 This function is useful to convert an existing tree expression into
1509 the flat representation used for the RHS of a GIMPLE assignment.
1510 It will reallocate memory as needed to expand or shrink the number
1511 of operand slots needed to represent EXPR.
1512
1513 NOTE: If you find yourself building a tree and then calling this
1514 function, you are most certainly doing it the slow way. It is much
1515 better to build a new assignment or to use the function
1516 gimple_assign_set_rhs_with_ops, which does not require an
1517 expression tree to be built. */
1518
1519 void
1520 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1521 {
1522 enum tree_code subcode;
1523 tree op1, op2, op3;
1524
1525 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
1526 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
1527 }
1528
1529
1530 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1531 operands OP1, OP2 and OP3.
1532
1533 NOTE: The statement pointed-to by GSI may be reallocated if it
1534 did not have enough operand slots. */
1535
1536 void
1537 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
1538 tree op1, tree op2, tree op3)
1539 {
1540 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1541 gimple stmt = gsi_stmt (*gsi);
1542
1543 /* If the new CODE needs more operands, allocate a new statement. */
1544 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1545 {
1546 tree lhs = gimple_assign_lhs (stmt);
1547 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1548 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1549 gimple_init_singleton (new_stmt);
1550 gsi_replace (gsi, new_stmt, true);
1551 stmt = new_stmt;
1552
1553 /* The LHS needs to be reset as this also changes the SSA name
1554 on the LHS. */
1555 gimple_assign_set_lhs (stmt, lhs);
1556 }
1557
1558 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1559 gimple_set_subcode (stmt, code);
1560 gimple_assign_set_rhs1 (stmt, op1);
1561 if (new_rhs_ops > 1)
1562 gimple_assign_set_rhs2 (stmt, op2);
1563 if (new_rhs_ops > 2)
1564 gimple_assign_set_rhs3 (stmt, op3);
1565 }
1566
1567
1568 /* Return the LHS of a statement that performs an assignment,
1569 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1570 for a call to a function that returns no value, or for a
1571 statement other than an assignment or a call. */
1572
1573 tree
1574 gimple_get_lhs (const_gimple stmt)
1575 {
1576 enum gimple_code code = gimple_code (stmt);
1577
1578 if (code == GIMPLE_ASSIGN)
1579 return gimple_assign_lhs (stmt);
1580 else if (code == GIMPLE_CALL)
1581 return gimple_call_lhs (stmt);
1582 else
1583 return NULL_TREE;
1584 }
1585
1586
1587 /* Set the LHS of a statement that performs an assignment,
1588 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1589
1590 void
1591 gimple_set_lhs (gimple stmt, tree lhs)
1592 {
1593 enum gimple_code code = gimple_code (stmt);
1594
1595 if (code == GIMPLE_ASSIGN)
1596 gimple_assign_set_lhs (stmt, lhs);
1597 else if (code == GIMPLE_CALL)
1598 gimple_call_set_lhs (stmt, lhs);
1599 else
1600 gcc_unreachable ();
1601 }
1602
1603
1604 /* Return a deep copy of statement STMT. All the operands from STMT
1605 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1606 and VUSE operand arrays are set to empty in the new copy. The new
1607 copy isn't part of any sequence. */
1608
1609 gimple
1610 gimple_copy (gimple stmt)
1611 {
1612 enum gimple_code code = gimple_code (stmt);
1613 unsigned num_ops = gimple_num_ops (stmt);
1614 gimple copy = gimple_alloc (code, num_ops);
1615 unsigned i;
1616
1617 /* Shallow copy all the fields from STMT. */
1618 memcpy (copy, stmt, gimple_size (code));
1619 gimple_init_singleton (copy);
1620
1621 /* If STMT has sub-statements, deep-copy them as well. */
1622 if (gimple_has_substatements (stmt))
1623 {
1624 gimple_seq new_seq;
1625 tree t;
1626
1627 switch (gimple_code (stmt))
1628 {
1629 case GIMPLE_BIND:
1630 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
1631 gimple_bind_set_body (copy, new_seq);
1632 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
1633 gimple_bind_set_block (copy, gimple_bind_block (stmt));
1634 break;
1635
1636 case GIMPLE_CATCH:
1637 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
1638 gimple_catch_set_handler (copy, new_seq);
1639 t = unshare_expr (gimple_catch_types (stmt));
1640 gimple_catch_set_types (copy, t);
1641 break;
1642
1643 case GIMPLE_EH_FILTER:
1644 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
1645 gimple_eh_filter_set_failure (copy, new_seq);
1646 t = unshare_expr (gimple_eh_filter_types (stmt));
1647 gimple_eh_filter_set_types (copy, t);
1648 break;
1649
1650 case GIMPLE_EH_ELSE:
1651 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
1652 gimple_eh_else_set_n_body (copy, new_seq);
1653 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
1654 gimple_eh_else_set_e_body (copy, new_seq);
1655 break;
1656
1657 case GIMPLE_TRY:
1658 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
1659 gimple_try_set_eval (copy, new_seq);
1660 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
1661 gimple_try_set_cleanup (copy, new_seq);
1662 break;
1663
1664 case GIMPLE_OMP_FOR:
1665 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1666 gimple_omp_for_set_pre_body (copy, new_seq);
1667 t = unshare_expr (gimple_omp_for_clauses (stmt));
1668 gimple_omp_for_set_clauses (copy, t);
1669 copy->gimple_omp_for.iter
1670 = ggc_alloc_vec_gimple_omp_for_iter
1671 (gimple_omp_for_collapse (stmt));
1672 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1673 {
1674 gimple_omp_for_set_cond (copy, i,
1675 gimple_omp_for_cond (stmt, i));
1676 gimple_omp_for_set_index (copy, i,
1677 gimple_omp_for_index (stmt, i));
1678 t = unshare_expr (gimple_omp_for_initial (stmt, i));
1679 gimple_omp_for_set_initial (copy, i, t);
1680 t = unshare_expr (gimple_omp_for_final (stmt, i));
1681 gimple_omp_for_set_final (copy, i, t);
1682 t = unshare_expr (gimple_omp_for_incr (stmt, i));
1683 gimple_omp_for_set_incr (copy, i, t);
1684 }
1685 goto copy_omp_body;
1686
1687 case GIMPLE_OMP_PARALLEL:
1688 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
1689 gimple_omp_parallel_set_clauses (copy, t);
1690 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
1691 gimple_omp_parallel_set_child_fn (copy, t);
1692 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
1693 gimple_omp_parallel_set_data_arg (copy, t);
1694 goto copy_omp_body;
1695
1696 case GIMPLE_OMP_TASK:
1697 t = unshare_expr (gimple_omp_task_clauses (stmt));
1698 gimple_omp_task_set_clauses (copy, t);
1699 t = unshare_expr (gimple_omp_task_child_fn (stmt));
1700 gimple_omp_task_set_child_fn (copy, t);
1701 t = unshare_expr (gimple_omp_task_data_arg (stmt));
1702 gimple_omp_task_set_data_arg (copy, t);
1703 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1704 gimple_omp_task_set_copy_fn (copy, t);
1705 t = unshare_expr (gimple_omp_task_arg_size (stmt));
1706 gimple_omp_task_set_arg_size (copy, t);
1707 t = unshare_expr (gimple_omp_task_arg_align (stmt));
1708 gimple_omp_task_set_arg_align (copy, t);
1709 goto copy_omp_body;
1710
1711 case GIMPLE_OMP_CRITICAL:
1712 t = unshare_expr (gimple_omp_critical_name (stmt));
1713 gimple_omp_critical_set_name (copy, t);
1714 goto copy_omp_body;
1715
1716 case GIMPLE_OMP_SECTIONS:
1717 t = unshare_expr (gimple_omp_sections_clauses (stmt));
1718 gimple_omp_sections_set_clauses (copy, t);
1719 t = unshare_expr (gimple_omp_sections_control (stmt));
1720 gimple_omp_sections_set_control (copy, t);
1721 /* FALLTHRU */
1722
1723 case GIMPLE_OMP_SINGLE:
1724 case GIMPLE_OMP_TARGET:
1725 case GIMPLE_OMP_TEAMS:
1726 case GIMPLE_OMP_SECTION:
1727 case GIMPLE_OMP_MASTER:
1728 case GIMPLE_OMP_TASKGROUP:
1729 case GIMPLE_OMP_ORDERED:
1730 copy_omp_body:
1731 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1732 gimple_omp_set_body (copy, new_seq);
1733 break;
1734
1735 case GIMPLE_TRANSACTION:
1736 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
1737 gimple_transaction_set_body (copy, new_seq);
1738 break;
1739
1740 case GIMPLE_WITH_CLEANUP_EXPR:
1741 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1742 gimple_wce_set_cleanup (copy, new_seq);
1743 break;
1744
1745 default:
1746 gcc_unreachable ();
1747 }
1748 }
1749
1750 /* Make copy of operands. */
1751 for (i = 0; i < num_ops; i++)
1752 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
1753
1754 if (gimple_has_mem_ops (stmt))
1755 {
1756 gimple_set_vdef (copy, gimple_vdef (stmt));
1757 gimple_set_vuse (copy, gimple_vuse (stmt));
1758 }
1759
1760 /* Clear out SSA operand vectors on COPY. */
1761 if (gimple_has_ops (stmt))
1762 {
1763 gimple_set_use_ops (copy, NULL);
1764
1765 /* SSA operands need to be updated. */
1766 gimple_set_modified (copy, true);
1767 }
1768
1769 return copy;
1770 }
1771
1772
1773 /* Return true if statement S has side-effects. We consider a
1774 statement to have side effects if:
1775
1776 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1777 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
1778
1779 bool
1780 gimple_has_side_effects (const_gimple s)
1781 {
1782 if (is_gimple_debug (s))
1783 return false;
1784
1785 /* We don't have to scan the arguments to check for
1786 volatile arguments, though, at present, we still
1787 do a scan to check for TREE_SIDE_EFFECTS. */
1788 if (gimple_has_volatile_ops (s))
1789 return true;
1790
1791 if (gimple_code (s) == GIMPLE_ASM
1792 && gimple_asm_volatile_p (s))
1793 return true;
1794
1795 if (is_gimple_call (s))
1796 {
1797 int flags = gimple_call_flags (s);
1798
1799 /* An infinite loop is considered a side effect. */
1800 if (!(flags & (ECF_CONST | ECF_PURE))
1801 || (flags & ECF_LOOPING_CONST_OR_PURE))
1802 return true;
1803
1804 return false;
1805 }
1806
1807 return false;
1808 }
1809
1810 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1811 Return true if S can trap. When INCLUDE_MEM is true, check whether
1812 the memory operations could trap. When INCLUDE_STORES is true and
1813 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
1814
1815 bool
1816 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
1817 {
1818 tree t, div = NULL_TREE;
1819 enum tree_code op;
1820
1821 if (include_mem)
1822 {
1823 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
1824
1825 for (i = start; i < gimple_num_ops (s); i++)
1826 if (tree_could_trap_p (gimple_op (s, i)))
1827 return true;
1828 }
1829
1830 switch (gimple_code (s))
1831 {
1832 case GIMPLE_ASM:
1833 return gimple_asm_volatile_p (s);
1834
1835 case GIMPLE_CALL:
1836 t = gimple_call_fndecl (s);
1837 /* Assume that calls to weak functions may trap. */
1838 if (!t || !DECL_P (t) || DECL_WEAK (t))
1839 return true;
1840 return false;
1841
1842 case GIMPLE_ASSIGN:
1843 t = gimple_expr_type (s);
1844 op = gimple_assign_rhs_code (s);
1845 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
1846 div = gimple_assign_rhs2 (s);
1847 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
1848 (INTEGRAL_TYPE_P (t)
1849 && TYPE_OVERFLOW_TRAPS (t)),
1850 div));
1851
1852 default:
1853 break;
1854 }
1855
1856 return false;
1857 }
1858
1859 /* Return true if statement S can trap. */
1860
1861 bool
1862 gimple_could_trap_p (gimple s)
1863 {
1864 return gimple_could_trap_p_1 (s, true, true);
1865 }
1866
1867 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
1868
1869 bool
1870 gimple_assign_rhs_could_trap_p (gimple s)
1871 {
1872 gcc_assert (is_gimple_assign (s));
1873 return gimple_could_trap_p_1 (s, true, false);
1874 }
1875
1876
1877 /* Print debugging information for gimple stmts generated. */
1878
1879 void
1880 dump_gimple_statistics (void)
1881 {
1882 int i, total_tuples = 0, total_bytes = 0;
1883
1884 if (! GATHER_STATISTICS)
1885 {
1886 fprintf (stderr, "No gimple statistics\n");
1887 return;
1888 }
1889
1890 fprintf (stderr, "\nGIMPLE statements\n");
1891 fprintf (stderr, "Kind Stmts Bytes\n");
1892 fprintf (stderr, "---------------------------------------\n");
1893 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
1894 {
1895 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
1896 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
1897 total_tuples += gimple_alloc_counts[i];
1898 total_bytes += gimple_alloc_sizes[i];
1899 }
1900 fprintf (stderr, "---------------------------------------\n");
1901 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
1902 fprintf (stderr, "---------------------------------------\n");
1903 }
1904
1905
1906 /* Return the number of operands needed on the RHS of a GIMPLE
1907 assignment for an expression with tree code CODE. */
1908
1909 unsigned
1910 get_gimple_rhs_num_ops (enum tree_code code)
1911 {
1912 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
1913
1914 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
1915 return 1;
1916 else if (rhs_class == GIMPLE_BINARY_RHS)
1917 return 2;
1918 else if (rhs_class == GIMPLE_TERNARY_RHS)
1919 return 3;
1920 else
1921 gcc_unreachable ();
1922 }
1923
1924 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
1925 (unsigned char) \
1926 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
1927 : ((TYPE) == tcc_binary \
1928 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
1929 : ((TYPE) == tcc_constant \
1930 || (TYPE) == tcc_declaration \
1931 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
1932 : ((SYM) == TRUTH_AND_EXPR \
1933 || (SYM) == TRUTH_OR_EXPR \
1934 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
1935 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
1936 : ((SYM) == COND_EXPR \
1937 || (SYM) == WIDEN_MULT_PLUS_EXPR \
1938 || (SYM) == WIDEN_MULT_MINUS_EXPR \
1939 || (SYM) == DOT_PROD_EXPR \
1940 || (SYM) == REALIGN_LOAD_EXPR \
1941 || (SYM) == VEC_COND_EXPR \
1942 || (SYM) == VEC_PERM_EXPR \
1943 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
1944 : ((SYM) == CONSTRUCTOR \
1945 || (SYM) == OBJ_TYPE_REF \
1946 || (SYM) == ASSERT_EXPR \
1947 || (SYM) == ADDR_EXPR \
1948 || (SYM) == WITH_SIZE_EXPR \
1949 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
1950 : GIMPLE_INVALID_RHS),
1951 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
1952
1953 const unsigned char gimple_rhs_class_table[] = {
1954 #include "all-tree.def"
1955 };
1956
1957 #undef DEFTREECODE
1958 #undef END_OF_BASE_TREE_CODES
1959
1960 /* Given a memory reference expression T, return its base address.
1961 The base address of a memory reference expression is the main
1962 object being referenced. For instance, the base address for
1963 'array[i].fld[j]' is 'array'. You can think of this as stripping
1964 away the offset part from a memory address.
1965
1966 This function calls handled_component_p to strip away all the inner
1967 parts of the memory reference until it reaches the base object. */
1968
1969 tree
1970 get_base_address (tree t)
1971 {
1972 while (handled_component_p (t))
1973 t = TREE_OPERAND (t, 0);
1974
1975 if ((TREE_CODE (t) == MEM_REF
1976 || TREE_CODE (t) == TARGET_MEM_REF)
1977 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
1978 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
1979
1980 /* ??? Either the alias oracle or all callers need to properly deal
1981 with WITH_SIZE_EXPRs before we can look through those. */
1982 if (TREE_CODE (t) == WITH_SIZE_EXPR)
1983 return NULL_TREE;
1984
1985 return t;
1986 }
1987
1988 void
1989 recalculate_side_effects (tree t)
1990 {
1991 enum tree_code code = TREE_CODE (t);
1992 int len = TREE_OPERAND_LENGTH (t);
1993 int i;
1994
1995 switch (TREE_CODE_CLASS (code))
1996 {
1997 case tcc_expression:
1998 switch (code)
1999 {
2000 case INIT_EXPR:
2001 case MODIFY_EXPR:
2002 case VA_ARG_EXPR:
2003 case PREDECREMENT_EXPR:
2004 case PREINCREMENT_EXPR:
2005 case POSTDECREMENT_EXPR:
2006 case POSTINCREMENT_EXPR:
2007 /* All of these have side-effects, no matter what their
2008 operands are. */
2009 return;
2010
2011 default:
2012 break;
2013 }
2014 /* Fall through. */
2015
2016 case tcc_comparison: /* a comparison expression */
2017 case tcc_unary: /* a unary arithmetic expression */
2018 case tcc_binary: /* a binary arithmetic expression */
2019 case tcc_reference: /* a reference */
2020 case tcc_vl_exp: /* a function call */
2021 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2022 for (i = 0; i < len; ++i)
2023 {
2024 tree op = TREE_OPERAND (t, i);
2025 if (op && TREE_SIDE_EFFECTS (op))
2026 TREE_SIDE_EFFECTS (t) = 1;
2027 }
2028 break;
2029
2030 case tcc_constant:
2031 /* No side-effects. */
2032 return;
2033
2034 default:
2035 gcc_unreachable ();
2036 }
2037 }
2038
2039 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2040 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2041 we failed to create one. */
2042
2043 tree
2044 canonicalize_cond_expr_cond (tree t)
2045 {
2046 /* Strip conversions around boolean operations. */
2047 if (CONVERT_EXPR_P (t)
2048 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2049 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2050 == BOOLEAN_TYPE))
2051 t = TREE_OPERAND (t, 0);
2052
2053 /* For !x use x == 0. */
2054 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2055 {
2056 tree top0 = TREE_OPERAND (t, 0);
2057 t = build2 (EQ_EXPR, TREE_TYPE (t),
2058 top0, build_int_cst (TREE_TYPE (top0), 0));
2059 }
2060 /* For cmp ? 1 : 0 use cmp. */
2061 else if (TREE_CODE (t) == COND_EXPR
2062 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2063 && integer_onep (TREE_OPERAND (t, 1))
2064 && integer_zerop (TREE_OPERAND (t, 2)))
2065 {
2066 tree top0 = TREE_OPERAND (t, 0);
2067 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2068 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2069 }
2070 /* For x ^ y use x != y. */
2071 else if (TREE_CODE (t) == BIT_XOR_EXPR)
2072 t = build2 (NE_EXPR, TREE_TYPE (t),
2073 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2074
2075 if (is_gimple_condexpr (t))
2076 return t;
2077
2078 return NULL_TREE;
2079 }
2080
2081 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2082 the positions marked by the set ARGS_TO_SKIP. */
2083
2084 gimple
2085 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
2086 {
2087 int i;
2088 int nargs = gimple_call_num_args (stmt);
2089 vec<tree> vargs;
2090 vargs.create (nargs);
2091 gimple new_stmt;
2092
2093 for (i = 0; i < nargs; i++)
2094 if (!bitmap_bit_p (args_to_skip, i))
2095 vargs.quick_push (gimple_call_arg (stmt, i));
2096
2097 if (gimple_call_internal_p (stmt))
2098 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2099 vargs);
2100 else
2101 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2102 vargs.release ();
2103 if (gimple_call_lhs (stmt))
2104 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2105
2106 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2107 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2108
2109 if (gimple_has_location (stmt))
2110 gimple_set_location (new_stmt, gimple_location (stmt));
2111 gimple_call_copy_flags (new_stmt, stmt);
2112 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2113
2114 gimple_set_modified (new_stmt, true);
2115
2116 return new_stmt;
2117 }
2118
2119
2120
2121 /* Return true if the field decls F1 and F2 are at the same offset.
2122
2123 This is intended to be used on GIMPLE types only. */
2124
2125 bool
2126 gimple_compare_field_offset (tree f1, tree f2)
2127 {
2128 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2129 {
2130 tree offset1 = DECL_FIELD_OFFSET (f1);
2131 tree offset2 = DECL_FIELD_OFFSET (f2);
2132 return ((offset1 == offset2
2133 /* Once gimplification is done, self-referential offsets are
2134 instantiated as operand #2 of the COMPONENT_REF built for
2135 each access and reset. Therefore, they are not relevant
2136 anymore and fields are interchangeable provided that they
2137 represent the same access. */
2138 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2139 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2140 && (DECL_SIZE (f1) == DECL_SIZE (f2)
2141 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2142 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2143 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2144 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2145 || operand_equal_p (offset1, offset2, 0))
2146 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2147 DECL_FIELD_BIT_OFFSET (f2)));
2148 }
2149
2150 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2151 should be, so handle differing ones specially by decomposing
2152 the offset into a byte and bit offset manually. */
2153 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
2154 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
2155 {
2156 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2157 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2158 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2159 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2160 + bit_offset1 / BITS_PER_UNIT);
2161 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2162 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2163 + bit_offset2 / BITS_PER_UNIT);
2164 if (byte_offset1 != byte_offset2)
2165 return false;
2166 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2167 }
2168
2169 return false;
2170 }
2171
2172
2173 /* Return a type the same as TYPE except unsigned or
2174 signed according to UNSIGNEDP. */
2175
2176 static tree
2177 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2178 {
2179 tree type1;
2180
2181 type1 = TYPE_MAIN_VARIANT (type);
2182 if (type1 == signed_char_type_node
2183 || type1 == char_type_node
2184 || type1 == unsigned_char_type_node)
2185 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2186 if (type1 == integer_type_node || type1 == unsigned_type_node)
2187 return unsignedp ? unsigned_type_node : integer_type_node;
2188 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2189 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2190 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2191 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2192 if (type1 == long_long_integer_type_node
2193 || type1 == long_long_unsigned_type_node)
2194 return unsignedp
2195 ? long_long_unsigned_type_node
2196 : long_long_integer_type_node;
2197 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
2198 return unsignedp
2199 ? int128_unsigned_type_node
2200 : int128_integer_type_node;
2201 #if HOST_BITS_PER_WIDE_INT >= 64
2202 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2203 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2204 #endif
2205 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2206 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2207 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2208 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2209 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2210 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2211 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2212 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2213
2214 #define GIMPLE_FIXED_TYPES(NAME) \
2215 if (type1 == short_ ## NAME ## _type_node \
2216 || type1 == unsigned_short_ ## NAME ## _type_node) \
2217 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2218 : short_ ## NAME ## _type_node; \
2219 if (type1 == NAME ## _type_node \
2220 || type1 == unsigned_ ## NAME ## _type_node) \
2221 return unsignedp ? unsigned_ ## NAME ## _type_node \
2222 : NAME ## _type_node; \
2223 if (type1 == long_ ## NAME ## _type_node \
2224 || type1 == unsigned_long_ ## NAME ## _type_node) \
2225 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2226 : long_ ## NAME ## _type_node; \
2227 if (type1 == long_long_ ## NAME ## _type_node \
2228 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2229 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2230 : long_long_ ## NAME ## _type_node;
2231
2232 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2233 if (type1 == NAME ## _type_node \
2234 || type1 == u ## NAME ## _type_node) \
2235 return unsignedp ? u ## NAME ## _type_node \
2236 : NAME ## _type_node;
2237
2238 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2239 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2240 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2241 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2242 : sat_ ## short_ ## NAME ## _type_node; \
2243 if (type1 == sat_ ## NAME ## _type_node \
2244 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2245 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2246 : sat_ ## NAME ## _type_node; \
2247 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2248 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2249 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2250 : sat_ ## long_ ## NAME ## _type_node; \
2251 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2252 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2253 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2254 : sat_ ## long_long_ ## NAME ## _type_node;
2255
2256 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
2257 if (type1 == sat_ ## NAME ## _type_node \
2258 || type1 == sat_ ## u ## NAME ## _type_node) \
2259 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2260 : sat_ ## NAME ## _type_node;
2261
2262 GIMPLE_FIXED_TYPES (fract);
2263 GIMPLE_FIXED_TYPES_SAT (fract);
2264 GIMPLE_FIXED_TYPES (accum);
2265 GIMPLE_FIXED_TYPES_SAT (accum);
2266
2267 GIMPLE_FIXED_MODE_TYPES (qq);
2268 GIMPLE_FIXED_MODE_TYPES (hq);
2269 GIMPLE_FIXED_MODE_TYPES (sq);
2270 GIMPLE_FIXED_MODE_TYPES (dq);
2271 GIMPLE_FIXED_MODE_TYPES (tq);
2272 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2273 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2274 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2275 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2276 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2277 GIMPLE_FIXED_MODE_TYPES (ha);
2278 GIMPLE_FIXED_MODE_TYPES (sa);
2279 GIMPLE_FIXED_MODE_TYPES (da);
2280 GIMPLE_FIXED_MODE_TYPES (ta);
2281 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2282 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2283 GIMPLE_FIXED_MODE_TYPES_SAT (da);
2284 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2285
2286 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2287 the precision; they have precision set to match their range, but
2288 may use a wider mode to match an ABI. If we change modes, we may
2289 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2290 the precision as well, so as to yield correct results for
2291 bit-field types. C++ does not have these separate bit-field
2292 types, and producing a signed or unsigned variant of an
2293 ENUMERAL_TYPE may cause other problems as well. */
2294 if (!INTEGRAL_TYPE_P (type)
2295 || TYPE_UNSIGNED (type) == unsignedp)
2296 return type;
2297
2298 #define TYPE_OK(node) \
2299 (TYPE_MODE (type) == TYPE_MODE (node) \
2300 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2301 if (TYPE_OK (signed_char_type_node))
2302 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2303 if (TYPE_OK (integer_type_node))
2304 return unsignedp ? unsigned_type_node : integer_type_node;
2305 if (TYPE_OK (short_integer_type_node))
2306 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2307 if (TYPE_OK (long_integer_type_node))
2308 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2309 if (TYPE_OK (long_long_integer_type_node))
2310 return (unsignedp
2311 ? long_long_unsigned_type_node
2312 : long_long_integer_type_node);
2313 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
2314 return (unsignedp
2315 ? int128_unsigned_type_node
2316 : int128_integer_type_node);
2317
2318 #if HOST_BITS_PER_WIDE_INT >= 64
2319 if (TYPE_OK (intTI_type_node))
2320 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2321 #endif
2322 if (TYPE_OK (intDI_type_node))
2323 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2324 if (TYPE_OK (intSI_type_node))
2325 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2326 if (TYPE_OK (intHI_type_node))
2327 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2328 if (TYPE_OK (intQI_type_node))
2329 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2330
2331 #undef GIMPLE_FIXED_TYPES
2332 #undef GIMPLE_FIXED_MODE_TYPES
2333 #undef GIMPLE_FIXED_TYPES_SAT
2334 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2335 #undef TYPE_OK
2336
2337 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2338 }
2339
2340
2341 /* Return an unsigned type the same as TYPE in other respects. */
2342
2343 tree
2344 gimple_unsigned_type (tree type)
2345 {
2346 return gimple_signed_or_unsigned_type (true, type);
2347 }
2348
2349
2350 /* Return a signed type the same as TYPE in other respects. */
2351
2352 tree
2353 gimple_signed_type (tree type)
2354 {
2355 return gimple_signed_or_unsigned_type (false, type);
2356 }
2357
2358
2359 /* Return the typed-based alias set for T, which may be an expression
2360 or a type. Return -1 if we don't do anything special. */
2361
2362 alias_set_type
2363 gimple_get_alias_set (tree t)
2364 {
2365 tree u;
2366
2367 /* Permit type-punning when accessing a union, provided the access
2368 is directly through the union. For example, this code does not
2369 permit taking the address of a union member and then storing
2370 through it. Even the type-punning allowed here is a GCC
2371 extension, albeit a common and useful one; the C standard says
2372 that such accesses have implementation-defined behavior. */
2373 for (u = t;
2374 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
2375 u = TREE_OPERAND (u, 0))
2376 if (TREE_CODE (u) == COMPONENT_REF
2377 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
2378 return 0;
2379
2380 /* That's all the expressions we handle specially. */
2381 if (!TYPE_P (t))
2382 return -1;
2383
2384 /* For convenience, follow the C standard when dealing with
2385 character types. Any object may be accessed via an lvalue that
2386 has character type. */
2387 if (t == char_type_node
2388 || t == signed_char_type_node
2389 || t == unsigned_char_type_node)
2390 return 0;
2391
2392 /* Allow aliasing between signed and unsigned variants of the same
2393 type. We treat the signed variant as canonical. */
2394 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2395 {
2396 tree t1 = gimple_signed_type (t);
2397
2398 /* t1 == t can happen for boolean nodes which are always unsigned. */
2399 if (t1 != t)
2400 return get_alias_set (t1);
2401 }
2402
2403 return -1;
2404 }
2405
2406
2407 /* Helper for gimple_ior_addresses_taken_1. */
2408
2409 static bool
2410 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
2411 tree addr, void *data)
2412 {
2413 bitmap addresses_taken = (bitmap)data;
2414 addr = get_base_address (addr);
2415 if (addr
2416 && DECL_P (addr))
2417 {
2418 bitmap_set_bit (addresses_taken, DECL_UID (addr));
2419 return true;
2420 }
2421 return false;
2422 }
2423
2424 /* Set the bit for the uid of all decls that have their address taken
2425 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
2426 were any in this stmt. */
2427
2428 bool
2429 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
2430 {
2431 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2432 gimple_ior_addresses_taken_1);
2433 }
2434
2435
2436 /* Return TRUE iff stmt is a call to a built-in function. */
2437
2438 bool
2439 is_gimple_builtin_call (gimple stmt)
2440 {
2441 tree callee;
2442
2443 if (is_gimple_call (stmt)
2444 && (callee = gimple_call_fndecl (stmt))
2445 && is_builtin_fn (callee)
2446 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2447 return true;
2448
2449 return false;
2450 }
2451
2452 /* Return true when STMTs arguments match those of FNDECL. */
2453
2454 static bool
2455 validate_call (gimple stmt, tree fndecl)
2456 {
2457 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2458 unsigned nargs = gimple_call_num_args (stmt);
2459 for (unsigned i = 0; i < nargs; ++i)
2460 {
2461 /* Variadic args follow. */
2462 if (!targs)
2463 return true;
2464 tree arg = gimple_call_arg (stmt, i);
2465 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2466 && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
2467 ;
2468 else if (POINTER_TYPE_P (TREE_TYPE (arg))
2469 && POINTER_TYPE_P (TREE_VALUE (targs)))
2470 ;
2471 else if (TREE_CODE (TREE_TYPE (arg))
2472 != TREE_CODE (TREE_VALUE (targs)))
2473 return false;
2474 targs = TREE_CHAIN (targs);
2475 }
2476 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2477 return false;
2478 return true;
2479 }
2480
2481 /* Return true when STMT is builtins call to CLASS. */
2482
2483 bool
2484 gimple_call_builtin_p (gimple stmt, enum built_in_class klass)
2485 {
2486 tree fndecl;
2487 if (is_gimple_call (stmt)
2488 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2489 && DECL_BUILT_IN_CLASS (fndecl) == klass)
2490 return validate_call (stmt, fndecl);
2491 return false;
2492 }
2493
2494 /* Return true when STMT is builtins call to CODE of CLASS. */
2495
2496 bool
2497 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
2498 {
2499 tree fndecl;
2500 if (is_gimple_call (stmt)
2501 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2502 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2503 && DECL_FUNCTION_CODE (fndecl) == code)
2504 return validate_call (stmt, fndecl);
2505 return false;
2506 }
2507
2508 /* Return true if STMT clobbers memory. STMT is required to be a
2509 GIMPLE_ASM. */
2510
2511 bool
2512 gimple_asm_clobbers_memory_p (const_gimple stmt)
2513 {
2514 unsigned i;
2515
2516 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2517 {
2518 tree op = gimple_asm_clobber_op (stmt, i);
2519 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2520 return true;
2521 }
2522
2523 return false;
2524 }
2525
2526 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
2527
2528 void
2529 dump_decl_set (FILE *file, bitmap set)
2530 {
2531 if (set)
2532 {
2533 bitmap_iterator bi;
2534 unsigned i;
2535
2536 fprintf (file, "{ ");
2537
2538 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2539 {
2540 fprintf (file, "D.%u", i);
2541 fprintf (file, " ");
2542 }
2543
2544 fprintf (file, "}");
2545 }
2546 else
2547 fprintf (file, "NIL");
2548 }
2549
2550 /* Return true when CALL is a call stmt that definitely doesn't
2551 free any memory or makes it unavailable otherwise. */
2552 bool
2553 nonfreeing_call_p (gimple call)
2554 {
2555 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2556 && gimple_call_flags (call) & ECF_LEAF)
2557 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2558 {
2559 /* Just in case these become ECF_LEAF in the future. */
2560 case BUILT_IN_FREE:
2561 case BUILT_IN_TM_FREE:
2562 case BUILT_IN_REALLOC:
2563 case BUILT_IN_STACK_RESTORE:
2564 return false;
2565 default:
2566 return true;
2567 }
2568
2569 return false;
2570 }
2571
2572 /* Callback for walk_stmt_load_store_ops.
2573
2574 Return TRUE if OP will dereference the tree stored in DATA, FALSE
2575 otherwise.
2576
2577 This routine only makes a superficial check for a dereference. Thus
2578 it must only be used if it is safe to return a false negative. */
2579 static bool
2580 check_loadstore (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
2581 {
2582 if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2583 && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
2584 return true;
2585 return false;
2586 }
2587
2588 /* If OP can be inferred to be non-zero after STMT executes, return true. */
2589
2590 bool
2591 infer_nonnull_range (gimple stmt, tree op)
2592 {
2593 /* We can only assume that a pointer dereference will yield
2594 non-NULL if -fdelete-null-pointer-checks is enabled. */
2595 if (!flag_delete_null_pointer_checks
2596 || !POINTER_TYPE_P (TREE_TYPE (op))
2597 || gimple_code (stmt) == GIMPLE_ASM)
2598 return false;
2599
2600 if (walk_stmt_load_store_ops (stmt, (void *)op,
2601 check_loadstore, check_loadstore))
2602 return true;
2603
2604 if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2605 {
2606 tree fntype = gimple_call_fntype (stmt);
2607 tree attrs = TYPE_ATTRIBUTES (fntype);
2608 for (; attrs; attrs = TREE_CHAIN (attrs))
2609 {
2610 attrs = lookup_attribute ("nonnull", attrs);
2611
2612 /* If "nonnull" wasn't specified, we know nothing about
2613 the argument. */
2614 if (attrs == NULL_TREE)
2615 return false;
2616
2617 /* If "nonnull" applies to all the arguments, then ARG
2618 is non-null if it's in the argument list. */
2619 if (TREE_VALUE (attrs) == NULL_TREE)
2620 {
2621 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2622 {
2623 if (operand_equal_p (op, gimple_call_arg (stmt, i), 0)
2624 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i))))
2625 return true;
2626 }
2627 return false;
2628 }
2629
2630 /* Now see if op appears in the nonnull list. */
2631 for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2632 {
2633 int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2634 tree arg = gimple_call_arg (stmt, idx);
2635 if (operand_equal_p (op, arg, 0))
2636 return true;
2637 }
2638 }
2639 }
2640
2641 /* If this function is marked as returning non-null, then we can
2642 infer OP is non-null if it is used in the return statement. */
2643 if (gimple_code (stmt) == GIMPLE_RETURN
2644 && gimple_return_retval (stmt)
2645 && operand_equal_p (gimple_return_retval (stmt), op, 0)
2646 && lookup_attribute ("returns_nonnull",
2647 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2648 return true;
2649
2650 return false;
2651 }
2652
2653 /* Compare two case labels. Because the front end should already have
2654 made sure that case ranges do not overlap, it is enough to only compare
2655 the CASE_LOW values of each case label. */
2656
2657 static int
2658 compare_case_labels (const void *p1, const void *p2)
2659 {
2660 const_tree const case1 = *(const_tree const*)p1;
2661 const_tree const case2 = *(const_tree const*)p2;
2662
2663 /* The 'default' case label always goes first. */
2664 if (!CASE_LOW (case1))
2665 return -1;
2666 else if (!CASE_LOW (case2))
2667 return 1;
2668 else
2669 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2670 }
2671
2672 /* Sort the case labels in LABEL_VEC in place in ascending order. */
2673
2674 void
2675 sort_case_labels (vec<tree> label_vec)
2676 {
2677 label_vec.qsort (compare_case_labels);
2678 }
2679 \f
2680 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2681
2682 LABELS is a vector that contains all case labels to look at.
2683
2684 INDEX_TYPE is the type of the switch index expression. Case labels
2685 in LABELS are discarded if their values are not in the value range
2686 covered by INDEX_TYPE. The remaining case label values are folded
2687 to INDEX_TYPE.
2688
2689 If a default case exists in LABELS, it is removed from LABELS and
2690 returned in DEFAULT_CASEP. If no default case exists, but the
2691 case labels already cover the whole range of INDEX_TYPE, a default
2692 case is returned pointing to one of the existing case labels.
2693 Otherwise DEFAULT_CASEP is set to NULL_TREE.
2694
2695 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2696 apply and no action is taken regardless of whether a default case is
2697 found or not. */
2698
2699 void
2700 preprocess_case_label_vec_for_gimple (vec<tree> labels,
2701 tree index_type,
2702 tree *default_casep)
2703 {
2704 tree min_value, max_value;
2705 tree default_case = NULL_TREE;
2706 size_t i, len;
2707
2708 i = 0;
2709 min_value = TYPE_MIN_VALUE (index_type);
2710 max_value = TYPE_MAX_VALUE (index_type);
2711 while (i < labels.length ())
2712 {
2713 tree elt = labels[i];
2714 tree low = CASE_LOW (elt);
2715 tree high = CASE_HIGH (elt);
2716 bool remove_element = FALSE;
2717
2718 if (low)
2719 {
2720 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2721 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2722
2723 /* This is a non-default case label, i.e. it has a value.
2724
2725 See if the case label is reachable within the range of
2726 the index type. Remove out-of-range case values. Turn
2727 case ranges into a canonical form (high > low strictly)
2728 and convert the case label values to the index type.
2729
2730 NB: The type of gimple_switch_index() may be the promoted
2731 type, but the case labels retain the original type. */
2732
2733 if (high)
2734 {
2735 /* This is a case range. Discard empty ranges.
2736 If the bounds or the range are equal, turn this
2737 into a simple (one-value) case. */
2738 int cmp = tree_int_cst_compare (high, low);
2739 if (cmp < 0)
2740 remove_element = TRUE;
2741 else if (cmp == 0)
2742 high = NULL_TREE;
2743 }
2744
2745 if (! high)
2746 {
2747 /* If the simple case value is unreachable, ignore it. */
2748 if ((TREE_CODE (min_value) == INTEGER_CST
2749 && tree_int_cst_compare (low, min_value) < 0)
2750 || (TREE_CODE (max_value) == INTEGER_CST
2751 && tree_int_cst_compare (low, max_value) > 0))
2752 remove_element = TRUE;
2753 else
2754 low = fold_convert (index_type, low);
2755 }
2756 else
2757 {
2758 /* If the entire case range is unreachable, ignore it. */
2759 if ((TREE_CODE (min_value) == INTEGER_CST
2760 && tree_int_cst_compare (high, min_value) < 0)
2761 || (TREE_CODE (max_value) == INTEGER_CST
2762 && tree_int_cst_compare (low, max_value) > 0))
2763 remove_element = TRUE;
2764 else
2765 {
2766 /* If the lower bound is less than the index type's
2767 minimum value, truncate the range bounds. */
2768 if (TREE_CODE (min_value) == INTEGER_CST
2769 && tree_int_cst_compare (low, min_value) < 0)
2770 low = min_value;
2771 low = fold_convert (index_type, low);
2772
2773 /* If the upper bound is greater than the index type's
2774 maximum value, truncate the range bounds. */
2775 if (TREE_CODE (max_value) == INTEGER_CST
2776 && tree_int_cst_compare (high, max_value) > 0)
2777 high = max_value;
2778 high = fold_convert (index_type, high);
2779
2780 /* We may have folded a case range to a one-value case. */
2781 if (tree_int_cst_equal (low, high))
2782 high = NULL_TREE;
2783 }
2784 }
2785
2786 CASE_LOW (elt) = low;
2787 CASE_HIGH (elt) = high;
2788 }
2789 else
2790 {
2791 gcc_assert (!default_case);
2792 default_case = elt;
2793 /* The default case must be passed separately to the
2794 gimple_build_switch routine. But if DEFAULT_CASEP
2795 is NULL, we do not remove the default case (it would
2796 be completely lost). */
2797 if (default_casep)
2798 remove_element = TRUE;
2799 }
2800
2801 if (remove_element)
2802 labels.ordered_remove (i);
2803 else
2804 i++;
2805 }
2806 len = i;
2807
2808 if (!labels.is_empty ())
2809 sort_case_labels (labels);
2810
2811 if (default_casep && !default_case)
2812 {
2813 /* If the switch has no default label, add one, so that we jump
2814 around the switch body. If the labels already cover the whole
2815 range of the switch index_type, add the default label pointing
2816 to one of the existing labels. */
2817 if (len
2818 && TYPE_MIN_VALUE (index_type)
2819 && TYPE_MAX_VALUE (index_type)
2820 && tree_int_cst_equal (CASE_LOW (labels[0]),
2821 TYPE_MIN_VALUE (index_type)))
2822 {
2823 tree low, high = CASE_HIGH (labels[len - 1]);
2824 if (!high)
2825 high = CASE_LOW (labels[len - 1]);
2826 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
2827 {
2828 for (i = 1; i < len; i++)
2829 {
2830 high = CASE_LOW (labels[i]);
2831 low = CASE_HIGH (labels[i - 1]);
2832 if (!low)
2833 low = CASE_LOW (labels[i - 1]);
2834 if ((TREE_INT_CST_LOW (low) + 1
2835 != TREE_INT_CST_LOW (high))
2836 || (TREE_INT_CST_HIGH (low)
2837 + (TREE_INT_CST_LOW (high) == 0)
2838 != TREE_INT_CST_HIGH (high)))
2839 break;
2840 }
2841 if (i == len)
2842 {
2843 tree label = CASE_LABEL (labels[0]);
2844 default_case = build_case_label (NULL_TREE, NULL_TREE,
2845 label);
2846 }
2847 }
2848 }
2849 }
2850
2851 if (default_casep)
2852 *default_casep = default_case;
2853 }
2854
2855 /* Set the location of all statements in SEQ to LOC. */
2856
2857 void
2858 gimple_seq_set_location (gimple_seq seq, location_t loc)
2859 {
2860 for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2861 gimple_set_location (gsi_stmt (i), loc);
2862 }