]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
More maybe_fold_reference TLC
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
69
70 enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83 };
84
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
92
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
110 static bool
111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 {
113 varpool_node *vnode;
114 struct cgraph_node *node;
115 symtab_node *snode;
116
117 if (DECL_ABSTRACT_P (decl))
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122 || !VAR_OR_FUNCTION_DECL_P (decl))
123 return true;
124
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
128 {
129 if (DECL_EXTERNAL (decl))
130 return false;
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab->function_flags_ready)
134 return true;
135 snode = symtab_node::get (decl);
136 if (!snode || !snode->definition)
137 return false;
138 node = dyn_cast <cgraph_node *> (snode);
139 return !node || !node->inlined_to;
140 }
141
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
145 if (!from_decl
146 || !VAR_P (from_decl)
147 || (!DECL_EXTERNAL (from_decl)
148 && (vnode = varpool_node::get (from_decl)) != NULL
149 && vnode->definition)
150 || (flag_ltrans
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->in_other_partition))
153 return true;
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161 return false;
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
170
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
178 if (!symtab->function_flags_ready)
179 return true;
180
181 snode = symtab_node::get (decl);
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
188 return !node || !node->inlined_to;
189 }
190
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
195 tree
196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 {
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202 }
203
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207
208 tree
209 canonicalize_constructor_val (tree cval, tree from_decl)
210 {
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218 {
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
239 if (!base)
240 return NULL_TREE;
241
242 if (VAR_OR_FUNCTION_DECL_P (base)
243 && !can_refer_decl_in_current_unit_p (base, from_decl))
244 return NULL_TREE;
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
247 if (VAR_P (base))
248 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
249 but since the use can be in a debug stmt we can't. */
250 ;
251 else if (TREE_CODE (base) == FUNCTION_DECL)
252 {
253 /* Make sure we create a cgraph node for functions we'll reference.
254 They can be non-existent if the reference comes from an entry
255 of an external vtable for example. */
256 cgraph_node::get_create (base);
257 }
258 /* Fixup types in global initializers. */
259 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
260 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
261
262 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
263 cval = fold_convert (TREE_TYPE (orig_cval), cval);
264 return cval;
265 }
266 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
267 if (TREE_CODE (cval) == INTEGER_CST)
268 {
269 if (TREE_OVERFLOW_P (cval))
270 cval = drop_tree_overflow (cval);
271 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
272 cval = fold_convert (TREE_TYPE (orig_cval), cval);
273 return cval;
274 }
275 return orig_cval;
276 }
277
278 /* If SYM is a constant variable with known value, return the value.
279 NULL_TREE is returned otherwise. */
280
281 tree
282 get_symbol_constant_value (tree sym)
283 {
284 tree val = ctor_for_folding (sym);
285 if (val != error_mark_node)
286 {
287 if (val)
288 {
289 val = canonicalize_constructor_val (unshare_expr (val), sym);
290 if (val && is_gimple_min_invariant (val))
291 return val;
292 else
293 return NULL_TREE;
294 }
295 /* Variables declared 'const' without an initializer
296 have zero as the initializer if they may not be
297 overridden at link or run time. */
298 if (!val
299 && is_gimple_reg_type (TREE_TYPE (sym)))
300 return build_zero_cst (TREE_TYPE (sym));
301 }
302
303 return NULL_TREE;
304 }
305
306
307
308 /* Subroutine of fold_stmt. We perform constant folding of the
309 memory reference tree EXPR. */
310
311 static tree
312 maybe_fold_reference (tree expr)
313 {
314 tree result = NULL_TREE;
315
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 result = fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 result = fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332 else
333 result = fold_const_aggregate_ref (expr);
334
335 if (result && is_gimple_min_invariant (result))
336 return result;
337
338 return NULL_TREE;
339 }
340
341 /* Return true if EXPR is an acceptable right-hand-side for a
342 GIMPLE assignment. We validate the entire tree, not just
343 the root node, thus catching expressions that embed complex
344 operands that are not permitted in GIMPLE. This function
345 is needed because the folding routines in fold-const.c
346 may return such expressions in some cases, e.g., an array
347 access with an embedded index addition. It may make more
348 sense to have folding routines that are sensitive to the
349 constraints on GIMPLE operands, rather than abandoning any
350 any attempt to fold if the usual folding turns out to be too
351 aggressive. */
352
353 bool
354 valid_gimple_rhs_p (tree expr)
355 {
356 enum tree_code code = TREE_CODE (expr);
357
358 switch (TREE_CODE_CLASS (code))
359 {
360 case tcc_declaration:
361 if (!is_gimple_variable (expr))
362 return false;
363 break;
364
365 case tcc_constant:
366 /* All constants are ok. */
367 break;
368
369 case tcc_comparison:
370 /* GENERIC allows comparisons with non-boolean types, reject
371 those for GIMPLE. Let vector-typed comparisons pass - rules
372 for GENERIC and GIMPLE are the same here. */
373 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
374 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
375 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
376 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
377 return false;
378
379 /* Fallthru. */
380 case tcc_binary:
381 if (!is_gimple_val (TREE_OPERAND (expr, 0))
382 || !is_gimple_val (TREE_OPERAND (expr, 1)))
383 return false;
384 break;
385
386 case tcc_unary:
387 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
388 return false;
389 break;
390
391 case tcc_expression:
392 switch (code)
393 {
394 case ADDR_EXPR:
395 {
396 tree t;
397 if (is_gimple_min_invariant (expr))
398 return true;
399 t = TREE_OPERAND (expr, 0);
400 while (handled_component_p (t))
401 {
402 /* ??? More checks needed, see the GIMPLE verifier. */
403 if ((TREE_CODE (t) == ARRAY_REF
404 || TREE_CODE (t) == ARRAY_RANGE_REF)
405 && !is_gimple_val (TREE_OPERAND (t, 1)))
406 return false;
407 t = TREE_OPERAND (t, 0);
408 }
409 if (!is_gimple_id (t))
410 return false;
411 }
412 break;
413
414 default:
415 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
416 {
417 if ((code == COND_EXPR
418 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
419 : !is_gimple_val (TREE_OPERAND (expr, 0)))
420 || !is_gimple_val (TREE_OPERAND (expr, 1))
421 || !is_gimple_val (TREE_OPERAND (expr, 2)))
422 return false;
423 break;
424 }
425 return false;
426 }
427 break;
428
429 case tcc_vl_exp:
430 return false;
431
432 case tcc_exceptional:
433 if (code == CONSTRUCTOR)
434 {
435 unsigned i;
436 tree elt;
437 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
438 if (!is_gimple_val (elt))
439 return false;
440 return true;
441 }
442 if (code != SSA_NAME)
443 return false;
444 break;
445
446 case tcc_reference:
447 if (code == BIT_FIELD_REF)
448 return is_gimple_val (TREE_OPERAND (expr, 0));
449 return false;
450
451 default:
452 return false;
453 }
454
455 return true;
456 }
457
458
459 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
460 replacement rhs for the statement or NULL_TREE if no simplification
461 could be made. It is assumed that the operands have been previously
462 folded. */
463
464 static tree
465 fold_gimple_assign (gimple_stmt_iterator *si)
466 {
467 gimple *stmt = gsi_stmt (*si);
468 enum tree_code subcode = gimple_assign_rhs_code (stmt);
469 location_t loc = gimple_location (stmt);
470
471 tree result = NULL_TREE;
472
473 switch (get_gimple_rhs_class (subcode))
474 {
475 case GIMPLE_SINGLE_RHS:
476 {
477 tree rhs = gimple_assign_rhs1 (stmt);
478
479 if (TREE_CLOBBER_P (rhs))
480 return NULL_TREE;
481
482 if (REFERENCE_CLASS_P (rhs))
483 return maybe_fold_reference (rhs);
484
485 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
486 {
487 tree val = OBJ_TYPE_REF_EXPR (rhs);
488 if (is_gimple_min_invariant (val))
489 return val;
490 else if (flag_devirtualize && virtual_method_call_p (rhs))
491 {
492 bool final;
493 vec <cgraph_node *>targets
494 = possible_polymorphic_call_targets (rhs, stmt, &final);
495 if (final && targets.length () <= 1 && dbg_cnt (devirt))
496 {
497 if (dump_enabled_p ())
498 {
499 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
500 "resolving virtual function address "
501 "reference to function %s\n",
502 targets.length () == 1
503 ? targets[0]->name ()
504 : "NULL");
505 }
506 if (targets.length () == 1)
507 {
508 val = fold_convert (TREE_TYPE (val),
509 build_fold_addr_expr_loc
510 (loc, targets[0]->decl));
511 STRIP_USELESS_TYPE_CONVERSION (val);
512 }
513 else
514 /* We cannot use __builtin_unreachable here because it
515 cannot have address taken. */
516 val = build_int_cst (TREE_TYPE (val), 0);
517 return val;
518 }
519 }
520 }
521
522 else if (TREE_CODE (rhs) == ADDR_EXPR)
523 {
524 tree ref = TREE_OPERAND (rhs, 0);
525 if (TREE_CODE (ref) == MEM_REF
526 && integer_zerop (TREE_OPERAND (ref, 1)))
527 {
528 result = TREE_OPERAND (ref, 0);
529 if (!useless_type_conversion_p (TREE_TYPE (rhs),
530 TREE_TYPE (result)))
531 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
532 return result;
533 }
534 }
535
536 else if (TREE_CODE (rhs) == CONSTRUCTOR
537 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
538 {
539 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
540 unsigned i;
541 tree val;
542
543 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
544 if (! CONSTANT_CLASS_P (val))
545 return NULL_TREE;
546
547 return build_vector_from_ctor (TREE_TYPE (rhs),
548 CONSTRUCTOR_ELTS (rhs));
549 }
550
551 else if (DECL_P (rhs)
552 && is_gimple_reg_type (TREE_TYPE (rhs)))
553 return get_symbol_constant_value (rhs);
554 }
555 break;
556
557 case GIMPLE_UNARY_RHS:
558 break;
559
560 case GIMPLE_BINARY_RHS:
561 break;
562
563 case GIMPLE_TERNARY_RHS:
564 result = fold_ternary_loc (loc, subcode,
565 TREE_TYPE (gimple_assign_lhs (stmt)),
566 gimple_assign_rhs1 (stmt),
567 gimple_assign_rhs2 (stmt),
568 gimple_assign_rhs3 (stmt));
569
570 if (result)
571 {
572 STRIP_USELESS_TYPE_CONVERSION (result);
573 if (valid_gimple_rhs_p (result))
574 return result;
575 }
576 break;
577
578 case GIMPLE_INVALID_RHS:
579 gcc_unreachable ();
580 }
581
582 return NULL_TREE;
583 }
584
585
586 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
587 adjusting the replacement stmts location and virtual operands.
588 If the statement has a lhs the last stmt in the sequence is expected
589 to assign to that lhs. */
590
591 static void
592 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
593 {
594 gimple *stmt = gsi_stmt (*si_p);
595
596 if (gimple_has_location (stmt))
597 annotate_all_with_location (stmts, gimple_location (stmt));
598
599 /* First iterate over the replacement statements backward, assigning
600 virtual operands to their defining statements. */
601 gimple *laststore = NULL;
602 for (gimple_stmt_iterator i = gsi_last (stmts);
603 !gsi_end_p (i); gsi_prev (&i))
604 {
605 gimple *new_stmt = gsi_stmt (i);
606 if ((gimple_assign_single_p (new_stmt)
607 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
608 || (is_gimple_call (new_stmt)
609 && (gimple_call_flags (new_stmt)
610 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
611 {
612 tree vdef;
613 if (!laststore)
614 vdef = gimple_vdef (stmt);
615 else
616 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
617 gimple_set_vdef (new_stmt, vdef);
618 if (vdef && TREE_CODE (vdef) == SSA_NAME)
619 SSA_NAME_DEF_STMT (vdef) = new_stmt;
620 laststore = new_stmt;
621 }
622 }
623
624 /* Second iterate over the statements forward, assigning virtual
625 operands to their uses. */
626 tree reaching_vuse = gimple_vuse (stmt);
627 for (gimple_stmt_iterator i = gsi_start (stmts);
628 !gsi_end_p (i); gsi_next (&i))
629 {
630 gimple *new_stmt = gsi_stmt (i);
631 /* If the new statement possibly has a VUSE, update it with exact SSA
632 name we know will reach this one. */
633 if (gimple_has_mem_ops (new_stmt))
634 gimple_set_vuse (new_stmt, reaching_vuse);
635 gimple_set_modified (new_stmt, true);
636 if (gimple_vdef (new_stmt))
637 reaching_vuse = gimple_vdef (new_stmt);
638 }
639
640 /* If the new sequence does not do a store release the virtual
641 definition of the original statement. */
642 if (reaching_vuse
643 && reaching_vuse == gimple_vuse (stmt))
644 {
645 tree vdef = gimple_vdef (stmt);
646 if (vdef
647 && TREE_CODE (vdef) == SSA_NAME)
648 {
649 unlink_stmt_vdef (stmt);
650 release_ssa_name (vdef);
651 }
652 }
653
654 /* Finally replace the original statement with the sequence. */
655 gsi_replace_with_seq (si_p, stmts, false);
656 }
657
658 /* Helper function for update_gimple_call and
659 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
660 with GIMPLE_CALL NEW_STMT. */
661
662 static void
663 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
664 gimple *stmt)
665 {
666 tree lhs = gimple_call_lhs (stmt);
667 gimple_call_set_lhs (new_stmt, lhs);
668 if (lhs && TREE_CODE (lhs) == SSA_NAME)
669 SSA_NAME_DEF_STMT (lhs) = new_stmt;
670 gimple_move_vops (new_stmt, stmt);
671 gimple_set_location (new_stmt, gimple_location (stmt));
672 if (gimple_block (new_stmt) == NULL_TREE)
673 gimple_set_block (new_stmt, gimple_block (stmt));
674 gsi_replace (si_p, new_stmt, false);
675 }
676
677 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
678 with number of arguments NARGS, where the arguments in GIMPLE form
679 follow NARGS argument. */
680
681 bool
682 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
683 {
684 va_list ap;
685 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
686
687 gcc_assert (is_gimple_call (stmt));
688 va_start (ap, nargs);
689 new_stmt = gimple_build_call_valist (fn, nargs, ap);
690 finish_update_gimple_call (si_p, new_stmt, stmt);
691 va_end (ap);
692 return true;
693 }
694
695 /* Return true if EXPR is a CALL_EXPR suitable for representation
696 as a single GIMPLE_CALL statement. If the arguments require
697 further gimplification, return false. */
698
699 static bool
700 valid_gimple_call_p (tree expr)
701 {
702 unsigned i, nargs;
703
704 if (TREE_CODE (expr) != CALL_EXPR)
705 return false;
706
707 nargs = call_expr_nargs (expr);
708 for (i = 0; i < nargs; i++)
709 {
710 tree arg = CALL_EXPR_ARG (expr, i);
711 if (is_gimple_reg_type (TREE_TYPE (arg)))
712 {
713 if (!is_gimple_val (arg))
714 return false;
715 }
716 else
717 if (!is_gimple_lvalue (arg))
718 return false;
719 }
720
721 return true;
722 }
723
724 /* Convert EXPR into a GIMPLE value suitable for substitution on the
725 RHS of an assignment. Insert the necessary statements before
726 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
727 is replaced. If the call is expected to produces a result, then it
728 is replaced by an assignment of the new RHS to the result variable.
729 If the result is to be ignored, then the call is replaced by a
730 GIMPLE_NOP. A proper VDEF chain is retained by making the first
731 VUSE and the last VDEF of the whole sequence be the same as the replaced
732 statement and using new SSA names for stores in between. */
733
734 void
735 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
736 {
737 tree lhs;
738 gimple *stmt, *new_stmt;
739 gimple_stmt_iterator i;
740 gimple_seq stmts = NULL;
741
742 stmt = gsi_stmt (*si_p);
743
744 gcc_assert (is_gimple_call (stmt));
745
746 if (valid_gimple_call_p (expr))
747 {
748 /* The call has simplified to another call. */
749 tree fn = CALL_EXPR_FN (expr);
750 unsigned i;
751 unsigned nargs = call_expr_nargs (expr);
752 vec<tree> args = vNULL;
753 gcall *new_stmt;
754
755 if (nargs > 0)
756 {
757 args.create (nargs);
758 args.safe_grow_cleared (nargs, true);
759
760 for (i = 0; i < nargs; i++)
761 args[i] = CALL_EXPR_ARG (expr, i);
762 }
763
764 new_stmt = gimple_build_call_vec (fn, args);
765 finish_update_gimple_call (si_p, new_stmt, stmt);
766 args.release ();
767 return;
768 }
769
770 lhs = gimple_call_lhs (stmt);
771 if (lhs == NULL_TREE)
772 {
773 push_gimplify_context (gimple_in_ssa_p (cfun));
774 gimplify_and_add (expr, &stmts);
775 pop_gimplify_context (NULL);
776
777 /* We can end up with folding a memcpy of an empty class assignment
778 which gets optimized away by C++ gimplification. */
779 if (gimple_seq_empty_p (stmts))
780 {
781 if (gimple_in_ssa_p (cfun))
782 {
783 unlink_stmt_vdef (stmt);
784 release_defs (stmt);
785 }
786 gsi_replace (si_p, gimple_build_nop (), false);
787 return;
788 }
789 }
790 else
791 {
792 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
793 new_stmt = gimple_build_assign (lhs, tmp);
794 i = gsi_last (stmts);
795 gsi_insert_after_without_update (&i, new_stmt,
796 GSI_CONTINUE_LINKING);
797 }
798
799 gsi_replace_with_seq_vops (si_p, stmts);
800 }
801
802
803 /* Replace the call at *GSI with the gimple value VAL. */
804
805 void
806 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
807 {
808 gimple *stmt = gsi_stmt (*gsi);
809 tree lhs = gimple_call_lhs (stmt);
810 gimple *repl;
811 if (lhs)
812 {
813 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
814 val = fold_convert (TREE_TYPE (lhs), val);
815 repl = gimple_build_assign (lhs, val);
816 }
817 else
818 repl = gimple_build_nop ();
819 tree vdef = gimple_vdef (stmt);
820 if (vdef && TREE_CODE (vdef) == SSA_NAME)
821 {
822 unlink_stmt_vdef (stmt);
823 release_ssa_name (vdef);
824 }
825 gsi_replace (gsi, repl, false);
826 }
827
828 /* Replace the call at *GSI with the new call REPL and fold that
829 again. */
830
831 static void
832 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
833 {
834 gimple *stmt = gsi_stmt (*gsi);
835 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
836 gimple_set_location (repl, gimple_location (stmt));
837 gimple_move_vops (repl, stmt);
838 gsi_replace (gsi, repl, false);
839 fold_stmt (gsi);
840 }
841
842 /* Return true if VAR is a VAR_DECL or a component thereof. */
843
844 static bool
845 var_decl_component_p (tree var)
846 {
847 tree inner = var;
848 while (handled_component_p (inner))
849 inner = TREE_OPERAND (inner, 0);
850 return (DECL_P (inner)
851 || (TREE_CODE (inner) == MEM_REF
852 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
853 }
854
855 /* Return TRUE if the SIZE argument, representing the size of an
856 object, is in a range of values of which exactly zero is valid. */
857
858 static bool
859 size_must_be_zero_p (tree size)
860 {
861 if (integer_zerop (size))
862 return true;
863
864 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
865 return false;
866
867 tree type = TREE_TYPE (size);
868 int prec = TYPE_PRECISION (type);
869
870 /* Compute the value of SSIZE_MAX, the largest positive value that
871 can be stored in ssize_t, the signed counterpart of size_t. */
872 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
873 value_range valid_range (build_int_cst (type, 0),
874 wide_int_to_tree (type, ssize_max));
875 value_range vr;
876 get_range_info (size, vr);
877 vr.intersect (&valid_range);
878 return vr.zero_p ();
879 }
880
881 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
882 diagnose (otherwise undefined) overlapping copies without preventing
883 folding. When folded, GCC guarantees that overlapping memcpy has
884 the same semantics as memmove. Call to the library memcpy need not
885 provide the same guarantee. Return false if no simplification can
886 be made. */
887
888 static bool
889 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
890 tree dest, tree src, enum built_in_function code)
891 {
892 gimple *stmt = gsi_stmt (*gsi);
893 tree lhs = gimple_call_lhs (stmt);
894 tree len = gimple_call_arg (stmt, 2);
895 location_t loc = gimple_location (stmt);
896
897 /* If the LEN parameter is a constant zero or in range where
898 the only valid value is zero, return DEST. */
899 if (size_must_be_zero_p (len))
900 {
901 gimple *repl;
902 if (gimple_call_lhs (stmt))
903 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
904 else
905 repl = gimple_build_nop ();
906 tree vdef = gimple_vdef (stmt);
907 if (vdef && TREE_CODE (vdef) == SSA_NAME)
908 {
909 unlink_stmt_vdef (stmt);
910 release_ssa_name (vdef);
911 }
912 gsi_replace (gsi, repl, false);
913 return true;
914 }
915
916 /* If SRC and DEST are the same (and not volatile), return
917 DEST{,+LEN,+LEN-1}. */
918 if (operand_equal_p (src, dest, 0))
919 {
920 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
921 It's safe and may even be emitted by GCC itself (see bug
922 32667). */
923 unlink_stmt_vdef (stmt);
924 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
925 release_ssa_name (gimple_vdef (stmt));
926 if (!lhs)
927 {
928 gsi_replace (gsi, gimple_build_nop (), false);
929 return true;
930 }
931 goto done;
932 }
933 else
934 {
935 /* We cannot (easily) change the type of the copy if it is a storage
936 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
937 modify the storage order of objects (see storage_order_barrier_p). */
938 tree srctype
939 = POINTER_TYPE_P (TREE_TYPE (src))
940 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
941 tree desttype
942 = POINTER_TYPE_P (TREE_TYPE (dest))
943 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
944 tree destvar, srcvar, srcoff;
945 unsigned int src_align, dest_align;
946 unsigned HOST_WIDE_INT tmp_len;
947 const char *tmp_str;
948
949 /* Build accesses at offset zero with a ref-all character type. */
950 tree off0
951 = build_int_cst (build_pointer_type_for_mode (char_type_node,
952 ptr_mode, true), 0);
953
954 /* If we can perform the copy efficiently with first doing all loads
955 and then all stores inline it that way. Currently efficiently
956 means that we can load all the memory into a single integer
957 register which is what MOVE_MAX gives us. */
958 src_align = get_pointer_alignment (src);
959 dest_align = get_pointer_alignment (dest);
960 if (tree_fits_uhwi_p (len)
961 && compare_tree_int (len, MOVE_MAX) <= 0
962 /* FIXME: Don't transform copies from strings with known length.
963 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
964 from being handled, and the case was XFAILed for that reason.
965 Now that it is handled and the XFAIL removed, as soon as other
966 strlenopt tests that rely on it for passing are adjusted, this
967 hack can be removed. */
968 && !c_strlen (src, 1)
969 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
970 && memchr (tmp_str, 0, tmp_len) == NULL)
971 && !(srctype
972 && AGGREGATE_TYPE_P (srctype)
973 && TYPE_REVERSE_STORAGE_ORDER (srctype))
974 && !(desttype
975 && AGGREGATE_TYPE_P (desttype)
976 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
977 {
978 unsigned ilen = tree_to_uhwi (len);
979 if (pow2p_hwi (ilen))
980 {
981 /* Detect out-of-bounds accesses without issuing warnings.
982 Avoid folding out-of-bounds copies but to avoid false
983 positives for unreachable code defer warning until after
984 DCE has worked its magic.
985 -Wrestrict is still diagnosed. */
986 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
987 dest, src, len, len,
988 false, false))
989 if (warning != OPT_Wrestrict)
990 return false;
991
992 scalar_int_mode mode;
993 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
994 if (type
995 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
996 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
997 /* If the destination pointer is not aligned we must be able
998 to emit an unaligned store. */
999 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1000 || !targetm.slow_unaligned_access (mode, dest_align)
1001 || (optab_handler (movmisalign_optab, mode)
1002 != CODE_FOR_nothing)))
1003 {
1004 tree srctype = type;
1005 tree desttype = type;
1006 if (src_align < GET_MODE_ALIGNMENT (mode))
1007 srctype = build_aligned_type (type, src_align);
1008 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1009 tree tem = fold_const_aggregate_ref (srcmem);
1010 if (tem)
1011 srcmem = tem;
1012 else if (src_align < GET_MODE_ALIGNMENT (mode)
1013 && targetm.slow_unaligned_access (mode, src_align)
1014 && (optab_handler (movmisalign_optab, mode)
1015 == CODE_FOR_nothing))
1016 srcmem = NULL_TREE;
1017 if (srcmem)
1018 {
1019 gimple *new_stmt;
1020 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1021 {
1022 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1023 srcmem
1024 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1025 new_stmt);
1026 gimple_assign_set_lhs (new_stmt, srcmem);
1027 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1028 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1029 }
1030 if (dest_align < GET_MODE_ALIGNMENT (mode))
1031 desttype = build_aligned_type (type, dest_align);
1032 new_stmt
1033 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1034 dest, off0),
1035 srcmem);
1036 gimple_move_vops (new_stmt, stmt);
1037 if (!lhs)
1038 {
1039 gsi_replace (gsi, new_stmt, false);
1040 return true;
1041 }
1042 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 goto done;
1044 }
1045 }
1046 }
1047 }
1048
1049 if (code == BUILT_IN_MEMMOVE)
1050 {
1051 /* Both DEST and SRC must be pointer types.
1052 ??? This is what old code did. Is the testing for pointer types
1053 really mandatory?
1054
1055 If either SRC is readonly or length is 1, we can use memcpy. */
1056 if (!dest_align || !src_align)
1057 return false;
1058 if (readonly_data_expr (src)
1059 || (tree_fits_uhwi_p (len)
1060 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1061 >= tree_to_uhwi (len))))
1062 {
1063 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1064 if (!fn)
1065 return false;
1066 gimple_call_set_fndecl (stmt, fn);
1067 gimple_call_set_arg (stmt, 0, dest);
1068 gimple_call_set_arg (stmt, 1, src);
1069 fold_stmt (gsi);
1070 return true;
1071 }
1072
1073 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1074 if (TREE_CODE (src) == ADDR_EXPR
1075 && TREE_CODE (dest) == ADDR_EXPR)
1076 {
1077 tree src_base, dest_base, fn;
1078 poly_int64 src_offset = 0, dest_offset = 0;
1079 poly_uint64 maxsize;
1080
1081 srcvar = TREE_OPERAND (src, 0);
1082 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1083 if (src_base == NULL)
1084 src_base = srcvar;
1085 destvar = TREE_OPERAND (dest, 0);
1086 dest_base = get_addr_base_and_unit_offset (destvar,
1087 &dest_offset);
1088 if (dest_base == NULL)
1089 dest_base = destvar;
1090 if (!poly_int_tree_p (len, &maxsize))
1091 maxsize = -1;
1092 if (SSA_VAR_P (src_base)
1093 && SSA_VAR_P (dest_base))
1094 {
1095 if (operand_equal_p (src_base, dest_base, 0)
1096 && ranges_maybe_overlap_p (src_offset, maxsize,
1097 dest_offset, maxsize))
1098 return false;
1099 }
1100 else if (TREE_CODE (src_base) == MEM_REF
1101 && TREE_CODE (dest_base) == MEM_REF)
1102 {
1103 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1104 TREE_OPERAND (dest_base, 0), 0))
1105 return false;
1106 poly_offset_int full_src_offset
1107 = mem_ref_offset (src_base) + src_offset;
1108 poly_offset_int full_dest_offset
1109 = mem_ref_offset (dest_base) + dest_offset;
1110 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1111 full_dest_offset, maxsize))
1112 return false;
1113 }
1114 else
1115 return false;
1116
1117 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1118 if (!fn)
1119 return false;
1120 gimple_call_set_fndecl (stmt, fn);
1121 gimple_call_set_arg (stmt, 0, dest);
1122 gimple_call_set_arg (stmt, 1, src);
1123 fold_stmt (gsi);
1124 return true;
1125 }
1126
1127 /* If the destination and source do not alias optimize into
1128 memcpy as well. */
1129 if ((is_gimple_min_invariant (dest)
1130 || TREE_CODE (dest) == SSA_NAME)
1131 && (is_gimple_min_invariant (src)
1132 || TREE_CODE (src) == SSA_NAME))
1133 {
1134 ao_ref destr, srcr;
1135 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1136 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1137 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1138 {
1139 tree fn;
1140 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1141 if (!fn)
1142 return false;
1143 gimple_call_set_fndecl (stmt, fn);
1144 gimple_call_set_arg (stmt, 0, dest);
1145 gimple_call_set_arg (stmt, 1, src);
1146 fold_stmt (gsi);
1147 return true;
1148 }
1149 }
1150
1151 return false;
1152 }
1153
1154 if (!tree_fits_shwi_p (len))
1155 return false;
1156 if (!srctype
1157 || (AGGREGATE_TYPE_P (srctype)
1158 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1159 return false;
1160 if (!desttype
1161 || (AGGREGATE_TYPE_P (desttype)
1162 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1163 return false;
1164 /* In the following try to find a type that is most natural to be
1165 used for the memcpy source and destination and that allows
1166 the most optimization when memcpy is turned into a plain assignment
1167 using that type. In theory we could always use a char[len] type
1168 but that only gains us that the destination and source possibly
1169 no longer will have their address taken. */
1170 if (TREE_CODE (srctype) == ARRAY_TYPE
1171 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1172 srctype = TREE_TYPE (srctype);
1173 if (TREE_CODE (desttype) == ARRAY_TYPE
1174 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1175 desttype = TREE_TYPE (desttype);
1176 if (TREE_ADDRESSABLE (srctype)
1177 || TREE_ADDRESSABLE (desttype))
1178 return false;
1179
1180 /* Make sure we are not copying using a floating-point mode or
1181 a type whose size possibly does not match its precision. */
1182 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1183 || TREE_CODE (desttype) == BOOLEAN_TYPE
1184 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1185 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1186 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1187 || TREE_CODE (srctype) == BOOLEAN_TYPE
1188 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1189 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1190 if (!srctype)
1191 srctype = desttype;
1192 if (!desttype)
1193 desttype = srctype;
1194 if (!srctype)
1195 return false;
1196
1197 src_align = get_pointer_alignment (src);
1198 dest_align = get_pointer_alignment (dest);
1199
1200 /* Choose between src and destination type for the access based
1201 on alignment, whether the access constitutes a register access
1202 and whether it may actually expose a declaration for SSA rewrite
1203 or SRA decomposition. Also try to expose a string constant, we
1204 might be able to concatenate several of them later into a single
1205 string store. */
1206 destvar = NULL_TREE;
1207 srcvar = NULL_TREE;
1208 if (TREE_CODE (dest) == ADDR_EXPR
1209 && var_decl_component_p (TREE_OPERAND (dest, 0))
1210 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1211 && dest_align >= TYPE_ALIGN (desttype)
1212 && (is_gimple_reg_type (desttype)
1213 || src_align >= TYPE_ALIGN (desttype)))
1214 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1215 else if (TREE_CODE (src) == ADDR_EXPR
1216 && var_decl_component_p (TREE_OPERAND (src, 0))
1217 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1218 && src_align >= TYPE_ALIGN (srctype)
1219 && (is_gimple_reg_type (srctype)
1220 || dest_align >= TYPE_ALIGN (srctype)))
1221 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1222 /* FIXME: Don't transform copies from strings with known original length.
1223 As soon as strlenopt tests that rely on it for passing are adjusted,
1224 this hack can be removed. */
1225 else if (gimple_call_alloca_for_var_p (stmt)
1226 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1227 && integer_zerop (srcoff)
1228 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1229 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1230 srctype = TREE_TYPE (srcvar);
1231 else
1232 return false;
1233
1234 /* Now that we chose an access type express the other side in
1235 terms of it if the target allows that with respect to alignment
1236 constraints. */
1237 if (srcvar == NULL_TREE)
1238 {
1239 if (src_align >= TYPE_ALIGN (desttype))
1240 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1241 else
1242 {
1243 if (STRICT_ALIGNMENT)
1244 return false;
1245 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1246 src_align);
1247 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1248 }
1249 }
1250 else if (destvar == NULL_TREE)
1251 {
1252 if (dest_align >= TYPE_ALIGN (srctype))
1253 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1254 else
1255 {
1256 if (STRICT_ALIGNMENT)
1257 return false;
1258 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1259 dest_align);
1260 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1261 }
1262 }
1263
1264 /* Same as above, detect out-of-bounds accesses without issuing
1265 warnings. Avoid folding out-of-bounds copies but to avoid
1266 false positives for unreachable code defer warning until
1267 after DCE has worked its magic.
1268 -Wrestrict is still diagnosed. */
1269 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1270 dest, src, len, len,
1271 false, false))
1272 if (warning != OPT_Wrestrict)
1273 return false;
1274
1275 gimple *new_stmt;
1276 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1277 {
1278 tree tem = fold_const_aggregate_ref (srcvar);
1279 if (tem)
1280 srcvar = tem;
1281 if (! is_gimple_min_invariant (srcvar))
1282 {
1283 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1284 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1285 new_stmt);
1286 gimple_assign_set_lhs (new_stmt, srcvar);
1287 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1288 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1289 }
1290 new_stmt = gimple_build_assign (destvar, srcvar);
1291 goto set_vop_and_replace;
1292 }
1293
1294 /* We get an aggregate copy. If the source is a STRING_CST, then
1295 directly use its type to perform the copy. */
1296 if (TREE_CODE (srcvar) == STRING_CST)
1297 desttype = srctype;
1298
1299 /* Or else, use an unsigned char[] type to perform the copy in order
1300 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1301 types or float modes behavior on copying. */
1302 else
1303 {
1304 desttype = build_array_type_nelts (unsigned_char_type_node,
1305 tree_to_uhwi (len));
1306 srctype = desttype;
1307 if (src_align > TYPE_ALIGN (srctype))
1308 srctype = build_aligned_type (srctype, src_align);
1309 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1310 }
1311
1312 if (dest_align > TYPE_ALIGN (desttype))
1313 desttype = build_aligned_type (desttype, dest_align);
1314 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1315 new_stmt = gimple_build_assign (destvar, srcvar);
1316
1317 set_vop_and_replace:
1318 gimple_move_vops (new_stmt, stmt);
1319 if (!lhs)
1320 {
1321 gsi_replace (gsi, new_stmt, false);
1322 return true;
1323 }
1324 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1325 }
1326
1327 done:
1328 gimple_seq stmts = NULL;
1329 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1330 len = NULL_TREE;
1331 else if (code == BUILT_IN_MEMPCPY)
1332 {
1333 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1334 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1335 TREE_TYPE (dest), dest, len);
1336 }
1337 else
1338 gcc_unreachable ();
1339
1340 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1341 gimple *repl = gimple_build_assign (lhs, dest);
1342 gsi_replace (gsi, repl, false);
1343 return true;
1344 }
1345
1346 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1347 to built-in memcmp (a, b, len). */
1348
1349 static bool
1350 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1351 {
1352 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1353
1354 if (!fn)
1355 return false;
1356
1357 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1358
1359 gimple *stmt = gsi_stmt (*gsi);
1360 tree a = gimple_call_arg (stmt, 0);
1361 tree b = gimple_call_arg (stmt, 1);
1362 tree len = gimple_call_arg (stmt, 2);
1363
1364 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1365 replace_call_with_call_and_fold (gsi, repl);
1366
1367 return true;
1368 }
1369
1370 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1371 to built-in memmove (dest, src, len). */
1372
1373 static bool
1374 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1375 {
1376 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1377
1378 if (!fn)
1379 return false;
1380
1381 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1382 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1383 len) into memmove (dest, src, len). */
1384
1385 gimple *stmt = gsi_stmt (*gsi);
1386 tree src = gimple_call_arg (stmt, 0);
1387 tree dest = gimple_call_arg (stmt, 1);
1388 tree len = gimple_call_arg (stmt, 2);
1389
1390 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1391 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1392 replace_call_with_call_and_fold (gsi, repl);
1393
1394 return true;
1395 }
1396
1397 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1398 to built-in memset (dest, 0, len). */
1399
1400 static bool
1401 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1402 {
1403 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1404
1405 if (!fn)
1406 return false;
1407
1408 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1409
1410 gimple *stmt = gsi_stmt (*gsi);
1411 tree dest = gimple_call_arg (stmt, 0);
1412 tree len = gimple_call_arg (stmt, 1);
1413
1414 gimple_seq seq = NULL;
1415 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1416 gimple_seq_add_stmt_without_update (&seq, repl);
1417 gsi_replace_with_seq_vops (gsi, seq);
1418 fold_stmt (gsi);
1419
1420 return true;
1421 }
1422
1423 /* Fold function call to builtin memset or bzero at *GSI setting the
1424 memory of size LEN to VAL. Return whether a simplification was made. */
1425
1426 static bool
1427 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1428 {
1429 gimple *stmt = gsi_stmt (*gsi);
1430 tree etype;
1431 unsigned HOST_WIDE_INT length, cval;
1432
1433 /* If the LEN parameter is zero, return DEST. */
1434 if (integer_zerop (len))
1435 {
1436 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1437 return true;
1438 }
1439
1440 if (! tree_fits_uhwi_p (len))
1441 return false;
1442
1443 if (TREE_CODE (c) != INTEGER_CST)
1444 return false;
1445
1446 tree dest = gimple_call_arg (stmt, 0);
1447 tree var = dest;
1448 if (TREE_CODE (var) != ADDR_EXPR)
1449 return false;
1450
1451 var = TREE_OPERAND (var, 0);
1452 if (TREE_THIS_VOLATILE (var))
1453 return false;
1454
1455 etype = TREE_TYPE (var);
1456 if (TREE_CODE (etype) == ARRAY_TYPE)
1457 etype = TREE_TYPE (etype);
1458
1459 if (!INTEGRAL_TYPE_P (etype)
1460 && !POINTER_TYPE_P (etype))
1461 return NULL_TREE;
1462
1463 if (! var_decl_component_p (var))
1464 return NULL_TREE;
1465
1466 length = tree_to_uhwi (len);
1467 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1468 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1469 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1470 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1471 return NULL_TREE;
1472
1473 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1474 return NULL_TREE;
1475
1476 if (!type_has_mode_precision_p (etype))
1477 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1478 TYPE_UNSIGNED (etype));
1479
1480 if (integer_zerop (c))
1481 cval = 0;
1482 else
1483 {
1484 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1485 return NULL_TREE;
1486
1487 cval = TREE_INT_CST_LOW (c);
1488 cval &= 0xff;
1489 cval |= cval << 8;
1490 cval |= cval << 16;
1491 cval |= (cval << 31) << 1;
1492 }
1493
1494 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1495 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1496 gimple_move_vops (store, stmt);
1497 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1498 if (gimple_call_lhs (stmt))
1499 {
1500 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1501 gsi_replace (gsi, asgn, false);
1502 }
1503 else
1504 {
1505 gimple_stmt_iterator gsi2 = *gsi;
1506 gsi_prev (gsi);
1507 gsi_remove (&gsi2, true);
1508 }
1509
1510 return true;
1511 }
1512
1513 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1514
1515 static bool
1516 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1517 c_strlen_data *pdata, unsigned eltsize)
1518 {
1519 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1520
1521 /* The length computed by this invocation of the function. */
1522 tree val = NULL_TREE;
1523
1524 /* True if VAL is an optimistic (tight) bound determined from
1525 the size of the character array in which the string may be
1526 stored. In that case, the computed VAL is used to set
1527 PDATA->MAXBOUND. */
1528 bool tight_bound = false;
1529
1530 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1531 if (TREE_CODE (arg) == ADDR_EXPR
1532 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1533 {
1534 tree op = TREE_OPERAND (arg, 0);
1535 if (integer_zerop (TREE_OPERAND (op, 1)))
1536 {
1537 tree aop0 = TREE_OPERAND (op, 0);
1538 if (TREE_CODE (aop0) == INDIRECT_REF
1539 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1540 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1541 pdata, eltsize);
1542 }
1543 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1544 && rkind == SRK_LENRANGE)
1545 {
1546 /* Fail if an array is the last member of a struct object
1547 since it could be treated as a (fake) flexible array
1548 member. */
1549 tree idx = TREE_OPERAND (op, 1);
1550
1551 arg = TREE_OPERAND (op, 0);
1552 tree optype = TREE_TYPE (arg);
1553 if (tree dom = TYPE_DOMAIN (optype))
1554 if (tree bound = TYPE_MAX_VALUE (dom))
1555 if (TREE_CODE (bound) == INTEGER_CST
1556 && TREE_CODE (idx) == INTEGER_CST
1557 && tree_int_cst_lt (bound, idx))
1558 return false;
1559 }
1560 }
1561
1562 if (rkind == SRK_INT_VALUE)
1563 {
1564 /* We are computing the maximum value (not string length). */
1565 val = arg;
1566 if (TREE_CODE (val) != INTEGER_CST
1567 || tree_int_cst_sgn (val) < 0)
1568 return false;
1569 }
1570 else
1571 {
1572 c_strlen_data lendata = { };
1573 val = c_strlen (arg, 1, &lendata, eltsize);
1574
1575 if (!val && lendata.decl)
1576 {
1577 /* ARG refers to an unterminated const character array.
1578 DATA.DECL with size DATA.LEN. */
1579 val = lendata.minlen;
1580 pdata->decl = lendata.decl;
1581 }
1582 }
1583
1584 /* Set if VAL represents the maximum length based on array size (set
1585 when exact length cannot be determined). */
1586 bool maxbound = false;
1587
1588 if (!val && rkind == SRK_LENRANGE)
1589 {
1590 if (TREE_CODE (arg) == ADDR_EXPR)
1591 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1592 pdata, eltsize);
1593
1594 if (TREE_CODE (arg) == ARRAY_REF)
1595 {
1596 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1597
1598 /* Determine the "innermost" array type. */
1599 while (TREE_CODE (optype) == ARRAY_TYPE
1600 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1601 optype = TREE_TYPE (optype);
1602
1603 /* Avoid arrays of pointers. */
1604 tree eltype = TREE_TYPE (optype);
1605 if (TREE_CODE (optype) != ARRAY_TYPE
1606 || !INTEGRAL_TYPE_P (eltype))
1607 return false;
1608
1609 /* Fail when the array bound is unknown or zero. */
1610 val = TYPE_SIZE_UNIT (optype);
1611 if (!val
1612 || TREE_CODE (val) != INTEGER_CST
1613 || integer_zerop (val))
1614 return false;
1615
1616 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1617 integer_one_node);
1618
1619 /* Set the minimum size to zero since the string in
1620 the array could have zero length. */
1621 pdata->minlen = ssize_int (0);
1622
1623 tight_bound = true;
1624 }
1625 else if (TREE_CODE (arg) == COMPONENT_REF
1626 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1627 == ARRAY_TYPE))
1628 {
1629 /* Use the type of the member array to determine the upper
1630 bound on the length of the array. This may be overly
1631 optimistic if the array itself isn't NUL-terminated and
1632 the caller relies on the subsequent member to contain
1633 the NUL but that would only be considered valid if
1634 the array were the last member of a struct. */
1635
1636 tree fld = TREE_OPERAND (arg, 1);
1637
1638 tree optype = TREE_TYPE (fld);
1639
1640 /* Determine the "innermost" array type. */
1641 while (TREE_CODE (optype) == ARRAY_TYPE
1642 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1643 optype = TREE_TYPE (optype);
1644
1645 /* Fail when the array bound is unknown or zero. */
1646 val = TYPE_SIZE_UNIT (optype);
1647 if (!val
1648 || TREE_CODE (val) != INTEGER_CST
1649 || integer_zerop (val))
1650 return false;
1651 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1652 integer_one_node);
1653
1654 /* Set the minimum size to zero since the string in
1655 the array could have zero length. */
1656 pdata->minlen = ssize_int (0);
1657
1658 /* The array size determined above is an optimistic bound
1659 on the length. If the array isn't nul-terminated the
1660 length computed by the library function would be greater.
1661 Even though using strlen to cross the subobject boundary
1662 is undefined, avoid drawing conclusions from the member
1663 type about the length here. */
1664 tight_bound = true;
1665 }
1666 else if (TREE_CODE (arg) == MEM_REF
1667 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1668 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1669 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1670 {
1671 /* Handle a MEM_REF into a DECL accessing an array of integers,
1672 being conservative about references to extern structures with
1673 flexible array members that can be initialized to arbitrary
1674 numbers of elements as an extension (static structs are okay).
1675 FIXME: Make this less conservative -- see
1676 component_ref_size in tree.c. */
1677 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1678 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1679 && (decl_binds_to_current_def_p (ref)
1680 || !array_at_struct_end_p (arg)))
1681 {
1682 /* Fail if the offset is out of bounds. Such accesses
1683 should be diagnosed at some point. */
1684 val = DECL_SIZE_UNIT (ref);
1685 if (!val
1686 || TREE_CODE (val) != INTEGER_CST
1687 || integer_zerop (val))
1688 return false;
1689
1690 poly_offset_int psiz = wi::to_offset (val);
1691 poly_offset_int poff = mem_ref_offset (arg);
1692 if (known_le (psiz, poff))
1693 return false;
1694
1695 pdata->minlen = ssize_int (0);
1696
1697 /* Subtract the offset and one for the terminating nul. */
1698 psiz -= poff;
1699 psiz -= 1;
1700 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1701 /* Since VAL reflects the size of a declared object
1702 rather the type of the access it is not a tight bound. */
1703 }
1704 }
1705 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1706 {
1707 /* Avoid handling pointers to arrays. GCC might misuse
1708 a pointer to an array of one bound to point to an array
1709 object of a greater bound. */
1710 tree argtype = TREE_TYPE (arg);
1711 if (TREE_CODE (argtype) == ARRAY_TYPE)
1712 {
1713 val = TYPE_SIZE_UNIT (argtype);
1714 if (!val
1715 || TREE_CODE (val) != INTEGER_CST
1716 || integer_zerop (val))
1717 return false;
1718 val = wide_int_to_tree (TREE_TYPE (val),
1719 wi::sub (wi::to_wide (val), 1));
1720
1721 /* Set the minimum size to zero since the string in
1722 the array could have zero length. */
1723 pdata->minlen = ssize_int (0);
1724 }
1725 }
1726 maxbound = true;
1727 }
1728
1729 if (!val)
1730 return false;
1731
1732 /* Adjust the lower bound on the string length as necessary. */
1733 if (!pdata->minlen
1734 || (rkind != SRK_STRLEN
1735 && TREE_CODE (pdata->minlen) == INTEGER_CST
1736 && TREE_CODE (val) == INTEGER_CST
1737 && tree_int_cst_lt (val, pdata->minlen)))
1738 pdata->minlen = val;
1739
1740 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1741 {
1742 /* Adjust the tighter (more optimistic) string length bound
1743 if necessary and proceed to adjust the more conservative
1744 bound. */
1745 if (TREE_CODE (val) == INTEGER_CST)
1746 {
1747 if (tree_int_cst_lt (pdata->maxbound, val))
1748 pdata->maxbound = val;
1749 }
1750 else
1751 pdata->maxbound = val;
1752 }
1753 else if (pdata->maxbound || maxbound)
1754 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1755 if VAL corresponds to the maximum length determined based
1756 on the type of the object. */
1757 pdata->maxbound = val;
1758
1759 if (tight_bound)
1760 {
1761 /* VAL computed above represents an optimistically tight bound
1762 on the length of the string based on the referenced object's
1763 or subobject's type. Determine the conservative upper bound
1764 based on the enclosing object's size if possible. */
1765 if (rkind == SRK_LENRANGE)
1766 {
1767 poly_int64 offset;
1768 tree base = get_addr_base_and_unit_offset (arg, &offset);
1769 if (!base)
1770 {
1771 /* When the call above fails due to a non-constant offset
1772 assume the offset is zero and use the size of the whole
1773 enclosing object instead. */
1774 base = get_base_address (arg);
1775 offset = 0;
1776 }
1777 /* If the base object is a pointer no upper bound on the length
1778 can be determined. Otherwise the maximum length is equal to
1779 the size of the enclosing object minus the offset of
1780 the referenced subobject minus 1 (for the terminating nul). */
1781 tree type = TREE_TYPE (base);
1782 if (TREE_CODE (type) == POINTER_TYPE
1783 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1784 || !(val = DECL_SIZE_UNIT (base)))
1785 val = build_all_ones_cst (size_type_node);
1786 else
1787 {
1788 val = DECL_SIZE_UNIT (base);
1789 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1790 size_int (offset + 1));
1791 }
1792 }
1793 else
1794 return false;
1795 }
1796
1797 if (pdata->maxlen)
1798 {
1799 /* Adjust the more conservative bound if possible/necessary
1800 and fail otherwise. */
1801 if (rkind != SRK_STRLEN)
1802 {
1803 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1804 || TREE_CODE (val) != INTEGER_CST)
1805 return false;
1806
1807 if (tree_int_cst_lt (pdata->maxlen, val))
1808 pdata->maxlen = val;
1809 return true;
1810 }
1811 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1812 {
1813 /* Fail if the length of this ARG is different from that
1814 previously determined from another ARG. */
1815 return false;
1816 }
1817 }
1818
1819 pdata->maxlen = val;
1820 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1821 }
1822
1823 /* For an ARG referencing one or more strings, try to obtain the range
1824 of their lengths, or the size of the largest array ARG referes to if
1825 the range of lengths cannot be determined, and store all in *PDATA.
1826 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1827 the maximum constant value.
1828 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1829 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1830 length or if we are unable to determine the length, return false.
1831 VISITED is a bitmap of visited variables.
1832 RKIND determines the kind of value or range to obtain (see
1833 strlen_range_kind).
1834 Set PDATA->DECL if ARG refers to an unterminated constant array.
1835 On input, set ELTSIZE to 1 for normal single byte character strings,
1836 and either 2 or 4 for wide characer strings (the size of wchar_t).
1837 Return true if *PDATA was successfully populated and false otherwise. */
1838
1839 static bool
1840 get_range_strlen (tree arg, bitmap *visited,
1841 strlen_range_kind rkind,
1842 c_strlen_data *pdata, unsigned eltsize)
1843 {
1844
1845 if (TREE_CODE (arg) != SSA_NAME)
1846 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1847
1848 /* If ARG is registered for SSA update we cannot look at its defining
1849 statement. */
1850 if (name_registered_for_update_p (arg))
1851 return false;
1852
1853 /* If we were already here, break the infinite cycle. */
1854 if (!*visited)
1855 *visited = BITMAP_ALLOC (NULL);
1856 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1857 return true;
1858
1859 tree var = arg;
1860 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1861
1862 switch (gimple_code (def_stmt))
1863 {
1864 case GIMPLE_ASSIGN:
1865 /* The RHS of the statement defining VAR must either have a
1866 constant length or come from another SSA_NAME with a constant
1867 length. */
1868 if (gimple_assign_single_p (def_stmt)
1869 || gimple_assign_unary_nop_p (def_stmt))
1870 {
1871 tree rhs = gimple_assign_rhs1 (def_stmt);
1872 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1873 }
1874 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1875 {
1876 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1877 gimple_assign_rhs3 (def_stmt) };
1878
1879 for (unsigned int i = 0; i < 2; i++)
1880 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1881 {
1882 if (rkind != SRK_LENRANGE)
1883 return false;
1884 /* Set the upper bound to the maximum to prevent
1885 it from being adjusted in the next iteration but
1886 leave MINLEN and the more conservative MAXBOUND
1887 determined so far alone (or leave them null if
1888 they haven't been set yet). That the MINLEN is
1889 in fact zero can be determined from MAXLEN being
1890 unbounded but the discovered minimum is used for
1891 diagnostics. */
1892 pdata->maxlen = build_all_ones_cst (size_type_node);
1893 }
1894 return true;
1895 }
1896 return false;
1897
1898 case GIMPLE_PHI:
1899 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1900 must have a constant length. */
1901 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1902 {
1903 tree arg = gimple_phi_arg (def_stmt, i)->def;
1904
1905 /* If this PHI has itself as an argument, we cannot
1906 determine the string length of this argument. However,
1907 if we can find a constant string length for the other
1908 PHI args then we can still be sure that this is a
1909 constant string length. So be optimistic and just
1910 continue with the next argument. */
1911 if (arg == gimple_phi_result (def_stmt))
1912 continue;
1913
1914 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1915 {
1916 if (rkind != SRK_LENRANGE)
1917 return false;
1918 /* Set the upper bound to the maximum to prevent
1919 it from being adjusted in the next iteration but
1920 leave MINLEN and the more conservative MAXBOUND
1921 determined so far alone (or leave them null if
1922 they haven't been set yet). That the MINLEN is
1923 in fact zero can be determined from MAXLEN being
1924 unbounded but the discovered minimum is used for
1925 diagnostics. */
1926 pdata->maxlen = build_all_ones_cst (size_type_node);
1927 }
1928 }
1929 return true;
1930
1931 default:
1932 return false;
1933 }
1934 }
1935
1936 /* Try to obtain the range of the lengths of the string(s) referenced
1937 by ARG, or the size of the largest array ARG refers to if the range
1938 of lengths cannot be determined, and store all in *PDATA which must
1939 be zero-initialized on input except PDATA->MAXBOUND may be set to
1940 a non-null tree node other than INTEGER_CST to request to have it
1941 set to the length of the longest string in a PHI. ELTSIZE is
1942 the expected size of the string element in bytes: 1 for char and
1943 some power of 2 for wide characters.
1944 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1945 for optimization. Returning false means that a nonzero PDATA->MINLEN
1946 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1947 is -1 (in that case, the actual range is indeterminate, i.e.,
1948 [0, PTRDIFF_MAX - 2]. */
1949
1950 bool
1951 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1952 {
1953 bitmap visited = NULL;
1954 tree maxbound = pdata->maxbound;
1955
1956 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1957 {
1958 /* On failure extend the length range to an impossible maximum
1959 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1960 members can stay unchanged regardless. */
1961 pdata->minlen = ssize_int (0);
1962 pdata->maxlen = build_all_ones_cst (size_type_node);
1963 }
1964 else if (!pdata->minlen)
1965 pdata->minlen = ssize_int (0);
1966
1967 /* If it's unchanged from it initial non-null value, set the conservative
1968 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1969 if (maxbound && pdata->maxbound == maxbound)
1970 pdata->maxbound = build_all_ones_cst (size_type_node);
1971
1972 if (visited)
1973 BITMAP_FREE (visited);
1974
1975 return !integer_all_onesp (pdata->maxlen);
1976 }
1977
1978 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1979 For ARG of pointer types, NONSTR indicates if the caller is prepared
1980 to handle unterminated strings. For integer ARG and when RKIND ==
1981 SRK_INT_VALUE, NONSTR must be null.
1982
1983 If an unterminated array is discovered and our caller handles
1984 unterminated arrays, then bubble up the offending DECL and
1985 return the maximum size. Otherwise return NULL. */
1986
1987 static tree
1988 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1989 {
1990 /* A non-null NONSTR is meaningless when determining the maximum
1991 value of an integer ARG. */
1992 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1993 /* ARG must have an integral type when RKIND says so. */
1994 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1995
1996 bitmap visited = NULL;
1997
1998 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1999 is unbounded. */
2000 c_strlen_data lendata = { };
2001 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
2002 lendata.maxlen = NULL_TREE;
2003 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2004 lendata.maxlen = NULL_TREE;
2005
2006 if (visited)
2007 BITMAP_FREE (visited);
2008
2009 if (nonstr)
2010 {
2011 /* For callers prepared to handle unterminated arrays set
2012 *NONSTR to point to the declaration of the array and return
2013 the maximum length/size. */
2014 *nonstr = lendata.decl;
2015 return lendata.maxlen;
2016 }
2017
2018 /* Fail if the constant array isn't nul-terminated. */
2019 return lendata.decl ? NULL_TREE : lendata.maxlen;
2020 }
2021
2022
2023 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2024 If LEN is not NULL, it represents the length of the string to be
2025 copied. Return NULL_TREE if no simplification can be made. */
2026
2027 static bool
2028 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2029 tree dest, tree src)
2030 {
2031 gimple *stmt = gsi_stmt (*gsi);
2032 location_t loc = gimple_location (stmt);
2033 tree fn;
2034
2035 /* If SRC and DEST are the same (and not volatile), return DEST. */
2036 if (operand_equal_p (src, dest, 0))
2037 {
2038 /* Issue -Wrestrict unless the pointers are null (those do
2039 not point to objects and so do not indicate an overlap;
2040 such calls could be the result of sanitization and jump
2041 threading). */
2042 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2043 {
2044 tree func = gimple_call_fndecl (stmt);
2045
2046 warning_at (loc, OPT_Wrestrict,
2047 "%qD source argument is the same as destination",
2048 func);
2049 }
2050
2051 replace_call_with_value (gsi, dest);
2052 return true;
2053 }
2054
2055 if (optimize_function_for_size_p (cfun))
2056 return false;
2057
2058 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2059 if (!fn)
2060 return false;
2061
2062 /* Set to non-null if ARG refers to an unterminated array. */
2063 tree nonstr = NULL;
2064 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2065
2066 if (nonstr)
2067 {
2068 /* Avoid folding calls with unterminated arrays. */
2069 if (!gimple_no_warning_p (stmt))
2070 warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
2071 gimple_set_no_warning (stmt, true);
2072 return false;
2073 }
2074
2075 if (!len)
2076 return false;
2077
2078 len = fold_convert_loc (loc, size_type_node, len);
2079 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2080 len = force_gimple_operand_gsi (gsi, len, true,
2081 NULL_TREE, true, GSI_SAME_STMT);
2082 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2083 replace_call_with_call_and_fold (gsi, repl);
2084 return true;
2085 }
2086
2087 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2088 If SLEN is not NULL, it represents the length of the source string.
2089 Return NULL_TREE if no simplification can be made. */
2090
2091 static bool
2092 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2093 tree dest, tree src, tree len)
2094 {
2095 gimple *stmt = gsi_stmt (*gsi);
2096 location_t loc = gimple_location (stmt);
2097 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2098
2099 /* If the LEN parameter is zero, return DEST. */
2100 if (integer_zerop (len))
2101 {
2102 /* Avoid warning if the destination refers to an array/pointer
2103 decorate with attribute nonstring. */
2104 if (!nonstring)
2105 {
2106 tree fndecl = gimple_call_fndecl (stmt);
2107
2108 /* Warn about the lack of nul termination: the result is not
2109 a (nul-terminated) string. */
2110 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2111 if (slen && !integer_zerop (slen))
2112 warning_at (loc, OPT_Wstringop_truncation,
2113 "%G%qD destination unchanged after copying no bytes "
2114 "from a string of length %E",
2115 stmt, fndecl, slen);
2116 else
2117 warning_at (loc, OPT_Wstringop_truncation,
2118 "%G%qD destination unchanged after copying no bytes",
2119 stmt, fndecl);
2120 }
2121
2122 replace_call_with_value (gsi, dest);
2123 return true;
2124 }
2125
2126 /* We can't compare slen with len as constants below if len is not a
2127 constant. */
2128 if (TREE_CODE (len) != INTEGER_CST)
2129 return false;
2130
2131 /* Now, we must be passed a constant src ptr parameter. */
2132 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2133 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2134 return false;
2135
2136 /* The size of the source string including the terminating nul. */
2137 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2138
2139 /* We do not support simplification of this case, though we do
2140 support it when expanding trees into RTL. */
2141 /* FIXME: generate a call to __builtin_memset. */
2142 if (tree_int_cst_lt (ssize, len))
2143 return false;
2144
2145 /* Diagnose truncation that leaves the copy unterminated. */
2146 maybe_diag_stxncpy_trunc (*gsi, src, len);
2147
2148 /* OK transform into builtin memcpy. */
2149 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2150 if (!fn)
2151 return false;
2152
2153 len = fold_convert_loc (loc, size_type_node, len);
2154 len = force_gimple_operand_gsi (gsi, len, true,
2155 NULL_TREE, true, GSI_SAME_STMT);
2156 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2157 replace_call_with_call_and_fold (gsi, repl);
2158
2159 return true;
2160 }
2161
2162 /* Fold function call to builtin strchr or strrchr.
2163 If both arguments are constant, evaluate and fold the result,
2164 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2165 In general strlen is significantly faster than strchr
2166 due to being a simpler operation. */
2167 static bool
2168 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2169 {
2170 gimple *stmt = gsi_stmt (*gsi);
2171 tree str = gimple_call_arg (stmt, 0);
2172 tree c = gimple_call_arg (stmt, 1);
2173 location_t loc = gimple_location (stmt);
2174 const char *p;
2175 char ch;
2176
2177 if (!gimple_call_lhs (stmt))
2178 return false;
2179
2180 /* Avoid folding if the first argument is not a nul-terminated array.
2181 Defer warning until later. */
2182 if (!check_nul_terminated_array (NULL_TREE, str))
2183 return false;
2184
2185 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2186 {
2187 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2188
2189 if (p1 == NULL)
2190 {
2191 replace_call_with_value (gsi, integer_zero_node);
2192 return true;
2193 }
2194
2195 tree len = build_int_cst (size_type_node, p1 - p);
2196 gimple_seq stmts = NULL;
2197 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2198 POINTER_PLUS_EXPR, str, len);
2199 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2200 gsi_replace_with_seq_vops (gsi, stmts);
2201 return true;
2202 }
2203
2204 if (!integer_zerop (c))
2205 return false;
2206
2207 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2208 if (is_strrchr && optimize_function_for_size_p (cfun))
2209 {
2210 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2211
2212 if (strchr_fn)
2213 {
2214 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2215 replace_call_with_call_and_fold (gsi, repl);
2216 return true;
2217 }
2218
2219 return false;
2220 }
2221
2222 tree len;
2223 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2224
2225 if (!strlen_fn)
2226 return false;
2227
2228 /* Create newstr = strlen (str). */
2229 gimple_seq stmts = NULL;
2230 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2231 gimple_set_location (new_stmt, loc);
2232 len = create_tmp_reg_or_ssa_name (size_type_node);
2233 gimple_call_set_lhs (new_stmt, len);
2234 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2235
2236 /* Create (str p+ strlen (str)). */
2237 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2238 POINTER_PLUS_EXPR, str, len);
2239 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2240 gsi_replace_with_seq_vops (gsi, stmts);
2241 /* gsi now points at the assignment to the lhs, get a
2242 stmt iterator to the strlen.
2243 ??? We can't use gsi_for_stmt as that doesn't work when the
2244 CFG isn't built yet. */
2245 gimple_stmt_iterator gsi2 = *gsi;
2246 gsi_prev (&gsi2);
2247 fold_stmt (&gsi2);
2248 return true;
2249 }
2250
2251 /* Fold function call to builtin strstr.
2252 If both arguments are constant, evaluate and fold the result,
2253 additionally fold strstr (x, "") into x and strstr (x, "c")
2254 into strchr (x, 'c'). */
2255 static bool
2256 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2257 {
2258 gimple *stmt = gsi_stmt (*gsi);
2259 if (!gimple_call_lhs (stmt))
2260 return false;
2261
2262 tree haystack = gimple_call_arg (stmt, 0);
2263 tree needle = gimple_call_arg (stmt, 1);
2264
2265 /* Avoid folding if either argument is not a nul-terminated array.
2266 Defer warning until later. */
2267 if (!check_nul_terminated_array (NULL_TREE, haystack)
2268 || !check_nul_terminated_array (NULL_TREE, needle))
2269 return false;
2270
2271 const char *q = c_getstr (needle);
2272 if (q == NULL)
2273 return false;
2274
2275 if (const char *p = c_getstr (haystack))
2276 {
2277 const char *r = strstr (p, q);
2278
2279 if (r == NULL)
2280 {
2281 replace_call_with_value (gsi, integer_zero_node);
2282 return true;
2283 }
2284
2285 tree len = build_int_cst (size_type_node, r - p);
2286 gimple_seq stmts = NULL;
2287 gimple *new_stmt
2288 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2289 haystack, len);
2290 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2291 gsi_replace_with_seq_vops (gsi, stmts);
2292 return true;
2293 }
2294
2295 /* For strstr (x, "") return x. */
2296 if (q[0] == '\0')
2297 {
2298 replace_call_with_value (gsi, haystack);
2299 return true;
2300 }
2301
2302 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2303 if (q[1] == '\0')
2304 {
2305 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2306 if (strchr_fn)
2307 {
2308 tree c = build_int_cst (integer_type_node, q[0]);
2309 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2310 replace_call_with_call_and_fold (gsi, repl);
2311 return true;
2312 }
2313 }
2314
2315 return false;
2316 }
2317
2318 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2319 to the call.
2320
2321 Return NULL_TREE if no simplification was possible, otherwise return the
2322 simplified form of the call as a tree.
2323
2324 The simplified form may be a constant or other expression which
2325 computes the same value, but in a more efficient manner (including
2326 calls to other builtin functions).
2327
2328 The call may contain arguments which need to be evaluated, but
2329 which are not useful to determine the result of the call. In
2330 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2331 COMPOUND_EXPR will be an argument which must be evaluated.
2332 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2333 COMPOUND_EXPR in the chain will contain the tree for the simplified
2334 form of the builtin function call. */
2335
2336 static bool
2337 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2338 {
2339 gimple *stmt = gsi_stmt (*gsi);
2340 location_t loc = gimple_location (stmt);
2341
2342 const char *p = c_getstr (src);
2343
2344 /* If the string length is zero, return the dst parameter. */
2345 if (p && *p == '\0')
2346 {
2347 replace_call_with_value (gsi, dst);
2348 return true;
2349 }
2350
2351 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2352 return false;
2353
2354 /* See if we can store by pieces into (dst + strlen(dst)). */
2355 tree newdst;
2356 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2357 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2358
2359 if (!strlen_fn || !memcpy_fn)
2360 return false;
2361
2362 /* If the length of the source string isn't computable don't
2363 split strcat into strlen and memcpy. */
2364 tree len = get_maxval_strlen (src, SRK_STRLEN);
2365 if (! len)
2366 return false;
2367
2368 /* Create strlen (dst). */
2369 gimple_seq stmts = NULL, stmts2;
2370 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2371 gimple_set_location (repl, loc);
2372 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2373 gimple_call_set_lhs (repl, newdst);
2374 gimple_seq_add_stmt_without_update (&stmts, repl);
2375
2376 /* Create (dst p+ strlen (dst)). */
2377 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2378 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2379 gimple_seq_add_seq_without_update (&stmts, stmts2);
2380
2381 len = fold_convert_loc (loc, size_type_node, len);
2382 len = size_binop_loc (loc, PLUS_EXPR, len,
2383 build_int_cst (size_type_node, 1));
2384 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2385 gimple_seq_add_seq_without_update (&stmts, stmts2);
2386
2387 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2388 gimple_seq_add_stmt_without_update (&stmts, repl);
2389 if (gimple_call_lhs (stmt))
2390 {
2391 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2392 gimple_seq_add_stmt_without_update (&stmts, repl);
2393 gsi_replace_with_seq_vops (gsi, stmts);
2394 /* gsi now points at the assignment to the lhs, get a
2395 stmt iterator to the memcpy call.
2396 ??? We can't use gsi_for_stmt as that doesn't work when the
2397 CFG isn't built yet. */
2398 gimple_stmt_iterator gsi2 = *gsi;
2399 gsi_prev (&gsi2);
2400 fold_stmt (&gsi2);
2401 }
2402 else
2403 {
2404 gsi_replace_with_seq_vops (gsi, stmts);
2405 fold_stmt (gsi);
2406 }
2407 return true;
2408 }
2409
2410 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2411 are the arguments to the call. */
2412
2413 static bool
2414 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2415 {
2416 gimple *stmt = gsi_stmt (*gsi);
2417 tree dest = gimple_call_arg (stmt, 0);
2418 tree src = gimple_call_arg (stmt, 1);
2419 tree size = gimple_call_arg (stmt, 2);
2420 tree fn;
2421 const char *p;
2422
2423
2424 p = c_getstr (src);
2425 /* If the SRC parameter is "", return DEST. */
2426 if (p && *p == '\0')
2427 {
2428 replace_call_with_value (gsi, dest);
2429 return true;
2430 }
2431
2432 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2433 return false;
2434
2435 /* If __builtin_strcat_chk is used, assume strcat is available. */
2436 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2437 if (!fn)
2438 return false;
2439
2440 gimple *repl = gimple_build_call (fn, 2, dest, src);
2441 replace_call_with_call_and_fold (gsi, repl);
2442 return true;
2443 }
2444
2445 /* Simplify a call to the strncat builtin. */
2446
2447 static bool
2448 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2449 {
2450 gimple *stmt = gsi_stmt (*gsi);
2451 tree dst = gimple_call_arg (stmt, 0);
2452 tree src = gimple_call_arg (stmt, 1);
2453 tree len = gimple_call_arg (stmt, 2);
2454
2455 const char *p = c_getstr (src);
2456
2457 /* If the requested length is zero, or the src parameter string
2458 length is zero, return the dst parameter. */
2459 if (integer_zerop (len) || (p && *p == '\0'))
2460 {
2461 replace_call_with_value (gsi, dst);
2462 return true;
2463 }
2464
2465 if (TREE_CODE (len) != INTEGER_CST || !p)
2466 return false;
2467
2468 unsigned srclen = strlen (p);
2469
2470 int cmpsrc = compare_tree_int (len, srclen);
2471
2472 /* Return early if the requested len is less than the string length.
2473 Warnings will be issued elsewhere later. */
2474 if (cmpsrc < 0)
2475 return false;
2476
2477 unsigned HOST_WIDE_INT dstsize;
2478
2479 bool nowarn = gimple_no_warning_p (stmt);
2480
2481 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2482 {
2483 int cmpdst = compare_tree_int (len, dstsize);
2484
2485 if (cmpdst >= 0)
2486 {
2487 tree fndecl = gimple_call_fndecl (stmt);
2488
2489 /* Strncat copies (at most) LEN bytes and always appends
2490 the terminating NUL so the specified bound should never
2491 be equal to (or greater than) the size of the destination.
2492 If it is, the copy could overflow. */
2493 location_t loc = gimple_location (stmt);
2494 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2495 cmpdst == 0
2496 ? G_("%G%qD specified bound %E equals "
2497 "destination size")
2498 : G_("%G%qD specified bound %E exceeds "
2499 "destination size %wu"),
2500 stmt, fndecl, len, dstsize);
2501 if (nowarn)
2502 gimple_set_no_warning (stmt, true);
2503 }
2504 }
2505
2506 if (!nowarn && cmpsrc == 0)
2507 {
2508 tree fndecl = gimple_call_fndecl (stmt);
2509 location_t loc = gimple_location (stmt);
2510
2511 /* To avoid possible overflow the specified bound should also
2512 not be equal to the length of the source, even when the size
2513 of the destination is unknown (it's not an uncommon mistake
2514 to specify as the bound to strncpy the length of the source). */
2515 if (warning_at (loc, OPT_Wstringop_overflow_,
2516 "%G%qD specified bound %E equals source length",
2517 stmt, fndecl, len))
2518 gimple_set_no_warning (stmt, true);
2519 }
2520
2521 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2522
2523 /* If the replacement _DECL isn't initialized, don't do the
2524 transformation. */
2525 if (!fn)
2526 return false;
2527
2528 /* Otherwise, emit a call to strcat. */
2529 gcall *repl = gimple_build_call (fn, 2, dst, src);
2530 replace_call_with_call_and_fold (gsi, repl);
2531 return true;
2532 }
2533
2534 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2535 LEN, and SIZE. */
2536
2537 static bool
2538 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2539 {
2540 gimple *stmt = gsi_stmt (*gsi);
2541 tree dest = gimple_call_arg (stmt, 0);
2542 tree src = gimple_call_arg (stmt, 1);
2543 tree len = gimple_call_arg (stmt, 2);
2544 tree size = gimple_call_arg (stmt, 3);
2545 tree fn;
2546 const char *p;
2547
2548 p = c_getstr (src);
2549 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2550 if ((p && *p == '\0')
2551 || integer_zerop (len))
2552 {
2553 replace_call_with_value (gsi, dest);
2554 return true;
2555 }
2556
2557 if (! tree_fits_uhwi_p (size))
2558 return false;
2559
2560 if (! integer_all_onesp (size))
2561 {
2562 tree src_len = c_strlen (src, 1);
2563 if (src_len
2564 && tree_fits_uhwi_p (src_len)
2565 && tree_fits_uhwi_p (len)
2566 && ! tree_int_cst_lt (len, src_len))
2567 {
2568 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2569 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2570 if (!fn)
2571 return false;
2572
2573 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2574 replace_call_with_call_and_fold (gsi, repl);
2575 return true;
2576 }
2577 return false;
2578 }
2579
2580 /* If __builtin_strncat_chk is used, assume strncat is available. */
2581 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2582 if (!fn)
2583 return false;
2584
2585 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2586 replace_call_with_call_and_fold (gsi, repl);
2587 return true;
2588 }
2589
2590 /* Build and append gimple statements to STMTS that would load a first
2591 character of a memory location identified by STR. LOC is location
2592 of the statement. */
2593
2594 static tree
2595 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2596 {
2597 tree var;
2598
2599 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2600 tree cst_uchar_ptr_node
2601 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2602 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2603
2604 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2605 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2606 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2607
2608 gimple_assign_set_lhs (stmt, var);
2609 gimple_seq_add_stmt_without_update (stmts, stmt);
2610
2611 return var;
2612 }
2613
2614 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2615
2616 static bool
2617 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2618 {
2619 gimple *stmt = gsi_stmt (*gsi);
2620 tree callee = gimple_call_fndecl (stmt);
2621 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2622
2623 tree type = integer_type_node;
2624 tree str1 = gimple_call_arg (stmt, 0);
2625 tree str2 = gimple_call_arg (stmt, 1);
2626 tree lhs = gimple_call_lhs (stmt);
2627
2628 tree bound_node = NULL_TREE;
2629 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2630
2631 /* Handle strncmp and strncasecmp functions. */
2632 if (gimple_call_num_args (stmt) == 3)
2633 {
2634 bound_node = gimple_call_arg (stmt, 2);
2635 if (tree_fits_uhwi_p (bound_node))
2636 bound = tree_to_uhwi (bound_node);
2637 }
2638
2639 /* If the BOUND parameter is zero, return zero. */
2640 if (bound == 0)
2641 {
2642 replace_call_with_value (gsi, integer_zero_node);
2643 return true;
2644 }
2645
2646 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2647 if (operand_equal_p (str1, str2, 0))
2648 {
2649 replace_call_with_value (gsi, integer_zero_node);
2650 return true;
2651 }
2652
2653 /* Initially set to the number of characters, including the terminating
2654 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2655 the array Sx is not terminated by a nul.
2656 For nul-terminated strings then adjusted to their length so that
2657 LENx == NULPOSx holds. */
2658 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2659 const char *p1 = getbyterep (str1, &len1);
2660 const char *p2 = getbyterep (str2, &len2);
2661
2662 /* The position of the terminating nul character if one exists, otherwise
2663 a value greater than LENx. */
2664 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2665
2666 if (p1)
2667 {
2668 size_t n = strnlen (p1, len1);
2669 if (n < len1)
2670 len1 = nulpos1 = n;
2671 }
2672
2673 if (p2)
2674 {
2675 size_t n = strnlen (p2, len2);
2676 if (n < len2)
2677 len2 = nulpos2 = n;
2678 }
2679
2680 /* For known strings, return an immediate value. */
2681 if (p1 && p2)
2682 {
2683 int r = 0;
2684 bool known_result = false;
2685
2686 switch (fcode)
2687 {
2688 case BUILT_IN_STRCMP:
2689 case BUILT_IN_STRCMP_EQ:
2690 if (len1 != nulpos1 || len2 != nulpos2)
2691 break;
2692
2693 r = strcmp (p1, p2);
2694 known_result = true;
2695 break;
2696
2697 case BUILT_IN_STRNCMP:
2698 case BUILT_IN_STRNCMP_EQ:
2699 {
2700 if (bound == HOST_WIDE_INT_M1U)
2701 break;
2702
2703 /* Reduce the bound to be no more than the length
2704 of the shorter of the two strings, or the sizes
2705 of the unterminated arrays. */
2706 unsigned HOST_WIDE_INT n = bound;
2707
2708 if (len1 == nulpos1 && len1 < n)
2709 n = len1 + 1;
2710 if (len2 == nulpos2 && len2 < n)
2711 n = len2 + 1;
2712
2713 if (MIN (nulpos1, nulpos2) + 1 < n)
2714 break;
2715
2716 r = strncmp (p1, p2, n);
2717 known_result = true;
2718 break;
2719 }
2720 /* Only handleable situation is where the string are equal (result 0),
2721 which is already handled by operand_equal_p case. */
2722 case BUILT_IN_STRCASECMP:
2723 break;
2724 case BUILT_IN_STRNCASECMP:
2725 {
2726 if (bound == HOST_WIDE_INT_M1U)
2727 break;
2728 r = strncmp (p1, p2, bound);
2729 if (r == 0)
2730 known_result = true;
2731 break;
2732 }
2733 default:
2734 gcc_unreachable ();
2735 }
2736
2737 if (known_result)
2738 {
2739 replace_call_with_value (gsi, build_cmp_result (type, r));
2740 return true;
2741 }
2742 }
2743
2744 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2745 || fcode == BUILT_IN_STRCMP
2746 || fcode == BUILT_IN_STRCMP_EQ
2747 || fcode == BUILT_IN_STRCASECMP;
2748
2749 location_t loc = gimple_location (stmt);
2750
2751 /* If the second arg is "", return *(const unsigned char*)arg1. */
2752 if (p2 && *p2 == '\0' && nonzero_bound)
2753 {
2754 gimple_seq stmts = NULL;
2755 tree var = gimple_load_first_char (loc, str1, &stmts);
2756 if (lhs)
2757 {
2758 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2759 gimple_seq_add_stmt_without_update (&stmts, stmt);
2760 }
2761
2762 gsi_replace_with_seq_vops (gsi, stmts);
2763 return true;
2764 }
2765
2766 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2767 if (p1 && *p1 == '\0' && nonzero_bound)
2768 {
2769 gimple_seq stmts = NULL;
2770 tree var = gimple_load_first_char (loc, str2, &stmts);
2771
2772 if (lhs)
2773 {
2774 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2775 stmt = gimple_build_assign (c, NOP_EXPR, var);
2776 gimple_seq_add_stmt_without_update (&stmts, stmt);
2777
2778 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2779 gimple_seq_add_stmt_without_update (&stmts, stmt);
2780 }
2781
2782 gsi_replace_with_seq_vops (gsi, stmts);
2783 return true;
2784 }
2785
2786 /* If BOUND is one, return an expression corresponding to
2787 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2788 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2789 {
2790 gimple_seq stmts = NULL;
2791 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2792 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2793
2794 if (lhs)
2795 {
2796 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2797 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2798 gimple_seq_add_stmt_without_update (&stmts, convert1);
2799
2800 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2801 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2802 gimple_seq_add_stmt_without_update (&stmts, convert2);
2803
2804 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2805 gimple_seq_add_stmt_without_update (&stmts, stmt);
2806 }
2807
2808 gsi_replace_with_seq_vops (gsi, stmts);
2809 return true;
2810 }
2811
2812 /* If BOUND is greater than the length of one constant string,
2813 and the other argument is also a nul-terminated string, replace
2814 strncmp with strcmp. */
2815 if (fcode == BUILT_IN_STRNCMP
2816 && bound > 0 && bound < HOST_WIDE_INT_M1U
2817 && ((p2 && len2 < bound && len2 == nulpos2)
2818 || (p1 && len1 < bound && len1 == nulpos1)))
2819 {
2820 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2821 if (!fn)
2822 return false;
2823 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2824 replace_call_with_call_and_fold (gsi, repl);
2825 return true;
2826 }
2827
2828 return false;
2829 }
2830
2831 /* Fold a call to the memchr pointed by GSI iterator. */
2832
2833 static bool
2834 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2835 {
2836 gimple *stmt = gsi_stmt (*gsi);
2837 tree lhs = gimple_call_lhs (stmt);
2838 tree arg1 = gimple_call_arg (stmt, 0);
2839 tree arg2 = gimple_call_arg (stmt, 1);
2840 tree len = gimple_call_arg (stmt, 2);
2841
2842 /* If the LEN parameter is zero, return zero. */
2843 if (integer_zerop (len))
2844 {
2845 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2846 return true;
2847 }
2848
2849 char c;
2850 if (TREE_CODE (arg2) != INTEGER_CST
2851 || !tree_fits_uhwi_p (len)
2852 || !target_char_cst_p (arg2, &c))
2853 return false;
2854
2855 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2856 unsigned HOST_WIDE_INT string_length;
2857 const char *p1 = getbyterep (arg1, &string_length);
2858
2859 if (p1)
2860 {
2861 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2862 if (r == NULL)
2863 {
2864 tree mem_size, offset_node;
2865 byte_representation (arg1, &offset_node, &mem_size, NULL);
2866 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2867 ? 0 : tree_to_uhwi (offset_node);
2868 /* MEM_SIZE is the size of the array the string literal
2869 is stored in. */
2870 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2871 gcc_checking_assert (string_length <= string_size);
2872 if (length <= string_size)
2873 {
2874 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2875 return true;
2876 }
2877 }
2878 else
2879 {
2880 unsigned HOST_WIDE_INT offset = r - p1;
2881 gimple_seq stmts = NULL;
2882 if (lhs != NULL_TREE)
2883 {
2884 tree offset_cst = build_int_cst (sizetype, offset);
2885 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2886 arg1, offset_cst);
2887 gimple_seq_add_stmt_without_update (&stmts, stmt);
2888 }
2889 else
2890 gimple_seq_add_stmt_without_update (&stmts,
2891 gimple_build_nop ());
2892
2893 gsi_replace_with_seq_vops (gsi, stmts);
2894 return true;
2895 }
2896 }
2897
2898 return false;
2899 }
2900
2901 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2902 to the call. IGNORE is true if the value returned
2903 by the builtin will be ignored. UNLOCKED is true is true if this
2904 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2905 the known length of the string. Return NULL_TREE if no simplification
2906 was possible. */
2907
2908 static bool
2909 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2910 tree arg0, tree arg1,
2911 bool unlocked)
2912 {
2913 gimple *stmt = gsi_stmt (*gsi);
2914
2915 /* If we're using an unlocked function, assume the other unlocked
2916 functions exist explicitly. */
2917 tree const fn_fputc = (unlocked
2918 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2919 : builtin_decl_implicit (BUILT_IN_FPUTC));
2920 tree const fn_fwrite = (unlocked
2921 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2922 : builtin_decl_implicit (BUILT_IN_FWRITE));
2923
2924 /* If the return value is used, don't do the transformation. */
2925 if (gimple_call_lhs (stmt))
2926 return false;
2927
2928 /* Get the length of the string passed to fputs. If the length
2929 can't be determined, punt. */
2930 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2931 if (!len
2932 || TREE_CODE (len) != INTEGER_CST)
2933 return false;
2934
2935 switch (compare_tree_int (len, 1))
2936 {
2937 case -1: /* length is 0, delete the call entirely . */
2938 replace_call_with_value (gsi, integer_zero_node);
2939 return true;
2940
2941 case 0: /* length is 1, call fputc. */
2942 {
2943 const char *p = c_getstr (arg0);
2944 if (p != NULL)
2945 {
2946 if (!fn_fputc)
2947 return false;
2948
2949 gimple *repl = gimple_build_call (fn_fputc, 2,
2950 build_int_cst
2951 (integer_type_node, p[0]), arg1);
2952 replace_call_with_call_and_fold (gsi, repl);
2953 return true;
2954 }
2955 }
2956 /* FALLTHROUGH */
2957 case 1: /* length is greater than 1, call fwrite. */
2958 {
2959 /* If optimizing for size keep fputs. */
2960 if (optimize_function_for_size_p (cfun))
2961 return false;
2962 /* New argument list transforming fputs(string, stream) to
2963 fwrite(string, 1, len, stream). */
2964 if (!fn_fwrite)
2965 return false;
2966
2967 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2968 size_one_node, len, arg1);
2969 replace_call_with_call_and_fold (gsi, repl);
2970 return true;
2971 }
2972 default:
2973 gcc_unreachable ();
2974 }
2975 return false;
2976 }
2977
2978 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2979 DEST, SRC, LEN, and SIZE are the arguments to the call.
2980 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2981 code of the builtin. If MAXLEN is not NULL, it is maximum length
2982 passed as third argument. */
2983
2984 static bool
2985 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2986 tree dest, tree src, tree len, tree size,
2987 enum built_in_function fcode)
2988 {
2989 gimple *stmt = gsi_stmt (*gsi);
2990 location_t loc = gimple_location (stmt);
2991 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2992 tree fn;
2993
2994 /* If SRC and DEST are the same (and not volatile), return DEST
2995 (resp. DEST+LEN for __mempcpy_chk). */
2996 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2997 {
2998 if (fcode != BUILT_IN_MEMPCPY_CHK)
2999 {
3000 replace_call_with_value (gsi, dest);
3001 return true;
3002 }
3003 else
3004 {
3005 gimple_seq stmts = NULL;
3006 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3007 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3008 TREE_TYPE (dest), dest, len);
3009 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3010 replace_call_with_value (gsi, temp);
3011 return true;
3012 }
3013 }
3014
3015 if (! tree_fits_uhwi_p (size))
3016 return false;
3017
3018 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3019 if (! integer_all_onesp (size))
3020 {
3021 if (! tree_fits_uhwi_p (len))
3022 {
3023 /* If LEN is not constant, try MAXLEN too.
3024 For MAXLEN only allow optimizing into non-_ocs function
3025 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3026 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3027 {
3028 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3029 {
3030 /* (void) __mempcpy_chk () can be optimized into
3031 (void) __memcpy_chk (). */
3032 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3033 if (!fn)
3034 return false;
3035
3036 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3037 replace_call_with_call_and_fold (gsi, repl);
3038 return true;
3039 }
3040 return false;
3041 }
3042 }
3043 else
3044 maxlen = len;
3045
3046 if (tree_int_cst_lt (size, maxlen))
3047 return false;
3048 }
3049
3050 fn = NULL_TREE;
3051 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3052 mem{cpy,pcpy,move,set} is available. */
3053 switch (fcode)
3054 {
3055 case BUILT_IN_MEMCPY_CHK:
3056 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3057 break;
3058 case BUILT_IN_MEMPCPY_CHK:
3059 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3060 break;
3061 case BUILT_IN_MEMMOVE_CHK:
3062 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3063 break;
3064 case BUILT_IN_MEMSET_CHK:
3065 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3066 break;
3067 default:
3068 break;
3069 }
3070
3071 if (!fn)
3072 return false;
3073
3074 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3075 replace_call_with_call_and_fold (gsi, repl);
3076 return true;
3077 }
3078
3079 /* Fold a call to the __st[rp]cpy_chk builtin.
3080 DEST, SRC, and SIZE are the arguments to the call.
3081 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3082 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3083 strings passed as second argument. */
3084
3085 static bool
3086 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3087 tree dest,
3088 tree src, tree size,
3089 enum built_in_function fcode)
3090 {
3091 gimple *stmt = gsi_stmt (*gsi);
3092 location_t loc = gimple_location (stmt);
3093 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3094 tree len, fn;
3095
3096 /* If SRC and DEST are the same (and not volatile), return DEST. */
3097 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3098 {
3099 /* Issue -Wrestrict unless the pointers are null (those do
3100 not point to objects and so do not indicate an overlap;
3101 such calls could be the result of sanitization and jump
3102 threading). */
3103 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
3104 {
3105 tree func = gimple_call_fndecl (stmt);
3106
3107 warning_at (loc, OPT_Wrestrict,
3108 "%qD source argument is the same as destination",
3109 func);
3110 }
3111
3112 replace_call_with_value (gsi, dest);
3113 return true;
3114 }
3115
3116 if (! tree_fits_uhwi_p (size))
3117 return false;
3118
3119 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3120 if (! integer_all_onesp (size))
3121 {
3122 len = c_strlen (src, 1);
3123 if (! len || ! tree_fits_uhwi_p (len))
3124 {
3125 /* If LEN is not constant, try MAXLEN too.
3126 For MAXLEN only allow optimizing into non-_ocs function
3127 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3128 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3129 {
3130 if (fcode == BUILT_IN_STPCPY_CHK)
3131 {
3132 if (! ignore)
3133 return false;
3134
3135 /* If return value of __stpcpy_chk is ignored,
3136 optimize into __strcpy_chk. */
3137 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3138 if (!fn)
3139 return false;
3140
3141 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3142 replace_call_with_call_and_fold (gsi, repl);
3143 return true;
3144 }
3145
3146 if (! len || TREE_SIDE_EFFECTS (len))
3147 return false;
3148
3149 /* If c_strlen returned something, but not a constant,
3150 transform __strcpy_chk into __memcpy_chk. */
3151 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3152 if (!fn)
3153 return false;
3154
3155 gimple_seq stmts = NULL;
3156 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3157 len = gimple_convert (&stmts, loc, size_type_node, len);
3158 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3159 build_int_cst (size_type_node, 1));
3160 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3161 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3162 replace_call_with_call_and_fold (gsi, repl);
3163 return true;
3164 }
3165 }
3166 else
3167 maxlen = len;
3168
3169 if (! tree_int_cst_lt (maxlen, size))
3170 return false;
3171 }
3172
3173 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3174 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
3175 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3176 if (!fn)
3177 return false;
3178
3179 gimple *repl = gimple_build_call (fn, 2, dest, src);
3180 replace_call_with_call_and_fold (gsi, repl);
3181 return true;
3182 }
3183
3184 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3185 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3186 length passed as third argument. IGNORE is true if return value can be
3187 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3188
3189 static bool
3190 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3191 tree dest, tree src,
3192 tree len, tree size,
3193 enum built_in_function fcode)
3194 {
3195 gimple *stmt = gsi_stmt (*gsi);
3196 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3197 tree fn;
3198
3199 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3200 {
3201 /* If return value of __stpncpy_chk is ignored,
3202 optimize into __strncpy_chk. */
3203 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3204 if (fn)
3205 {
3206 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3207 replace_call_with_call_and_fold (gsi, repl);
3208 return true;
3209 }
3210 }
3211
3212 if (! tree_fits_uhwi_p (size))
3213 return false;
3214
3215 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3216 if (! integer_all_onesp (size))
3217 {
3218 if (! tree_fits_uhwi_p (len))
3219 {
3220 /* If LEN is not constant, try MAXLEN too.
3221 For MAXLEN only allow optimizing into non-_ocs function
3222 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3223 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3224 return false;
3225 }
3226 else
3227 maxlen = len;
3228
3229 if (tree_int_cst_lt (size, maxlen))
3230 return false;
3231 }
3232
3233 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3234 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3235 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3236 if (!fn)
3237 return false;
3238
3239 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3240 replace_call_with_call_and_fold (gsi, repl);
3241 return true;
3242 }
3243
3244 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3245 Return NULL_TREE if no simplification can be made. */
3246
3247 static bool
3248 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3249 {
3250 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3251 location_t loc = gimple_location (stmt);
3252 tree dest = gimple_call_arg (stmt, 0);
3253 tree src = gimple_call_arg (stmt, 1);
3254 tree fn, lenp1;
3255
3256 /* If the result is unused, replace stpcpy with strcpy. */
3257 if (gimple_call_lhs (stmt) == NULL_TREE)
3258 {
3259 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3260 if (!fn)
3261 return false;
3262 gimple_call_set_fndecl (stmt, fn);
3263 fold_stmt (gsi);
3264 return true;
3265 }
3266
3267 /* Set to non-null if ARG refers to an unterminated array. */
3268 c_strlen_data data = { };
3269 /* The size of the unterminated array if SRC referes to one. */
3270 tree size;
3271 /* True if the size is exact/constant, false if it's the lower bound
3272 of a range. */
3273 bool exact;
3274 tree len = c_strlen (src, 1, &data, 1);
3275 if (!len
3276 || TREE_CODE (len) != INTEGER_CST)
3277 {
3278 data.decl = unterminated_array (src, &size, &exact);
3279 if (!data.decl)
3280 return false;
3281 }
3282
3283 if (data.decl)
3284 {
3285 /* Avoid folding calls with unterminated arrays. */
3286 if (!gimple_no_warning_p (stmt))
3287 warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3288 exact);
3289 gimple_set_no_warning (stmt, true);
3290 return false;
3291 }
3292
3293 if (optimize_function_for_size_p (cfun)
3294 /* If length is zero it's small enough. */
3295 && !integer_zerop (len))
3296 return false;
3297
3298 /* If the source has a known length replace stpcpy with memcpy. */
3299 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3300 if (!fn)
3301 return false;
3302
3303 gimple_seq stmts = NULL;
3304 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3305 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3306 tem, build_int_cst (size_type_node, 1));
3307 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3308 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3309 gimple_move_vops (repl, stmt);
3310 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3311 /* Replace the result with dest + len. */
3312 stmts = NULL;
3313 tem = gimple_convert (&stmts, loc, sizetype, len);
3314 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3315 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3316 POINTER_PLUS_EXPR, dest, tem);
3317 gsi_replace (gsi, ret, false);
3318 /* Finally fold the memcpy call. */
3319 gimple_stmt_iterator gsi2 = *gsi;
3320 gsi_prev (&gsi2);
3321 fold_stmt (&gsi2);
3322 return true;
3323 }
3324
3325 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3326 NULL_TREE if a normal call should be emitted rather than expanding
3327 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3328 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3329 passed as second argument. */
3330
3331 static bool
3332 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3333 enum built_in_function fcode)
3334 {
3335 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3336 tree dest, size, len, fn, fmt, flag;
3337 const char *fmt_str;
3338
3339 /* Verify the required arguments in the original call. */
3340 if (gimple_call_num_args (stmt) < 5)
3341 return false;
3342
3343 dest = gimple_call_arg (stmt, 0);
3344 len = gimple_call_arg (stmt, 1);
3345 flag = gimple_call_arg (stmt, 2);
3346 size = gimple_call_arg (stmt, 3);
3347 fmt = gimple_call_arg (stmt, 4);
3348
3349 if (! tree_fits_uhwi_p (size))
3350 return false;
3351
3352 if (! integer_all_onesp (size))
3353 {
3354 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3355 if (! tree_fits_uhwi_p (len))
3356 {
3357 /* If LEN is not constant, try MAXLEN too.
3358 For MAXLEN only allow optimizing into non-_ocs function
3359 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3360 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3361 return false;
3362 }
3363 else
3364 maxlen = len;
3365
3366 if (tree_int_cst_lt (size, maxlen))
3367 return false;
3368 }
3369
3370 if (!init_target_chars ())
3371 return false;
3372
3373 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3374 or if format doesn't contain % chars or is "%s". */
3375 if (! integer_zerop (flag))
3376 {
3377 fmt_str = c_getstr (fmt);
3378 if (fmt_str == NULL)
3379 return false;
3380 if (strchr (fmt_str, target_percent) != NULL
3381 && strcmp (fmt_str, target_percent_s))
3382 return false;
3383 }
3384
3385 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3386 available. */
3387 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3388 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3389 if (!fn)
3390 return false;
3391
3392 /* Replace the called function and the first 5 argument by 3 retaining
3393 trailing varargs. */
3394 gimple_call_set_fndecl (stmt, fn);
3395 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3396 gimple_call_set_arg (stmt, 0, dest);
3397 gimple_call_set_arg (stmt, 1, len);
3398 gimple_call_set_arg (stmt, 2, fmt);
3399 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3400 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3401 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3402 fold_stmt (gsi);
3403 return true;
3404 }
3405
3406 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3407 Return NULL_TREE if a normal call should be emitted rather than
3408 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3409 or BUILT_IN_VSPRINTF_CHK. */
3410
3411 static bool
3412 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3413 enum built_in_function fcode)
3414 {
3415 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3416 tree dest, size, len, fn, fmt, flag;
3417 const char *fmt_str;
3418 unsigned nargs = gimple_call_num_args (stmt);
3419
3420 /* Verify the required arguments in the original call. */
3421 if (nargs < 4)
3422 return false;
3423 dest = gimple_call_arg (stmt, 0);
3424 flag = gimple_call_arg (stmt, 1);
3425 size = gimple_call_arg (stmt, 2);
3426 fmt = gimple_call_arg (stmt, 3);
3427
3428 if (! tree_fits_uhwi_p (size))
3429 return false;
3430
3431 len = NULL_TREE;
3432
3433 if (!init_target_chars ())
3434 return false;
3435
3436 /* Check whether the format is a literal string constant. */
3437 fmt_str = c_getstr (fmt);
3438 if (fmt_str != NULL)
3439 {
3440 /* If the format doesn't contain % args or %%, we know the size. */
3441 if (strchr (fmt_str, target_percent) == 0)
3442 {
3443 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3444 len = build_int_cstu (size_type_node, strlen (fmt_str));
3445 }
3446 /* If the format is "%s" and first ... argument is a string literal,
3447 we know the size too. */
3448 else if (fcode == BUILT_IN_SPRINTF_CHK
3449 && strcmp (fmt_str, target_percent_s) == 0)
3450 {
3451 tree arg;
3452
3453 if (nargs == 5)
3454 {
3455 arg = gimple_call_arg (stmt, 4);
3456 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3457 {
3458 len = c_strlen (arg, 1);
3459 if (! len || ! tree_fits_uhwi_p (len))
3460 len = NULL_TREE;
3461 }
3462 }
3463 }
3464 }
3465
3466 if (! integer_all_onesp (size))
3467 {
3468 if (! len || ! tree_int_cst_lt (len, size))
3469 return false;
3470 }
3471
3472 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3473 or if format doesn't contain % chars or is "%s". */
3474 if (! integer_zerop (flag))
3475 {
3476 if (fmt_str == NULL)
3477 return false;
3478 if (strchr (fmt_str, target_percent) != NULL
3479 && strcmp (fmt_str, target_percent_s))
3480 return false;
3481 }
3482
3483 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3484 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3485 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3486 if (!fn)
3487 return false;
3488
3489 /* Replace the called function and the first 4 argument by 2 retaining
3490 trailing varargs. */
3491 gimple_call_set_fndecl (stmt, fn);
3492 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3493 gimple_call_set_arg (stmt, 0, dest);
3494 gimple_call_set_arg (stmt, 1, fmt);
3495 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3496 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3497 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3498 fold_stmt (gsi);
3499 return true;
3500 }
3501
3502 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3503 ORIG may be null if this is a 2-argument call. We don't attempt to
3504 simplify calls with more than 3 arguments.
3505
3506 Return true if simplification was possible, otherwise false. */
3507
3508 bool
3509 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3510 {
3511 gimple *stmt = gsi_stmt (*gsi);
3512 tree dest = gimple_call_arg (stmt, 0);
3513 tree fmt = gimple_call_arg (stmt, 1);
3514 tree orig = NULL_TREE;
3515 const char *fmt_str = NULL;
3516
3517 /* Verify the required arguments in the original call. We deal with two
3518 types of sprintf() calls: 'sprintf (str, fmt)' and
3519 'sprintf (dest, "%s", orig)'. */
3520 if (gimple_call_num_args (stmt) > 3)
3521 return false;
3522
3523 if (gimple_call_num_args (stmt) == 3)
3524 orig = gimple_call_arg (stmt, 2);
3525
3526 /* Check whether the format is a literal string constant. */
3527 fmt_str = c_getstr (fmt);
3528 if (fmt_str == NULL)
3529 return false;
3530
3531 if (!init_target_chars ())
3532 return false;
3533
3534 /* If the format doesn't contain % args or %%, use strcpy. */
3535 if (strchr (fmt_str, target_percent) == NULL)
3536 {
3537 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3538
3539 if (!fn)
3540 return false;
3541
3542 /* Don't optimize sprintf (buf, "abc", ptr++). */
3543 if (orig)
3544 return false;
3545
3546 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3547 'format' is known to contain no % formats. */
3548 gimple_seq stmts = NULL;
3549 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3550
3551 /* Propagate the NO_WARNING bit to avoid issuing the same
3552 warning more than once. */
3553 if (gimple_no_warning_p (stmt))
3554 gimple_set_no_warning (repl, true);
3555
3556 gimple_seq_add_stmt_without_update (&stmts, repl);
3557 if (tree lhs = gimple_call_lhs (stmt))
3558 {
3559 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3560 strlen (fmt_str)));
3561 gimple_seq_add_stmt_without_update (&stmts, repl);
3562 gsi_replace_with_seq_vops (gsi, stmts);
3563 /* gsi now points at the assignment to the lhs, get a
3564 stmt iterator to the memcpy call.
3565 ??? We can't use gsi_for_stmt as that doesn't work when the
3566 CFG isn't built yet. */
3567 gimple_stmt_iterator gsi2 = *gsi;
3568 gsi_prev (&gsi2);
3569 fold_stmt (&gsi2);
3570 }
3571 else
3572 {
3573 gsi_replace_with_seq_vops (gsi, stmts);
3574 fold_stmt (gsi);
3575 }
3576 return true;
3577 }
3578
3579 /* If the format is "%s", use strcpy if the result isn't used. */
3580 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3581 {
3582 tree fn;
3583 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3584
3585 if (!fn)
3586 return false;
3587
3588 /* Don't crash on sprintf (str1, "%s"). */
3589 if (!orig)
3590 return false;
3591
3592 tree orig_len = NULL_TREE;
3593 if (gimple_call_lhs (stmt))
3594 {
3595 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3596 if (!orig_len)
3597 return false;
3598 }
3599
3600 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3601 gimple_seq stmts = NULL;
3602 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3603
3604 /* Propagate the NO_WARNING bit to avoid issuing the same
3605 warning more than once. */
3606 if (gimple_no_warning_p (stmt))
3607 gimple_set_no_warning (repl, true);
3608
3609 gimple_seq_add_stmt_without_update (&stmts, repl);
3610 if (tree lhs = gimple_call_lhs (stmt))
3611 {
3612 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3613 TREE_TYPE (orig_len)))
3614 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3615 repl = gimple_build_assign (lhs, orig_len);
3616 gimple_seq_add_stmt_without_update (&stmts, repl);
3617 gsi_replace_with_seq_vops (gsi, stmts);
3618 /* gsi now points at the assignment to the lhs, get a
3619 stmt iterator to the memcpy call.
3620 ??? We can't use gsi_for_stmt as that doesn't work when the
3621 CFG isn't built yet. */
3622 gimple_stmt_iterator gsi2 = *gsi;
3623 gsi_prev (&gsi2);
3624 fold_stmt (&gsi2);
3625 }
3626 else
3627 {
3628 gsi_replace_with_seq_vops (gsi, stmts);
3629 fold_stmt (gsi);
3630 }
3631 return true;
3632 }
3633 return false;
3634 }
3635
3636 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3637 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3638 attempt to simplify calls with more than 4 arguments.
3639
3640 Return true if simplification was possible, otherwise false. */
3641
3642 bool
3643 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3644 {
3645 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3646 tree dest = gimple_call_arg (stmt, 0);
3647 tree destsize = gimple_call_arg (stmt, 1);
3648 tree fmt = gimple_call_arg (stmt, 2);
3649 tree orig = NULL_TREE;
3650 const char *fmt_str = NULL;
3651
3652 if (gimple_call_num_args (stmt) > 4)
3653 return false;
3654
3655 if (gimple_call_num_args (stmt) == 4)
3656 orig = gimple_call_arg (stmt, 3);
3657
3658 if (!tree_fits_uhwi_p (destsize))
3659 return false;
3660 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3661
3662 /* Check whether the format is a literal string constant. */
3663 fmt_str = c_getstr (fmt);
3664 if (fmt_str == NULL)
3665 return false;
3666
3667 if (!init_target_chars ())
3668 return false;
3669
3670 /* If the format doesn't contain % args or %%, use strcpy. */
3671 if (strchr (fmt_str, target_percent) == NULL)
3672 {
3673 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3674 if (!fn)
3675 return false;
3676
3677 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3678 if (orig)
3679 return false;
3680
3681 /* We could expand this as
3682 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3683 or to
3684 memcpy (str, fmt_with_nul_at_cstm1, cst);
3685 but in the former case that might increase code size
3686 and in the latter case grow .rodata section too much.
3687 So punt for now. */
3688 size_t len = strlen (fmt_str);
3689 if (len >= destlen)
3690 return false;
3691
3692 gimple_seq stmts = NULL;
3693 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3694 gimple_seq_add_stmt_without_update (&stmts, repl);
3695 if (tree lhs = gimple_call_lhs (stmt))
3696 {
3697 repl = gimple_build_assign (lhs,
3698 build_int_cst (TREE_TYPE (lhs), len));
3699 gimple_seq_add_stmt_without_update (&stmts, repl);
3700 gsi_replace_with_seq_vops (gsi, stmts);
3701 /* gsi now points at the assignment to the lhs, get a
3702 stmt iterator to the memcpy call.
3703 ??? We can't use gsi_for_stmt as that doesn't work when the
3704 CFG isn't built yet. */
3705 gimple_stmt_iterator gsi2 = *gsi;
3706 gsi_prev (&gsi2);
3707 fold_stmt (&gsi2);
3708 }
3709 else
3710 {
3711 gsi_replace_with_seq_vops (gsi, stmts);
3712 fold_stmt (gsi);
3713 }
3714 return true;
3715 }
3716
3717 /* If the format is "%s", use strcpy if the result isn't used. */
3718 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3719 {
3720 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3721 if (!fn)
3722 return false;
3723
3724 /* Don't crash on snprintf (str1, cst, "%s"). */
3725 if (!orig)
3726 return false;
3727
3728 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3729 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3730 return false;
3731
3732 /* We could expand this as
3733 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3734 or to
3735 memcpy (str1, str2_with_nul_at_cstm1, cst);
3736 but in the former case that might increase code size
3737 and in the latter case grow .rodata section too much.
3738 So punt for now. */
3739 if (compare_tree_int (orig_len, destlen) >= 0)
3740 return false;
3741
3742 /* Convert snprintf (str1, cst, "%s", str2) into
3743 strcpy (str1, str2) if strlen (str2) < cst. */
3744 gimple_seq stmts = NULL;
3745 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3746 gimple_seq_add_stmt_without_update (&stmts, repl);
3747 if (tree lhs = gimple_call_lhs (stmt))
3748 {
3749 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3750 TREE_TYPE (orig_len)))
3751 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3752 repl = gimple_build_assign (lhs, orig_len);
3753 gimple_seq_add_stmt_without_update (&stmts, repl);
3754 gsi_replace_with_seq_vops (gsi, stmts);
3755 /* gsi now points at the assignment to the lhs, get a
3756 stmt iterator to the memcpy call.
3757 ??? We can't use gsi_for_stmt as that doesn't work when the
3758 CFG isn't built yet. */
3759 gimple_stmt_iterator gsi2 = *gsi;
3760 gsi_prev (&gsi2);
3761 fold_stmt (&gsi2);
3762 }
3763 else
3764 {
3765 gsi_replace_with_seq_vops (gsi, stmts);
3766 fold_stmt (gsi);
3767 }
3768 return true;
3769 }
3770 return false;
3771 }
3772
3773 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3774 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3775 more than 3 arguments, and ARG may be null in the 2-argument case.
3776
3777 Return NULL_TREE if no simplification was possible, otherwise return the
3778 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3779 code of the function to be simplified. */
3780
3781 static bool
3782 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3783 tree fp, tree fmt, tree arg,
3784 enum built_in_function fcode)
3785 {
3786 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3787 tree fn_fputc, fn_fputs;
3788 const char *fmt_str = NULL;
3789
3790 /* If the return value is used, don't do the transformation. */
3791 if (gimple_call_lhs (stmt) != NULL_TREE)
3792 return false;
3793
3794 /* Check whether the format is a literal string constant. */
3795 fmt_str = c_getstr (fmt);
3796 if (fmt_str == NULL)
3797 return false;
3798
3799 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3800 {
3801 /* If we're using an unlocked function, assume the other
3802 unlocked functions exist explicitly. */
3803 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3804 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3805 }
3806 else
3807 {
3808 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3809 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3810 }
3811
3812 if (!init_target_chars ())
3813 return false;
3814
3815 /* If the format doesn't contain % args or %%, use strcpy. */
3816 if (strchr (fmt_str, target_percent) == NULL)
3817 {
3818 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3819 && arg)
3820 return false;
3821
3822 /* If the format specifier was "", fprintf does nothing. */
3823 if (fmt_str[0] == '\0')
3824 {
3825 replace_call_with_value (gsi, NULL_TREE);
3826 return true;
3827 }
3828
3829 /* When "string" doesn't contain %, replace all cases of
3830 fprintf (fp, string) with fputs (string, fp). The fputs
3831 builtin will take care of special cases like length == 1. */
3832 if (fn_fputs)
3833 {
3834 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3835 replace_call_with_call_and_fold (gsi, repl);
3836 return true;
3837 }
3838 }
3839
3840 /* The other optimizations can be done only on the non-va_list variants. */
3841 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3842 return false;
3843
3844 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3845 else if (strcmp (fmt_str, target_percent_s) == 0)
3846 {
3847 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3848 return false;
3849 if (fn_fputs)
3850 {
3851 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3852 replace_call_with_call_and_fold (gsi, repl);
3853 return true;
3854 }
3855 }
3856
3857 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3858 else if (strcmp (fmt_str, target_percent_c) == 0)
3859 {
3860 if (!arg
3861 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3862 return false;
3863 if (fn_fputc)
3864 {
3865 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3866 replace_call_with_call_and_fold (gsi, repl);
3867 return true;
3868 }
3869 }
3870
3871 return false;
3872 }
3873
3874 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3875 FMT and ARG are the arguments to the call; we don't fold cases with
3876 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3877
3878 Return NULL_TREE if no simplification was possible, otherwise return the
3879 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3880 code of the function to be simplified. */
3881
3882 static bool
3883 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3884 tree arg, enum built_in_function fcode)
3885 {
3886 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3887 tree fn_putchar, fn_puts, newarg;
3888 const char *fmt_str = NULL;
3889
3890 /* If the return value is used, don't do the transformation. */
3891 if (gimple_call_lhs (stmt) != NULL_TREE)
3892 return false;
3893
3894 /* Check whether the format is a literal string constant. */
3895 fmt_str = c_getstr (fmt);
3896 if (fmt_str == NULL)
3897 return false;
3898
3899 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3900 {
3901 /* If we're using an unlocked function, assume the other
3902 unlocked functions exist explicitly. */
3903 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3904 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3905 }
3906 else
3907 {
3908 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3909 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3910 }
3911
3912 if (!init_target_chars ())
3913 return false;
3914
3915 if (strcmp (fmt_str, target_percent_s) == 0
3916 || strchr (fmt_str, target_percent) == NULL)
3917 {
3918 const char *str;
3919
3920 if (strcmp (fmt_str, target_percent_s) == 0)
3921 {
3922 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3923 return false;
3924
3925 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3926 return false;
3927
3928 str = c_getstr (arg);
3929 if (str == NULL)
3930 return false;
3931 }
3932 else
3933 {
3934 /* The format specifier doesn't contain any '%' characters. */
3935 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3936 && arg)
3937 return false;
3938 str = fmt_str;
3939 }
3940
3941 /* If the string was "", printf does nothing. */
3942 if (str[0] == '\0')
3943 {
3944 replace_call_with_value (gsi, NULL_TREE);
3945 return true;
3946 }
3947
3948 /* If the string has length of 1, call putchar. */
3949 if (str[1] == '\0')
3950 {
3951 /* Given printf("c"), (where c is any one character,)
3952 convert "c"[0] to an int and pass that to the replacement
3953 function. */
3954 newarg = build_int_cst (integer_type_node, str[0]);
3955 if (fn_putchar)
3956 {
3957 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3958 replace_call_with_call_and_fold (gsi, repl);
3959 return true;
3960 }
3961 }
3962 else
3963 {
3964 /* If the string was "string\n", call puts("string"). */
3965 size_t len = strlen (str);
3966 if ((unsigned char)str[len - 1] == target_newline
3967 && (size_t) (int) len == len
3968 && (int) len > 0)
3969 {
3970 char *newstr;
3971
3972 /* Create a NUL-terminated string that's one char shorter
3973 than the original, stripping off the trailing '\n'. */
3974 newstr = xstrdup (str);
3975 newstr[len - 1] = '\0';
3976 newarg = build_string_literal (len, newstr);
3977 free (newstr);
3978 if (fn_puts)
3979 {
3980 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3981 replace_call_with_call_and_fold (gsi, repl);
3982 return true;
3983 }
3984 }
3985 else
3986 /* We'd like to arrange to call fputs(string,stdout) here,
3987 but we need stdout and don't have a way to get it yet. */
3988 return false;
3989 }
3990 }
3991
3992 /* The other optimizations can be done only on the non-va_list variants. */
3993 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3994 return false;
3995
3996 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3997 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3998 {
3999 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
4000 return false;
4001 if (fn_puts)
4002 {
4003 gcall *repl = gimple_build_call (fn_puts, 1, arg);
4004 replace_call_with_call_and_fold (gsi, repl);
4005 return true;
4006 }
4007 }
4008
4009 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4010 else if (strcmp (fmt_str, target_percent_c) == 0)
4011 {
4012 if (!arg || ! useless_type_conversion_p (integer_type_node,
4013 TREE_TYPE (arg)))
4014 return false;
4015 if (fn_putchar)
4016 {
4017 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4018 replace_call_with_call_and_fold (gsi, repl);
4019 return true;
4020 }
4021 }
4022
4023 return false;
4024 }
4025
4026
4027
4028 /* Fold a call to __builtin_strlen with known length LEN. */
4029
4030 static bool
4031 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
4032 {
4033 gimple *stmt = gsi_stmt (*gsi);
4034 tree arg = gimple_call_arg (stmt, 0);
4035
4036 wide_int minlen;
4037 wide_int maxlen;
4038
4039 c_strlen_data lendata = { };
4040 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4041 && !lendata.decl
4042 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4043 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4044 {
4045 /* The range of lengths refers to either a single constant
4046 string or to the longest and shortest constant string
4047 referenced by the argument of the strlen() call, or to
4048 the strings that can possibly be stored in the arrays
4049 the argument refers to. */
4050 minlen = wi::to_wide (lendata.minlen);
4051 maxlen = wi::to_wide (lendata.maxlen);
4052 }
4053 else
4054 {
4055 unsigned prec = TYPE_PRECISION (sizetype);
4056
4057 minlen = wi::shwi (0, prec);
4058 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4059 }
4060
4061 if (minlen == maxlen)
4062 {
4063 /* Fold the strlen call to a constant. */
4064 tree type = TREE_TYPE (lendata.minlen);
4065 tree len = force_gimple_operand_gsi (gsi,
4066 wide_int_to_tree (type, minlen),
4067 true, NULL, true, GSI_SAME_STMT);
4068 replace_call_with_value (gsi, len);
4069 return true;
4070 }
4071
4072 /* Set the strlen() range to [0, MAXLEN]. */
4073 if (tree lhs = gimple_call_lhs (stmt))
4074 set_strlen_range (lhs, minlen, maxlen);
4075
4076 return false;
4077 }
4078
4079 /* Fold a call to __builtin_acc_on_device. */
4080
4081 static bool
4082 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4083 {
4084 /* Defer folding until we know which compiler we're in. */
4085 if (symtab->state != EXPANSION)
4086 return false;
4087
4088 unsigned val_host = GOMP_DEVICE_HOST;
4089 unsigned val_dev = GOMP_DEVICE_NONE;
4090
4091 #ifdef ACCEL_COMPILER
4092 val_host = GOMP_DEVICE_NOT_HOST;
4093 val_dev = ACCEL_COMPILER_acc_device;
4094 #endif
4095
4096 location_t loc = gimple_location (gsi_stmt (*gsi));
4097
4098 tree host_eq = make_ssa_name (boolean_type_node);
4099 gimple *host_ass = gimple_build_assign
4100 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4101 gimple_set_location (host_ass, loc);
4102 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4103
4104 tree dev_eq = make_ssa_name (boolean_type_node);
4105 gimple *dev_ass = gimple_build_assign
4106 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4107 gimple_set_location (dev_ass, loc);
4108 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4109
4110 tree result = make_ssa_name (boolean_type_node);
4111 gimple *result_ass = gimple_build_assign
4112 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4113 gimple_set_location (result_ass, loc);
4114 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4115
4116 replace_call_with_value (gsi, result);
4117
4118 return true;
4119 }
4120
4121 /* Fold realloc (0, n) -> malloc (n). */
4122
4123 static bool
4124 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4125 {
4126 gimple *stmt = gsi_stmt (*gsi);
4127 tree arg = gimple_call_arg (stmt, 0);
4128 tree size = gimple_call_arg (stmt, 1);
4129
4130 if (operand_equal_p (arg, null_pointer_node, 0))
4131 {
4132 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4133 if (fn_malloc)
4134 {
4135 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4136 replace_call_with_call_and_fold (gsi, repl);
4137 return true;
4138 }
4139 }
4140 return false;
4141 }
4142
4143 /* Number of bytes into which any type but aggregate or vector types
4144 should fit. */
4145 static constexpr size_t clear_padding_unit
4146 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4147 /* Buffer size on which __builtin_clear_padding folding code works. */
4148 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4149
4150 /* Data passed through __builtin_clear_padding folding. */
4151 struct clear_padding_struct {
4152 location_t loc;
4153 /* 0 during __builtin_clear_padding folding, nonzero during
4154 clear_type_padding_in_mask. In that case, instead of clearing the
4155 non-padding bits in union_ptr array clear the padding bits in there. */
4156 bool clear_in_mask;
4157 tree base;
4158 tree alias_type;
4159 gimple_stmt_iterator *gsi;
4160 /* Alignment of buf->base + 0. */
4161 unsigned align;
4162 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4163 HOST_WIDE_INT off;
4164 /* Number of padding bytes before buf->off that don't have padding clear
4165 code emitted yet. */
4166 HOST_WIDE_INT padding_bytes;
4167 /* The size of the whole object. Never emit code to touch
4168 buf->base + buf->sz or following bytes. */
4169 HOST_WIDE_INT sz;
4170 /* Number of bytes recorded in buf->buf. */
4171 size_t size;
4172 /* When inside union, instead of emitting code we and bits inside of
4173 the union_ptr array. */
4174 unsigned char *union_ptr;
4175 /* Set bits mean padding bits that need to be cleared by the builtin. */
4176 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4177 };
4178
4179 /* Emit code to clear padding requested in BUF->buf - set bits
4180 in there stand for padding that should be cleared. FULL is true
4181 if everything from the buffer should be flushed, otherwise
4182 it can leave up to 2 * clear_padding_unit bytes for further
4183 processing. */
4184
4185 static void
4186 clear_padding_flush (clear_padding_struct *buf, bool full)
4187 {
4188 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4189 if (!full && buf->size < 2 * clear_padding_unit)
4190 return;
4191 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4192 size_t end = buf->size;
4193 if (!full)
4194 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4195 * clear_padding_unit);
4196 size_t padding_bytes = buf->padding_bytes;
4197 if (buf->union_ptr)
4198 {
4199 if (buf->clear_in_mask)
4200 {
4201 /* During clear_type_padding_in_mask, clear the padding
4202 bits set in buf->buf in the buf->union_ptr mask. */
4203 for (size_t i = 0; i < end; i++)
4204 {
4205 if (buf->buf[i] == (unsigned char) ~0)
4206 padding_bytes++;
4207 else
4208 {
4209 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4210 0, padding_bytes);
4211 padding_bytes = 0;
4212 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4213 }
4214 }
4215 if (full)
4216 {
4217 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4218 0, padding_bytes);
4219 buf->off = 0;
4220 buf->size = 0;
4221 buf->padding_bytes = 0;
4222 }
4223 else
4224 {
4225 memmove (buf->buf, buf->buf + end, buf->size - end);
4226 buf->off += end;
4227 buf->size -= end;
4228 buf->padding_bytes = padding_bytes;
4229 }
4230 return;
4231 }
4232 /* Inside of a union, instead of emitting any code, instead
4233 clear all bits in the union_ptr buffer that are clear
4234 in buf. Whole padding bytes don't clear anything. */
4235 for (size_t i = 0; i < end; i++)
4236 {
4237 if (buf->buf[i] == (unsigned char) ~0)
4238 padding_bytes++;
4239 else
4240 {
4241 padding_bytes = 0;
4242 buf->union_ptr[buf->off + i] &= buf->buf[i];
4243 }
4244 }
4245 if (full)
4246 {
4247 buf->off = 0;
4248 buf->size = 0;
4249 buf->padding_bytes = 0;
4250 }
4251 else
4252 {
4253 memmove (buf->buf, buf->buf + end, buf->size - end);
4254 buf->off += end;
4255 buf->size -= end;
4256 buf->padding_bytes = padding_bytes;
4257 }
4258 return;
4259 }
4260 size_t wordsize = UNITS_PER_WORD;
4261 for (size_t i = 0; i < end; i += wordsize)
4262 {
4263 size_t nonzero_first = wordsize;
4264 size_t nonzero_last = 0;
4265 size_t zero_first = wordsize;
4266 size_t zero_last = 0;
4267 bool all_ones = true, bytes_only = true;
4268 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4269 > (unsigned HOST_WIDE_INT) buf->sz)
4270 {
4271 gcc_assert (wordsize > 1);
4272 wordsize /= 2;
4273 i -= wordsize;
4274 continue;
4275 }
4276 for (size_t j = i; j < i + wordsize && j < end; j++)
4277 {
4278 if (buf->buf[j])
4279 {
4280 if (nonzero_first == wordsize)
4281 {
4282 nonzero_first = j - i;
4283 nonzero_last = j - i;
4284 }
4285 if (nonzero_last != j - i)
4286 all_ones = false;
4287 nonzero_last = j + 1 - i;
4288 }
4289 else
4290 {
4291 if (zero_first == wordsize)
4292 zero_first = j - i;
4293 zero_last = j + 1 - i;
4294 }
4295 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4296 {
4297 all_ones = false;
4298 bytes_only = false;
4299 }
4300 }
4301 size_t padding_end = i;
4302 if (padding_bytes)
4303 {
4304 if (nonzero_first == 0
4305 && nonzero_last == wordsize
4306 && all_ones)
4307 {
4308 /* All bits are padding and we had some padding
4309 before too. Just extend it. */
4310 padding_bytes += wordsize;
4311 continue;
4312 }
4313 if (all_ones && nonzero_first == 0)
4314 {
4315 padding_bytes += nonzero_last;
4316 padding_end += nonzero_last;
4317 nonzero_first = wordsize;
4318 nonzero_last = 0;
4319 }
4320 else if (bytes_only && nonzero_first == 0)
4321 {
4322 gcc_assert (zero_first && zero_first != wordsize);
4323 padding_bytes += zero_first;
4324 padding_end += zero_first;
4325 }
4326 tree atype, src;
4327 if (padding_bytes == 1)
4328 {
4329 atype = char_type_node;
4330 src = build_zero_cst (char_type_node);
4331 }
4332 else
4333 {
4334 atype = build_array_type_nelts (char_type_node, padding_bytes);
4335 src = build_constructor (atype, NULL);
4336 }
4337 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4338 build_int_cst (buf->alias_type,
4339 buf->off + padding_end
4340 - padding_bytes));
4341 gimple *g = gimple_build_assign (dst, src);
4342 gimple_set_location (g, buf->loc);
4343 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4344 padding_bytes = 0;
4345 buf->padding_bytes = 0;
4346 }
4347 if (nonzero_first == wordsize)
4348 /* All bits in a word are 0, there are no padding bits. */
4349 continue;
4350 if (all_ones && nonzero_last == wordsize)
4351 {
4352 /* All bits between nonzero_first and end of word are padding
4353 bits, start counting padding_bytes. */
4354 padding_bytes = nonzero_last - nonzero_first;
4355 continue;
4356 }
4357 if (bytes_only)
4358 {
4359 /* If bitfields aren't involved in this word, prefer storing
4360 individual bytes or groups of them over performing a RMW
4361 operation on the whole word. */
4362 gcc_assert (i + zero_last <= end);
4363 for (size_t j = padding_end; j < i + zero_last; j++)
4364 {
4365 if (buf->buf[j])
4366 {
4367 size_t k;
4368 for (k = j; k < i + zero_last; k++)
4369 if (buf->buf[k] == 0)
4370 break;
4371 HOST_WIDE_INT off = buf->off + j;
4372 tree atype, src;
4373 if (k - j == 1)
4374 {
4375 atype = char_type_node;
4376 src = build_zero_cst (char_type_node);
4377 }
4378 else
4379 {
4380 atype = build_array_type_nelts (char_type_node, k - j);
4381 src = build_constructor (atype, NULL);
4382 }
4383 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4384 buf->base,
4385 build_int_cst (buf->alias_type, off));
4386 gimple *g = gimple_build_assign (dst, src);
4387 gimple_set_location (g, buf->loc);
4388 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4389 j = k;
4390 }
4391 }
4392 if (nonzero_last == wordsize)
4393 padding_bytes = nonzero_last - zero_last;
4394 continue;
4395 }
4396 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4397 {
4398 if (nonzero_last - nonzero_first <= eltsz
4399 && ((nonzero_first & ~(eltsz - 1))
4400 == ((nonzero_last - 1) & ~(eltsz - 1))))
4401 {
4402 tree type;
4403 if (eltsz == 1)
4404 type = char_type_node;
4405 else
4406 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4407 0);
4408 size_t start = nonzero_first & ~(eltsz - 1);
4409 HOST_WIDE_INT off = buf->off + i + start;
4410 tree atype = type;
4411 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4412 atype = build_aligned_type (type, buf->align);
4413 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4414 build_int_cst (buf->alias_type, off));
4415 tree src;
4416 gimple *g;
4417 if (all_ones
4418 && nonzero_first == start
4419 && nonzero_last == start + eltsz)
4420 src = build_zero_cst (type);
4421 else
4422 {
4423 src = make_ssa_name (type);
4424 g = gimple_build_assign (src, unshare_expr (dst));
4425 gimple_set_location (g, buf->loc);
4426 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4427 tree mask = native_interpret_expr (type,
4428 buf->buf + i + start,
4429 eltsz);
4430 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4431 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4432 tree src_masked = make_ssa_name (type);
4433 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4434 src, mask);
4435 gimple_set_location (g, buf->loc);
4436 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4437 src = src_masked;
4438 }
4439 g = gimple_build_assign (dst, src);
4440 gimple_set_location (g, buf->loc);
4441 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4442 break;
4443 }
4444 }
4445 }
4446 if (full)
4447 {
4448 if (padding_bytes)
4449 {
4450 tree atype, src;
4451 if (padding_bytes == 1)
4452 {
4453 atype = char_type_node;
4454 src = build_zero_cst (char_type_node);
4455 }
4456 else
4457 {
4458 atype = build_array_type_nelts (char_type_node, padding_bytes);
4459 src = build_constructor (atype, NULL);
4460 }
4461 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4462 build_int_cst (buf->alias_type,
4463 buf->off + end
4464 - padding_bytes));
4465 gimple *g = gimple_build_assign (dst, src);
4466 gimple_set_location (g, buf->loc);
4467 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4468 }
4469 size_t end_rem = end % UNITS_PER_WORD;
4470 buf->off += end - end_rem;
4471 buf->size = end_rem;
4472 memset (buf->buf, 0, buf->size);
4473 buf->padding_bytes = 0;
4474 }
4475 else
4476 {
4477 memmove (buf->buf, buf->buf + end, buf->size - end);
4478 buf->off += end;
4479 buf->size -= end;
4480 buf->padding_bytes = padding_bytes;
4481 }
4482 }
4483
4484 /* Append PADDING_BYTES padding bytes. */
4485
4486 static void
4487 clear_padding_add_padding (clear_padding_struct *buf,
4488 HOST_WIDE_INT padding_bytes)
4489 {
4490 if (padding_bytes == 0)
4491 return;
4492 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4493 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4494 clear_padding_flush (buf, false);
4495 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4496 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4497 {
4498 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4499 padding_bytes -= clear_padding_buf_size - buf->size;
4500 buf->size = clear_padding_buf_size;
4501 clear_padding_flush (buf, false);
4502 gcc_assert (buf->padding_bytes);
4503 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4504 is guaranteed to be all ones. */
4505 padding_bytes += buf->size;
4506 buf->size = padding_bytes % UNITS_PER_WORD;
4507 memset (buf->buf, ~0, buf->size);
4508 buf->off += padding_bytes - buf->size;
4509 buf->padding_bytes += padding_bytes - buf->size;
4510 }
4511 else
4512 {
4513 memset (buf->buf + buf->size, ~0, padding_bytes);
4514 buf->size += padding_bytes;
4515 }
4516 }
4517
4518 static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4519
4520 /* Clear padding bits of union type TYPE. */
4521
4522 static void
4523 clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4524 {
4525 clear_padding_struct *union_buf;
4526 HOST_WIDE_INT start_off = 0, next_off = 0;
4527 size_t start_size = 0;
4528 if (buf->union_ptr)
4529 {
4530 start_off = buf->off + buf->size;
4531 next_off = start_off + sz;
4532 start_size = start_off % UNITS_PER_WORD;
4533 start_off -= start_size;
4534 clear_padding_flush (buf, true);
4535 union_buf = buf;
4536 }
4537 else
4538 {
4539 if (sz + buf->size > clear_padding_buf_size)
4540 clear_padding_flush (buf, false);
4541 union_buf = XALLOCA (clear_padding_struct);
4542 union_buf->loc = buf->loc;
4543 union_buf->clear_in_mask = buf->clear_in_mask;
4544 union_buf->base = NULL_TREE;
4545 union_buf->alias_type = NULL_TREE;
4546 union_buf->gsi = NULL;
4547 union_buf->align = 0;
4548 union_buf->off = 0;
4549 union_buf->padding_bytes = 0;
4550 union_buf->sz = sz;
4551 union_buf->size = 0;
4552 if (sz + buf->size <= clear_padding_buf_size)
4553 union_buf->union_ptr = buf->buf + buf->size;
4554 else
4555 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4556 memset (union_buf->union_ptr, ~0, sz);
4557 }
4558
4559 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4560 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4561 {
4562 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4563 {
4564 if (TREE_TYPE (field) == error_mark_node)
4565 continue;
4566 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4567 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4568 if (!buf->clear_in_mask)
4569 error_at (buf->loc, "flexible array member %qD does not have "
4570 "well defined padding bits for %qs",
4571 field, "__builtin_clear_padding");
4572 continue;
4573 }
4574 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4575 gcc_assert (union_buf->size == 0);
4576 union_buf->off = start_off;
4577 union_buf->size = start_size;
4578 memset (union_buf->buf, ~0, start_size);
4579 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4580 clear_padding_add_padding (union_buf, sz - fldsz);
4581 clear_padding_flush (union_buf, true);
4582 }
4583
4584 if (buf == union_buf)
4585 {
4586 buf->off = next_off;
4587 buf->size = next_off % UNITS_PER_WORD;
4588 buf->off -= buf->size;
4589 memset (buf->buf, ~0, buf->size);
4590 }
4591 else if (sz + buf->size <= clear_padding_buf_size)
4592 buf->size += sz;
4593 else
4594 {
4595 unsigned char *union_ptr = union_buf->union_ptr;
4596 while (sz)
4597 {
4598 clear_padding_flush (buf, false);
4599 HOST_WIDE_INT this_sz
4600 = MIN ((unsigned HOST_WIDE_INT) sz,
4601 clear_padding_buf_size - buf->size);
4602 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4603 buf->size += this_sz;
4604 union_ptr += this_sz;
4605 sz -= this_sz;
4606 }
4607 XDELETE (union_buf->union_ptr);
4608 }
4609 }
4610
4611 /* The only known floating point formats with padding bits are the
4612 IEEE extended ones. */
4613
4614 static bool
4615 clear_padding_real_needs_padding_p (tree type)
4616 {
4617 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4618 return (fmt->b == 2
4619 && fmt->signbit_ro == fmt->signbit_rw
4620 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4621 }
4622
4623 /* Return true if TYPE might contain any padding bits. */
4624
4625 static bool
4626 clear_padding_type_may_have_padding_p (tree type)
4627 {
4628 switch (TREE_CODE (type))
4629 {
4630 case RECORD_TYPE:
4631 case UNION_TYPE:
4632 return true;
4633 case ARRAY_TYPE:
4634 case COMPLEX_TYPE:
4635 case VECTOR_TYPE:
4636 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4637 case REAL_TYPE:
4638 return clear_padding_real_needs_padding_p (type);
4639 default:
4640 return false;
4641 }
4642 }
4643
4644 /* Emit a runtime loop:
4645 for (; buf.base != end; buf.base += sz)
4646 __builtin_clear_padding (buf.base); */
4647
4648 static void
4649 clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4650 {
4651 tree l1 = create_artificial_label (buf->loc);
4652 tree l2 = create_artificial_label (buf->loc);
4653 tree l3 = create_artificial_label (buf->loc);
4654 gimple *g = gimple_build_goto (l2);
4655 gimple_set_location (g, buf->loc);
4656 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4657 g = gimple_build_label (l1);
4658 gimple_set_location (g, buf->loc);
4659 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4660 clear_padding_type (buf, type, buf->sz);
4661 clear_padding_flush (buf, true);
4662 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4663 size_int (buf->sz));
4664 gimple_set_location (g, buf->loc);
4665 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4666 g = gimple_build_label (l2);
4667 gimple_set_location (g, buf->loc);
4668 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4669 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4670 gimple_set_location (g, buf->loc);
4671 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4672 g = gimple_build_label (l3);
4673 gimple_set_location (g, buf->loc);
4674 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4675 }
4676
4677 /* Clear padding bits for TYPE. Called recursively from
4678 gimple_fold_builtin_clear_padding. */
4679
4680 static void
4681 clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4682 {
4683 switch (TREE_CODE (type))
4684 {
4685 case RECORD_TYPE:
4686 HOST_WIDE_INT cur_pos;
4687 cur_pos = 0;
4688 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4689 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4690 {
4691 tree ftype = TREE_TYPE (field);
4692 if (DECL_BIT_FIELD (field))
4693 {
4694 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4695 if (fldsz == 0)
4696 continue;
4697 HOST_WIDE_INT pos = int_byte_position (field);
4698 HOST_WIDE_INT bpos
4699 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4700 bpos %= BITS_PER_UNIT;
4701 HOST_WIDE_INT end
4702 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4703 if (pos + end > cur_pos)
4704 {
4705 clear_padding_add_padding (buf, pos + end - cur_pos);
4706 cur_pos = pos + end;
4707 }
4708 gcc_assert (cur_pos > pos
4709 && ((unsigned HOST_WIDE_INT) buf->size
4710 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4711 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4712 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4713 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4714 " in %qs", "__builtin_clear_padding");
4715 else if (BYTES_BIG_ENDIAN)
4716 {
4717 /* Big endian. */
4718 if (bpos + fldsz <= BITS_PER_UNIT)
4719 *p &= ~(((1 << fldsz) - 1)
4720 << (BITS_PER_UNIT - bpos - fldsz));
4721 else
4722 {
4723 if (bpos)
4724 {
4725 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4726 p++;
4727 fldsz -= BITS_PER_UNIT - bpos;
4728 }
4729 memset (p, 0, fldsz / BITS_PER_UNIT);
4730 p += fldsz / BITS_PER_UNIT;
4731 fldsz %= BITS_PER_UNIT;
4732 if (fldsz)
4733 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4734 }
4735 }
4736 else
4737 {
4738 /* Little endian. */
4739 if (bpos + fldsz <= BITS_PER_UNIT)
4740 *p &= ~(((1 << fldsz) - 1) << bpos);
4741 else
4742 {
4743 if (bpos)
4744 {
4745 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4746 p++;
4747 fldsz -= BITS_PER_UNIT - bpos;
4748 }
4749 memset (p, 0, fldsz / BITS_PER_UNIT);
4750 p += fldsz / BITS_PER_UNIT;
4751 fldsz %= BITS_PER_UNIT;
4752 if (fldsz)
4753 *p &= ~((1 << fldsz) - 1);
4754 }
4755 }
4756 }
4757 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4758 {
4759 if (ftype == error_mark_node)
4760 continue;
4761 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4762 && !COMPLETE_TYPE_P (ftype));
4763 if (!buf->clear_in_mask)
4764 error_at (buf->loc, "flexible array member %qD does not "
4765 "have well defined padding bits for %qs",
4766 field, "__builtin_clear_padding");
4767 }
4768 else if (is_empty_type (TREE_TYPE (field)))
4769 continue;
4770 else
4771 {
4772 HOST_WIDE_INT pos = int_byte_position (field);
4773 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4774 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4775 clear_padding_add_padding (buf, pos - cur_pos);
4776 cur_pos = pos;
4777 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4778 cur_pos += fldsz;
4779 }
4780 }
4781 gcc_assert (sz >= cur_pos);
4782 clear_padding_add_padding (buf, sz - cur_pos);
4783 break;
4784 case ARRAY_TYPE:
4785 HOST_WIDE_INT nelts, fldsz;
4786 fldsz = int_size_in_bytes (TREE_TYPE (type));
4787 if (fldsz == 0)
4788 break;
4789 nelts = sz / fldsz;
4790 if (nelts > 1
4791 && sz > 8 * UNITS_PER_WORD
4792 && buf->union_ptr == NULL
4793 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4794 {
4795 /* For sufficiently large array of more than one elements,
4796 emit a runtime loop to keep code size manageable. */
4797 tree base = buf->base;
4798 unsigned int prev_align = buf->align;
4799 HOST_WIDE_INT off = buf->off + buf->size;
4800 HOST_WIDE_INT prev_sz = buf->sz;
4801 clear_padding_flush (buf, true);
4802 tree elttype = TREE_TYPE (type);
4803 buf->base = create_tmp_var (build_pointer_type (elttype));
4804 tree end = make_ssa_name (TREE_TYPE (buf->base));
4805 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4806 base, size_int (off));
4807 gimple_set_location (g, buf->loc);
4808 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4809 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4810 size_int (sz));
4811 gimple_set_location (g, buf->loc);
4812 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4813 buf->sz = fldsz;
4814 buf->align = TYPE_ALIGN (elttype);
4815 buf->off = 0;
4816 buf->size = 0;
4817 clear_padding_emit_loop (buf, elttype, end);
4818 buf->base = base;
4819 buf->sz = prev_sz;
4820 buf->align = prev_align;
4821 buf->size = off % UNITS_PER_WORD;
4822 buf->off = off - buf->size;
4823 memset (buf->buf, 0, buf->size);
4824 break;
4825 }
4826 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4827 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4828 break;
4829 case UNION_TYPE:
4830 clear_padding_union (buf, type, sz);
4831 break;
4832 case REAL_TYPE:
4833 gcc_assert ((size_t) sz <= clear_padding_unit);
4834 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4835 clear_padding_flush (buf, false);
4836 if (clear_padding_real_needs_padding_p (type))
4837 {
4838 /* Use native_interpret_expr + native_encode_expr to figure out
4839 which bits are padding. */
4840 memset (buf->buf + buf->size, ~0, sz);
4841 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4842 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4843 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4844 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4845 for (size_t i = 0; i < (size_t) sz; i++)
4846 buf->buf[buf->size + i] ^= ~0;
4847 }
4848 else
4849 memset (buf->buf + buf->size, 0, sz);
4850 buf->size += sz;
4851 break;
4852 case COMPLEX_TYPE:
4853 fldsz = int_size_in_bytes (TREE_TYPE (type));
4854 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4855 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4856 break;
4857 case VECTOR_TYPE:
4858 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4859 fldsz = int_size_in_bytes (TREE_TYPE (type));
4860 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4861 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4862 break;
4863 case NULLPTR_TYPE:
4864 gcc_assert ((size_t) sz <= clear_padding_unit);
4865 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4866 clear_padding_flush (buf, false);
4867 memset (buf->buf + buf->size, ~0, sz);
4868 buf->size += sz;
4869 break;
4870 default:
4871 gcc_assert ((size_t) sz <= clear_padding_unit);
4872 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4873 clear_padding_flush (buf, false);
4874 memset (buf->buf + buf->size, 0, sz);
4875 buf->size += sz;
4876 break;
4877 }
4878 }
4879
4880 /* Clear padding bits of TYPE in MASK. */
4881
4882 void
4883 clear_type_padding_in_mask (tree type, unsigned char *mask)
4884 {
4885 clear_padding_struct buf;
4886 buf.loc = UNKNOWN_LOCATION;
4887 buf.clear_in_mask = true;
4888 buf.base = NULL_TREE;
4889 buf.alias_type = NULL_TREE;
4890 buf.gsi = NULL;
4891 buf.align = 0;
4892 buf.off = 0;
4893 buf.padding_bytes = 0;
4894 buf.sz = int_size_in_bytes (type);
4895 buf.size = 0;
4896 buf.union_ptr = mask;
4897 clear_padding_type (&buf, type, buf.sz);
4898 clear_padding_flush (&buf, true);
4899 }
4900
4901 /* Fold __builtin_clear_padding builtin. */
4902
4903 static bool
4904 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4905 {
4906 gimple *stmt = gsi_stmt (*gsi);
4907 gcc_assert (gimple_call_num_args (stmt) == 2);
4908 tree ptr = gimple_call_arg (stmt, 0);
4909 tree typearg = gimple_call_arg (stmt, 1);
4910 tree type = TREE_TYPE (TREE_TYPE (typearg));
4911 location_t loc = gimple_location (stmt);
4912 clear_padding_struct buf;
4913 gimple_stmt_iterator gsiprev = *gsi;
4914 /* This should be folded during the lower pass. */
4915 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4916 gcc_assert (COMPLETE_TYPE_P (type));
4917 gsi_prev (&gsiprev);
4918
4919 buf.loc = loc;
4920 buf.clear_in_mask = false;
4921 buf.base = ptr;
4922 buf.alias_type = NULL_TREE;
4923 buf.gsi = gsi;
4924 buf.align = get_pointer_alignment (ptr);
4925 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4926 buf.align = MAX (buf.align, talign);
4927 buf.off = 0;
4928 buf.padding_bytes = 0;
4929 buf.size = 0;
4930 buf.sz = int_size_in_bytes (type);
4931 buf.union_ptr = NULL;
4932 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4933 sorry_at (loc, "%s not supported for variable length aggregates",
4934 "__builtin_clear_padding");
4935 /* The implementation currently assumes 8-bit host and target
4936 chars which is the case for all currently supported targets
4937 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4938 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4939 sorry_at (loc, "%s not supported on this target",
4940 "__builtin_clear_padding");
4941 else if (!clear_padding_type_may_have_padding_p (type))
4942 ;
4943 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4944 {
4945 tree sz = TYPE_SIZE_UNIT (type);
4946 tree elttype = type;
4947 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4948 while (TREE_CODE (elttype) == ARRAY_TYPE
4949 && int_size_in_bytes (elttype) < 0)
4950 elttype = TREE_TYPE (elttype);
4951 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4952 gcc_assert (eltsz >= 0);
4953 if (eltsz)
4954 {
4955 buf.base = create_tmp_var (build_pointer_type (elttype));
4956 tree end = make_ssa_name (TREE_TYPE (buf.base));
4957 gimple *g = gimple_build_assign (buf.base, ptr);
4958 gimple_set_location (g, loc);
4959 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4960 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4961 gimple_set_location (g, loc);
4962 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4963 buf.sz = eltsz;
4964 buf.align = TYPE_ALIGN (elttype);
4965 buf.alias_type = build_pointer_type (elttype);
4966 clear_padding_emit_loop (&buf, elttype, end);
4967 }
4968 }
4969 else
4970 {
4971 if (!is_gimple_mem_ref_addr (buf.base))
4972 {
4973 buf.base = make_ssa_name (TREE_TYPE (ptr));
4974 gimple *g = gimple_build_assign (buf.base, ptr);
4975 gimple_set_location (g, loc);
4976 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4977 }
4978 buf.alias_type = build_pointer_type (type);
4979 clear_padding_type (&buf, type, buf.sz);
4980 clear_padding_flush (&buf, true);
4981 }
4982
4983 gimple_stmt_iterator gsiprev2 = *gsi;
4984 gsi_prev (&gsiprev2);
4985 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4986 gsi_replace (gsi, gimple_build_nop (), true);
4987 else
4988 {
4989 gsi_remove (gsi, true);
4990 *gsi = gsiprev2;
4991 }
4992 return true;
4993 }
4994
4995 /* Fold the non-target builtin at *GSI and return whether any simplification
4996 was made. */
4997
4998 static bool
4999 gimple_fold_builtin (gimple_stmt_iterator *gsi)
5000 {
5001 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
5002 tree callee = gimple_call_fndecl (stmt);
5003
5004 /* Give up for always_inline inline builtins until they are
5005 inlined. */
5006 if (avoid_folding_inline_builtin (callee))
5007 return false;
5008
5009 unsigned n = gimple_call_num_args (stmt);
5010 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5011 switch (fcode)
5012 {
5013 case BUILT_IN_BCMP:
5014 return gimple_fold_builtin_bcmp (gsi);
5015 case BUILT_IN_BCOPY:
5016 return gimple_fold_builtin_bcopy (gsi);
5017 case BUILT_IN_BZERO:
5018 return gimple_fold_builtin_bzero (gsi);
5019
5020 case BUILT_IN_MEMSET:
5021 return gimple_fold_builtin_memset (gsi,
5022 gimple_call_arg (stmt, 1),
5023 gimple_call_arg (stmt, 2));
5024 case BUILT_IN_MEMCPY:
5025 case BUILT_IN_MEMPCPY:
5026 case BUILT_IN_MEMMOVE:
5027 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5028 gimple_call_arg (stmt, 1), fcode);
5029 case BUILT_IN_SPRINTF_CHK:
5030 case BUILT_IN_VSPRINTF_CHK:
5031 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5032 case BUILT_IN_STRCAT_CHK:
5033 return gimple_fold_builtin_strcat_chk (gsi);
5034 case BUILT_IN_STRNCAT_CHK:
5035 return gimple_fold_builtin_strncat_chk (gsi);
5036 case BUILT_IN_STRLEN:
5037 return gimple_fold_builtin_strlen (gsi);
5038 case BUILT_IN_STRCPY:
5039 return gimple_fold_builtin_strcpy (gsi,
5040 gimple_call_arg (stmt, 0),
5041 gimple_call_arg (stmt, 1));
5042 case BUILT_IN_STRNCPY:
5043 return gimple_fold_builtin_strncpy (gsi,
5044 gimple_call_arg (stmt, 0),
5045 gimple_call_arg (stmt, 1),
5046 gimple_call_arg (stmt, 2));
5047 case BUILT_IN_STRCAT:
5048 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5049 gimple_call_arg (stmt, 1));
5050 case BUILT_IN_STRNCAT:
5051 return gimple_fold_builtin_strncat (gsi);
5052 case BUILT_IN_INDEX:
5053 case BUILT_IN_STRCHR:
5054 return gimple_fold_builtin_strchr (gsi, false);
5055 case BUILT_IN_RINDEX:
5056 case BUILT_IN_STRRCHR:
5057 return gimple_fold_builtin_strchr (gsi, true);
5058 case BUILT_IN_STRSTR:
5059 return gimple_fold_builtin_strstr (gsi);
5060 case BUILT_IN_STRCMP:
5061 case BUILT_IN_STRCMP_EQ:
5062 case BUILT_IN_STRCASECMP:
5063 case BUILT_IN_STRNCMP:
5064 case BUILT_IN_STRNCMP_EQ:
5065 case BUILT_IN_STRNCASECMP:
5066 return gimple_fold_builtin_string_compare (gsi);
5067 case BUILT_IN_MEMCHR:
5068 return gimple_fold_builtin_memchr (gsi);
5069 case BUILT_IN_FPUTS:
5070 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5071 gimple_call_arg (stmt, 1), false);
5072 case BUILT_IN_FPUTS_UNLOCKED:
5073 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5074 gimple_call_arg (stmt, 1), true);
5075 case BUILT_IN_MEMCPY_CHK:
5076 case BUILT_IN_MEMPCPY_CHK:
5077 case BUILT_IN_MEMMOVE_CHK:
5078 case BUILT_IN_MEMSET_CHK:
5079 return gimple_fold_builtin_memory_chk (gsi,
5080 gimple_call_arg (stmt, 0),
5081 gimple_call_arg (stmt, 1),
5082 gimple_call_arg (stmt, 2),
5083 gimple_call_arg (stmt, 3),
5084 fcode);
5085 case BUILT_IN_STPCPY:
5086 return gimple_fold_builtin_stpcpy (gsi);
5087 case BUILT_IN_STRCPY_CHK:
5088 case BUILT_IN_STPCPY_CHK:
5089 return gimple_fold_builtin_stxcpy_chk (gsi,
5090 gimple_call_arg (stmt, 0),
5091 gimple_call_arg (stmt, 1),
5092 gimple_call_arg (stmt, 2),
5093 fcode);
5094 case BUILT_IN_STRNCPY_CHK:
5095 case BUILT_IN_STPNCPY_CHK:
5096 return gimple_fold_builtin_stxncpy_chk (gsi,
5097 gimple_call_arg (stmt, 0),
5098 gimple_call_arg (stmt, 1),
5099 gimple_call_arg (stmt, 2),
5100 gimple_call_arg (stmt, 3),
5101 fcode);
5102 case BUILT_IN_SNPRINTF_CHK:
5103 case BUILT_IN_VSNPRINTF_CHK:
5104 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5105
5106 case BUILT_IN_FPRINTF:
5107 case BUILT_IN_FPRINTF_UNLOCKED:
5108 case BUILT_IN_VFPRINTF:
5109 if (n == 2 || n == 3)
5110 return gimple_fold_builtin_fprintf (gsi,
5111 gimple_call_arg (stmt, 0),
5112 gimple_call_arg (stmt, 1),
5113 n == 3
5114 ? gimple_call_arg (stmt, 2)
5115 : NULL_TREE,
5116 fcode);
5117 break;
5118 case BUILT_IN_FPRINTF_CHK:
5119 case BUILT_IN_VFPRINTF_CHK:
5120 if (n == 3 || n == 4)
5121 return gimple_fold_builtin_fprintf (gsi,
5122 gimple_call_arg (stmt, 0),
5123 gimple_call_arg (stmt, 2),
5124 n == 4
5125 ? gimple_call_arg (stmt, 3)
5126 : NULL_TREE,
5127 fcode);
5128 break;
5129 case BUILT_IN_PRINTF:
5130 case BUILT_IN_PRINTF_UNLOCKED:
5131 case BUILT_IN_VPRINTF:
5132 if (n == 1 || n == 2)
5133 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5134 n == 2
5135 ? gimple_call_arg (stmt, 1)
5136 : NULL_TREE, fcode);
5137 break;
5138 case BUILT_IN_PRINTF_CHK:
5139 case BUILT_IN_VPRINTF_CHK:
5140 if (n == 2 || n == 3)
5141 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5142 n == 3
5143 ? gimple_call_arg (stmt, 2)
5144 : NULL_TREE, fcode);
5145 break;
5146 case BUILT_IN_ACC_ON_DEVICE:
5147 return gimple_fold_builtin_acc_on_device (gsi,
5148 gimple_call_arg (stmt, 0));
5149 case BUILT_IN_REALLOC:
5150 return gimple_fold_builtin_realloc (gsi);
5151
5152 case BUILT_IN_CLEAR_PADDING:
5153 return gimple_fold_builtin_clear_padding (gsi);
5154
5155 default:;
5156 }
5157
5158 /* Try the generic builtin folder. */
5159 bool ignore = (gimple_call_lhs (stmt) == NULL);
5160 tree result = fold_call_stmt (stmt, ignore);
5161 if (result)
5162 {
5163 if (ignore)
5164 STRIP_NOPS (result);
5165 else
5166 result = fold_convert (gimple_call_return_type (stmt), result);
5167 gimplify_and_update_call_from_tree (gsi, result);
5168 return true;
5169 }
5170
5171 return false;
5172 }
5173
5174 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5175 function calls to constants, where possible. */
5176
5177 static tree
5178 fold_internal_goacc_dim (const gimple *call)
5179 {
5180 int axis = oacc_get_ifn_dim_arg (call);
5181 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5182 tree result = NULL_TREE;
5183 tree type = TREE_TYPE (gimple_call_lhs (call));
5184
5185 switch (gimple_call_internal_fn (call))
5186 {
5187 case IFN_GOACC_DIM_POS:
5188 /* If the size is 1, we know the answer. */
5189 if (size == 1)
5190 result = build_int_cst (type, 0);
5191 break;
5192 case IFN_GOACC_DIM_SIZE:
5193 /* If the size is not dynamic, we know the answer. */
5194 if (size)
5195 result = build_int_cst (type, size);
5196 break;
5197 default:
5198 break;
5199 }
5200
5201 return result;
5202 }
5203
5204 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5205 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5206 &var where var is only addressable because of such calls. */
5207
5208 bool
5209 optimize_atomic_compare_exchange_p (gimple *stmt)
5210 {
5211 if (gimple_call_num_args (stmt) != 6
5212 || !flag_inline_atomics
5213 || !optimize
5214 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5215 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5216 || !gimple_vdef (stmt)
5217 || !gimple_vuse (stmt))
5218 return false;
5219
5220 tree fndecl = gimple_call_fndecl (stmt);
5221 switch (DECL_FUNCTION_CODE (fndecl))
5222 {
5223 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5224 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5225 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5226 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5227 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5228 break;
5229 default:
5230 return false;
5231 }
5232
5233 tree expected = gimple_call_arg (stmt, 1);
5234 if (TREE_CODE (expected) != ADDR_EXPR
5235 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5236 return false;
5237
5238 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5239 if (!is_gimple_reg_type (etype)
5240 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5241 || TREE_THIS_VOLATILE (etype)
5242 || VECTOR_TYPE_P (etype)
5243 || TREE_CODE (etype) == COMPLEX_TYPE
5244 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5245 might not preserve all the bits. See PR71716. */
5246 || SCALAR_FLOAT_TYPE_P (etype)
5247 || maybe_ne (TYPE_PRECISION (etype),
5248 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5249 return false;
5250
5251 tree weak = gimple_call_arg (stmt, 3);
5252 if (!integer_zerop (weak) && !integer_onep (weak))
5253 return false;
5254
5255 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5256 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5257 machine_mode mode = TYPE_MODE (itype);
5258
5259 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5260 == CODE_FOR_nothing
5261 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5262 return false;
5263
5264 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5265 return false;
5266
5267 return true;
5268 }
5269
5270 /* Fold
5271 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5272 into
5273 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5274 i = IMAGPART_EXPR <t>;
5275 r = (_Bool) i;
5276 e = REALPART_EXPR <t>; */
5277
5278 void
5279 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5280 {
5281 gimple *stmt = gsi_stmt (*gsi);
5282 tree fndecl = gimple_call_fndecl (stmt);
5283 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5284 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5285 tree ctype = build_complex_type (itype);
5286 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5287 bool throws = false;
5288 edge e = NULL;
5289 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5290 expected);
5291 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5292 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5293 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5294 {
5295 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5296 build1 (VIEW_CONVERT_EXPR, itype,
5297 gimple_assign_lhs (g)));
5298 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5299 }
5300 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5301 + int_size_in_bytes (itype);
5302 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5303 gimple_call_arg (stmt, 0),
5304 gimple_assign_lhs (g),
5305 gimple_call_arg (stmt, 2),
5306 build_int_cst (integer_type_node, flag),
5307 gimple_call_arg (stmt, 4),
5308 gimple_call_arg (stmt, 5));
5309 tree lhs = make_ssa_name (ctype);
5310 gimple_call_set_lhs (g, lhs);
5311 gimple_move_vops (g, stmt);
5312 tree oldlhs = gimple_call_lhs (stmt);
5313 if (stmt_can_throw_internal (cfun, stmt))
5314 {
5315 throws = true;
5316 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5317 }
5318 gimple_call_set_nothrow (as_a <gcall *> (g),
5319 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5320 gimple_call_set_lhs (stmt, NULL_TREE);
5321 gsi_replace (gsi, g, true);
5322 if (oldlhs)
5323 {
5324 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5325 build1 (IMAGPART_EXPR, itype, lhs));
5326 if (throws)
5327 {
5328 gsi_insert_on_edge_immediate (e, g);
5329 *gsi = gsi_for_stmt (g);
5330 }
5331 else
5332 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5333 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5334 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5335 }
5336 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5337 build1 (REALPART_EXPR, itype, lhs));
5338 if (throws && oldlhs == NULL_TREE)
5339 {
5340 gsi_insert_on_edge_immediate (e, g);
5341 *gsi = gsi_for_stmt (g);
5342 }
5343 else
5344 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5345 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5346 {
5347 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5348 VIEW_CONVERT_EXPR,
5349 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5350 gimple_assign_lhs (g)));
5351 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5352 }
5353 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5354 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5355 *gsi = gsiret;
5356 }
5357
5358 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5359 doesn't fit into TYPE. The test for overflow should be regardless of
5360 -fwrapv, and even for unsigned types. */
5361
5362 bool
5363 arith_overflowed_p (enum tree_code code, const_tree type,
5364 const_tree arg0, const_tree arg1)
5365 {
5366 widest2_int warg0 = widest2_int_cst (arg0);
5367 widest2_int warg1 = widest2_int_cst (arg1);
5368 widest2_int wres;
5369 switch (code)
5370 {
5371 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5372 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5373 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5374 default: gcc_unreachable ();
5375 }
5376 signop sign = TYPE_SIGN (type);
5377 if (sign == UNSIGNED && wi::neg_p (wres))
5378 return true;
5379 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5380 }
5381
5382 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5383 for the memory it references, otherwise return null. VECTYPE is the
5384 type of the memory vector. */
5385
5386 static tree
5387 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5388 {
5389 tree ptr = gimple_call_arg (call, 0);
5390 tree alias_align = gimple_call_arg (call, 1);
5391 tree mask = gimple_call_arg (call, 2);
5392 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5393 return NULL_TREE;
5394
5395 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5396 if (TYPE_ALIGN (vectype) != align)
5397 vectype = build_aligned_type (vectype, align);
5398 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5399 return fold_build2 (MEM_REF, vectype, ptr, offset);
5400 }
5401
5402 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5403
5404 static bool
5405 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5406 {
5407 tree lhs = gimple_call_lhs (call);
5408 if (!lhs)
5409 return false;
5410
5411 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5412 {
5413 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5414 gimple_set_location (new_stmt, gimple_location (call));
5415 gimple_move_vops (new_stmt, call);
5416 gsi_replace (gsi, new_stmt, false);
5417 return true;
5418 }
5419 return false;
5420 }
5421
5422 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5423
5424 static bool
5425 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5426 {
5427 tree rhs = gimple_call_arg (call, 3);
5428 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5429 {
5430 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5431 gimple_set_location (new_stmt, gimple_location (call));
5432 gimple_move_vops (new_stmt, call);
5433 gsi_replace (gsi, new_stmt, false);
5434 return true;
5435 }
5436 return false;
5437 }
5438
5439 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5440 The statement may be replaced by another statement, e.g., if the call
5441 simplifies to a constant value. Return true if any changes were made.
5442 It is assumed that the operands have been previously folded. */
5443
5444 static bool
5445 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5446 {
5447 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5448 tree callee;
5449 bool changed = false;
5450
5451 /* Check for virtual calls that became direct calls. */
5452 callee = gimple_call_fn (stmt);
5453 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5454 {
5455 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5456 {
5457 if (dump_file && virtual_method_call_p (callee)
5458 && !possible_polymorphic_call_target_p
5459 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5460 (OBJ_TYPE_REF_EXPR (callee)))))
5461 {
5462 fprintf (dump_file,
5463 "Type inheritance inconsistent devirtualization of ");
5464 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5465 fprintf (dump_file, " to ");
5466 print_generic_expr (dump_file, callee, TDF_SLIM);
5467 fprintf (dump_file, "\n");
5468 }
5469
5470 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5471 changed = true;
5472 }
5473 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5474 {
5475 bool final;
5476 vec <cgraph_node *>targets
5477 = possible_polymorphic_call_targets (callee, stmt, &final);
5478 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5479 {
5480 tree lhs = gimple_call_lhs (stmt);
5481 if (dump_enabled_p ())
5482 {
5483 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5484 "folding virtual function call to %s\n",
5485 targets.length () == 1
5486 ? targets[0]->name ()
5487 : "__builtin_unreachable");
5488 }
5489 if (targets.length () == 1)
5490 {
5491 tree fndecl = targets[0]->decl;
5492 gimple_call_set_fndecl (stmt, fndecl);
5493 changed = true;
5494 /* If changing the call to __cxa_pure_virtual
5495 or similar noreturn function, adjust gimple_call_fntype
5496 too. */
5497 if (gimple_call_noreturn_p (stmt)
5498 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5499 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5500 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5501 == void_type_node))
5502 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5503 /* If the call becomes noreturn, remove the lhs. */
5504 if (lhs
5505 && gimple_call_noreturn_p (stmt)
5506 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5507 || should_remove_lhs_p (lhs)))
5508 {
5509 if (TREE_CODE (lhs) == SSA_NAME)
5510 {
5511 tree var = create_tmp_var (TREE_TYPE (lhs));
5512 tree def = get_or_create_ssa_default_def (cfun, var);
5513 gimple *new_stmt = gimple_build_assign (lhs, def);
5514 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5515 }
5516 gimple_call_set_lhs (stmt, NULL_TREE);
5517 }
5518 maybe_remove_unused_call_args (cfun, stmt);
5519 }
5520 else
5521 {
5522 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5523 gimple *new_stmt = gimple_build_call (fndecl, 0);
5524 gimple_set_location (new_stmt, gimple_location (stmt));
5525 /* If the call had a SSA name as lhs morph that into
5526 an uninitialized value. */
5527 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5528 {
5529 tree var = create_tmp_var (TREE_TYPE (lhs));
5530 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5531 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5532 set_ssa_default_def (cfun, var, lhs);
5533 }
5534 gimple_move_vops (new_stmt, stmt);
5535 gsi_replace (gsi, new_stmt, false);
5536 return true;
5537 }
5538 }
5539 }
5540 }
5541
5542 /* Check for indirect calls that became direct calls, and then
5543 no longer require a static chain. */
5544 if (gimple_call_chain (stmt))
5545 {
5546 tree fn = gimple_call_fndecl (stmt);
5547 if (fn && !DECL_STATIC_CHAIN (fn))
5548 {
5549 gimple_call_set_chain (stmt, NULL);
5550 changed = true;
5551 }
5552 }
5553
5554 if (inplace)
5555 return changed;
5556
5557 /* Check for builtins that CCP can handle using information not
5558 available in the generic fold routines. */
5559 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5560 {
5561 if (gimple_fold_builtin (gsi))
5562 changed = true;
5563 }
5564 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5565 {
5566 changed |= targetm.gimple_fold_builtin (gsi);
5567 }
5568 else if (gimple_call_internal_p (stmt))
5569 {
5570 enum tree_code subcode = ERROR_MARK;
5571 tree result = NULL_TREE;
5572 bool cplx_result = false;
5573 tree overflow = NULL_TREE;
5574 switch (gimple_call_internal_fn (stmt))
5575 {
5576 case IFN_BUILTIN_EXPECT:
5577 result = fold_builtin_expect (gimple_location (stmt),
5578 gimple_call_arg (stmt, 0),
5579 gimple_call_arg (stmt, 1),
5580 gimple_call_arg (stmt, 2),
5581 NULL_TREE);
5582 break;
5583 case IFN_UBSAN_OBJECT_SIZE:
5584 {
5585 tree offset = gimple_call_arg (stmt, 1);
5586 tree objsize = gimple_call_arg (stmt, 2);
5587 if (integer_all_onesp (objsize)
5588 || (TREE_CODE (offset) == INTEGER_CST
5589 && TREE_CODE (objsize) == INTEGER_CST
5590 && tree_int_cst_le (offset, objsize)))
5591 {
5592 replace_call_with_value (gsi, NULL_TREE);
5593 return true;
5594 }
5595 }
5596 break;
5597 case IFN_UBSAN_PTR:
5598 if (integer_zerop (gimple_call_arg (stmt, 1)))
5599 {
5600 replace_call_with_value (gsi, NULL_TREE);
5601 return true;
5602 }
5603 break;
5604 case IFN_UBSAN_BOUNDS:
5605 {
5606 tree index = gimple_call_arg (stmt, 1);
5607 tree bound = gimple_call_arg (stmt, 2);
5608 if (TREE_CODE (index) == INTEGER_CST
5609 && TREE_CODE (bound) == INTEGER_CST)
5610 {
5611 index = fold_convert (TREE_TYPE (bound), index);
5612 if (TREE_CODE (index) == INTEGER_CST
5613 && tree_int_cst_le (index, bound))
5614 {
5615 replace_call_with_value (gsi, NULL_TREE);
5616 return true;
5617 }
5618 }
5619 }
5620 break;
5621 case IFN_GOACC_DIM_SIZE:
5622 case IFN_GOACC_DIM_POS:
5623 result = fold_internal_goacc_dim (stmt);
5624 break;
5625 case IFN_UBSAN_CHECK_ADD:
5626 subcode = PLUS_EXPR;
5627 break;
5628 case IFN_UBSAN_CHECK_SUB:
5629 subcode = MINUS_EXPR;
5630 break;
5631 case IFN_UBSAN_CHECK_MUL:
5632 subcode = MULT_EXPR;
5633 break;
5634 case IFN_ADD_OVERFLOW:
5635 subcode = PLUS_EXPR;
5636 cplx_result = true;
5637 break;
5638 case IFN_SUB_OVERFLOW:
5639 subcode = MINUS_EXPR;
5640 cplx_result = true;
5641 break;
5642 case IFN_MUL_OVERFLOW:
5643 subcode = MULT_EXPR;
5644 cplx_result = true;
5645 break;
5646 case IFN_MASK_LOAD:
5647 changed |= gimple_fold_mask_load (gsi, stmt);
5648 break;
5649 case IFN_MASK_STORE:
5650 changed |= gimple_fold_mask_store (gsi, stmt);
5651 break;
5652 default:
5653 break;
5654 }
5655 if (subcode != ERROR_MARK)
5656 {
5657 tree arg0 = gimple_call_arg (stmt, 0);
5658 tree arg1 = gimple_call_arg (stmt, 1);
5659 tree type = TREE_TYPE (arg0);
5660 if (cplx_result)
5661 {
5662 tree lhs = gimple_call_lhs (stmt);
5663 if (lhs == NULL_TREE)
5664 type = NULL_TREE;
5665 else
5666 type = TREE_TYPE (TREE_TYPE (lhs));
5667 }
5668 if (type == NULL_TREE)
5669 ;
5670 /* x = y + 0; x = y - 0; x = y * 0; */
5671 else if (integer_zerop (arg1))
5672 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5673 /* x = 0 + y; x = 0 * y; */
5674 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5675 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5676 /* x = y - y; */
5677 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5678 result = integer_zero_node;
5679 /* x = y * 1; x = 1 * y; */
5680 else if (subcode == MULT_EXPR && integer_onep (arg1))
5681 result = arg0;
5682 else if (subcode == MULT_EXPR && integer_onep (arg0))
5683 result = arg1;
5684 else if (TREE_CODE (arg0) == INTEGER_CST
5685 && TREE_CODE (arg1) == INTEGER_CST)
5686 {
5687 if (cplx_result)
5688 result = int_const_binop (subcode, fold_convert (type, arg0),
5689 fold_convert (type, arg1));
5690 else
5691 result = int_const_binop (subcode, arg0, arg1);
5692 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5693 {
5694 if (cplx_result)
5695 overflow = build_one_cst (type);
5696 else
5697 result = NULL_TREE;
5698 }
5699 }
5700 if (result)
5701 {
5702 if (result == integer_zero_node)
5703 result = build_zero_cst (type);
5704 else if (cplx_result && TREE_TYPE (result) != type)
5705 {
5706 if (TREE_CODE (result) == INTEGER_CST)
5707 {
5708 if (arith_overflowed_p (PLUS_EXPR, type, result,
5709 integer_zero_node))
5710 overflow = build_one_cst (type);
5711 }
5712 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5713 && TYPE_UNSIGNED (type))
5714 || (TYPE_PRECISION (type)
5715 < (TYPE_PRECISION (TREE_TYPE (result))
5716 + (TYPE_UNSIGNED (TREE_TYPE (result))
5717 && !TYPE_UNSIGNED (type)))))
5718 result = NULL_TREE;
5719 if (result)
5720 result = fold_convert (type, result);
5721 }
5722 }
5723 }
5724
5725 if (result)
5726 {
5727 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5728 result = drop_tree_overflow (result);
5729 if (cplx_result)
5730 {
5731 if (overflow == NULL_TREE)
5732 overflow = build_zero_cst (TREE_TYPE (result));
5733 tree ctype = build_complex_type (TREE_TYPE (result));
5734 if (TREE_CODE (result) == INTEGER_CST
5735 && TREE_CODE (overflow) == INTEGER_CST)
5736 result = build_complex (ctype, result, overflow);
5737 else
5738 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5739 ctype, result, overflow);
5740 }
5741 gimplify_and_update_call_from_tree (gsi, result);
5742 changed = true;
5743 }
5744 }
5745
5746 return changed;
5747 }
5748
5749
5750 /* Return true whether NAME has a use on STMT. */
5751
5752 static bool
5753 has_use_on_stmt (tree name, gimple *stmt)
5754 {
5755 imm_use_iterator iter;
5756 use_operand_p use_p;
5757 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5758 if (USE_STMT (use_p) == stmt)
5759 return true;
5760 return false;
5761 }
5762
5763 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5764 gimple_simplify.
5765
5766 Replaces *GSI with the simplification result in RCODE and OPS
5767 and the associated statements in *SEQ. Does the replacement
5768 according to INPLACE and returns true if the operation succeeded. */
5769
5770 static bool
5771 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5772 gimple_match_op *res_op,
5773 gimple_seq *seq, bool inplace)
5774 {
5775 gimple *stmt = gsi_stmt (*gsi);
5776 tree *ops = res_op->ops;
5777 unsigned int num_ops = res_op->num_ops;
5778
5779 /* Play safe and do not allow abnormals to be mentioned in
5780 newly created statements. See also maybe_push_res_to_seq.
5781 As an exception allow such uses if there was a use of the
5782 same SSA name on the old stmt. */
5783 for (unsigned int i = 0; i < num_ops; ++i)
5784 if (TREE_CODE (ops[i]) == SSA_NAME
5785 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5786 && !has_use_on_stmt (ops[i], stmt))
5787 return false;
5788
5789 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5790 for (unsigned int i = 0; i < 2; ++i)
5791 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5792 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5793 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5794 return false;
5795
5796 /* Don't insert new statements when INPLACE is true, even if we could
5797 reuse STMT for the final statement. */
5798 if (inplace && !gimple_seq_empty_p (*seq))
5799 return false;
5800
5801 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5802 {
5803 gcc_assert (res_op->code.is_tree_code ());
5804 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
5805 /* GIMPLE_CONDs condition may not throw. */
5806 && (!flag_exceptions
5807 || !cfun->can_throw_non_call_exceptions
5808 || !operation_could_trap_p (res_op->code,
5809 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5810 false, NULL_TREE)))
5811 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5812 else if (res_op->code == SSA_NAME)
5813 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5814 build_zero_cst (TREE_TYPE (ops[0])));
5815 else if (res_op->code == INTEGER_CST)
5816 {
5817 if (integer_zerop (ops[0]))
5818 gimple_cond_make_false (cond_stmt);
5819 else
5820 gimple_cond_make_true (cond_stmt);
5821 }
5822 else if (!inplace)
5823 {
5824 tree res = maybe_push_res_to_seq (res_op, seq);
5825 if (!res)
5826 return false;
5827 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5828 build_zero_cst (TREE_TYPE (res)));
5829 }
5830 else
5831 return false;
5832 if (dump_file && (dump_flags & TDF_DETAILS))
5833 {
5834 fprintf (dump_file, "gimple_simplified to ");
5835 if (!gimple_seq_empty_p (*seq))
5836 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5837 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5838 0, TDF_SLIM);
5839 }
5840 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5841 return true;
5842 }
5843 else if (is_gimple_assign (stmt)
5844 && res_op->code.is_tree_code ())
5845 {
5846 if (!inplace
5847 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
5848 {
5849 maybe_build_generic_op (res_op);
5850 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5851 res_op->op_or_null (0),
5852 res_op->op_or_null (1),
5853 res_op->op_or_null (2));
5854 if (dump_file && (dump_flags & TDF_DETAILS))
5855 {
5856 fprintf (dump_file, "gimple_simplified to ");
5857 if (!gimple_seq_empty_p (*seq))
5858 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5859 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5860 0, TDF_SLIM);
5861 }
5862 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5863 return true;
5864 }
5865 }
5866 else if (res_op->code.is_fn_code ()
5867 && gimple_call_combined_fn (stmt) == res_op->code)
5868 {
5869 gcc_assert (num_ops == gimple_call_num_args (stmt));
5870 for (unsigned int i = 0; i < num_ops; ++i)
5871 gimple_call_set_arg (stmt, i, ops[i]);
5872 if (dump_file && (dump_flags & TDF_DETAILS))
5873 {
5874 fprintf (dump_file, "gimple_simplified to ");
5875 if (!gimple_seq_empty_p (*seq))
5876 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5877 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5878 }
5879 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5880 return true;
5881 }
5882 else if (!inplace)
5883 {
5884 if (gimple_has_lhs (stmt))
5885 {
5886 tree lhs = gimple_get_lhs (stmt);
5887 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5888 return false;
5889 if (dump_file && (dump_flags & TDF_DETAILS))
5890 {
5891 fprintf (dump_file, "gimple_simplified to ");
5892 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5893 }
5894 gsi_replace_with_seq_vops (gsi, *seq);
5895 return true;
5896 }
5897 else
5898 gcc_unreachable ();
5899 }
5900
5901 return false;
5902 }
5903
5904 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5905
5906 static bool
5907 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5908 {
5909 bool res = false;
5910 tree *orig_t = t;
5911
5912 if (TREE_CODE (*t) == ADDR_EXPR)
5913 t = &TREE_OPERAND (*t, 0);
5914
5915 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5916 generic vector extension. The actual vector referenced is
5917 view-converted to an array type for this purpose. If the index
5918 is constant the canonical representation in the middle-end is a
5919 BIT_FIELD_REF so re-write the former to the latter here. */
5920 if (TREE_CODE (*t) == ARRAY_REF
5921 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5922 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5923 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5924 {
5925 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5926 if (VECTOR_TYPE_P (vtype))
5927 {
5928 tree low = array_ref_low_bound (*t);
5929 if (TREE_CODE (low) == INTEGER_CST)
5930 {
5931 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5932 {
5933 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5934 wi::to_widest (low));
5935 idx = wi::mul (idx, wi::to_widest
5936 (TYPE_SIZE (TREE_TYPE (*t))));
5937 widest_int ext
5938 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5939 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5940 {
5941 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5942 TREE_TYPE (*t),
5943 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5944 TYPE_SIZE (TREE_TYPE (*t)),
5945 wide_int_to_tree (bitsizetype, idx));
5946 res = true;
5947 }
5948 }
5949 }
5950 }
5951 }
5952
5953 while (handled_component_p (*t))
5954 t = &TREE_OPERAND (*t, 0);
5955
5956 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5957 of invariant addresses into a SSA name MEM_REF address. */
5958 if (TREE_CODE (*t) == MEM_REF
5959 || TREE_CODE (*t) == TARGET_MEM_REF)
5960 {
5961 tree addr = TREE_OPERAND (*t, 0);
5962 if (TREE_CODE (addr) == ADDR_EXPR
5963 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5964 || handled_component_p (TREE_OPERAND (addr, 0))))
5965 {
5966 tree base;
5967 poly_int64 coffset;
5968 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5969 &coffset);
5970 if (!base)
5971 {
5972 if (is_debug)
5973 return false;
5974 gcc_unreachable ();
5975 }
5976
5977 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5978 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5979 TREE_OPERAND (*t, 1),
5980 size_int (coffset));
5981 res = true;
5982 }
5983 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5984 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5985 }
5986
5987 /* Canonicalize back MEM_REFs to plain reference trees if the object
5988 accessed is a decl that has the same access semantics as the MEM_REF. */
5989 if (TREE_CODE (*t) == MEM_REF
5990 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5991 && integer_zerop (TREE_OPERAND (*t, 1))
5992 && MR_DEPENDENCE_CLIQUE (*t) == 0)
5993 {
5994 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5995 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5996 if (/* Same volatile qualification. */
5997 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
5998 /* Same TBAA behavior with -fstrict-aliasing. */
5999 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6000 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6001 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6002 /* Same alignment. */
6003 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6004 /* We have to look out here to not drop a required conversion
6005 from the rhs to the lhs if *t appears on the lhs or vice-versa
6006 if it appears on the rhs. Thus require strict type
6007 compatibility. */
6008 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6009 {
6010 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6011 res = true;
6012 }
6013 }
6014
6015 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6016 && TREE_CODE (*t) == MEM_REF
6017 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6018 {
6019 tree base;
6020 poly_int64 coffset;
6021 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6022 &coffset);
6023 if (base)
6024 {
6025 gcc_assert (TREE_CODE (base) == MEM_REF);
6026 poly_int64 moffset;
6027 if (mem_ref_offset (base).to_shwi (&moffset))
6028 {
6029 coffset += moffset;
6030 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6031 {
6032 coffset += moffset;
6033 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6034 return true;
6035 }
6036 }
6037 }
6038 }
6039
6040 /* Canonicalize TARGET_MEM_REF in particular with respect to
6041 the indexes becoming constant. */
6042 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6043 {
6044 tree tem = maybe_fold_tmr (*t);
6045 if (tem)
6046 {
6047 *t = tem;
6048 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6049 recompute_tree_invariant_for_addr_expr (*orig_t);
6050 res = true;
6051 }
6052 }
6053
6054 return res;
6055 }
6056
6057 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6058 distinguishes both cases. */
6059
6060 static bool
6061 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6062 {
6063 bool changed = false;
6064 gimple *stmt = gsi_stmt (*gsi);
6065 bool nowarning = gimple_no_warning_p (stmt);
6066 unsigned i;
6067 fold_defer_overflow_warnings ();
6068
6069 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6070 after propagation.
6071 ??? This shouldn't be done in generic folding but in the
6072 propagation helpers which also know whether an address was
6073 propagated.
6074 Also canonicalize operand order. */
6075 switch (gimple_code (stmt))
6076 {
6077 case GIMPLE_ASSIGN:
6078 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6079 {
6080 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6081 if ((REFERENCE_CLASS_P (*rhs)
6082 || TREE_CODE (*rhs) == ADDR_EXPR)
6083 && maybe_canonicalize_mem_ref_addr (rhs))
6084 changed = true;
6085 tree *lhs = gimple_assign_lhs_ptr (stmt);
6086 if (REFERENCE_CLASS_P (*lhs)
6087 && maybe_canonicalize_mem_ref_addr (lhs))
6088 changed = true;
6089 }
6090 else
6091 {
6092 /* Canonicalize operand order. */
6093 enum tree_code code = gimple_assign_rhs_code (stmt);
6094 if (TREE_CODE_CLASS (code) == tcc_comparison
6095 || commutative_tree_code (code)
6096 || commutative_ternary_tree_code (code))
6097 {
6098 tree rhs1 = gimple_assign_rhs1 (stmt);
6099 tree rhs2 = gimple_assign_rhs2 (stmt);
6100 if (tree_swap_operands_p (rhs1, rhs2))
6101 {
6102 gimple_assign_set_rhs1 (stmt, rhs2);
6103 gimple_assign_set_rhs2 (stmt, rhs1);
6104 if (TREE_CODE_CLASS (code) == tcc_comparison)
6105 gimple_assign_set_rhs_code (stmt,
6106 swap_tree_comparison (code));
6107 changed = true;
6108 }
6109 }
6110 }
6111 break;
6112 case GIMPLE_CALL:
6113 {
6114 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6115 {
6116 tree *arg = gimple_call_arg_ptr (stmt, i);
6117 if (REFERENCE_CLASS_P (*arg)
6118 && maybe_canonicalize_mem_ref_addr (arg))
6119 changed = true;
6120 }
6121 tree *lhs = gimple_call_lhs_ptr (stmt);
6122 if (*lhs
6123 && REFERENCE_CLASS_P (*lhs)
6124 && maybe_canonicalize_mem_ref_addr (lhs))
6125 changed = true;
6126 break;
6127 }
6128 case GIMPLE_ASM:
6129 {
6130 gasm *asm_stmt = as_a <gasm *> (stmt);
6131 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6132 {
6133 tree link = gimple_asm_output_op (asm_stmt, i);
6134 tree op = TREE_VALUE (link);
6135 if (REFERENCE_CLASS_P (op)
6136 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6137 changed = true;
6138 }
6139 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6140 {
6141 tree link = gimple_asm_input_op (asm_stmt, i);
6142 tree op = TREE_VALUE (link);
6143 if ((REFERENCE_CLASS_P (op)
6144 || TREE_CODE (op) == ADDR_EXPR)
6145 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6146 changed = true;
6147 }
6148 }
6149 break;
6150 case GIMPLE_DEBUG:
6151 if (gimple_debug_bind_p (stmt))
6152 {
6153 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6154 if (*val
6155 && (REFERENCE_CLASS_P (*val)
6156 || TREE_CODE (*val) == ADDR_EXPR)
6157 && maybe_canonicalize_mem_ref_addr (val, true))
6158 changed = true;
6159 }
6160 break;
6161 case GIMPLE_COND:
6162 {
6163 /* Canonicalize operand order. */
6164 tree lhs = gimple_cond_lhs (stmt);
6165 tree rhs = gimple_cond_rhs (stmt);
6166 if (tree_swap_operands_p (lhs, rhs))
6167 {
6168 gcond *gc = as_a <gcond *> (stmt);
6169 gimple_cond_set_lhs (gc, rhs);
6170 gimple_cond_set_rhs (gc, lhs);
6171 gimple_cond_set_code (gc,
6172 swap_tree_comparison (gimple_cond_code (gc)));
6173 changed = true;
6174 }
6175 }
6176 default:;
6177 }
6178
6179 /* Dispatch to pattern-based folding. */
6180 if (!inplace
6181 || is_gimple_assign (stmt)
6182 || gimple_code (stmt) == GIMPLE_COND)
6183 {
6184 gimple_seq seq = NULL;
6185 gimple_match_op res_op;
6186 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6187 valueize, valueize))
6188 {
6189 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6190 changed = true;
6191 else
6192 gimple_seq_discard (seq);
6193 }
6194 }
6195
6196 stmt = gsi_stmt (*gsi);
6197
6198 /* Fold the main computation performed by the statement. */
6199 switch (gimple_code (stmt))
6200 {
6201 case GIMPLE_ASSIGN:
6202 {
6203 /* Try to canonicalize for boolean-typed X the comparisons
6204 X == 0, X == 1, X != 0, and X != 1. */
6205 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6206 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6207 {
6208 tree lhs = gimple_assign_lhs (stmt);
6209 tree op1 = gimple_assign_rhs1 (stmt);
6210 tree op2 = gimple_assign_rhs2 (stmt);
6211 tree type = TREE_TYPE (op1);
6212
6213 /* Check whether the comparison operands are of the same boolean
6214 type as the result type is.
6215 Check that second operand is an integer-constant with value
6216 one or zero. */
6217 if (TREE_CODE (op2) == INTEGER_CST
6218 && (integer_zerop (op2) || integer_onep (op2))
6219 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6220 {
6221 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6222 bool is_logical_not = false;
6223
6224 /* X == 0 and X != 1 is a logical-not.of X
6225 X == 1 and X != 0 is X */
6226 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6227 || (cmp_code == NE_EXPR && integer_onep (op2)))
6228 is_logical_not = true;
6229
6230 if (is_logical_not == false)
6231 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6232 /* Only for one-bit precision typed X the transformation
6233 !X -> ~X is valied. */
6234 else if (TYPE_PRECISION (type) == 1)
6235 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6236 /* Otherwise we use !X -> X ^ 1. */
6237 else
6238 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6239 build_int_cst (type, 1));
6240 changed = true;
6241 break;
6242 }
6243 }
6244
6245 unsigned old_num_ops = gimple_num_ops (stmt);
6246 tree lhs = gimple_assign_lhs (stmt);
6247 tree new_rhs = fold_gimple_assign (gsi);
6248 if (new_rhs
6249 && !useless_type_conversion_p (TREE_TYPE (lhs),
6250 TREE_TYPE (new_rhs)))
6251 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6252 if (new_rhs
6253 && (!inplace
6254 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6255 {
6256 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6257 changed = true;
6258 }
6259 break;
6260 }
6261
6262 case GIMPLE_CALL:
6263 changed |= gimple_fold_call (gsi, inplace);
6264 break;
6265
6266 case GIMPLE_DEBUG:
6267 if (gimple_debug_bind_p (stmt))
6268 {
6269 tree val = gimple_debug_bind_get_value (stmt);
6270 if (val
6271 && REFERENCE_CLASS_P (val))
6272 {
6273 tree tem = maybe_fold_reference (val);
6274 if (tem)
6275 {
6276 gimple_debug_bind_set_value (stmt, tem);
6277 changed = true;
6278 }
6279 }
6280 else if (val
6281 && TREE_CODE (val) == ADDR_EXPR)
6282 {
6283 tree ref = TREE_OPERAND (val, 0);
6284 tree tem = maybe_fold_reference (ref);
6285 if (tem)
6286 {
6287 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6288 gimple_debug_bind_set_value (stmt, tem);
6289 changed = true;
6290 }
6291 }
6292 }
6293 break;
6294
6295 case GIMPLE_RETURN:
6296 {
6297 greturn *ret_stmt = as_a<greturn *> (stmt);
6298 tree ret = gimple_return_retval(ret_stmt);
6299
6300 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6301 {
6302 tree val = valueize (ret);
6303 if (val && val != ret
6304 && may_propagate_copy (ret, val))
6305 {
6306 gimple_return_set_retval (ret_stmt, val);
6307 changed = true;
6308 }
6309 }
6310 }
6311 break;
6312
6313 default:;
6314 }
6315
6316 stmt = gsi_stmt (*gsi);
6317
6318 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6319 return changed;
6320 }
6321
6322 /* Valueziation callback that ends up not following SSA edges. */
6323
6324 tree
6325 no_follow_ssa_edges (tree)
6326 {
6327 return NULL_TREE;
6328 }
6329
6330 /* Valueization callback that ends up following single-use SSA edges only. */
6331
6332 tree
6333 follow_single_use_edges (tree val)
6334 {
6335 if (TREE_CODE (val) == SSA_NAME
6336 && !has_single_use (val))
6337 return NULL_TREE;
6338 return val;
6339 }
6340
6341 /* Valueization callback that follows all SSA edges. */
6342
6343 tree
6344 follow_all_ssa_edges (tree val)
6345 {
6346 return val;
6347 }
6348
6349 /* Fold the statement pointed to by GSI. In some cases, this function may
6350 replace the whole statement with a new one. Returns true iff folding
6351 makes any changes.
6352 The statement pointed to by GSI should be in valid gimple form but may
6353 be in unfolded state as resulting from for example constant propagation
6354 which can produce *&x = 0. */
6355
6356 bool
6357 fold_stmt (gimple_stmt_iterator *gsi)
6358 {
6359 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6360 }
6361
6362 bool
6363 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6364 {
6365 return fold_stmt_1 (gsi, false, valueize);
6366 }
6367
6368 /* Perform the minimal folding on statement *GSI. Only operations like
6369 *&x created by constant propagation are handled. The statement cannot
6370 be replaced with a new one. Return true if the statement was
6371 changed, false otherwise.
6372 The statement *GSI should be in valid gimple form but may
6373 be in unfolded state as resulting from for example constant propagation
6374 which can produce *&x = 0. */
6375
6376 bool
6377 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6378 {
6379 gimple *stmt = gsi_stmt (*gsi);
6380 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6381 gcc_assert (gsi_stmt (*gsi) == stmt);
6382 return changed;
6383 }
6384
6385 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6386 if EXPR is null or we don't know how.
6387 If non-null, the result always has boolean type. */
6388
6389 static tree
6390 canonicalize_bool (tree expr, bool invert)
6391 {
6392 if (!expr)
6393 return NULL_TREE;
6394 else if (invert)
6395 {
6396 if (integer_nonzerop (expr))
6397 return boolean_false_node;
6398 else if (integer_zerop (expr))
6399 return boolean_true_node;
6400 else if (TREE_CODE (expr) == SSA_NAME)
6401 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6402 build_int_cst (TREE_TYPE (expr), 0));
6403 else if (COMPARISON_CLASS_P (expr))
6404 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6405 boolean_type_node,
6406 TREE_OPERAND (expr, 0),
6407 TREE_OPERAND (expr, 1));
6408 else
6409 return NULL_TREE;
6410 }
6411 else
6412 {
6413 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6414 return expr;
6415 if (integer_nonzerop (expr))
6416 return boolean_true_node;
6417 else if (integer_zerop (expr))
6418 return boolean_false_node;
6419 else if (TREE_CODE (expr) == SSA_NAME)
6420 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6421 build_int_cst (TREE_TYPE (expr), 0));
6422 else if (COMPARISON_CLASS_P (expr))
6423 return fold_build2 (TREE_CODE (expr),
6424 boolean_type_node,
6425 TREE_OPERAND (expr, 0),
6426 TREE_OPERAND (expr, 1));
6427 else
6428 return NULL_TREE;
6429 }
6430 }
6431
6432 /* Check to see if a boolean expression EXPR is logically equivalent to the
6433 comparison (OP1 CODE OP2). Check for various identities involving
6434 SSA_NAMEs. */
6435
6436 static bool
6437 same_bool_comparison_p (const_tree expr, enum tree_code code,
6438 const_tree op1, const_tree op2)
6439 {
6440 gimple *s;
6441
6442 /* The obvious case. */
6443 if (TREE_CODE (expr) == code
6444 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6445 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6446 return true;
6447
6448 /* Check for comparing (name, name != 0) and the case where expr
6449 is an SSA_NAME with a definition matching the comparison. */
6450 if (TREE_CODE (expr) == SSA_NAME
6451 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6452 {
6453 if (operand_equal_p (expr, op1, 0))
6454 return ((code == NE_EXPR && integer_zerop (op2))
6455 || (code == EQ_EXPR && integer_nonzerop (op2)));
6456 s = SSA_NAME_DEF_STMT (expr);
6457 if (is_gimple_assign (s)
6458 && gimple_assign_rhs_code (s) == code
6459 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6460 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6461 return true;
6462 }
6463
6464 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6465 of name is a comparison, recurse. */
6466 if (TREE_CODE (op1) == SSA_NAME
6467 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6468 {
6469 s = SSA_NAME_DEF_STMT (op1);
6470 if (is_gimple_assign (s)
6471 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6472 {
6473 enum tree_code c = gimple_assign_rhs_code (s);
6474 if ((c == NE_EXPR && integer_zerop (op2))
6475 || (c == EQ_EXPR && integer_nonzerop (op2)))
6476 return same_bool_comparison_p (expr, c,
6477 gimple_assign_rhs1 (s),
6478 gimple_assign_rhs2 (s));
6479 if ((c == EQ_EXPR && integer_zerop (op2))
6480 || (c == NE_EXPR && integer_nonzerop (op2)))
6481 return same_bool_comparison_p (expr,
6482 invert_tree_comparison (c, false),
6483 gimple_assign_rhs1 (s),
6484 gimple_assign_rhs2 (s));
6485 }
6486 }
6487 return false;
6488 }
6489
6490 /* Check to see if two boolean expressions OP1 and OP2 are logically
6491 equivalent. */
6492
6493 static bool
6494 same_bool_result_p (const_tree op1, const_tree op2)
6495 {
6496 /* Simple cases first. */
6497 if (operand_equal_p (op1, op2, 0))
6498 return true;
6499
6500 /* Check the cases where at least one of the operands is a comparison.
6501 These are a bit smarter than operand_equal_p in that they apply some
6502 identifies on SSA_NAMEs. */
6503 if (COMPARISON_CLASS_P (op2)
6504 && same_bool_comparison_p (op1, TREE_CODE (op2),
6505 TREE_OPERAND (op2, 0),
6506 TREE_OPERAND (op2, 1)))
6507 return true;
6508 if (COMPARISON_CLASS_P (op1)
6509 && same_bool_comparison_p (op2, TREE_CODE (op1),
6510 TREE_OPERAND (op1, 0),
6511 TREE_OPERAND (op1, 1)))
6512 return true;
6513
6514 /* Default case. */
6515 return false;
6516 }
6517
6518 /* Forward declarations for some mutually recursive functions. */
6519
6520 static tree
6521 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6522 enum tree_code code2, tree op2a, tree op2b);
6523 static tree
6524 and_var_with_comparison (tree type, tree var, bool invert,
6525 enum tree_code code2, tree op2a, tree op2b);
6526 static tree
6527 and_var_with_comparison_1 (tree type, gimple *stmt,
6528 enum tree_code code2, tree op2a, tree op2b);
6529 static tree
6530 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6531 enum tree_code code2, tree op2a, tree op2b);
6532 static tree
6533 or_var_with_comparison (tree, tree var, bool invert,
6534 enum tree_code code2, tree op2a, tree op2b);
6535 static tree
6536 or_var_with_comparison_1 (tree, gimple *stmt,
6537 enum tree_code code2, tree op2a, tree op2b);
6538
6539 /* Helper function for and_comparisons_1: try to simplify the AND of the
6540 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6541 If INVERT is true, invert the value of the VAR before doing the AND.
6542 Return NULL_EXPR if we can't simplify this to a single expression. */
6543
6544 static tree
6545 and_var_with_comparison (tree type, tree var, bool invert,
6546 enum tree_code code2, tree op2a, tree op2b)
6547 {
6548 tree t;
6549 gimple *stmt = SSA_NAME_DEF_STMT (var);
6550
6551 /* We can only deal with variables whose definitions are assignments. */
6552 if (!is_gimple_assign (stmt))
6553 return NULL_TREE;
6554
6555 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6556 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6557 Then we only have to consider the simpler non-inverted cases. */
6558 if (invert)
6559 t = or_var_with_comparison_1 (type, stmt,
6560 invert_tree_comparison (code2, false),
6561 op2a, op2b);
6562 else
6563 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6564 return canonicalize_bool (t, invert);
6565 }
6566
6567 /* Try to simplify the AND of the ssa variable defined by the assignment
6568 STMT with the comparison specified by (OP2A CODE2 OP2B).
6569 Return NULL_EXPR if we can't simplify this to a single expression. */
6570
6571 static tree
6572 and_var_with_comparison_1 (tree type, gimple *stmt,
6573 enum tree_code code2, tree op2a, tree op2b)
6574 {
6575 tree var = gimple_assign_lhs (stmt);
6576 tree true_test_var = NULL_TREE;
6577 tree false_test_var = NULL_TREE;
6578 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6579
6580 /* Check for identities like (var AND (var == 0)) => false. */
6581 if (TREE_CODE (op2a) == SSA_NAME
6582 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6583 {
6584 if ((code2 == NE_EXPR && integer_zerop (op2b))
6585 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6586 {
6587 true_test_var = op2a;
6588 if (var == true_test_var)
6589 return var;
6590 }
6591 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6592 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6593 {
6594 false_test_var = op2a;
6595 if (var == false_test_var)
6596 return boolean_false_node;
6597 }
6598 }
6599
6600 /* If the definition is a comparison, recurse on it. */
6601 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6602 {
6603 tree t = and_comparisons_1 (type, innercode,
6604 gimple_assign_rhs1 (stmt),
6605 gimple_assign_rhs2 (stmt),
6606 code2,
6607 op2a,
6608 op2b);
6609 if (t)
6610 return t;
6611 }
6612
6613 /* If the definition is an AND or OR expression, we may be able to
6614 simplify by reassociating. */
6615 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6616 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6617 {
6618 tree inner1 = gimple_assign_rhs1 (stmt);
6619 tree inner2 = gimple_assign_rhs2 (stmt);
6620 gimple *s;
6621 tree t;
6622 tree partial = NULL_TREE;
6623 bool is_and = (innercode == BIT_AND_EXPR);
6624
6625 /* Check for boolean identities that don't require recursive examination
6626 of inner1/inner2:
6627 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6628 inner1 AND (inner1 OR inner2) => inner1
6629 !inner1 AND (inner1 AND inner2) => false
6630 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6631 Likewise for similar cases involving inner2. */
6632 if (inner1 == true_test_var)
6633 return (is_and ? var : inner1);
6634 else if (inner2 == true_test_var)
6635 return (is_and ? var : inner2);
6636 else if (inner1 == false_test_var)
6637 return (is_and
6638 ? boolean_false_node
6639 : and_var_with_comparison (type, inner2, false, code2, op2a,
6640 op2b));
6641 else if (inner2 == false_test_var)
6642 return (is_and
6643 ? boolean_false_node
6644 : and_var_with_comparison (type, inner1, false, code2, op2a,
6645 op2b));
6646
6647 /* Next, redistribute/reassociate the AND across the inner tests.
6648 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6649 if (TREE_CODE (inner1) == SSA_NAME
6650 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6651 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6652 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6653 gimple_assign_rhs1 (s),
6654 gimple_assign_rhs2 (s),
6655 code2, op2a, op2b)))
6656 {
6657 /* Handle the AND case, where we are reassociating:
6658 (inner1 AND inner2) AND (op2a code2 op2b)
6659 => (t AND inner2)
6660 If the partial result t is a constant, we win. Otherwise
6661 continue on to try reassociating with the other inner test. */
6662 if (is_and)
6663 {
6664 if (integer_onep (t))
6665 return inner2;
6666 else if (integer_zerop (t))
6667 return boolean_false_node;
6668 }
6669
6670 /* Handle the OR case, where we are redistributing:
6671 (inner1 OR inner2) AND (op2a code2 op2b)
6672 => (t OR (inner2 AND (op2a code2 op2b))) */
6673 else if (integer_onep (t))
6674 return boolean_true_node;
6675
6676 /* Save partial result for later. */
6677 partial = t;
6678 }
6679
6680 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6681 if (TREE_CODE (inner2) == SSA_NAME
6682 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6683 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6684 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6685 gimple_assign_rhs1 (s),
6686 gimple_assign_rhs2 (s),
6687 code2, op2a, op2b)))
6688 {
6689 /* Handle the AND case, where we are reassociating:
6690 (inner1 AND inner2) AND (op2a code2 op2b)
6691 => (inner1 AND t) */
6692 if (is_and)
6693 {
6694 if (integer_onep (t))
6695 return inner1;
6696 else if (integer_zerop (t))
6697 return boolean_false_node;
6698 /* If both are the same, we can apply the identity
6699 (x AND x) == x. */
6700 else if (partial && same_bool_result_p (t, partial))
6701 return t;
6702 }
6703
6704 /* Handle the OR case. where we are redistributing:
6705 (inner1 OR inner2) AND (op2a code2 op2b)
6706 => (t OR (inner1 AND (op2a code2 op2b)))
6707 => (t OR partial) */
6708 else
6709 {
6710 if (integer_onep (t))
6711 return boolean_true_node;
6712 else if (partial)
6713 {
6714 /* We already got a simplification for the other
6715 operand to the redistributed OR expression. The
6716 interesting case is when at least one is false.
6717 Or, if both are the same, we can apply the identity
6718 (x OR x) == x. */
6719 if (integer_zerop (partial))
6720 return t;
6721 else if (integer_zerop (t))
6722 return partial;
6723 else if (same_bool_result_p (t, partial))
6724 return t;
6725 }
6726 }
6727 }
6728 }
6729 return NULL_TREE;
6730 }
6731
6732 /* Try to simplify the AND of two comparisons defined by
6733 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6734 If this can be done without constructing an intermediate value,
6735 return the resulting tree; otherwise NULL_TREE is returned.
6736 This function is deliberately asymmetric as it recurses on SSA_DEFs
6737 in the first comparison but not the second. */
6738
6739 static tree
6740 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6741 enum tree_code code2, tree op2a, tree op2b)
6742 {
6743 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6744
6745 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6746 if (operand_equal_p (op1a, op2a, 0)
6747 && operand_equal_p (op1b, op2b, 0))
6748 {
6749 /* Result will be either NULL_TREE, or a combined comparison. */
6750 tree t = combine_comparisons (UNKNOWN_LOCATION,
6751 TRUTH_ANDIF_EXPR, code1, code2,
6752 truth_type, op1a, op1b);
6753 if (t)
6754 return t;
6755 }
6756
6757 /* Likewise the swapped case of the above. */
6758 if (operand_equal_p (op1a, op2b, 0)
6759 && operand_equal_p (op1b, op2a, 0))
6760 {
6761 /* Result will be either NULL_TREE, or a combined comparison. */
6762 tree t = combine_comparisons (UNKNOWN_LOCATION,
6763 TRUTH_ANDIF_EXPR, code1,
6764 swap_tree_comparison (code2),
6765 truth_type, op1a, op1b);
6766 if (t)
6767 return t;
6768 }
6769
6770 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6771 NAME's definition is a truth value. See if there are any simplifications
6772 that can be done against the NAME's definition. */
6773 if (TREE_CODE (op1a) == SSA_NAME
6774 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6775 && (integer_zerop (op1b) || integer_onep (op1b)))
6776 {
6777 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6778 || (code1 == NE_EXPR && integer_onep (op1b)));
6779 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6780 switch (gimple_code (stmt))
6781 {
6782 case GIMPLE_ASSIGN:
6783 /* Try to simplify by copy-propagating the definition. */
6784 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6785 op2b);
6786
6787 case GIMPLE_PHI:
6788 /* If every argument to the PHI produces the same result when
6789 ANDed with the second comparison, we win.
6790 Do not do this unless the type is bool since we need a bool
6791 result here anyway. */
6792 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6793 {
6794 tree result = NULL_TREE;
6795 unsigned i;
6796 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6797 {
6798 tree arg = gimple_phi_arg_def (stmt, i);
6799
6800 /* If this PHI has itself as an argument, ignore it.
6801 If all the other args produce the same result,
6802 we're still OK. */
6803 if (arg == gimple_phi_result (stmt))
6804 continue;
6805 else if (TREE_CODE (arg) == INTEGER_CST)
6806 {
6807 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6808 {
6809 if (!result)
6810 result = boolean_false_node;
6811 else if (!integer_zerop (result))
6812 return NULL_TREE;
6813 }
6814 else if (!result)
6815 result = fold_build2 (code2, boolean_type_node,
6816 op2a, op2b);
6817 else if (!same_bool_comparison_p (result,
6818 code2, op2a, op2b))
6819 return NULL_TREE;
6820 }
6821 else if (TREE_CODE (arg) == SSA_NAME
6822 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6823 {
6824 tree temp;
6825 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6826 /* In simple cases we can look through PHI nodes,
6827 but we have to be careful with loops.
6828 See PR49073. */
6829 if (! dom_info_available_p (CDI_DOMINATORS)
6830 || gimple_bb (def_stmt) == gimple_bb (stmt)
6831 || dominated_by_p (CDI_DOMINATORS,
6832 gimple_bb (def_stmt),
6833 gimple_bb (stmt)))
6834 return NULL_TREE;
6835 temp = and_var_with_comparison (type, arg, invert, code2,
6836 op2a, op2b);
6837 if (!temp)
6838 return NULL_TREE;
6839 else if (!result)
6840 result = temp;
6841 else if (!same_bool_result_p (result, temp))
6842 return NULL_TREE;
6843 }
6844 else
6845 return NULL_TREE;
6846 }
6847 return result;
6848 }
6849
6850 default:
6851 break;
6852 }
6853 }
6854 return NULL_TREE;
6855 }
6856
6857 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6858 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6859 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6860 simplify this to a single expression. As we are going to lower the cost
6861 of building SSA names / gimple stmts significantly, we need to allocate
6862 them ont the stack. This will cause the code to be a bit ugly. */
6863
6864 static tree
6865 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6866 enum tree_code code1,
6867 tree op1a, tree op1b,
6868 enum tree_code code2, tree op2a,
6869 tree op2b)
6870 {
6871 /* Allocate gimple stmt1 on the stack. */
6872 gassign *stmt1
6873 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6874 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6875 gimple_assign_set_rhs_code (stmt1, code1);
6876 gimple_assign_set_rhs1 (stmt1, op1a);
6877 gimple_assign_set_rhs2 (stmt1, op1b);
6878
6879 /* Allocate gimple stmt2 on the stack. */
6880 gassign *stmt2
6881 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6882 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6883 gimple_assign_set_rhs_code (stmt2, code2);
6884 gimple_assign_set_rhs1 (stmt2, op2a);
6885 gimple_assign_set_rhs2 (stmt2, op2b);
6886
6887 /* Allocate SSA names(lhs1) on the stack. */
6888 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6889 memset (lhs1, 0, sizeof (tree_ssa_name));
6890 TREE_SET_CODE (lhs1, SSA_NAME);
6891 TREE_TYPE (lhs1) = type;
6892 init_ssa_name_imm_use (lhs1);
6893
6894 /* Allocate SSA names(lhs2) on the stack. */
6895 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6896 memset (lhs2, 0, sizeof (tree_ssa_name));
6897 TREE_SET_CODE (lhs2, SSA_NAME);
6898 TREE_TYPE (lhs2) = type;
6899 init_ssa_name_imm_use (lhs2);
6900
6901 gimple_assign_set_lhs (stmt1, lhs1);
6902 gimple_assign_set_lhs (stmt2, lhs2);
6903
6904 gimple_match_op op (gimple_match_cond::UNCOND, code,
6905 type, gimple_assign_lhs (stmt1),
6906 gimple_assign_lhs (stmt2));
6907 if (op.resimplify (NULL, follow_all_ssa_edges))
6908 {
6909 if (gimple_simplified_result_is_gimple_val (&op))
6910 {
6911 tree res = op.ops[0];
6912 if (res == lhs1)
6913 return build2 (code1, type, op1a, op1b);
6914 else if (res == lhs2)
6915 return build2 (code2, type, op2a, op2b);
6916 else
6917 return res;
6918 }
6919 else if (op.code.is_tree_code ()
6920 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6921 {
6922 tree op0 = op.ops[0];
6923 tree op1 = op.ops[1];
6924 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6925 return NULL_TREE; /* not simple */
6926
6927 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6928 }
6929 }
6930
6931 return NULL_TREE;
6932 }
6933
6934 /* Try to simplify the AND of two comparisons, specified by
6935 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6936 If this can be simplified to a single expression (without requiring
6937 introducing more SSA variables to hold intermediate values),
6938 return the resulting tree. Otherwise return NULL_TREE.
6939 If the result expression is non-null, it has boolean type. */
6940
6941 tree
6942 maybe_fold_and_comparisons (tree type,
6943 enum tree_code code1, tree op1a, tree op1b,
6944 enum tree_code code2, tree op2a, tree op2b)
6945 {
6946 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6947 return t;
6948
6949 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6950 return t;
6951
6952 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6953 op1a, op1b, code2, op2a,
6954 op2b))
6955 return t;
6956
6957 return NULL_TREE;
6958 }
6959
6960 /* Helper function for or_comparisons_1: try to simplify the OR of the
6961 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6962 If INVERT is true, invert the value of VAR before doing the OR.
6963 Return NULL_EXPR if we can't simplify this to a single expression. */
6964
6965 static tree
6966 or_var_with_comparison (tree type, tree var, bool invert,
6967 enum tree_code code2, tree op2a, tree op2b)
6968 {
6969 tree t;
6970 gimple *stmt = SSA_NAME_DEF_STMT (var);
6971
6972 /* We can only deal with variables whose definitions are assignments. */
6973 if (!is_gimple_assign (stmt))
6974 return NULL_TREE;
6975
6976 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6977 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6978 Then we only have to consider the simpler non-inverted cases. */
6979 if (invert)
6980 t = and_var_with_comparison_1 (type, stmt,
6981 invert_tree_comparison (code2, false),
6982 op2a, op2b);
6983 else
6984 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6985 return canonicalize_bool (t, invert);
6986 }
6987
6988 /* Try to simplify the OR of the ssa variable defined by the assignment
6989 STMT with the comparison specified by (OP2A CODE2 OP2B).
6990 Return NULL_EXPR if we can't simplify this to a single expression. */
6991
6992 static tree
6993 or_var_with_comparison_1 (tree type, gimple *stmt,
6994 enum tree_code code2, tree op2a, tree op2b)
6995 {
6996 tree var = gimple_assign_lhs (stmt);
6997 tree true_test_var = NULL_TREE;
6998 tree false_test_var = NULL_TREE;
6999 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7000
7001 /* Check for identities like (var OR (var != 0)) => true . */
7002 if (TREE_CODE (op2a) == SSA_NAME
7003 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7004 {
7005 if ((code2 == NE_EXPR && integer_zerop (op2b))
7006 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7007 {
7008 true_test_var = op2a;
7009 if (var == true_test_var)
7010 return var;
7011 }
7012 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7013 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7014 {
7015 false_test_var = op2a;
7016 if (var == false_test_var)
7017 return boolean_true_node;
7018 }
7019 }
7020
7021 /* If the definition is a comparison, recurse on it. */
7022 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7023 {
7024 tree t = or_comparisons_1 (type, innercode,
7025 gimple_assign_rhs1 (stmt),
7026 gimple_assign_rhs2 (stmt),
7027 code2,
7028 op2a,
7029 op2b);
7030 if (t)
7031 return t;
7032 }
7033
7034 /* If the definition is an AND or OR expression, we may be able to
7035 simplify by reassociating. */
7036 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7037 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7038 {
7039 tree inner1 = gimple_assign_rhs1 (stmt);
7040 tree inner2 = gimple_assign_rhs2 (stmt);
7041 gimple *s;
7042 tree t;
7043 tree partial = NULL_TREE;
7044 bool is_or = (innercode == BIT_IOR_EXPR);
7045
7046 /* Check for boolean identities that don't require recursive examination
7047 of inner1/inner2:
7048 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7049 inner1 OR (inner1 AND inner2) => inner1
7050 !inner1 OR (inner1 OR inner2) => true
7051 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7052 */
7053 if (inner1 == true_test_var)
7054 return (is_or ? var : inner1);
7055 else if (inner2 == true_test_var)
7056 return (is_or ? var : inner2);
7057 else if (inner1 == false_test_var)
7058 return (is_or
7059 ? boolean_true_node
7060 : or_var_with_comparison (type, inner2, false, code2, op2a,
7061 op2b));
7062 else if (inner2 == false_test_var)
7063 return (is_or
7064 ? boolean_true_node
7065 : or_var_with_comparison (type, inner1, false, code2, op2a,
7066 op2b));
7067
7068 /* Next, redistribute/reassociate the OR across the inner tests.
7069 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7070 if (TREE_CODE (inner1) == SSA_NAME
7071 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7072 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7073 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7074 gimple_assign_rhs1 (s),
7075 gimple_assign_rhs2 (s),
7076 code2, op2a, op2b)))
7077 {
7078 /* Handle the OR case, where we are reassociating:
7079 (inner1 OR inner2) OR (op2a code2 op2b)
7080 => (t OR inner2)
7081 If the partial result t is a constant, we win. Otherwise
7082 continue on to try reassociating with the other inner test. */
7083 if (is_or)
7084 {
7085 if (integer_onep (t))
7086 return boolean_true_node;
7087 else if (integer_zerop (t))
7088 return inner2;
7089 }
7090
7091 /* Handle the AND case, where we are redistributing:
7092 (inner1 AND inner2) OR (op2a code2 op2b)
7093 => (t AND (inner2 OR (op2a code op2b))) */
7094 else if (integer_zerop (t))
7095 return boolean_false_node;
7096
7097 /* Save partial result for later. */
7098 partial = t;
7099 }
7100
7101 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7102 if (TREE_CODE (inner2) == SSA_NAME
7103 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7104 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7105 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7106 gimple_assign_rhs1 (s),
7107 gimple_assign_rhs2 (s),
7108 code2, op2a, op2b)))
7109 {
7110 /* Handle the OR case, where we are reassociating:
7111 (inner1 OR inner2) OR (op2a code2 op2b)
7112 => (inner1 OR t)
7113 => (t OR partial) */
7114 if (is_or)
7115 {
7116 if (integer_zerop (t))
7117 return inner1;
7118 else if (integer_onep (t))
7119 return boolean_true_node;
7120 /* If both are the same, we can apply the identity
7121 (x OR x) == x. */
7122 else if (partial && same_bool_result_p (t, partial))
7123 return t;
7124 }
7125
7126 /* Handle the AND case, where we are redistributing:
7127 (inner1 AND inner2) OR (op2a code2 op2b)
7128 => (t AND (inner1 OR (op2a code2 op2b)))
7129 => (t AND partial) */
7130 else
7131 {
7132 if (integer_zerop (t))
7133 return boolean_false_node;
7134 else if (partial)
7135 {
7136 /* We already got a simplification for the other
7137 operand to the redistributed AND expression. The
7138 interesting case is when at least one is true.
7139 Or, if both are the same, we can apply the identity
7140 (x AND x) == x. */
7141 if (integer_onep (partial))
7142 return t;
7143 else if (integer_onep (t))
7144 return partial;
7145 else if (same_bool_result_p (t, partial))
7146 return t;
7147 }
7148 }
7149 }
7150 }
7151 return NULL_TREE;
7152 }
7153
7154 /* Try to simplify the OR of two comparisons defined by
7155 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7156 If this can be done without constructing an intermediate value,
7157 return the resulting tree; otherwise NULL_TREE is returned.
7158 This function is deliberately asymmetric as it recurses on SSA_DEFs
7159 in the first comparison but not the second. */
7160
7161 static tree
7162 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7163 enum tree_code code2, tree op2a, tree op2b)
7164 {
7165 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7166
7167 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7168 if (operand_equal_p (op1a, op2a, 0)
7169 && operand_equal_p (op1b, op2b, 0))
7170 {
7171 /* Result will be either NULL_TREE, or a combined comparison. */
7172 tree t = combine_comparisons (UNKNOWN_LOCATION,
7173 TRUTH_ORIF_EXPR, code1, code2,
7174 truth_type, op1a, op1b);
7175 if (t)
7176 return t;
7177 }
7178
7179 /* Likewise the swapped case of the above. */
7180 if (operand_equal_p (op1a, op2b, 0)
7181 && operand_equal_p (op1b, op2a, 0))
7182 {
7183 /* Result will be either NULL_TREE, or a combined comparison. */
7184 tree t = combine_comparisons (UNKNOWN_LOCATION,
7185 TRUTH_ORIF_EXPR, code1,
7186 swap_tree_comparison (code2),
7187 truth_type, op1a, op1b);
7188 if (t)
7189 return t;
7190 }
7191
7192 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7193 NAME's definition is a truth value. See if there are any simplifications
7194 that can be done against the NAME's definition. */
7195 if (TREE_CODE (op1a) == SSA_NAME
7196 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7197 && (integer_zerop (op1b) || integer_onep (op1b)))
7198 {
7199 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7200 || (code1 == NE_EXPR && integer_onep (op1b)));
7201 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7202 switch (gimple_code (stmt))
7203 {
7204 case GIMPLE_ASSIGN:
7205 /* Try to simplify by copy-propagating the definition. */
7206 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7207 op2b);
7208
7209 case GIMPLE_PHI:
7210 /* If every argument to the PHI produces the same result when
7211 ORed with the second comparison, we win.
7212 Do not do this unless the type is bool since we need a bool
7213 result here anyway. */
7214 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7215 {
7216 tree result = NULL_TREE;
7217 unsigned i;
7218 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7219 {
7220 tree arg = gimple_phi_arg_def (stmt, i);
7221
7222 /* If this PHI has itself as an argument, ignore it.
7223 If all the other args produce the same result,
7224 we're still OK. */
7225 if (arg == gimple_phi_result (stmt))
7226 continue;
7227 else if (TREE_CODE (arg) == INTEGER_CST)
7228 {
7229 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7230 {
7231 if (!result)
7232 result = boolean_true_node;
7233 else if (!integer_onep (result))
7234 return NULL_TREE;
7235 }
7236 else if (!result)
7237 result = fold_build2 (code2, boolean_type_node,
7238 op2a, op2b);
7239 else if (!same_bool_comparison_p (result,
7240 code2, op2a, op2b))
7241 return NULL_TREE;
7242 }
7243 else if (TREE_CODE (arg) == SSA_NAME
7244 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7245 {
7246 tree temp;
7247 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7248 /* In simple cases we can look through PHI nodes,
7249 but we have to be careful with loops.
7250 See PR49073. */
7251 if (! dom_info_available_p (CDI_DOMINATORS)
7252 || gimple_bb (def_stmt) == gimple_bb (stmt)
7253 || dominated_by_p (CDI_DOMINATORS,
7254 gimple_bb (def_stmt),
7255 gimple_bb (stmt)))
7256 return NULL_TREE;
7257 temp = or_var_with_comparison (type, arg, invert, code2,
7258 op2a, op2b);
7259 if (!temp)
7260 return NULL_TREE;
7261 else if (!result)
7262 result = temp;
7263 else if (!same_bool_result_p (result, temp))
7264 return NULL_TREE;
7265 }
7266 else
7267 return NULL_TREE;
7268 }
7269 return result;
7270 }
7271
7272 default:
7273 break;
7274 }
7275 }
7276 return NULL_TREE;
7277 }
7278
7279 /* Try to simplify the OR of two comparisons, specified by
7280 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7281 If this can be simplified to a single expression (without requiring
7282 introducing more SSA variables to hold intermediate values),
7283 return the resulting tree. Otherwise return NULL_TREE.
7284 If the result expression is non-null, it has boolean type. */
7285
7286 tree
7287 maybe_fold_or_comparisons (tree type,
7288 enum tree_code code1, tree op1a, tree op1b,
7289 enum tree_code code2, tree op2a, tree op2b)
7290 {
7291 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
7292 return t;
7293
7294 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7295 return t;
7296
7297 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7298 op1a, op1b, code2, op2a,
7299 op2b))
7300 return t;
7301
7302 return NULL_TREE;
7303 }
7304
7305 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7306
7307 Either NULL_TREE, a simplified but non-constant or a constant
7308 is returned.
7309
7310 ??? This should go into a gimple-fold-inline.h file to be eventually
7311 privatized with the single valueize function used in the various TUs
7312 to avoid the indirect function call overhead. */
7313
7314 tree
7315 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7316 tree (*gvalueize) (tree))
7317 {
7318 gimple_match_op res_op;
7319 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7320 edges if there are intermediate VARYING defs. For this reason
7321 do not follow SSA edges here even though SCCVN can technically
7322 just deal fine with that. */
7323 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7324 {
7325 tree res = NULL_TREE;
7326 if (gimple_simplified_result_is_gimple_val (&res_op))
7327 res = res_op.ops[0];
7328 else if (mprts_hook)
7329 res = mprts_hook (&res_op);
7330 if (res)
7331 {
7332 if (dump_file && dump_flags & TDF_DETAILS)
7333 {
7334 fprintf (dump_file, "Match-and-simplified ");
7335 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7336 fprintf (dump_file, " to ");
7337 print_generic_expr (dump_file, res);
7338 fprintf (dump_file, "\n");
7339 }
7340 return res;
7341 }
7342 }
7343
7344 location_t loc = gimple_location (stmt);
7345 switch (gimple_code (stmt))
7346 {
7347 case GIMPLE_ASSIGN:
7348 {
7349 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7350
7351 switch (get_gimple_rhs_class (subcode))
7352 {
7353 case GIMPLE_SINGLE_RHS:
7354 {
7355 tree rhs = gimple_assign_rhs1 (stmt);
7356 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7357
7358 if (TREE_CODE (rhs) == SSA_NAME)
7359 {
7360 /* If the RHS is an SSA_NAME, return its known constant value,
7361 if any. */
7362 return (*valueize) (rhs);
7363 }
7364 /* Handle propagating invariant addresses into address
7365 operations. */
7366 else if (TREE_CODE (rhs) == ADDR_EXPR
7367 && !is_gimple_min_invariant (rhs))
7368 {
7369 poly_int64 offset = 0;
7370 tree base;
7371 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7372 &offset,
7373 valueize);
7374 if (base
7375 && (CONSTANT_CLASS_P (base)
7376 || decl_address_invariant_p (base)))
7377 return build_invariant_address (TREE_TYPE (rhs),
7378 base, offset);
7379 }
7380 else if (TREE_CODE (rhs) == CONSTRUCTOR
7381 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7382 && known_eq (CONSTRUCTOR_NELTS (rhs),
7383 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7384 {
7385 unsigned i, nelts;
7386 tree val;
7387
7388 nelts = CONSTRUCTOR_NELTS (rhs);
7389 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7390 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7391 {
7392 val = (*valueize) (val);
7393 if (TREE_CODE (val) == INTEGER_CST
7394 || TREE_CODE (val) == REAL_CST
7395 || TREE_CODE (val) == FIXED_CST)
7396 vec.quick_push (val);
7397 else
7398 return NULL_TREE;
7399 }
7400
7401 return vec.build ();
7402 }
7403 if (subcode == OBJ_TYPE_REF)
7404 {
7405 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7406 /* If callee is constant, we can fold away the wrapper. */
7407 if (is_gimple_min_invariant (val))
7408 return val;
7409 }
7410
7411 if (kind == tcc_reference)
7412 {
7413 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7414 || TREE_CODE (rhs) == REALPART_EXPR
7415 || TREE_CODE (rhs) == IMAGPART_EXPR)
7416 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7417 {
7418 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7419 return fold_unary_loc (EXPR_LOCATION (rhs),
7420 TREE_CODE (rhs),
7421 TREE_TYPE (rhs), val);
7422 }
7423 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7424 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7425 {
7426 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7427 return fold_ternary_loc (EXPR_LOCATION (rhs),
7428 TREE_CODE (rhs),
7429 TREE_TYPE (rhs), val,
7430 TREE_OPERAND (rhs, 1),
7431 TREE_OPERAND (rhs, 2));
7432 }
7433 else if (TREE_CODE (rhs) == MEM_REF
7434 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7435 {
7436 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7437 if (TREE_CODE (val) == ADDR_EXPR
7438 && is_gimple_min_invariant (val))
7439 {
7440 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7441 unshare_expr (val),
7442 TREE_OPERAND (rhs, 1));
7443 if (tem)
7444 rhs = tem;
7445 }
7446 }
7447 return fold_const_aggregate_ref_1 (rhs, valueize);
7448 }
7449 else if (kind == tcc_declaration)
7450 return get_symbol_constant_value (rhs);
7451 return rhs;
7452 }
7453
7454 case GIMPLE_UNARY_RHS:
7455 return NULL_TREE;
7456
7457 case GIMPLE_BINARY_RHS:
7458 /* Translate &x + CST into an invariant form suitable for
7459 further propagation. */
7460 if (subcode == POINTER_PLUS_EXPR)
7461 {
7462 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7463 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7464 if (TREE_CODE (op0) == ADDR_EXPR
7465 && TREE_CODE (op1) == INTEGER_CST)
7466 {
7467 tree off = fold_convert (ptr_type_node, op1);
7468 return build1_loc
7469 (loc, ADDR_EXPR, TREE_TYPE (op0),
7470 fold_build2 (MEM_REF,
7471 TREE_TYPE (TREE_TYPE (op0)),
7472 unshare_expr (op0), off));
7473 }
7474 }
7475 /* Canonicalize bool != 0 and bool == 0 appearing after
7476 valueization. While gimple_simplify handles this
7477 it can get confused by the ~X == 1 -> X == 0 transform
7478 which we cant reduce to a SSA name or a constant
7479 (and we have no way to tell gimple_simplify to not
7480 consider those transforms in the first place). */
7481 else if (subcode == EQ_EXPR
7482 || subcode == NE_EXPR)
7483 {
7484 tree lhs = gimple_assign_lhs (stmt);
7485 tree op0 = gimple_assign_rhs1 (stmt);
7486 if (useless_type_conversion_p (TREE_TYPE (lhs),
7487 TREE_TYPE (op0)))
7488 {
7489 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7490 op0 = (*valueize) (op0);
7491 if (TREE_CODE (op0) == INTEGER_CST)
7492 std::swap (op0, op1);
7493 if (TREE_CODE (op1) == INTEGER_CST
7494 && ((subcode == NE_EXPR && integer_zerop (op1))
7495 || (subcode == EQ_EXPR && integer_onep (op1))))
7496 return op0;
7497 }
7498 }
7499 return NULL_TREE;
7500
7501 case GIMPLE_TERNARY_RHS:
7502 {
7503 /* Handle ternary operators that can appear in GIMPLE form. */
7504 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7505 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7506 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7507 return fold_ternary_loc (loc, subcode,
7508 gimple_expr_type (stmt), op0, op1, op2);
7509 }
7510
7511 default:
7512 gcc_unreachable ();
7513 }
7514 }
7515
7516 case GIMPLE_CALL:
7517 {
7518 tree fn;
7519 gcall *call_stmt = as_a <gcall *> (stmt);
7520
7521 if (gimple_call_internal_p (stmt))
7522 {
7523 enum tree_code subcode = ERROR_MARK;
7524 switch (gimple_call_internal_fn (stmt))
7525 {
7526 case IFN_UBSAN_CHECK_ADD:
7527 subcode = PLUS_EXPR;
7528 break;
7529 case IFN_UBSAN_CHECK_SUB:
7530 subcode = MINUS_EXPR;
7531 break;
7532 case IFN_UBSAN_CHECK_MUL:
7533 subcode = MULT_EXPR;
7534 break;
7535 case IFN_BUILTIN_EXPECT:
7536 {
7537 tree arg0 = gimple_call_arg (stmt, 0);
7538 tree op0 = (*valueize) (arg0);
7539 if (TREE_CODE (op0) == INTEGER_CST)
7540 return op0;
7541 return NULL_TREE;
7542 }
7543 default:
7544 return NULL_TREE;
7545 }
7546 tree arg0 = gimple_call_arg (stmt, 0);
7547 tree arg1 = gimple_call_arg (stmt, 1);
7548 tree op0 = (*valueize) (arg0);
7549 tree op1 = (*valueize) (arg1);
7550
7551 if (TREE_CODE (op0) != INTEGER_CST
7552 || TREE_CODE (op1) != INTEGER_CST)
7553 {
7554 switch (subcode)
7555 {
7556 case MULT_EXPR:
7557 /* x * 0 = 0 * x = 0 without overflow. */
7558 if (integer_zerop (op0) || integer_zerop (op1))
7559 return build_zero_cst (TREE_TYPE (arg0));
7560 break;
7561 case MINUS_EXPR:
7562 /* y - y = 0 without overflow. */
7563 if (operand_equal_p (op0, op1, 0))
7564 return build_zero_cst (TREE_TYPE (arg0));
7565 break;
7566 default:
7567 break;
7568 }
7569 }
7570 tree res
7571 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7572 if (res
7573 && TREE_CODE (res) == INTEGER_CST
7574 && !TREE_OVERFLOW (res))
7575 return res;
7576 return NULL_TREE;
7577 }
7578
7579 fn = (*valueize) (gimple_call_fn (stmt));
7580 if (TREE_CODE (fn) == ADDR_EXPR
7581 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7582 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7583 && gimple_builtin_call_types_compatible_p (stmt,
7584 TREE_OPERAND (fn, 0)))
7585 {
7586 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7587 tree retval;
7588 unsigned i;
7589 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7590 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7591 retval = fold_builtin_call_array (loc,
7592 gimple_call_return_type (call_stmt),
7593 fn, gimple_call_num_args (stmt), args);
7594 if (retval)
7595 {
7596 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7597 STRIP_NOPS (retval);
7598 retval = fold_convert (gimple_call_return_type (call_stmt),
7599 retval);
7600 }
7601 return retval;
7602 }
7603 return NULL_TREE;
7604 }
7605
7606 default:
7607 return NULL_TREE;
7608 }
7609 }
7610
7611 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7612 Returns NULL_TREE if folding to a constant is not possible, otherwise
7613 returns a constant according to is_gimple_min_invariant. */
7614
7615 tree
7616 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7617 {
7618 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7619 if (res && is_gimple_min_invariant (res))
7620 return res;
7621 return NULL_TREE;
7622 }
7623
7624
7625 /* The following set of functions are supposed to fold references using
7626 their constant initializers. */
7627
7628 /* See if we can find constructor defining value of BASE.
7629 When we know the consructor with constant offset (such as
7630 base is array[40] and we do know constructor of array), then
7631 BIT_OFFSET is adjusted accordingly.
7632
7633 As a special case, return error_mark_node when constructor
7634 is not explicitly available, but it is known to be zero
7635 such as 'static const int a;'. */
7636 static tree
7637 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7638 tree (*valueize)(tree))
7639 {
7640 poly_int64 bit_offset2, size, max_size;
7641 bool reverse;
7642
7643 if (TREE_CODE (base) == MEM_REF)
7644 {
7645 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7646 if (!boff.to_shwi (bit_offset))
7647 return NULL_TREE;
7648
7649 if (valueize
7650 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7651 base = valueize (TREE_OPERAND (base, 0));
7652 if (!base || TREE_CODE (base) != ADDR_EXPR)
7653 return NULL_TREE;
7654 base = TREE_OPERAND (base, 0);
7655 }
7656 else if (valueize
7657 && TREE_CODE (base) == SSA_NAME)
7658 base = valueize (base);
7659
7660 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7661 DECL_INITIAL. If BASE is a nested reference into another
7662 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7663 the inner reference. */
7664 switch (TREE_CODE (base))
7665 {
7666 case VAR_DECL:
7667 case CONST_DECL:
7668 {
7669 tree init = ctor_for_folding (base);
7670
7671 /* Our semantic is exact opposite of ctor_for_folding;
7672 NULL means unknown, while error_mark_node is 0. */
7673 if (init == error_mark_node)
7674 return NULL_TREE;
7675 if (!init)
7676 return error_mark_node;
7677 return init;
7678 }
7679
7680 case VIEW_CONVERT_EXPR:
7681 return get_base_constructor (TREE_OPERAND (base, 0),
7682 bit_offset, valueize);
7683
7684 case ARRAY_REF:
7685 case COMPONENT_REF:
7686 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7687 &reverse);
7688 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7689 return NULL_TREE;
7690 *bit_offset += bit_offset2;
7691 return get_base_constructor (base, bit_offset, valueize);
7692
7693 case CONSTRUCTOR:
7694 return base;
7695
7696 default:
7697 if (CONSTANT_CLASS_P (base))
7698 return base;
7699
7700 return NULL_TREE;
7701 }
7702 }
7703
7704 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7705 to the memory at bit OFFSET. When non-null, TYPE is the expected
7706 type of the reference; otherwise the type of the referenced element
7707 is used instead. When SIZE is zero, attempt to fold a reference to
7708 the entire element which OFFSET refers to. Increment *SUBOFF by
7709 the bit offset of the accessed element. */
7710
7711 static tree
7712 fold_array_ctor_reference (tree type, tree ctor,
7713 unsigned HOST_WIDE_INT offset,
7714 unsigned HOST_WIDE_INT size,
7715 tree from_decl,
7716 unsigned HOST_WIDE_INT *suboff)
7717 {
7718 offset_int low_bound;
7719 offset_int elt_size;
7720 offset_int access_index;
7721 tree domain_type = NULL_TREE;
7722 HOST_WIDE_INT inner_offset;
7723
7724 /* Compute low bound and elt size. */
7725 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7726 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7727 if (domain_type && TYPE_MIN_VALUE (domain_type))
7728 {
7729 /* Static constructors for variably sized objects make no sense. */
7730 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7731 return NULL_TREE;
7732 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7733 }
7734 else
7735 low_bound = 0;
7736 /* Static constructors for variably sized objects make no sense. */
7737 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7738 return NULL_TREE;
7739 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7740
7741 /* When TYPE is non-null, verify that it specifies a constant-sized
7742 access of a multiple of the array element size. Avoid division
7743 by zero below when ELT_SIZE is zero, such as with the result of
7744 an initializer for a zero-length array or an empty struct. */
7745 if (elt_size == 0
7746 || (type
7747 && (!TYPE_SIZE_UNIT (type)
7748 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7749 return NULL_TREE;
7750
7751 /* Compute the array index we look for. */
7752 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7753 elt_size);
7754 access_index += low_bound;
7755
7756 /* And offset within the access. */
7757 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7758
7759 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7760 if (size > elt_sz * BITS_PER_UNIT)
7761 {
7762 /* native_encode_expr constraints. */
7763 if (size > MAX_BITSIZE_MODE_ANY_MODE
7764 || size % BITS_PER_UNIT != 0
7765 || inner_offset % BITS_PER_UNIT != 0
7766 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7767 return NULL_TREE;
7768
7769 unsigned ctor_idx;
7770 tree val = get_array_ctor_element_at_index (ctor, access_index,
7771 &ctor_idx);
7772 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7773 return build_zero_cst (type);
7774
7775 /* native-encode adjacent ctor elements. */
7776 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7777 unsigned bufoff = 0;
7778 offset_int index = 0;
7779 offset_int max_index = access_index;
7780 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7781 if (!val)
7782 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7783 else if (!CONSTANT_CLASS_P (val))
7784 return NULL_TREE;
7785 if (!elt->index)
7786 ;
7787 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7788 {
7789 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7790 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7791 }
7792 else
7793 index = max_index = wi::to_offset (elt->index);
7794 index = wi::umax (index, access_index);
7795 do
7796 {
7797 if (bufoff + elt_sz > sizeof (buf))
7798 elt_sz = sizeof (buf) - bufoff;
7799 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7800 inner_offset / BITS_PER_UNIT);
7801 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7802 return NULL_TREE;
7803 inner_offset = 0;
7804 bufoff += len;
7805
7806 access_index += 1;
7807 if (wi::cmpu (access_index, index) == 0)
7808 val = elt->value;
7809 else if (wi::cmpu (access_index, max_index) > 0)
7810 {
7811 ctor_idx++;
7812 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7813 {
7814 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7815 ++max_index;
7816 }
7817 else
7818 {
7819 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7820 index = 0;
7821 max_index = access_index;
7822 if (!elt->index)
7823 ;
7824 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7825 {
7826 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7827 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7828 }
7829 else
7830 index = max_index = wi::to_offset (elt->index);
7831 index = wi::umax (index, access_index);
7832 if (wi::cmpu (access_index, index) == 0)
7833 val = elt->value;
7834 else
7835 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7836 }
7837 }
7838 }
7839 while (bufoff < size / BITS_PER_UNIT);
7840 *suboff += size;
7841 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7842 }
7843
7844 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7845 {
7846 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7847 {
7848 /* For the final reference to the entire accessed element
7849 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7850 may be null) in favor of the type of the element, and set
7851 SIZE to the size of the accessed element. */
7852 inner_offset = 0;
7853 type = TREE_TYPE (val);
7854 size = elt_sz * BITS_PER_UNIT;
7855 }
7856 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7857 && TREE_CODE (val) == CONSTRUCTOR
7858 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7859 /* If this isn't the last element in the CTOR and a CTOR itself
7860 and it does not cover the whole object we are requesting give up
7861 since we're not set up for combining from multiple CTORs. */
7862 return NULL_TREE;
7863
7864 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7865 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7866 suboff);
7867 }
7868
7869 /* Memory not explicitly mentioned in constructor is 0 (or
7870 the reference is out of range). */
7871 return type ? build_zero_cst (type) : NULL_TREE;
7872 }
7873
7874 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7875 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7876 is the expected type of the reference; otherwise the type of
7877 the referenced member is used instead. When SIZE is zero,
7878 attempt to fold a reference to the entire member which OFFSET
7879 refers to; in this case. Increment *SUBOFF by the bit offset
7880 of the accessed member. */
7881
7882 static tree
7883 fold_nonarray_ctor_reference (tree type, tree ctor,
7884 unsigned HOST_WIDE_INT offset,
7885 unsigned HOST_WIDE_INT size,
7886 tree from_decl,
7887 unsigned HOST_WIDE_INT *suboff)
7888 {
7889 unsigned HOST_WIDE_INT cnt;
7890 tree cfield, cval;
7891
7892 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7893 cval)
7894 {
7895 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7896 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7897 tree field_size = DECL_SIZE (cfield);
7898
7899 if (!field_size)
7900 {
7901 /* Determine the size of the flexible array member from
7902 the size of the initializer provided for it. */
7903 field_size = TYPE_SIZE (TREE_TYPE (cval));
7904 }
7905
7906 /* Variable sized objects in static constructors makes no sense,
7907 but field_size can be NULL for flexible array members. */
7908 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7909 && TREE_CODE (byte_offset) == INTEGER_CST
7910 && (field_size != NULL_TREE
7911 ? TREE_CODE (field_size) == INTEGER_CST
7912 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7913
7914 /* Compute bit offset of the field. */
7915 offset_int bitoffset
7916 = (wi::to_offset (field_offset)
7917 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7918 /* Compute bit offset where the field ends. */
7919 offset_int bitoffset_end;
7920 if (field_size != NULL_TREE)
7921 bitoffset_end = bitoffset + wi::to_offset (field_size);
7922 else
7923 bitoffset_end = 0;
7924
7925 /* Compute the bit offset of the end of the desired access.
7926 As a special case, if the size of the desired access is
7927 zero, assume the access is to the entire field (and let
7928 the caller make any necessary adjustments by storing
7929 the actual bounds of the field in FIELDBOUNDS). */
7930 offset_int access_end = offset_int (offset);
7931 if (size)
7932 access_end += size;
7933 else
7934 access_end = bitoffset_end;
7935
7936 /* Is there any overlap between the desired access at
7937 [OFFSET, OFFSET+SIZE) and the offset of the field within
7938 the object at [BITOFFSET, BITOFFSET_END)? */
7939 if (wi::cmps (access_end, bitoffset) > 0
7940 && (field_size == NULL_TREE
7941 || wi::lts_p (offset, bitoffset_end)))
7942 {
7943 *suboff += bitoffset.to_uhwi ();
7944
7945 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7946 {
7947 /* For the final reference to the entire accessed member
7948 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7949 be null) in favor of the type of the member, and set
7950 SIZE to the size of the accessed member. */
7951 offset = bitoffset.to_uhwi ();
7952 type = TREE_TYPE (cval);
7953 size = (bitoffset_end - bitoffset).to_uhwi ();
7954 }
7955
7956 /* We do have overlap. Now see if the field is large enough
7957 to cover the access. Give up for accesses that extend
7958 beyond the end of the object or that span multiple fields. */
7959 if (wi::cmps (access_end, bitoffset_end) > 0)
7960 return NULL_TREE;
7961 if (offset < bitoffset)
7962 return NULL_TREE;
7963
7964 offset_int inner_offset = offset_int (offset) - bitoffset;
7965 return fold_ctor_reference (type, cval,
7966 inner_offset.to_uhwi (), size,
7967 from_decl, suboff);
7968 }
7969 }
7970
7971 if (!type)
7972 return NULL_TREE;
7973
7974 return build_zero_cst (type);
7975 }
7976
7977 /* CTOR is value initializing memory. Fold a reference of TYPE and
7978 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7979 is zero, attempt to fold a reference to the entire subobject
7980 which OFFSET refers to. This is used when folding accesses to
7981 string members of aggregates. When non-null, set *SUBOFF to
7982 the bit offset of the accessed subobject. */
7983
7984 tree
7985 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7986 const poly_uint64 &poly_size, tree from_decl,
7987 unsigned HOST_WIDE_INT *suboff /* = NULL */)
7988 {
7989 tree ret;
7990
7991 /* We found the field with exact match. */
7992 if (type
7993 && useless_type_conversion_p (type, TREE_TYPE (ctor))
7994 && known_eq (poly_offset, 0U))
7995 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7996
7997 /* The remaining optimizations need a constant size and offset. */
7998 unsigned HOST_WIDE_INT size, offset;
7999 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8000 return NULL_TREE;
8001
8002 /* We are at the end of walk, see if we can view convert the
8003 result. */
8004 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8005 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8006 && !compare_tree_int (TYPE_SIZE (type), size)
8007 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
8008 {
8009 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8010 if (ret)
8011 {
8012 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8013 if (ret)
8014 STRIP_USELESS_TYPE_CONVERSION (ret);
8015 }
8016 return ret;
8017 }
8018 /* For constants and byte-aligned/sized reads try to go through
8019 native_encode/interpret. */
8020 if (CONSTANT_CLASS_P (ctor)
8021 && BITS_PER_UNIT == 8
8022 && offset % BITS_PER_UNIT == 0
8023 && offset / BITS_PER_UNIT <= INT_MAX
8024 && size % BITS_PER_UNIT == 0
8025 && size <= MAX_BITSIZE_MODE_ANY_MODE
8026 && can_native_interpret_type_p (type))
8027 {
8028 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8029 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8030 offset / BITS_PER_UNIT);
8031 if (len > 0)
8032 return native_interpret_expr (type, buf, len);
8033 }
8034 if (TREE_CODE (ctor) == CONSTRUCTOR)
8035 {
8036 unsigned HOST_WIDE_INT dummy = 0;
8037 if (!suboff)
8038 suboff = &dummy;
8039
8040 tree ret;
8041 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8042 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8043 ret = fold_array_ctor_reference (type, ctor, offset, size,
8044 from_decl, suboff);
8045 else
8046 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8047 from_decl, suboff);
8048
8049 /* Fall back to native_encode_initializer. Needs to be done
8050 only in the outermost fold_ctor_reference call (because it itself
8051 recurses into CONSTRUCTORs) and doesn't update suboff. */
8052 if (ret == NULL_TREE
8053 && suboff == &dummy
8054 && BITS_PER_UNIT == 8
8055 && offset % BITS_PER_UNIT == 0
8056 && offset / BITS_PER_UNIT <= INT_MAX
8057 && size % BITS_PER_UNIT == 0
8058 && size <= MAX_BITSIZE_MODE_ANY_MODE
8059 && can_native_interpret_type_p (type))
8060 {
8061 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8062 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8063 offset / BITS_PER_UNIT);
8064 if (len > 0)
8065 return native_interpret_expr (type, buf, len);
8066 }
8067
8068 return ret;
8069 }
8070
8071 return NULL_TREE;
8072 }
8073
8074 /* Return the tree representing the element referenced by T if T is an
8075 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8076 names using VALUEIZE. Return NULL_TREE otherwise. */
8077
8078 tree
8079 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8080 {
8081 tree ctor, idx, base;
8082 poly_int64 offset, size, max_size;
8083 tree tem;
8084 bool reverse;
8085
8086 if (TREE_THIS_VOLATILE (t))
8087 return NULL_TREE;
8088
8089 if (DECL_P (t))
8090 return get_symbol_constant_value (t);
8091
8092 tem = fold_read_from_constant_string (t);
8093 if (tem)
8094 return tem;
8095
8096 switch (TREE_CODE (t))
8097 {
8098 case ARRAY_REF:
8099 case ARRAY_RANGE_REF:
8100 /* Constant indexes are handled well by get_base_constructor.
8101 Only special case variable offsets.
8102 FIXME: This code can't handle nested references with variable indexes
8103 (they will be handled only by iteration of ccp). Perhaps we can bring
8104 get_ref_base_and_extent here and make it use a valueize callback. */
8105 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8106 && valueize
8107 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8108 && poly_int_tree_p (idx))
8109 {
8110 tree low_bound, unit_size;
8111
8112 /* If the resulting bit-offset is constant, track it. */
8113 if ((low_bound = array_ref_low_bound (t),
8114 poly_int_tree_p (low_bound))
8115 && (unit_size = array_ref_element_size (t),
8116 tree_fits_uhwi_p (unit_size)))
8117 {
8118 poly_offset_int woffset
8119 = wi::sext (wi::to_poly_offset (idx)
8120 - wi::to_poly_offset (low_bound),
8121 TYPE_PRECISION (sizetype));
8122 woffset *= tree_to_uhwi (unit_size);
8123 woffset *= BITS_PER_UNIT;
8124 if (woffset.to_shwi (&offset))
8125 {
8126 base = TREE_OPERAND (t, 0);
8127 ctor = get_base_constructor (base, &offset, valueize);
8128 /* Empty constructor. Always fold to 0. */
8129 if (ctor == error_mark_node)
8130 return build_zero_cst (TREE_TYPE (t));
8131 /* Out of bound array access. Value is undefined,
8132 but don't fold. */
8133 if (maybe_lt (offset, 0))
8134 return NULL_TREE;
8135 /* We cannot determine ctor. */
8136 if (!ctor)
8137 return NULL_TREE;
8138 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8139 tree_to_uhwi (unit_size)
8140 * BITS_PER_UNIT,
8141 base);
8142 }
8143 }
8144 }
8145 /* Fallthru. */
8146
8147 case COMPONENT_REF:
8148 case BIT_FIELD_REF:
8149 case TARGET_MEM_REF:
8150 case MEM_REF:
8151 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8152 ctor = get_base_constructor (base, &offset, valueize);
8153
8154 /* Empty constructor. Always fold to 0. */
8155 if (ctor == error_mark_node)
8156 return build_zero_cst (TREE_TYPE (t));
8157 /* We do not know precise address. */
8158 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8159 return NULL_TREE;
8160 /* We cannot determine ctor. */
8161 if (!ctor)
8162 return NULL_TREE;
8163
8164 /* Out of bound array access. Value is undefined, but don't fold. */
8165 if (maybe_lt (offset, 0))
8166 return NULL_TREE;
8167
8168 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8169 if (tem)
8170 return tem;
8171
8172 /* For bit field reads try to read the representative and
8173 adjust. */
8174 if (TREE_CODE (t) == COMPONENT_REF
8175 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8176 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8177 {
8178 HOST_WIDE_INT csize, coffset;
8179 tree field = TREE_OPERAND (t, 1);
8180 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8181 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8182 && size.is_constant (&csize)
8183 && offset.is_constant (&coffset)
8184 && (coffset % BITS_PER_UNIT != 0
8185 || csize % BITS_PER_UNIT != 0)
8186 && !reverse
8187 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8188 {
8189 poly_int64 bitoffset;
8190 poly_uint64 field_offset, repr_offset;
8191 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8192 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8193 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8194 else
8195 bitoffset = 0;
8196 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8197 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8198 HOST_WIDE_INT bitoff;
8199 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8200 - TYPE_PRECISION (TREE_TYPE (field)));
8201 if (bitoffset.is_constant (&bitoff)
8202 && bitoff >= 0
8203 && bitoff <= diff)
8204 {
8205 offset -= bitoff;
8206 size = tree_to_uhwi (DECL_SIZE (repr));
8207
8208 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8209 size, base);
8210 if (tem && TREE_CODE (tem) == INTEGER_CST)
8211 {
8212 if (!BYTES_BIG_ENDIAN)
8213 tem = wide_int_to_tree (TREE_TYPE (field),
8214 wi::lrshift (wi::to_wide (tem),
8215 bitoff));
8216 else
8217 tem = wide_int_to_tree (TREE_TYPE (field),
8218 wi::lrshift (wi::to_wide (tem),
8219 diff - bitoff));
8220 return tem;
8221 }
8222 }
8223 }
8224 }
8225 break;
8226
8227 case REALPART_EXPR:
8228 case IMAGPART_EXPR:
8229 {
8230 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8231 if (c && TREE_CODE (c) == COMPLEX_CST)
8232 return fold_build1_loc (EXPR_LOCATION (t),
8233 TREE_CODE (t), TREE_TYPE (t), c);
8234 break;
8235 }
8236
8237 default:
8238 break;
8239 }
8240
8241 return NULL_TREE;
8242 }
8243
8244 tree
8245 fold_const_aggregate_ref (tree t)
8246 {
8247 return fold_const_aggregate_ref_1 (t, NULL);
8248 }
8249
8250 /* Lookup virtual method with index TOKEN in a virtual table V
8251 at OFFSET.
8252 Set CAN_REFER if non-NULL to false if method
8253 is not referable or if the virtual table is ill-formed (such as rewriten
8254 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8255
8256 tree
8257 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8258 tree v,
8259 unsigned HOST_WIDE_INT offset,
8260 bool *can_refer)
8261 {
8262 tree vtable = v, init, fn;
8263 unsigned HOST_WIDE_INT size;
8264 unsigned HOST_WIDE_INT elt_size, access_index;
8265 tree domain_type;
8266
8267 if (can_refer)
8268 *can_refer = true;
8269
8270 /* First of all double check we have virtual table. */
8271 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8272 {
8273 /* Pass down that we lost track of the target. */
8274 if (can_refer)
8275 *can_refer = false;
8276 return NULL_TREE;
8277 }
8278
8279 init = ctor_for_folding (v);
8280
8281 /* The virtual tables should always be born with constructors
8282 and we always should assume that they are avaialble for
8283 folding. At the moment we do not stream them in all cases,
8284 but it should never happen that ctor seem unreachable. */
8285 gcc_assert (init);
8286 if (init == error_mark_node)
8287 {
8288 /* Pass down that we lost track of the target. */
8289 if (can_refer)
8290 *can_refer = false;
8291 return NULL_TREE;
8292 }
8293 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8294 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8295 offset *= BITS_PER_UNIT;
8296 offset += token * size;
8297
8298 /* Lookup the value in the constructor that is assumed to be array.
8299 This is equivalent to
8300 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8301 offset, size, NULL);
8302 but in a constant time. We expect that frontend produced a simple
8303 array without indexed initializers. */
8304
8305 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8306 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8307 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8308 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8309
8310 access_index = offset / BITS_PER_UNIT / elt_size;
8311 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8312
8313 /* The C++ FE can now produce indexed fields, and we check if the indexes
8314 match. */
8315 if (access_index < CONSTRUCTOR_NELTS (init))
8316 {
8317 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8318 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8319 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8320 STRIP_NOPS (fn);
8321 }
8322 else
8323 fn = NULL;
8324
8325 /* For type inconsistent program we may end up looking up virtual method
8326 in virtual table that does not contain TOKEN entries. We may overrun
8327 the virtual table and pick up a constant or RTTI info pointer.
8328 In any case the call is undefined. */
8329 if (!fn
8330 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8331 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8332 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8333 else
8334 {
8335 fn = TREE_OPERAND (fn, 0);
8336
8337 /* When cgraph node is missing and function is not public, we cannot
8338 devirtualize. This can happen in WHOPR when the actual method
8339 ends up in other partition, because we found devirtualization
8340 possibility too late. */
8341 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8342 {
8343 if (can_refer)
8344 {
8345 *can_refer = false;
8346 return fn;
8347 }
8348 return NULL_TREE;
8349 }
8350 }
8351
8352 /* Make sure we create a cgraph node for functions we'll reference.
8353 They can be non-existent if the reference comes from an entry
8354 of an external vtable for example. */
8355 cgraph_node::get_create (fn);
8356
8357 return fn;
8358 }
8359
8360 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8361 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8362 KNOWN_BINFO carries the binfo describing the true type of
8363 OBJ_TYPE_REF_OBJECT(REF).
8364 Set CAN_REFER if non-NULL to false if method
8365 is not referable or if the virtual table is ill-formed (such as rewriten
8366 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8367
8368 tree
8369 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8370 bool *can_refer)
8371 {
8372 unsigned HOST_WIDE_INT offset;
8373 tree v;
8374
8375 v = BINFO_VTABLE (known_binfo);
8376 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8377 if (!v)
8378 return NULL_TREE;
8379
8380 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8381 {
8382 if (can_refer)
8383 *can_refer = false;
8384 return NULL_TREE;
8385 }
8386 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8387 }
8388
8389 /* Given a pointer value T, return a simplified version of an
8390 indirection through T, or NULL_TREE if no simplification is
8391 possible. Note that the resulting type may be different from
8392 the type pointed to in the sense that it is still compatible
8393 from the langhooks point of view. */
8394
8395 tree
8396 gimple_fold_indirect_ref (tree t)
8397 {
8398 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8399 tree sub = t;
8400 tree subtype;
8401
8402 STRIP_NOPS (sub);
8403 subtype = TREE_TYPE (sub);
8404 if (!POINTER_TYPE_P (subtype)
8405 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8406 return NULL_TREE;
8407
8408 if (TREE_CODE (sub) == ADDR_EXPR)
8409 {
8410 tree op = TREE_OPERAND (sub, 0);
8411 tree optype = TREE_TYPE (op);
8412 /* *&p => p */
8413 if (useless_type_conversion_p (type, optype))
8414 return op;
8415
8416 /* *(foo *)&fooarray => fooarray[0] */
8417 if (TREE_CODE (optype) == ARRAY_TYPE
8418 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8419 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8420 {
8421 tree type_domain = TYPE_DOMAIN (optype);
8422 tree min_val = size_zero_node;
8423 if (type_domain && TYPE_MIN_VALUE (type_domain))
8424 min_val = TYPE_MIN_VALUE (type_domain);
8425 if (TREE_CODE (min_val) == INTEGER_CST)
8426 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8427 }
8428 /* *(foo *)&complexfoo => __real__ complexfoo */
8429 else if (TREE_CODE (optype) == COMPLEX_TYPE
8430 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8431 return fold_build1 (REALPART_EXPR, type, op);
8432 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8433 else if (TREE_CODE (optype) == VECTOR_TYPE
8434 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8435 {
8436 tree part_width = TYPE_SIZE (type);
8437 tree index = bitsize_int (0);
8438 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8439 }
8440 }
8441
8442 /* *(p + CST) -> ... */
8443 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8444 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8445 {
8446 tree addr = TREE_OPERAND (sub, 0);
8447 tree off = TREE_OPERAND (sub, 1);
8448 tree addrtype;
8449
8450 STRIP_NOPS (addr);
8451 addrtype = TREE_TYPE (addr);
8452
8453 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8454 if (TREE_CODE (addr) == ADDR_EXPR
8455 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8456 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8457 && tree_fits_uhwi_p (off))
8458 {
8459 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8460 tree part_width = TYPE_SIZE (type);
8461 unsigned HOST_WIDE_INT part_widthi
8462 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8463 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8464 tree index = bitsize_int (indexi);
8465 if (known_lt (offset / part_widthi,
8466 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8467 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8468 part_width, index);
8469 }
8470
8471 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8472 if (TREE_CODE (addr) == ADDR_EXPR
8473 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8474 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8475 {
8476 tree size = TYPE_SIZE_UNIT (type);
8477 if (tree_int_cst_equal (size, off))
8478 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8479 }
8480
8481 /* *(p + CST) -> MEM_REF <p, CST>. */
8482 if (TREE_CODE (addr) != ADDR_EXPR
8483 || DECL_P (TREE_OPERAND (addr, 0)))
8484 return fold_build2 (MEM_REF, type,
8485 addr,
8486 wide_int_to_tree (ptype, wi::to_wide (off)));
8487 }
8488
8489 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8490 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8491 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8492 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8493 {
8494 tree type_domain;
8495 tree min_val = size_zero_node;
8496 tree osub = sub;
8497 sub = gimple_fold_indirect_ref (sub);
8498 if (! sub)
8499 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8500 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8501 if (type_domain && TYPE_MIN_VALUE (type_domain))
8502 min_val = TYPE_MIN_VALUE (type_domain);
8503 if (TREE_CODE (min_val) == INTEGER_CST)
8504 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8505 }
8506
8507 return NULL_TREE;
8508 }
8509
8510 /* Return true if CODE is an operation that when operating on signed
8511 integer types involves undefined behavior on overflow and the
8512 operation can be expressed with unsigned arithmetic. */
8513
8514 bool
8515 arith_code_with_undefined_signed_overflow (tree_code code)
8516 {
8517 switch (code)
8518 {
8519 case ABS_EXPR:
8520 case PLUS_EXPR:
8521 case MINUS_EXPR:
8522 case MULT_EXPR:
8523 case NEGATE_EXPR:
8524 case POINTER_PLUS_EXPR:
8525 return true;
8526 default:
8527 return false;
8528 }
8529 }
8530
8531 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8532 operation that can be transformed to unsigned arithmetic by converting
8533 its operand, carrying out the operation in the corresponding unsigned
8534 type and converting the result back to the original type.
8535
8536 Returns a sequence of statements that replace STMT and also contain
8537 a modified form of STMT itself. */
8538
8539 gimple_seq
8540 rewrite_to_defined_overflow (gimple *stmt)
8541 {
8542 if (dump_file && (dump_flags & TDF_DETAILS))
8543 {
8544 fprintf (dump_file, "rewriting stmt with undefined signed "
8545 "overflow ");
8546 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8547 }
8548
8549 tree lhs = gimple_assign_lhs (stmt);
8550 tree type = unsigned_type_for (TREE_TYPE (lhs));
8551 gimple_seq stmts = NULL;
8552 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8553 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8554 else
8555 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8556 {
8557 tree op = gimple_op (stmt, i);
8558 op = gimple_convert (&stmts, type, op);
8559 gimple_set_op (stmt, i, op);
8560 }
8561 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8562 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8563 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8564 gimple_set_modified (stmt, true);
8565 gimple_seq_add_stmt (&stmts, stmt);
8566 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8567 gimple_seq_add_stmt (&stmts, cvt);
8568
8569 return stmts;
8570 }
8571
8572
8573 /* The valueization hook we use for the gimple_build API simplification.
8574 This makes us match fold_buildN behavior by only combining with
8575 statements in the sequence(s) we are currently building. */
8576
8577 static tree
8578 gimple_build_valueize (tree op)
8579 {
8580 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8581 return op;
8582 return NULL_TREE;
8583 }
8584
8585 /* Build the expression CODE OP0 of type TYPE with location LOC,
8586 simplifying it first if possible. Returns the built
8587 expression value and appends statements possibly defining it
8588 to SEQ. */
8589
8590 tree
8591 gimple_build (gimple_seq *seq, location_t loc,
8592 enum tree_code code, tree type, tree op0)
8593 {
8594 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8595 if (!res)
8596 {
8597 res = create_tmp_reg_or_ssa_name (type);
8598 gimple *stmt;
8599 if (code == REALPART_EXPR
8600 || code == IMAGPART_EXPR
8601 || code == VIEW_CONVERT_EXPR)
8602 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8603 else
8604 stmt = gimple_build_assign (res, code, op0);
8605 gimple_set_location (stmt, loc);
8606 gimple_seq_add_stmt_without_update (seq, stmt);
8607 }
8608 return res;
8609 }
8610
8611 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8612 simplifying it first if possible. Returns the built
8613 expression value and appends statements possibly defining it
8614 to SEQ. */
8615
8616 tree
8617 gimple_build (gimple_seq *seq, location_t loc,
8618 enum tree_code code, tree type, tree op0, tree op1)
8619 {
8620 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8621 if (!res)
8622 {
8623 res = create_tmp_reg_or_ssa_name (type);
8624 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8625 gimple_set_location (stmt, loc);
8626 gimple_seq_add_stmt_without_update (seq, stmt);
8627 }
8628 return res;
8629 }
8630
8631 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8632 simplifying it first if possible. Returns the built
8633 expression value and appends statements possibly defining it
8634 to SEQ. */
8635
8636 tree
8637 gimple_build (gimple_seq *seq, location_t loc,
8638 enum tree_code code, tree type, tree op0, tree op1, tree op2)
8639 {
8640 tree res = gimple_simplify (code, type, op0, op1, op2,
8641 seq, gimple_build_valueize);
8642 if (!res)
8643 {
8644 res = create_tmp_reg_or_ssa_name (type);
8645 gimple *stmt;
8646 if (code == BIT_FIELD_REF)
8647 stmt = gimple_build_assign (res, code,
8648 build3 (code, type, op0, op1, op2));
8649 else
8650 stmt = gimple_build_assign (res, code, op0, op1, op2);
8651 gimple_set_location (stmt, loc);
8652 gimple_seq_add_stmt_without_update (seq, stmt);
8653 }
8654 return res;
8655 }
8656
8657 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8658 void) with a location LOC. Returns the built expression value (or NULL_TREE
8659 if TYPE is void) and appends statements possibly defining it to SEQ. */
8660
8661 tree
8662 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8663 {
8664 tree res = NULL_TREE;
8665 gcall *stmt;
8666 if (internal_fn_p (fn))
8667 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8668 else
8669 {
8670 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8671 stmt = gimple_build_call (decl, 0);
8672 }
8673 if (!VOID_TYPE_P (type))
8674 {
8675 res = create_tmp_reg_or_ssa_name (type);
8676 gimple_call_set_lhs (stmt, res);
8677 }
8678 gimple_set_location (stmt, loc);
8679 gimple_seq_add_stmt_without_update (seq, stmt);
8680 return res;
8681 }
8682
8683 /* Build the call FN (ARG0) with a result of type TYPE
8684 (or no result if TYPE is void) with location LOC,
8685 simplifying it first if possible. Returns the built
8686 expression value (or NULL_TREE if TYPE is void) and appends
8687 statements possibly defining it to SEQ. */
8688
8689 tree
8690 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8691 tree type, tree arg0)
8692 {
8693 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8694 if (!res)
8695 {
8696 gcall *stmt;
8697 if (internal_fn_p (fn))
8698 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8699 else
8700 {
8701 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8702 stmt = gimple_build_call (decl, 1, arg0);
8703 }
8704 if (!VOID_TYPE_P (type))
8705 {
8706 res = create_tmp_reg_or_ssa_name (type);
8707 gimple_call_set_lhs (stmt, res);
8708 }
8709 gimple_set_location (stmt, loc);
8710 gimple_seq_add_stmt_without_update (seq, stmt);
8711 }
8712 return res;
8713 }
8714
8715 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8716 (or no result if TYPE is void) with location LOC,
8717 simplifying it first if possible. Returns the built
8718 expression value (or NULL_TREE if TYPE is void) and appends
8719 statements possibly defining it to SEQ. */
8720
8721 tree
8722 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8723 tree type, tree arg0, tree arg1)
8724 {
8725 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8726 if (!res)
8727 {
8728 gcall *stmt;
8729 if (internal_fn_p (fn))
8730 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8731 else
8732 {
8733 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8734 stmt = gimple_build_call (decl, 2, arg0, arg1);
8735 }
8736 if (!VOID_TYPE_P (type))
8737 {
8738 res = create_tmp_reg_or_ssa_name (type);
8739 gimple_call_set_lhs (stmt, res);
8740 }
8741 gimple_set_location (stmt, loc);
8742 gimple_seq_add_stmt_without_update (seq, stmt);
8743 }
8744 return res;
8745 }
8746
8747 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8748 (or no result if TYPE is void) with location LOC,
8749 simplifying it first if possible. Returns the built
8750 expression value (or NULL_TREE if TYPE is void) and appends
8751 statements possibly defining it to SEQ. */
8752
8753 tree
8754 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8755 tree type, tree arg0, tree arg1, tree arg2)
8756 {
8757 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8758 seq, gimple_build_valueize);
8759 if (!res)
8760 {
8761 gcall *stmt;
8762 if (internal_fn_p (fn))
8763 stmt = gimple_build_call_internal (as_internal_fn (fn),
8764 3, arg0, arg1, arg2);
8765 else
8766 {
8767 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8768 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8769 }
8770 if (!VOID_TYPE_P (type))
8771 {
8772 res = create_tmp_reg_or_ssa_name (type);
8773 gimple_call_set_lhs (stmt, res);
8774 }
8775 gimple_set_location (stmt, loc);
8776 gimple_seq_add_stmt_without_update (seq, stmt);
8777 }
8778 return res;
8779 }
8780
8781 /* Build the conversion (TYPE) OP with a result of type TYPE
8782 with location LOC if such conversion is neccesary in GIMPLE,
8783 simplifying it first.
8784 Returns the built expression value and appends
8785 statements possibly defining it to SEQ. */
8786
8787 tree
8788 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8789 {
8790 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8791 return op;
8792 return gimple_build (seq, loc, NOP_EXPR, type, op);
8793 }
8794
8795 /* Build the conversion (ptrofftype) OP with a result of a type
8796 compatible with ptrofftype with location LOC if such conversion
8797 is neccesary in GIMPLE, simplifying it first.
8798 Returns the built expression value and appends
8799 statements possibly defining it to SEQ. */
8800
8801 tree
8802 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8803 {
8804 if (ptrofftype_p (TREE_TYPE (op)))
8805 return op;
8806 return gimple_convert (seq, loc, sizetype, op);
8807 }
8808
8809 /* Build a vector of type TYPE in which each element has the value OP.
8810 Return a gimple value for the result, appending any new statements
8811 to SEQ. */
8812
8813 tree
8814 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8815 tree op)
8816 {
8817 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8818 && !CONSTANT_CLASS_P (op))
8819 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8820
8821 tree res, vec = build_vector_from_val (type, op);
8822 if (is_gimple_val (vec))
8823 return vec;
8824 if (gimple_in_ssa_p (cfun))
8825 res = make_ssa_name (type);
8826 else
8827 res = create_tmp_reg (type);
8828 gimple *stmt = gimple_build_assign (res, vec);
8829 gimple_set_location (stmt, loc);
8830 gimple_seq_add_stmt_without_update (seq, stmt);
8831 return res;
8832 }
8833
8834 /* Build a vector from BUILDER, handling the case in which some elements
8835 are non-constant. Return a gimple value for the result, appending any
8836 new instructions to SEQ.
8837
8838 BUILDER must not have a stepped encoding on entry. This is because
8839 the function is not geared up to handle the arithmetic that would
8840 be needed in the variable case, and any code building a vector that
8841 is known to be constant should use BUILDER->build () directly. */
8842
8843 tree
8844 gimple_build_vector (gimple_seq *seq, location_t loc,
8845 tree_vector_builder *builder)
8846 {
8847 gcc_assert (builder->nelts_per_pattern () <= 2);
8848 unsigned int encoded_nelts = builder->encoded_nelts ();
8849 for (unsigned int i = 0; i < encoded_nelts; ++i)
8850 if (!CONSTANT_CLASS_P ((*builder)[i]))
8851 {
8852 tree type = builder->type ();
8853 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8854 vec<constructor_elt, va_gc> *v;
8855 vec_alloc (v, nelts);
8856 for (i = 0; i < nelts; ++i)
8857 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
8858
8859 tree res;
8860 if (gimple_in_ssa_p (cfun))
8861 res = make_ssa_name (type);
8862 else
8863 res = create_tmp_reg (type);
8864 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8865 gimple_set_location (stmt, loc);
8866 gimple_seq_add_stmt_without_update (seq, stmt);
8867 return res;
8868 }
8869 return builder->build ();
8870 }
8871
8872 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8873 and generate a value guaranteed to be rounded upwards to ALIGN.
8874
8875 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8876
8877 tree
8878 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8879 tree old_size, unsigned HOST_WIDE_INT align)
8880 {
8881 unsigned HOST_WIDE_INT tg_mask = align - 1;
8882 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8883 gcc_assert (INTEGRAL_TYPE_P (type));
8884 tree tree_mask = build_int_cst (type, tg_mask);
8885 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8886 tree_mask);
8887
8888 tree mask = build_int_cst (type, -align);
8889 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8890 }
8891
8892 /* Return true if the result of assignment STMT is known to be non-negative.
8893 If the return value is based on the assumption that signed overflow is
8894 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8895 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8896
8897 static bool
8898 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8899 int depth)
8900 {
8901 enum tree_code code = gimple_assign_rhs_code (stmt);
8902 switch (get_gimple_rhs_class (code))
8903 {
8904 case GIMPLE_UNARY_RHS:
8905 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8906 gimple_expr_type (stmt),
8907 gimple_assign_rhs1 (stmt),
8908 strict_overflow_p, depth);
8909 case GIMPLE_BINARY_RHS:
8910 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8911 gimple_expr_type (stmt),
8912 gimple_assign_rhs1 (stmt),
8913 gimple_assign_rhs2 (stmt),
8914 strict_overflow_p, depth);
8915 case GIMPLE_TERNARY_RHS:
8916 return false;
8917 case GIMPLE_SINGLE_RHS:
8918 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8919 strict_overflow_p, depth);
8920 case GIMPLE_INVALID_RHS:
8921 break;
8922 }
8923 gcc_unreachable ();
8924 }
8925
8926 /* Return true if return value of call STMT is known to be non-negative.
8927 If the return value is based on the assumption that signed overflow is
8928 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8929 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8930
8931 static bool
8932 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8933 int depth)
8934 {
8935 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8936 gimple_call_arg (stmt, 0) : NULL_TREE;
8937 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8938 gimple_call_arg (stmt, 1) : NULL_TREE;
8939
8940 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
8941 gimple_call_combined_fn (stmt),
8942 arg0,
8943 arg1,
8944 strict_overflow_p, depth);
8945 }
8946
8947 /* Return true if return value of call STMT is known to be non-negative.
8948 If the return value is based on the assumption that signed overflow is
8949 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8950 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8951
8952 static bool
8953 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8954 int depth)
8955 {
8956 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8957 {
8958 tree arg = gimple_phi_arg_def (stmt, i);
8959 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8960 return false;
8961 }
8962 return true;
8963 }
8964
8965 /* Return true if STMT is known to compute a non-negative value.
8966 If the return value is based on the assumption that signed overflow is
8967 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8968 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8969
8970 bool
8971 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8972 int depth)
8973 {
8974 switch (gimple_code (stmt))
8975 {
8976 case GIMPLE_ASSIGN:
8977 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8978 depth);
8979 case GIMPLE_CALL:
8980 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8981 depth);
8982 case GIMPLE_PHI:
8983 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8984 depth);
8985 default:
8986 return false;
8987 }
8988 }
8989
8990 /* Return true if the floating-point value computed by assignment STMT
8991 is known to have an integer value. We also allow +Inf, -Inf and NaN
8992 to be considered integer values. Return false for signaling NaN.
8993
8994 DEPTH is the current nesting depth of the query. */
8995
8996 static bool
8997 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
8998 {
8999 enum tree_code code = gimple_assign_rhs_code (stmt);
9000 switch (get_gimple_rhs_class (code))
9001 {
9002 case GIMPLE_UNARY_RHS:
9003 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9004 gimple_assign_rhs1 (stmt), depth);
9005 case GIMPLE_BINARY_RHS:
9006 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9007 gimple_assign_rhs1 (stmt),
9008 gimple_assign_rhs2 (stmt), depth);
9009 case GIMPLE_TERNARY_RHS:
9010 return false;
9011 case GIMPLE_SINGLE_RHS:
9012 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9013 case GIMPLE_INVALID_RHS:
9014 break;
9015 }
9016 gcc_unreachable ();
9017 }
9018
9019 /* Return true if the floating-point value computed by call STMT is known
9020 to have an integer value. We also allow +Inf, -Inf and NaN to be
9021 considered integer values. Return false for signaling NaN.
9022
9023 DEPTH is the current nesting depth of the query. */
9024
9025 static bool
9026 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9027 {
9028 tree arg0 = (gimple_call_num_args (stmt) > 0
9029 ? gimple_call_arg (stmt, 0)
9030 : NULL_TREE);
9031 tree arg1 = (gimple_call_num_args (stmt) > 1
9032 ? gimple_call_arg (stmt, 1)
9033 : NULL_TREE);
9034 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9035 arg0, arg1, depth);
9036 }
9037
9038 /* Return true if the floating-point result of phi STMT is known to have
9039 an integer value. We also allow +Inf, -Inf and NaN to be considered
9040 integer values. Return false for signaling NaN.
9041
9042 DEPTH is the current nesting depth of the query. */
9043
9044 static bool
9045 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9046 {
9047 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9048 {
9049 tree arg = gimple_phi_arg_def (stmt, i);
9050 if (!integer_valued_real_single_p (arg, depth + 1))
9051 return false;
9052 }
9053 return true;
9054 }
9055
9056 /* Return true if the floating-point value computed by STMT is known
9057 to have an integer value. We also allow +Inf, -Inf and NaN to be
9058 considered integer values. Return false for signaling NaN.
9059
9060 DEPTH is the current nesting depth of the query. */
9061
9062 bool
9063 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9064 {
9065 switch (gimple_code (stmt))
9066 {
9067 case GIMPLE_ASSIGN:
9068 return gimple_assign_integer_valued_real_p (stmt, depth);
9069 case GIMPLE_CALL:
9070 return gimple_call_integer_valued_real_p (stmt, depth);
9071 case GIMPLE_PHI:
9072 return gimple_phi_integer_valued_real_p (stmt, depth);
9073 default:
9074 return false;
9075 }
9076 }