]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
middle-end: add support for per-location warning groups.
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
69
70 enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83 };
84
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
92
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
110 static bool
111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 {
113 varpool_node *vnode;
114 struct cgraph_node *node;
115 symtab_node *snode;
116
117 if (DECL_ABSTRACT_P (decl))
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122 || !VAR_OR_FUNCTION_DECL_P (decl))
123 return true;
124
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
128 {
129 if (DECL_EXTERNAL (decl))
130 return false;
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab->function_flags_ready)
134 return true;
135 snode = symtab_node::get (decl);
136 if (!snode || !snode->definition)
137 return false;
138 node = dyn_cast <cgraph_node *> (snode);
139 return !node || !node->inlined_to;
140 }
141
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
145 if (!from_decl
146 || !VAR_P (from_decl)
147 || (!DECL_EXTERNAL (from_decl)
148 && (vnode = varpool_node::get (from_decl)) != NULL
149 && vnode->definition)
150 || (flag_ltrans
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->in_other_partition))
153 return true;
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161 return false;
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
170
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
178 if (!symtab->function_flags_ready)
179 return true;
180
181 snode = symtab_node::get (decl);
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
188 return !node || !node->inlined_to;
189 }
190
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
195 tree
196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 {
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202 }
203
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207
208 tree
209 canonicalize_constructor_val (tree cval, tree from_decl)
210 {
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218 {
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
239 if (!base)
240 return NULL_TREE;
241
242 if (VAR_OR_FUNCTION_DECL_P (base)
243 && !can_refer_decl_in_current_unit_p (base, from_decl))
244 return NULL_TREE;
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
247 if (VAR_P (base))
248 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
249 but since the use can be in a debug stmt we can't. */
250 ;
251 else if (TREE_CODE (base) == FUNCTION_DECL)
252 {
253 /* Make sure we create a cgraph node for functions we'll reference.
254 They can be non-existent if the reference comes from an entry
255 of an external vtable for example. */
256 cgraph_node::get_create (base);
257 }
258 /* Fixup types in global initializers. */
259 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
260 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
261
262 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
263 cval = fold_convert (TREE_TYPE (orig_cval), cval);
264 return cval;
265 }
266 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
267 if (TREE_CODE (cval) == INTEGER_CST)
268 {
269 if (TREE_OVERFLOW_P (cval))
270 cval = drop_tree_overflow (cval);
271 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
272 cval = fold_convert (TREE_TYPE (orig_cval), cval);
273 return cval;
274 }
275 return orig_cval;
276 }
277
278 /* If SYM is a constant variable with known value, return the value.
279 NULL_TREE is returned otherwise. */
280
281 tree
282 get_symbol_constant_value (tree sym)
283 {
284 tree val = ctor_for_folding (sym);
285 if (val != error_mark_node)
286 {
287 if (val)
288 {
289 val = canonicalize_constructor_val (unshare_expr (val), sym);
290 if (val && is_gimple_min_invariant (val))
291 return val;
292 else
293 return NULL_TREE;
294 }
295 /* Variables declared 'const' without an initializer
296 have zero as the initializer if they may not be
297 overridden at link or run time. */
298 if (!val
299 && is_gimple_reg_type (TREE_TYPE (sym)))
300 return build_zero_cst (TREE_TYPE (sym));
301 }
302
303 return NULL_TREE;
304 }
305
306
307
308 /* Subroutine of fold_stmt. We perform constant folding of the
309 memory reference tree EXPR. */
310
311 static tree
312 maybe_fold_reference (tree expr)
313 {
314 tree result = NULL_TREE;
315
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 result = fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 result = fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332 else
333 result = fold_const_aggregate_ref (expr);
334
335 if (result && is_gimple_min_invariant (result))
336 return result;
337
338 return NULL_TREE;
339 }
340
341 /* Return true if EXPR is an acceptable right-hand-side for a
342 GIMPLE assignment. We validate the entire tree, not just
343 the root node, thus catching expressions that embed complex
344 operands that are not permitted in GIMPLE. This function
345 is needed because the folding routines in fold-const.c
346 may return such expressions in some cases, e.g., an array
347 access with an embedded index addition. It may make more
348 sense to have folding routines that are sensitive to the
349 constraints on GIMPLE operands, rather than abandoning any
350 any attempt to fold if the usual folding turns out to be too
351 aggressive. */
352
353 bool
354 valid_gimple_rhs_p (tree expr)
355 {
356 enum tree_code code = TREE_CODE (expr);
357
358 switch (TREE_CODE_CLASS (code))
359 {
360 case tcc_declaration:
361 if (!is_gimple_variable (expr))
362 return false;
363 break;
364
365 case tcc_constant:
366 /* All constants are ok. */
367 break;
368
369 case tcc_comparison:
370 /* GENERIC allows comparisons with non-boolean types, reject
371 those for GIMPLE. Let vector-typed comparisons pass - rules
372 for GENERIC and GIMPLE are the same here. */
373 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
374 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
375 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
376 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
377 return false;
378
379 /* Fallthru. */
380 case tcc_binary:
381 if (!is_gimple_val (TREE_OPERAND (expr, 0))
382 || !is_gimple_val (TREE_OPERAND (expr, 1)))
383 return false;
384 break;
385
386 case tcc_unary:
387 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
388 return false;
389 break;
390
391 case tcc_expression:
392 switch (code)
393 {
394 case ADDR_EXPR:
395 {
396 tree t;
397 if (is_gimple_min_invariant (expr))
398 return true;
399 t = TREE_OPERAND (expr, 0);
400 while (handled_component_p (t))
401 {
402 /* ??? More checks needed, see the GIMPLE verifier. */
403 if ((TREE_CODE (t) == ARRAY_REF
404 || TREE_CODE (t) == ARRAY_RANGE_REF)
405 && !is_gimple_val (TREE_OPERAND (t, 1)))
406 return false;
407 t = TREE_OPERAND (t, 0);
408 }
409 if (!is_gimple_id (t))
410 return false;
411 }
412 break;
413
414 default:
415 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
416 {
417 if ((code == COND_EXPR
418 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
419 : !is_gimple_val (TREE_OPERAND (expr, 0)))
420 || !is_gimple_val (TREE_OPERAND (expr, 1))
421 || !is_gimple_val (TREE_OPERAND (expr, 2)))
422 return false;
423 break;
424 }
425 return false;
426 }
427 break;
428
429 case tcc_vl_exp:
430 return false;
431
432 case tcc_exceptional:
433 if (code == CONSTRUCTOR)
434 {
435 unsigned i;
436 tree elt;
437 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
438 if (!is_gimple_val (elt))
439 return false;
440 return true;
441 }
442 if (code != SSA_NAME)
443 return false;
444 break;
445
446 case tcc_reference:
447 if (code == BIT_FIELD_REF)
448 return is_gimple_val (TREE_OPERAND (expr, 0));
449 return false;
450
451 default:
452 return false;
453 }
454
455 return true;
456 }
457
458
459 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
460 replacement rhs for the statement or NULL_TREE if no simplification
461 could be made. It is assumed that the operands have been previously
462 folded. */
463
464 static tree
465 fold_gimple_assign (gimple_stmt_iterator *si)
466 {
467 gimple *stmt = gsi_stmt (*si);
468 enum tree_code subcode = gimple_assign_rhs_code (stmt);
469 location_t loc = gimple_location (stmt);
470
471 tree result = NULL_TREE;
472
473 switch (get_gimple_rhs_class (subcode))
474 {
475 case GIMPLE_SINGLE_RHS:
476 {
477 tree rhs = gimple_assign_rhs1 (stmt);
478
479 if (TREE_CLOBBER_P (rhs))
480 return NULL_TREE;
481
482 if (REFERENCE_CLASS_P (rhs))
483 return maybe_fold_reference (rhs);
484
485 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
486 {
487 tree val = OBJ_TYPE_REF_EXPR (rhs);
488 if (is_gimple_min_invariant (val))
489 return val;
490 else if (flag_devirtualize && virtual_method_call_p (rhs))
491 {
492 bool final;
493 vec <cgraph_node *>targets
494 = possible_polymorphic_call_targets (rhs, stmt, &final);
495 if (final && targets.length () <= 1 && dbg_cnt (devirt))
496 {
497 if (dump_enabled_p ())
498 {
499 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
500 "resolving virtual function address "
501 "reference to function %s\n",
502 targets.length () == 1
503 ? targets[0]->name ()
504 : "NULL");
505 }
506 if (targets.length () == 1)
507 {
508 val = fold_convert (TREE_TYPE (val),
509 build_fold_addr_expr_loc
510 (loc, targets[0]->decl));
511 STRIP_USELESS_TYPE_CONVERSION (val);
512 }
513 else
514 /* We cannot use __builtin_unreachable here because it
515 cannot have address taken. */
516 val = build_int_cst (TREE_TYPE (val), 0);
517 return val;
518 }
519 }
520 }
521
522 else if (TREE_CODE (rhs) == ADDR_EXPR)
523 {
524 tree ref = TREE_OPERAND (rhs, 0);
525 if (TREE_CODE (ref) == MEM_REF
526 && integer_zerop (TREE_OPERAND (ref, 1)))
527 {
528 result = TREE_OPERAND (ref, 0);
529 if (!useless_type_conversion_p (TREE_TYPE (rhs),
530 TREE_TYPE (result)))
531 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
532 return result;
533 }
534 }
535
536 else if (TREE_CODE (rhs) == CONSTRUCTOR
537 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
538 {
539 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
540 unsigned i;
541 tree val;
542
543 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
544 if (! CONSTANT_CLASS_P (val))
545 return NULL_TREE;
546
547 return build_vector_from_ctor (TREE_TYPE (rhs),
548 CONSTRUCTOR_ELTS (rhs));
549 }
550
551 else if (DECL_P (rhs)
552 && is_gimple_reg_type (TREE_TYPE (rhs)))
553 return get_symbol_constant_value (rhs);
554 }
555 break;
556
557 case GIMPLE_UNARY_RHS:
558 break;
559
560 case GIMPLE_BINARY_RHS:
561 break;
562
563 case GIMPLE_TERNARY_RHS:
564 result = fold_ternary_loc (loc, subcode,
565 TREE_TYPE (gimple_assign_lhs (stmt)),
566 gimple_assign_rhs1 (stmt),
567 gimple_assign_rhs2 (stmt),
568 gimple_assign_rhs3 (stmt));
569
570 if (result)
571 {
572 STRIP_USELESS_TYPE_CONVERSION (result);
573 if (valid_gimple_rhs_p (result))
574 return result;
575 }
576 break;
577
578 case GIMPLE_INVALID_RHS:
579 gcc_unreachable ();
580 }
581
582 return NULL_TREE;
583 }
584
585
586 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
587 adjusting the replacement stmts location and virtual operands.
588 If the statement has a lhs the last stmt in the sequence is expected
589 to assign to that lhs. */
590
591 static void
592 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
593 {
594 gimple *stmt = gsi_stmt (*si_p);
595
596 if (gimple_has_location (stmt))
597 annotate_all_with_location (stmts, gimple_location (stmt));
598
599 /* First iterate over the replacement statements backward, assigning
600 virtual operands to their defining statements. */
601 gimple *laststore = NULL;
602 for (gimple_stmt_iterator i = gsi_last (stmts);
603 !gsi_end_p (i); gsi_prev (&i))
604 {
605 gimple *new_stmt = gsi_stmt (i);
606 if ((gimple_assign_single_p (new_stmt)
607 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
608 || (is_gimple_call (new_stmt)
609 && (gimple_call_flags (new_stmt)
610 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
611 {
612 tree vdef;
613 if (!laststore)
614 vdef = gimple_vdef (stmt);
615 else
616 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
617 gimple_set_vdef (new_stmt, vdef);
618 if (vdef && TREE_CODE (vdef) == SSA_NAME)
619 SSA_NAME_DEF_STMT (vdef) = new_stmt;
620 laststore = new_stmt;
621 }
622 }
623
624 /* Second iterate over the statements forward, assigning virtual
625 operands to their uses. */
626 tree reaching_vuse = gimple_vuse (stmt);
627 for (gimple_stmt_iterator i = gsi_start (stmts);
628 !gsi_end_p (i); gsi_next (&i))
629 {
630 gimple *new_stmt = gsi_stmt (i);
631 /* If the new statement possibly has a VUSE, update it with exact SSA
632 name we know will reach this one. */
633 if (gimple_has_mem_ops (new_stmt))
634 gimple_set_vuse (new_stmt, reaching_vuse);
635 gimple_set_modified (new_stmt, true);
636 if (gimple_vdef (new_stmt))
637 reaching_vuse = gimple_vdef (new_stmt);
638 }
639
640 /* If the new sequence does not do a store release the virtual
641 definition of the original statement. */
642 if (reaching_vuse
643 && reaching_vuse == gimple_vuse (stmt))
644 {
645 tree vdef = gimple_vdef (stmt);
646 if (vdef
647 && TREE_CODE (vdef) == SSA_NAME)
648 {
649 unlink_stmt_vdef (stmt);
650 release_ssa_name (vdef);
651 }
652 }
653
654 /* Finally replace the original statement with the sequence. */
655 gsi_replace_with_seq (si_p, stmts, false);
656 }
657
658 /* Helper function for update_gimple_call and
659 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
660 with GIMPLE_CALL NEW_STMT. */
661
662 static void
663 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
664 gimple *stmt)
665 {
666 tree lhs = gimple_call_lhs (stmt);
667 gimple_call_set_lhs (new_stmt, lhs);
668 if (lhs && TREE_CODE (lhs) == SSA_NAME)
669 SSA_NAME_DEF_STMT (lhs) = new_stmt;
670 gimple_move_vops (new_stmt, stmt);
671 gimple_set_location (new_stmt, gimple_location (stmt));
672 if (gimple_block (new_stmt) == NULL_TREE)
673 gimple_set_block (new_stmt, gimple_block (stmt));
674 gsi_replace (si_p, new_stmt, false);
675 }
676
677 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
678 with number of arguments NARGS, where the arguments in GIMPLE form
679 follow NARGS argument. */
680
681 bool
682 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
683 {
684 va_list ap;
685 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
686
687 gcc_assert (is_gimple_call (stmt));
688 va_start (ap, nargs);
689 new_stmt = gimple_build_call_valist (fn, nargs, ap);
690 finish_update_gimple_call (si_p, new_stmt, stmt);
691 va_end (ap);
692 return true;
693 }
694
695 /* Return true if EXPR is a CALL_EXPR suitable for representation
696 as a single GIMPLE_CALL statement. If the arguments require
697 further gimplification, return false. */
698
699 static bool
700 valid_gimple_call_p (tree expr)
701 {
702 unsigned i, nargs;
703
704 if (TREE_CODE (expr) != CALL_EXPR)
705 return false;
706
707 nargs = call_expr_nargs (expr);
708 for (i = 0; i < nargs; i++)
709 {
710 tree arg = CALL_EXPR_ARG (expr, i);
711 if (is_gimple_reg_type (TREE_TYPE (arg)))
712 {
713 if (!is_gimple_val (arg))
714 return false;
715 }
716 else
717 if (!is_gimple_lvalue (arg))
718 return false;
719 }
720
721 return true;
722 }
723
724 /* Convert EXPR into a GIMPLE value suitable for substitution on the
725 RHS of an assignment. Insert the necessary statements before
726 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
727 is replaced. If the call is expected to produces a result, then it
728 is replaced by an assignment of the new RHS to the result variable.
729 If the result is to be ignored, then the call is replaced by a
730 GIMPLE_NOP. A proper VDEF chain is retained by making the first
731 VUSE and the last VDEF of the whole sequence be the same as the replaced
732 statement and using new SSA names for stores in between. */
733
734 void
735 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
736 {
737 tree lhs;
738 gimple *stmt, *new_stmt;
739 gimple_stmt_iterator i;
740 gimple_seq stmts = NULL;
741
742 stmt = gsi_stmt (*si_p);
743
744 gcc_assert (is_gimple_call (stmt));
745
746 if (valid_gimple_call_p (expr))
747 {
748 /* The call has simplified to another call. */
749 tree fn = CALL_EXPR_FN (expr);
750 unsigned i;
751 unsigned nargs = call_expr_nargs (expr);
752 vec<tree> args = vNULL;
753 gcall *new_stmt;
754
755 if (nargs > 0)
756 {
757 args.create (nargs);
758 args.safe_grow_cleared (nargs, true);
759
760 for (i = 0; i < nargs; i++)
761 args[i] = CALL_EXPR_ARG (expr, i);
762 }
763
764 new_stmt = gimple_build_call_vec (fn, args);
765 finish_update_gimple_call (si_p, new_stmt, stmt);
766 args.release ();
767 return;
768 }
769
770 lhs = gimple_call_lhs (stmt);
771 if (lhs == NULL_TREE)
772 {
773 push_gimplify_context (gimple_in_ssa_p (cfun));
774 gimplify_and_add (expr, &stmts);
775 pop_gimplify_context (NULL);
776
777 /* We can end up with folding a memcpy of an empty class assignment
778 which gets optimized away by C++ gimplification. */
779 if (gimple_seq_empty_p (stmts))
780 {
781 if (gimple_in_ssa_p (cfun))
782 {
783 unlink_stmt_vdef (stmt);
784 release_defs (stmt);
785 }
786 gsi_replace (si_p, gimple_build_nop (), false);
787 return;
788 }
789 }
790 else
791 {
792 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
793 new_stmt = gimple_build_assign (lhs, tmp);
794 i = gsi_last (stmts);
795 gsi_insert_after_without_update (&i, new_stmt,
796 GSI_CONTINUE_LINKING);
797 }
798
799 gsi_replace_with_seq_vops (si_p, stmts);
800 }
801
802
803 /* Replace the call at *GSI with the gimple value VAL. */
804
805 void
806 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
807 {
808 gimple *stmt = gsi_stmt (*gsi);
809 tree lhs = gimple_call_lhs (stmt);
810 gimple *repl;
811 if (lhs)
812 {
813 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
814 val = fold_convert (TREE_TYPE (lhs), val);
815 repl = gimple_build_assign (lhs, val);
816 }
817 else
818 repl = gimple_build_nop ();
819 tree vdef = gimple_vdef (stmt);
820 if (vdef && TREE_CODE (vdef) == SSA_NAME)
821 {
822 unlink_stmt_vdef (stmt);
823 release_ssa_name (vdef);
824 }
825 gsi_replace (gsi, repl, false);
826 }
827
828 /* Replace the call at *GSI with the new call REPL and fold that
829 again. */
830
831 static void
832 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
833 {
834 gimple *stmt = gsi_stmt (*gsi);
835 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
836 gimple_set_location (repl, gimple_location (stmt));
837 gimple_move_vops (repl, stmt);
838 gsi_replace (gsi, repl, false);
839 fold_stmt (gsi);
840 }
841
842 /* Return true if VAR is a VAR_DECL or a component thereof. */
843
844 static bool
845 var_decl_component_p (tree var)
846 {
847 tree inner = var;
848 while (handled_component_p (inner))
849 inner = TREE_OPERAND (inner, 0);
850 return (DECL_P (inner)
851 || (TREE_CODE (inner) == MEM_REF
852 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
853 }
854
855 /* Return TRUE if the SIZE argument, representing the size of an
856 object, is in a range of values of which exactly zero is valid. */
857
858 static bool
859 size_must_be_zero_p (tree size)
860 {
861 if (integer_zerop (size))
862 return true;
863
864 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
865 return false;
866
867 tree type = TREE_TYPE (size);
868 int prec = TYPE_PRECISION (type);
869
870 /* Compute the value of SSIZE_MAX, the largest positive value that
871 can be stored in ssize_t, the signed counterpart of size_t. */
872 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
873 value_range valid_range (build_int_cst (type, 0),
874 wide_int_to_tree (type, ssize_max));
875 value_range vr;
876 if (cfun)
877 get_range_query (cfun)->range_of_expr (vr, size);
878 else
879 get_global_range_query ()->range_of_expr (vr, size);
880 if (vr.undefined_p ())
881 vr.set_varying (TREE_TYPE (size));
882 vr.intersect (&valid_range);
883 return vr.zero_p ();
884 }
885
886 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
887 diagnose (otherwise undefined) overlapping copies without preventing
888 folding. When folded, GCC guarantees that overlapping memcpy has
889 the same semantics as memmove. Call to the library memcpy need not
890 provide the same guarantee. Return false if no simplification can
891 be made. */
892
893 static bool
894 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
895 tree dest, tree src, enum built_in_function code)
896 {
897 gimple *stmt = gsi_stmt (*gsi);
898 tree lhs = gimple_call_lhs (stmt);
899 tree len = gimple_call_arg (stmt, 2);
900 location_t loc = gimple_location (stmt);
901
902 /* If the LEN parameter is a constant zero or in range where
903 the only valid value is zero, return DEST. */
904 if (size_must_be_zero_p (len))
905 {
906 gimple *repl;
907 if (gimple_call_lhs (stmt))
908 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
909 else
910 repl = gimple_build_nop ();
911 tree vdef = gimple_vdef (stmt);
912 if (vdef && TREE_CODE (vdef) == SSA_NAME)
913 {
914 unlink_stmt_vdef (stmt);
915 release_ssa_name (vdef);
916 }
917 gsi_replace (gsi, repl, false);
918 return true;
919 }
920
921 /* If SRC and DEST are the same (and not volatile), return
922 DEST{,+LEN,+LEN-1}. */
923 if (operand_equal_p (src, dest, 0))
924 {
925 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
926 It's safe and may even be emitted by GCC itself (see bug
927 32667). */
928 unlink_stmt_vdef (stmt);
929 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
930 release_ssa_name (gimple_vdef (stmt));
931 if (!lhs)
932 {
933 gsi_replace (gsi, gimple_build_nop (), false);
934 return true;
935 }
936 goto done;
937 }
938 else
939 {
940 /* We cannot (easily) change the type of the copy if it is a storage
941 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
942 modify the storage order of objects (see storage_order_barrier_p). */
943 tree srctype
944 = POINTER_TYPE_P (TREE_TYPE (src))
945 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
946 tree desttype
947 = POINTER_TYPE_P (TREE_TYPE (dest))
948 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
949 tree destvar, srcvar, srcoff;
950 unsigned int src_align, dest_align;
951 unsigned HOST_WIDE_INT tmp_len;
952 const char *tmp_str;
953
954 /* Build accesses at offset zero with a ref-all character type. */
955 tree off0
956 = build_int_cst (build_pointer_type_for_mode (char_type_node,
957 ptr_mode, true), 0);
958
959 /* If we can perform the copy efficiently with first doing all loads
960 and then all stores inline it that way. Currently efficiently
961 means that we can load all the memory into a single integer
962 register which is what MOVE_MAX gives us. */
963 src_align = get_pointer_alignment (src);
964 dest_align = get_pointer_alignment (dest);
965 if (tree_fits_uhwi_p (len)
966 && compare_tree_int (len, MOVE_MAX) <= 0
967 /* FIXME: Don't transform copies from strings with known length.
968 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
969 from being handled, and the case was XFAILed for that reason.
970 Now that it is handled and the XFAIL removed, as soon as other
971 strlenopt tests that rely on it for passing are adjusted, this
972 hack can be removed. */
973 && !c_strlen (src, 1)
974 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
975 && memchr (tmp_str, 0, tmp_len) == NULL)
976 && !(srctype
977 && AGGREGATE_TYPE_P (srctype)
978 && TYPE_REVERSE_STORAGE_ORDER (srctype))
979 && !(desttype
980 && AGGREGATE_TYPE_P (desttype)
981 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
982 {
983 unsigned ilen = tree_to_uhwi (len);
984 if (pow2p_hwi (ilen))
985 {
986 /* Detect out-of-bounds accesses without issuing warnings.
987 Avoid folding out-of-bounds copies but to avoid false
988 positives for unreachable code defer warning until after
989 DCE has worked its magic.
990 -Wrestrict is still diagnosed. */
991 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
992 dest, src, len, len,
993 false, false))
994 if (warning != OPT_Wrestrict)
995 return false;
996
997 scalar_int_mode mode;
998 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
999 if (type
1000 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
1001 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1002 /* If the destination pointer is not aligned we must be able
1003 to emit an unaligned store. */
1004 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1005 || !targetm.slow_unaligned_access (mode, dest_align)
1006 || (optab_handler (movmisalign_optab, mode)
1007 != CODE_FOR_nothing)))
1008 {
1009 tree srctype = type;
1010 tree desttype = type;
1011 if (src_align < GET_MODE_ALIGNMENT (mode))
1012 srctype = build_aligned_type (type, src_align);
1013 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1014 tree tem = fold_const_aggregate_ref (srcmem);
1015 if (tem)
1016 srcmem = tem;
1017 else if (src_align < GET_MODE_ALIGNMENT (mode)
1018 && targetm.slow_unaligned_access (mode, src_align)
1019 && (optab_handler (movmisalign_optab, mode)
1020 == CODE_FOR_nothing))
1021 srcmem = NULL_TREE;
1022 if (srcmem)
1023 {
1024 gimple *new_stmt;
1025 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1026 {
1027 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1028 srcmem
1029 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1030 new_stmt);
1031 gimple_assign_set_lhs (new_stmt, srcmem);
1032 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1033 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1034 }
1035 if (dest_align < GET_MODE_ALIGNMENT (mode))
1036 desttype = build_aligned_type (type, dest_align);
1037 new_stmt
1038 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1039 dest, off0),
1040 srcmem);
1041 gimple_move_vops (new_stmt, stmt);
1042 if (!lhs)
1043 {
1044 gsi_replace (gsi, new_stmt, false);
1045 return true;
1046 }
1047 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1048 goto done;
1049 }
1050 }
1051 }
1052 }
1053
1054 if (code == BUILT_IN_MEMMOVE)
1055 {
1056 /* Both DEST and SRC must be pointer types.
1057 ??? This is what old code did. Is the testing for pointer types
1058 really mandatory?
1059
1060 If either SRC is readonly or length is 1, we can use memcpy. */
1061 if (!dest_align || !src_align)
1062 return false;
1063 if (readonly_data_expr (src)
1064 || (tree_fits_uhwi_p (len)
1065 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1066 >= tree_to_uhwi (len))))
1067 {
1068 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1069 if (!fn)
1070 return false;
1071 gimple_call_set_fndecl (stmt, fn);
1072 gimple_call_set_arg (stmt, 0, dest);
1073 gimple_call_set_arg (stmt, 1, src);
1074 fold_stmt (gsi);
1075 return true;
1076 }
1077
1078 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1079 if (TREE_CODE (src) == ADDR_EXPR
1080 && TREE_CODE (dest) == ADDR_EXPR)
1081 {
1082 tree src_base, dest_base, fn;
1083 poly_int64 src_offset = 0, dest_offset = 0;
1084 poly_uint64 maxsize;
1085
1086 srcvar = TREE_OPERAND (src, 0);
1087 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1088 if (src_base == NULL)
1089 src_base = srcvar;
1090 destvar = TREE_OPERAND (dest, 0);
1091 dest_base = get_addr_base_and_unit_offset (destvar,
1092 &dest_offset);
1093 if (dest_base == NULL)
1094 dest_base = destvar;
1095 if (!poly_int_tree_p (len, &maxsize))
1096 maxsize = -1;
1097 if (SSA_VAR_P (src_base)
1098 && SSA_VAR_P (dest_base))
1099 {
1100 if (operand_equal_p (src_base, dest_base, 0)
1101 && ranges_maybe_overlap_p (src_offset, maxsize,
1102 dest_offset, maxsize))
1103 return false;
1104 }
1105 else if (TREE_CODE (src_base) == MEM_REF
1106 && TREE_CODE (dest_base) == MEM_REF)
1107 {
1108 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1109 TREE_OPERAND (dest_base, 0), 0))
1110 return false;
1111 poly_offset_int full_src_offset
1112 = mem_ref_offset (src_base) + src_offset;
1113 poly_offset_int full_dest_offset
1114 = mem_ref_offset (dest_base) + dest_offset;
1115 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1116 full_dest_offset, maxsize))
1117 return false;
1118 }
1119 else
1120 return false;
1121
1122 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1123 if (!fn)
1124 return false;
1125 gimple_call_set_fndecl (stmt, fn);
1126 gimple_call_set_arg (stmt, 0, dest);
1127 gimple_call_set_arg (stmt, 1, src);
1128 fold_stmt (gsi);
1129 return true;
1130 }
1131
1132 /* If the destination and source do not alias optimize into
1133 memcpy as well. */
1134 if ((is_gimple_min_invariant (dest)
1135 || TREE_CODE (dest) == SSA_NAME)
1136 && (is_gimple_min_invariant (src)
1137 || TREE_CODE (src) == SSA_NAME))
1138 {
1139 ao_ref destr, srcr;
1140 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1141 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1142 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1143 {
1144 tree fn;
1145 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1146 if (!fn)
1147 return false;
1148 gimple_call_set_fndecl (stmt, fn);
1149 gimple_call_set_arg (stmt, 0, dest);
1150 gimple_call_set_arg (stmt, 1, src);
1151 fold_stmt (gsi);
1152 return true;
1153 }
1154 }
1155
1156 return false;
1157 }
1158
1159 if (!tree_fits_shwi_p (len))
1160 return false;
1161 if (!srctype
1162 || (AGGREGATE_TYPE_P (srctype)
1163 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1164 return false;
1165 if (!desttype
1166 || (AGGREGATE_TYPE_P (desttype)
1167 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1168 return false;
1169 /* In the following try to find a type that is most natural to be
1170 used for the memcpy source and destination and that allows
1171 the most optimization when memcpy is turned into a plain assignment
1172 using that type. In theory we could always use a char[len] type
1173 but that only gains us that the destination and source possibly
1174 no longer will have their address taken. */
1175 if (TREE_CODE (srctype) == ARRAY_TYPE
1176 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1177 srctype = TREE_TYPE (srctype);
1178 if (TREE_CODE (desttype) == ARRAY_TYPE
1179 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1180 desttype = TREE_TYPE (desttype);
1181 if (TREE_ADDRESSABLE (srctype)
1182 || TREE_ADDRESSABLE (desttype))
1183 return false;
1184
1185 /* Make sure we are not copying using a floating-point mode or
1186 a type whose size possibly does not match its precision. */
1187 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1188 || TREE_CODE (desttype) == BOOLEAN_TYPE
1189 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1190 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1191 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1192 || TREE_CODE (srctype) == BOOLEAN_TYPE
1193 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1194 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1195 if (!srctype)
1196 srctype = desttype;
1197 if (!desttype)
1198 desttype = srctype;
1199 if (!srctype)
1200 return false;
1201
1202 src_align = get_pointer_alignment (src);
1203 dest_align = get_pointer_alignment (dest);
1204
1205 /* Choose between src and destination type for the access based
1206 on alignment, whether the access constitutes a register access
1207 and whether it may actually expose a declaration for SSA rewrite
1208 or SRA decomposition. Also try to expose a string constant, we
1209 might be able to concatenate several of them later into a single
1210 string store. */
1211 destvar = NULL_TREE;
1212 srcvar = NULL_TREE;
1213 if (TREE_CODE (dest) == ADDR_EXPR
1214 && var_decl_component_p (TREE_OPERAND (dest, 0))
1215 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1216 && dest_align >= TYPE_ALIGN (desttype)
1217 && (is_gimple_reg_type (desttype)
1218 || src_align >= TYPE_ALIGN (desttype)))
1219 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1220 else if (TREE_CODE (src) == ADDR_EXPR
1221 && var_decl_component_p (TREE_OPERAND (src, 0))
1222 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1223 && src_align >= TYPE_ALIGN (srctype)
1224 && (is_gimple_reg_type (srctype)
1225 || dest_align >= TYPE_ALIGN (srctype)))
1226 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1227 /* FIXME: Don't transform copies from strings with known original length.
1228 As soon as strlenopt tests that rely on it for passing are adjusted,
1229 this hack can be removed. */
1230 else if (gimple_call_alloca_for_var_p (stmt)
1231 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1232 && integer_zerop (srcoff)
1233 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1234 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1235 srctype = TREE_TYPE (srcvar);
1236 else
1237 return false;
1238
1239 /* Now that we chose an access type express the other side in
1240 terms of it if the target allows that with respect to alignment
1241 constraints. */
1242 if (srcvar == NULL_TREE)
1243 {
1244 if (src_align >= TYPE_ALIGN (desttype))
1245 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1246 else
1247 {
1248 if (STRICT_ALIGNMENT)
1249 return false;
1250 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1251 src_align);
1252 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1253 }
1254 }
1255 else if (destvar == NULL_TREE)
1256 {
1257 if (dest_align >= TYPE_ALIGN (srctype))
1258 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1259 else
1260 {
1261 if (STRICT_ALIGNMENT)
1262 return false;
1263 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1264 dest_align);
1265 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1266 }
1267 }
1268
1269 /* Same as above, detect out-of-bounds accesses without issuing
1270 warnings. Avoid folding out-of-bounds copies but to avoid
1271 false positives for unreachable code defer warning until
1272 after DCE has worked its magic.
1273 -Wrestrict is still diagnosed. */
1274 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1275 dest, src, len, len,
1276 false, false))
1277 if (warning != OPT_Wrestrict)
1278 return false;
1279
1280 gimple *new_stmt;
1281 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1282 {
1283 tree tem = fold_const_aggregate_ref (srcvar);
1284 if (tem)
1285 srcvar = tem;
1286 if (! is_gimple_min_invariant (srcvar))
1287 {
1288 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1289 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1290 new_stmt);
1291 gimple_assign_set_lhs (new_stmt, srcvar);
1292 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1293 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1294 }
1295 new_stmt = gimple_build_assign (destvar, srcvar);
1296 goto set_vop_and_replace;
1297 }
1298
1299 /* We get an aggregate copy. If the source is a STRING_CST, then
1300 directly use its type to perform the copy. */
1301 if (TREE_CODE (srcvar) == STRING_CST)
1302 desttype = srctype;
1303
1304 /* Or else, use an unsigned char[] type to perform the copy in order
1305 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1306 types or float modes behavior on copying. */
1307 else
1308 {
1309 desttype = build_array_type_nelts (unsigned_char_type_node,
1310 tree_to_uhwi (len));
1311 srctype = desttype;
1312 if (src_align > TYPE_ALIGN (srctype))
1313 srctype = build_aligned_type (srctype, src_align);
1314 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1315 }
1316
1317 if (dest_align > TYPE_ALIGN (desttype))
1318 desttype = build_aligned_type (desttype, dest_align);
1319 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1320 new_stmt = gimple_build_assign (destvar, srcvar);
1321
1322 set_vop_and_replace:
1323 gimple_move_vops (new_stmt, stmt);
1324 if (!lhs)
1325 {
1326 gsi_replace (gsi, new_stmt, false);
1327 return true;
1328 }
1329 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1330 }
1331
1332 done:
1333 gimple_seq stmts = NULL;
1334 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1335 len = NULL_TREE;
1336 else if (code == BUILT_IN_MEMPCPY)
1337 {
1338 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1339 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1340 TREE_TYPE (dest), dest, len);
1341 }
1342 else
1343 gcc_unreachable ();
1344
1345 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1346 gimple *repl = gimple_build_assign (lhs, dest);
1347 gsi_replace (gsi, repl, false);
1348 return true;
1349 }
1350
1351 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1352 to built-in memcmp (a, b, len). */
1353
1354 static bool
1355 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1356 {
1357 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1358
1359 if (!fn)
1360 return false;
1361
1362 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1363
1364 gimple *stmt = gsi_stmt (*gsi);
1365 tree a = gimple_call_arg (stmt, 0);
1366 tree b = gimple_call_arg (stmt, 1);
1367 tree len = gimple_call_arg (stmt, 2);
1368
1369 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1370 replace_call_with_call_and_fold (gsi, repl);
1371
1372 return true;
1373 }
1374
1375 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1376 to built-in memmove (dest, src, len). */
1377
1378 static bool
1379 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1380 {
1381 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1382
1383 if (!fn)
1384 return false;
1385
1386 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1387 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1388 len) into memmove (dest, src, len). */
1389
1390 gimple *stmt = gsi_stmt (*gsi);
1391 tree src = gimple_call_arg (stmt, 0);
1392 tree dest = gimple_call_arg (stmt, 1);
1393 tree len = gimple_call_arg (stmt, 2);
1394
1395 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1396 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1397 replace_call_with_call_and_fold (gsi, repl);
1398
1399 return true;
1400 }
1401
1402 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1403 to built-in memset (dest, 0, len). */
1404
1405 static bool
1406 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1407 {
1408 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1409
1410 if (!fn)
1411 return false;
1412
1413 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1414
1415 gimple *stmt = gsi_stmt (*gsi);
1416 tree dest = gimple_call_arg (stmt, 0);
1417 tree len = gimple_call_arg (stmt, 1);
1418
1419 gimple_seq seq = NULL;
1420 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1421 gimple_seq_add_stmt_without_update (&seq, repl);
1422 gsi_replace_with_seq_vops (gsi, seq);
1423 fold_stmt (gsi);
1424
1425 return true;
1426 }
1427
1428 /* Fold function call to builtin memset or bzero at *GSI setting the
1429 memory of size LEN to VAL. Return whether a simplification was made. */
1430
1431 static bool
1432 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1433 {
1434 gimple *stmt = gsi_stmt (*gsi);
1435 tree etype;
1436 unsigned HOST_WIDE_INT length, cval;
1437
1438 /* If the LEN parameter is zero, return DEST. */
1439 if (integer_zerop (len))
1440 {
1441 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1442 return true;
1443 }
1444
1445 if (! tree_fits_uhwi_p (len))
1446 return false;
1447
1448 if (TREE_CODE (c) != INTEGER_CST)
1449 return false;
1450
1451 tree dest = gimple_call_arg (stmt, 0);
1452 tree var = dest;
1453 if (TREE_CODE (var) != ADDR_EXPR)
1454 return false;
1455
1456 var = TREE_OPERAND (var, 0);
1457 if (TREE_THIS_VOLATILE (var))
1458 return false;
1459
1460 etype = TREE_TYPE (var);
1461 if (TREE_CODE (etype) == ARRAY_TYPE)
1462 etype = TREE_TYPE (etype);
1463
1464 if (!INTEGRAL_TYPE_P (etype)
1465 && !POINTER_TYPE_P (etype))
1466 return NULL_TREE;
1467
1468 if (! var_decl_component_p (var))
1469 return NULL_TREE;
1470
1471 length = tree_to_uhwi (len);
1472 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1473 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1474 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1475 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1476 return NULL_TREE;
1477
1478 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1479 return NULL_TREE;
1480
1481 if (!type_has_mode_precision_p (etype))
1482 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1483 TYPE_UNSIGNED (etype));
1484
1485 if (integer_zerop (c))
1486 cval = 0;
1487 else
1488 {
1489 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1490 return NULL_TREE;
1491
1492 cval = TREE_INT_CST_LOW (c);
1493 cval &= 0xff;
1494 cval |= cval << 8;
1495 cval |= cval << 16;
1496 cval |= (cval << 31) << 1;
1497 }
1498
1499 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1500 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1501 gimple_move_vops (store, stmt);
1502 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1503 if (gimple_call_lhs (stmt))
1504 {
1505 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1506 gsi_replace (gsi, asgn, false);
1507 }
1508 else
1509 {
1510 gimple_stmt_iterator gsi2 = *gsi;
1511 gsi_prev (gsi);
1512 gsi_remove (&gsi2, true);
1513 }
1514
1515 return true;
1516 }
1517
1518 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1519
1520 static bool
1521 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1522 c_strlen_data *pdata, unsigned eltsize)
1523 {
1524 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1525
1526 /* The length computed by this invocation of the function. */
1527 tree val = NULL_TREE;
1528
1529 /* True if VAL is an optimistic (tight) bound determined from
1530 the size of the character array in which the string may be
1531 stored. In that case, the computed VAL is used to set
1532 PDATA->MAXBOUND. */
1533 bool tight_bound = false;
1534
1535 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1536 if (TREE_CODE (arg) == ADDR_EXPR
1537 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1538 {
1539 tree op = TREE_OPERAND (arg, 0);
1540 if (integer_zerop (TREE_OPERAND (op, 1)))
1541 {
1542 tree aop0 = TREE_OPERAND (op, 0);
1543 if (TREE_CODE (aop0) == INDIRECT_REF
1544 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1545 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1546 pdata, eltsize);
1547 }
1548 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1549 && rkind == SRK_LENRANGE)
1550 {
1551 /* Fail if an array is the last member of a struct object
1552 since it could be treated as a (fake) flexible array
1553 member. */
1554 tree idx = TREE_OPERAND (op, 1);
1555
1556 arg = TREE_OPERAND (op, 0);
1557 tree optype = TREE_TYPE (arg);
1558 if (tree dom = TYPE_DOMAIN (optype))
1559 if (tree bound = TYPE_MAX_VALUE (dom))
1560 if (TREE_CODE (bound) == INTEGER_CST
1561 && TREE_CODE (idx) == INTEGER_CST
1562 && tree_int_cst_lt (bound, idx))
1563 return false;
1564 }
1565 }
1566
1567 if (rkind == SRK_INT_VALUE)
1568 {
1569 /* We are computing the maximum value (not string length). */
1570 val = arg;
1571 if (TREE_CODE (val) != INTEGER_CST
1572 || tree_int_cst_sgn (val) < 0)
1573 return false;
1574 }
1575 else
1576 {
1577 c_strlen_data lendata = { };
1578 val = c_strlen (arg, 1, &lendata, eltsize);
1579
1580 if (!val && lendata.decl)
1581 {
1582 /* ARG refers to an unterminated const character array.
1583 DATA.DECL with size DATA.LEN. */
1584 val = lendata.minlen;
1585 pdata->decl = lendata.decl;
1586 }
1587 }
1588
1589 /* Set if VAL represents the maximum length based on array size (set
1590 when exact length cannot be determined). */
1591 bool maxbound = false;
1592
1593 if (!val && rkind == SRK_LENRANGE)
1594 {
1595 if (TREE_CODE (arg) == ADDR_EXPR)
1596 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1597 pdata, eltsize);
1598
1599 if (TREE_CODE (arg) == ARRAY_REF)
1600 {
1601 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1602
1603 /* Determine the "innermost" array type. */
1604 while (TREE_CODE (optype) == ARRAY_TYPE
1605 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1606 optype = TREE_TYPE (optype);
1607
1608 /* Avoid arrays of pointers. */
1609 tree eltype = TREE_TYPE (optype);
1610 if (TREE_CODE (optype) != ARRAY_TYPE
1611 || !INTEGRAL_TYPE_P (eltype))
1612 return false;
1613
1614 /* Fail when the array bound is unknown or zero. */
1615 val = TYPE_SIZE_UNIT (optype);
1616 if (!val
1617 || TREE_CODE (val) != INTEGER_CST
1618 || integer_zerop (val))
1619 return false;
1620
1621 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1622 integer_one_node);
1623
1624 /* Set the minimum size to zero since the string in
1625 the array could have zero length. */
1626 pdata->minlen = ssize_int (0);
1627
1628 tight_bound = true;
1629 }
1630 else if (TREE_CODE (arg) == COMPONENT_REF
1631 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1632 == ARRAY_TYPE))
1633 {
1634 /* Use the type of the member array to determine the upper
1635 bound on the length of the array. This may be overly
1636 optimistic if the array itself isn't NUL-terminated and
1637 the caller relies on the subsequent member to contain
1638 the NUL but that would only be considered valid if
1639 the array were the last member of a struct. */
1640
1641 tree fld = TREE_OPERAND (arg, 1);
1642
1643 tree optype = TREE_TYPE (fld);
1644
1645 /* Determine the "innermost" array type. */
1646 while (TREE_CODE (optype) == ARRAY_TYPE
1647 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1648 optype = TREE_TYPE (optype);
1649
1650 /* Fail when the array bound is unknown or zero. */
1651 val = TYPE_SIZE_UNIT (optype);
1652 if (!val
1653 || TREE_CODE (val) != INTEGER_CST
1654 || integer_zerop (val))
1655 return false;
1656 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1657 integer_one_node);
1658
1659 /* Set the minimum size to zero since the string in
1660 the array could have zero length. */
1661 pdata->minlen = ssize_int (0);
1662
1663 /* The array size determined above is an optimistic bound
1664 on the length. If the array isn't nul-terminated the
1665 length computed by the library function would be greater.
1666 Even though using strlen to cross the subobject boundary
1667 is undefined, avoid drawing conclusions from the member
1668 type about the length here. */
1669 tight_bound = true;
1670 }
1671 else if (TREE_CODE (arg) == MEM_REF
1672 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1673 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1674 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1675 {
1676 /* Handle a MEM_REF into a DECL accessing an array of integers,
1677 being conservative about references to extern structures with
1678 flexible array members that can be initialized to arbitrary
1679 numbers of elements as an extension (static structs are okay).
1680 FIXME: Make this less conservative -- see
1681 component_ref_size in tree.c. */
1682 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1683 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1684 && (decl_binds_to_current_def_p (ref)
1685 || !array_at_struct_end_p (arg)))
1686 {
1687 /* Fail if the offset is out of bounds. Such accesses
1688 should be diagnosed at some point. */
1689 val = DECL_SIZE_UNIT (ref);
1690 if (!val
1691 || TREE_CODE (val) != INTEGER_CST
1692 || integer_zerop (val))
1693 return false;
1694
1695 poly_offset_int psiz = wi::to_offset (val);
1696 poly_offset_int poff = mem_ref_offset (arg);
1697 if (known_le (psiz, poff))
1698 return false;
1699
1700 pdata->minlen = ssize_int (0);
1701
1702 /* Subtract the offset and one for the terminating nul. */
1703 psiz -= poff;
1704 psiz -= 1;
1705 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1706 /* Since VAL reflects the size of a declared object
1707 rather the type of the access it is not a tight bound. */
1708 }
1709 }
1710 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1711 {
1712 /* Avoid handling pointers to arrays. GCC might misuse
1713 a pointer to an array of one bound to point to an array
1714 object of a greater bound. */
1715 tree argtype = TREE_TYPE (arg);
1716 if (TREE_CODE (argtype) == ARRAY_TYPE)
1717 {
1718 val = TYPE_SIZE_UNIT (argtype);
1719 if (!val
1720 || TREE_CODE (val) != INTEGER_CST
1721 || integer_zerop (val))
1722 return false;
1723 val = wide_int_to_tree (TREE_TYPE (val),
1724 wi::sub (wi::to_wide (val), 1));
1725
1726 /* Set the minimum size to zero since the string in
1727 the array could have zero length. */
1728 pdata->minlen = ssize_int (0);
1729 }
1730 }
1731 maxbound = true;
1732 }
1733
1734 if (!val)
1735 return false;
1736
1737 /* Adjust the lower bound on the string length as necessary. */
1738 if (!pdata->minlen
1739 || (rkind != SRK_STRLEN
1740 && TREE_CODE (pdata->minlen) == INTEGER_CST
1741 && TREE_CODE (val) == INTEGER_CST
1742 && tree_int_cst_lt (val, pdata->minlen)))
1743 pdata->minlen = val;
1744
1745 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1746 {
1747 /* Adjust the tighter (more optimistic) string length bound
1748 if necessary and proceed to adjust the more conservative
1749 bound. */
1750 if (TREE_CODE (val) == INTEGER_CST)
1751 {
1752 if (tree_int_cst_lt (pdata->maxbound, val))
1753 pdata->maxbound = val;
1754 }
1755 else
1756 pdata->maxbound = val;
1757 }
1758 else if (pdata->maxbound || maxbound)
1759 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1760 if VAL corresponds to the maximum length determined based
1761 on the type of the object. */
1762 pdata->maxbound = val;
1763
1764 if (tight_bound)
1765 {
1766 /* VAL computed above represents an optimistically tight bound
1767 on the length of the string based on the referenced object's
1768 or subobject's type. Determine the conservative upper bound
1769 based on the enclosing object's size if possible. */
1770 if (rkind == SRK_LENRANGE)
1771 {
1772 poly_int64 offset;
1773 tree base = get_addr_base_and_unit_offset (arg, &offset);
1774 if (!base)
1775 {
1776 /* When the call above fails due to a non-constant offset
1777 assume the offset is zero and use the size of the whole
1778 enclosing object instead. */
1779 base = get_base_address (arg);
1780 offset = 0;
1781 }
1782 /* If the base object is a pointer no upper bound on the length
1783 can be determined. Otherwise the maximum length is equal to
1784 the size of the enclosing object minus the offset of
1785 the referenced subobject minus 1 (for the terminating nul). */
1786 tree type = TREE_TYPE (base);
1787 if (TREE_CODE (type) == POINTER_TYPE
1788 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1789 || !(val = DECL_SIZE_UNIT (base)))
1790 val = build_all_ones_cst (size_type_node);
1791 else
1792 {
1793 val = DECL_SIZE_UNIT (base);
1794 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1795 size_int (offset + 1));
1796 }
1797 }
1798 else
1799 return false;
1800 }
1801
1802 if (pdata->maxlen)
1803 {
1804 /* Adjust the more conservative bound if possible/necessary
1805 and fail otherwise. */
1806 if (rkind != SRK_STRLEN)
1807 {
1808 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1809 || TREE_CODE (val) != INTEGER_CST)
1810 return false;
1811
1812 if (tree_int_cst_lt (pdata->maxlen, val))
1813 pdata->maxlen = val;
1814 return true;
1815 }
1816 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1817 {
1818 /* Fail if the length of this ARG is different from that
1819 previously determined from another ARG. */
1820 return false;
1821 }
1822 }
1823
1824 pdata->maxlen = val;
1825 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1826 }
1827
1828 /* For an ARG referencing one or more strings, try to obtain the range
1829 of their lengths, or the size of the largest array ARG referes to if
1830 the range of lengths cannot be determined, and store all in *PDATA.
1831 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1832 the maximum constant value.
1833 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1834 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1835 length or if we are unable to determine the length, return false.
1836 VISITED is a bitmap of visited variables.
1837 RKIND determines the kind of value or range to obtain (see
1838 strlen_range_kind).
1839 Set PDATA->DECL if ARG refers to an unterminated constant array.
1840 On input, set ELTSIZE to 1 for normal single byte character strings,
1841 and either 2 or 4 for wide characer strings (the size of wchar_t).
1842 Return true if *PDATA was successfully populated and false otherwise. */
1843
1844 static bool
1845 get_range_strlen (tree arg, bitmap *visited,
1846 strlen_range_kind rkind,
1847 c_strlen_data *pdata, unsigned eltsize)
1848 {
1849
1850 if (TREE_CODE (arg) != SSA_NAME)
1851 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1852
1853 /* If ARG is registered for SSA update we cannot look at its defining
1854 statement. */
1855 if (name_registered_for_update_p (arg))
1856 return false;
1857
1858 /* If we were already here, break the infinite cycle. */
1859 if (!*visited)
1860 *visited = BITMAP_ALLOC (NULL);
1861 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1862 return true;
1863
1864 tree var = arg;
1865 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1866
1867 switch (gimple_code (def_stmt))
1868 {
1869 case GIMPLE_ASSIGN:
1870 /* The RHS of the statement defining VAR must either have a
1871 constant length or come from another SSA_NAME with a constant
1872 length. */
1873 if (gimple_assign_single_p (def_stmt)
1874 || gimple_assign_unary_nop_p (def_stmt))
1875 {
1876 tree rhs = gimple_assign_rhs1 (def_stmt);
1877 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1878 }
1879 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1880 {
1881 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1882 gimple_assign_rhs3 (def_stmt) };
1883
1884 for (unsigned int i = 0; i < 2; i++)
1885 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1886 {
1887 if (rkind != SRK_LENRANGE)
1888 return false;
1889 /* Set the upper bound to the maximum to prevent
1890 it from being adjusted in the next iteration but
1891 leave MINLEN and the more conservative MAXBOUND
1892 determined so far alone (or leave them null if
1893 they haven't been set yet). That the MINLEN is
1894 in fact zero can be determined from MAXLEN being
1895 unbounded but the discovered minimum is used for
1896 diagnostics. */
1897 pdata->maxlen = build_all_ones_cst (size_type_node);
1898 }
1899 return true;
1900 }
1901 return false;
1902
1903 case GIMPLE_PHI:
1904 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1905 must have a constant length. */
1906 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1907 {
1908 tree arg = gimple_phi_arg (def_stmt, i)->def;
1909
1910 /* If this PHI has itself as an argument, we cannot
1911 determine the string length of this argument. However,
1912 if we can find a constant string length for the other
1913 PHI args then we can still be sure that this is a
1914 constant string length. So be optimistic and just
1915 continue with the next argument. */
1916 if (arg == gimple_phi_result (def_stmt))
1917 continue;
1918
1919 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1920 {
1921 if (rkind != SRK_LENRANGE)
1922 return false;
1923 /* Set the upper bound to the maximum to prevent
1924 it from being adjusted in the next iteration but
1925 leave MINLEN and the more conservative MAXBOUND
1926 determined so far alone (or leave them null if
1927 they haven't been set yet). That the MINLEN is
1928 in fact zero can be determined from MAXLEN being
1929 unbounded but the discovered minimum is used for
1930 diagnostics. */
1931 pdata->maxlen = build_all_ones_cst (size_type_node);
1932 }
1933 }
1934 return true;
1935
1936 default:
1937 return false;
1938 }
1939 }
1940
1941 /* Try to obtain the range of the lengths of the string(s) referenced
1942 by ARG, or the size of the largest array ARG refers to if the range
1943 of lengths cannot be determined, and store all in *PDATA which must
1944 be zero-initialized on input except PDATA->MAXBOUND may be set to
1945 a non-null tree node other than INTEGER_CST to request to have it
1946 set to the length of the longest string in a PHI. ELTSIZE is
1947 the expected size of the string element in bytes: 1 for char and
1948 some power of 2 for wide characters.
1949 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1950 for optimization. Returning false means that a nonzero PDATA->MINLEN
1951 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1952 is -1 (in that case, the actual range is indeterminate, i.e.,
1953 [0, PTRDIFF_MAX - 2]. */
1954
1955 bool
1956 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1957 {
1958 bitmap visited = NULL;
1959 tree maxbound = pdata->maxbound;
1960
1961 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1962 {
1963 /* On failure extend the length range to an impossible maximum
1964 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1965 members can stay unchanged regardless. */
1966 pdata->minlen = ssize_int (0);
1967 pdata->maxlen = build_all_ones_cst (size_type_node);
1968 }
1969 else if (!pdata->minlen)
1970 pdata->minlen = ssize_int (0);
1971
1972 /* If it's unchanged from it initial non-null value, set the conservative
1973 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1974 if (maxbound && pdata->maxbound == maxbound)
1975 pdata->maxbound = build_all_ones_cst (size_type_node);
1976
1977 if (visited)
1978 BITMAP_FREE (visited);
1979
1980 return !integer_all_onesp (pdata->maxlen);
1981 }
1982
1983 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1984 For ARG of pointer types, NONSTR indicates if the caller is prepared
1985 to handle unterminated strings. For integer ARG and when RKIND ==
1986 SRK_INT_VALUE, NONSTR must be null.
1987
1988 If an unterminated array is discovered and our caller handles
1989 unterminated arrays, then bubble up the offending DECL and
1990 return the maximum size. Otherwise return NULL. */
1991
1992 static tree
1993 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1994 {
1995 /* A non-null NONSTR is meaningless when determining the maximum
1996 value of an integer ARG. */
1997 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1998 /* ARG must have an integral type when RKIND says so. */
1999 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2000
2001 bitmap visited = NULL;
2002
2003 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2004 is unbounded. */
2005 c_strlen_data lendata = { };
2006 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
2007 lendata.maxlen = NULL_TREE;
2008 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2009 lendata.maxlen = NULL_TREE;
2010
2011 if (visited)
2012 BITMAP_FREE (visited);
2013
2014 if (nonstr)
2015 {
2016 /* For callers prepared to handle unterminated arrays set
2017 *NONSTR to point to the declaration of the array and return
2018 the maximum length/size. */
2019 *nonstr = lendata.decl;
2020 return lendata.maxlen;
2021 }
2022
2023 /* Fail if the constant array isn't nul-terminated. */
2024 return lendata.decl ? NULL_TREE : lendata.maxlen;
2025 }
2026
2027
2028 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2029 If LEN is not NULL, it represents the length of the string to be
2030 copied. Return NULL_TREE if no simplification can be made. */
2031
2032 static bool
2033 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2034 tree dest, tree src)
2035 {
2036 gimple *stmt = gsi_stmt (*gsi);
2037 location_t loc = gimple_location (stmt);
2038 tree fn;
2039
2040 /* If SRC and DEST are the same (and not volatile), return DEST. */
2041 if (operand_equal_p (src, dest, 0))
2042 {
2043 /* Issue -Wrestrict unless the pointers are null (those do
2044 not point to objects and so do not indicate an overlap;
2045 such calls could be the result of sanitization and jump
2046 threading). */
2047 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2048 {
2049 tree func = gimple_call_fndecl (stmt);
2050
2051 warning_at (loc, OPT_Wrestrict,
2052 "%qD source argument is the same as destination",
2053 func);
2054 }
2055
2056 replace_call_with_value (gsi, dest);
2057 return true;
2058 }
2059
2060 if (optimize_function_for_size_p (cfun))
2061 return false;
2062
2063 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2064 if (!fn)
2065 return false;
2066
2067 /* Set to non-null if ARG refers to an unterminated array. */
2068 tree nonstr = NULL;
2069 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2070
2071 if (nonstr)
2072 {
2073 /* Avoid folding calls with unterminated arrays. */
2074 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2075 warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
2076 suppress_warning (stmt, OPT_Wstringop_overread);
2077 return false;
2078 }
2079
2080 if (!len)
2081 return false;
2082
2083 len = fold_convert_loc (loc, size_type_node, len);
2084 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2085 len = force_gimple_operand_gsi (gsi, len, true,
2086 NULL_TREE, true, GSI_SAME_STMT);
2087 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2088 replace_call_with_call_and_fold (gsi, repl);
2089 return true;
2090 }
2091
2092 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2093 If SLEN is not NULL, it represents the length of the source string.
2094 Return NULL_TREE if no simplification can be made. */
2095
2096 static bool
2097 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2098 tree dest, tree src, tree len)
2099 {
2100 gimple *stmt = gsi_stmt (*gsi);
2101 location_t loc = gimple_location (stmt);
2102 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2103
2104 /* If the LEN parameter is zero, return DEST. */
2105 if (integer_zerop (len))
2106 {
2107 /* Avoid warning if the destination refers to an array/pointer
2108 decorate with attribute nonstring. */
2109 if (!nonstring)
2110 {
2111 tree fndecl = gimple_call_fndecl (stmt);
2112
2113 /* Warn about the lack of nul termination: the result is not
2114 a (nul-terminated) string. */
2115 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2116 if (slen && !integer_zerop (slen))
2117 warning_at (loc, OPT_Wstringop_truncation,
2118 "%G%qD destination unchanged after copying no bytes "
2119 "from a string of length %E",
2120 stmt, fndecl, slen);
2121 else
2122 warning_at (loc, OPT_Wstringop_truncation,
2123 "%G%qD destination unchanged after copying no bytes",
2124 stmt, fndecl);
2125 }
2126
2127 replace_call_with_value (gsi, dest);
2128 return true;
2129 }
2130
2131 /* We can't compare slen with len as constants below if len is not a
2132 constant. */
2133 if (TREE_CODE (len) != INTEGER_CST)
2134 return false;
2135
2136 /* Now, we must be passed a constant src ptr parameter. */
2137 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2138 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2139 return false;
2140
2141 /* The size of the source string including the terminating nul. */
2142 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2143
2144 /* We do not support simplification of this case, though we do
2145 support it when expanding trees into RTL. */
2146 /* FIXME: generate a call to __builtin_memset. */
2147 if (tree_int_cst_lt (ssize, len))
2148 return false;
2149
2150 /* Diagnose truncation that leaves the copy unterminated. */
2151 maybe_diag_stxncpy_trunc (*gsi, src, len);
2152
2153 /* OK transform into builtin memcpy. */
2154 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2155 if (!fn)
2156 return false;
2157
2158 len = fold_convert_loc (loc, size_type_node, len);
2159 len = force_gimple_operand_gsi (gsi, len, true,
2160 NULL_TREE, true, GSI_SAME_STMT);
2161 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2162 replace_call_with_call_and_fold (gsi, repl);
2163
2164 return true;
2165 }
2166
2167 /* Fold function call to builtin strchr or strrchr.
2168 If both arguments are constant, evaluate and fold the result,
2169 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2170 In general strlen is significantly faster than strchr
2171 due to being a simpler operation. */
2172 static bool
2173 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2174 {
2175 gimple *stmt = gsi_stmt (*gsi);
2176 tree str = gimple_call_arg (stmt, 0);
2177 tree c = gimple_call_arg (stmt, 1);
2178 location_t loc = gimple_location (stmt);
2179 const char *p;
2180 char ch;
2181
2182 if (!gimple_call_lhs (stmt))
2183 return false;
2184
2185 /* Avoid folding if the first argument is not a nul-terminated array.
2186 Defer warning until later. */
2187 if (!check_nul_terminated_array (NULL_TREE, str))
2188 return false;
2189
2190 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2191 {
2192 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2193
2194 if (p1 == NULL)
2195 {
2196 replace_call_with_value (gsi, integer_zero_node);
2197 return true;
2198 }
2199
2200 tree len = build_int_cst (size_type_node, p1 - p);
2201 gimple_seq stmts = NULL;
2202 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2203 POINTER_PLUS_EXPR, str, len);
2204 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2205 gsi_replace_with_seq_vops (gsi, stmts);
2206 return true;
2207 }
2208
2209 if (!integer_zerop (c))
2210 return false;
2211
2212 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2213 if (is_strrchr && optimize_function_for_size_p (cfun))
2214 {
2215 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2216
2217 if (strchr_fn)
2218 {
2219 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2220 replace_call_with_call_and_fold (gsi, repl);
2221 return true;
2222 }
2223
2224 return false;
2225 }
2226
2227 tree len;
2228 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2229
2230 if (!strlen_fn)
2231 return false;
2232
2233 /* Create newstr = strlen (str). */
2234 gimple_seq stmts = NULL;
2235 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2236 gimple_set_location (new_stmt, loc);
2237 len = create_tmp_reg_or_ssa_name (size_type_node);
2238 gimple_call_set_lhs (new_stmt, len);
2239 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2240
2241 /* Create (str p+ strlen (str)). */
2242 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2243 POINTER_PLUS_EXPR, str, len);
2244 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2245 gsi_replace_with_seq_vops (gsi, stmts);
2246 /* gsi now points at the assignment to the lhs, get a
2247 stmt iterator to the strlen.
2248 ??? We can't use gsi_for_stmt as that doesn't work when the
2249 CFG isn't built yet. */
2250 gimple_stmt_iterator gsi2 = *gsi;
2251 gsi_prev (&gsi2);
2252 fold_stmt (&gsi2);
2253 return true;
2254 }
2255
2256 /* Fold function call to builtin strstr.
2257 If both arguments are constant, evaluate and fold the result,
2258 additionally fold strstr (x, "") into x and strstr (x, "c")
2259 into strchr (x, 'c'). */
2260 static bool
2261 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2262 {
2263 gimple *stmt = gsi_stmt (*gsi);
2264 if (!gimple_call_lhs (stmt))
2265 return false;
2266
2267 tree haystack = gimple_call_arg (stmt, 0);
2268 tree needle = gimple_call_arg (stmt, 1);
2269
2270 /* Avoid folding if either argument is not a nul-terminated array.
2271 Defer warning until later. */
2272 if (!check_nul_terminated_array (NULL_TREE, haystack)
2273 || !check_nul_terminated_array (NULL_TREE, needle))
2274 return false;
2275
2276 const char *q = c_getstr (needle);
2277 if (q == NULL)
2278 return false;
2279
2280 if (const char *p = c_getstr (haystack))
2281 {
2282 const char *r = strstr (p, q);
2283
2284 if (r == NULL)
2285 {
2286 replace_call_with_value (gsi, integer_zero_node);
2287 return true;
2288 }
2289
2290 tree len = build_int_cst (size_type_node, r - p);
2291 gimple_seq stmts = NULL;
2292 gimple *new_stmt
2293 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2294 haystack, len);
2295 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2296 gsi_replace_with_seq_vops (gsi, stmts);
2297 return true;
2298 }
2299
2300 /* For strstr (x, "") return x. */
2301 if (q[0] == '\0')
2302 {
2303 replace_call_with_value (gsi, haystack);
2304 return true;
2305 }
2306
2307 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2308 if (q[1] == '\0')
2309 {
2310 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2311 if (strchr_fn)
2312 {
2313 tree c = build_int_cst (integer_type_node, q[0]);
2314 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2315 replace_call_with_call_and_fold (gsi, repl);
2316 return true;
2317 }
2318 }
2319
2320 return false;
2321 }
2322
2323 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2324 to the call.
2325
2326 Return NULL_TREE if no simplification was possible, otherwise return the
2327 simplified form of the call as a tree.
2328
2329 The simplified form may be a constant or other expression which
2330 computes the same value, but in a more efficient manner (including
2331 calls to other builtin functions).
2332
2333 The call may contain arguments which need to be evaluated, but
2334 which are not useful to determine the result of the call. In
2335 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2336 COMPOUND_EXPR will be an argument which must be evaluated.
2337 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2338 COMPOUND_EXPR in the chain will contain the tree for the simplified
2339 form of the builtin function call. */
2340
2341 static bool
2342 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2343 {
2344 gimple *stmt = gsi_stmt (*gsi);
2345 location_t loc = gimple_location (stmt);
2346
2347 const char *p = c_getstr (src);
2348
2349 /* If the string length is zero, return the dst parameter. */
2350 if (p && *p == '\0')
2351 {
2352 replace_call_with_value (gsi, dst);
2353 return true;
2354 }
2355
2356 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2357 return false;
2358
2359 /* See if we can store by pieces into (dst + strlen(dst)). */
2360 tree newdst;
2361 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2362 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2363
2364 if (!strlen_fn || !memcpy_fn)
2365 return false;
2366
2367 /* If the length of the source string isn't computable don't
2368 split strcat into strlen and memcpy. */
2369 tree len = get_maxval_strlen (src, SRK_STRLEN);
2370 if (! len)
2371 return false;
2372
2373 /* Create strlen (dst). */
2374 gimple_seq stmts = NULL, stmts2;
2375 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2376 gimple_set_location (repl, loc);
2377 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2378 gimple_call_set_lhs (repl, newdst);
2379 gimple_seq_add_stmt_without_update (&stmts, repl);
2380
2381 /* Create (dst p+ strlen (dst)). */
2382 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2383 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2384 gimple_seq_add_seq_without_update (&stmts, stmts2);
2385
2386 len = fold_convert_loc (loc, size_type_node, len);
2387 len = size_binop_loc (loc, PLUS_EXPR, len,
2388 build_int_cst (size_type_node, 1));
2389 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2390 gimple_seq_add_seq_without_update (&stmts, stmts2);
2391
2392 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2393 gimple_seq_add_stmt_without_update (&stmts, repl);
2394 if (gimple_call_lhs (stmt))
2395 {
2396 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2397 gimple_seq_add_stmt_without_update (&stmts, repl);
2398 gsi_replace_with_seq_vops (gsi, stmts);
2399 /* gsi now points at the assignment to the lhs, get a
2400 stmt iterator to the memcpy call.
2401 ??? We can't use gsi_for_stmt as that doesn't work when the
2402 CFG isn't built yet. */
2403 gimple_stmt_iterator gsi2 = *gsi;
2404 gsi_prev (&gsi2);
2405 fold_stmt (&gsi2);
2406 }
2407 else
2408 {
2409 gsi_replace_with_seq_vops (gsi, stmts);
2410 fold_stmt (gsi);
2411 }
2412 return true;
2413 }
2414
2415 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2416 are the arguments to the call. */
2417
2418 static bool
2419 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2420 {
2421 gimple *stmt = gsi_stmt (*gsi);
2422 tree dest = gimple_call_arg (stmt, 0);
2423 tree src = gimple_call_arg (stmt, 1);
2424 tree size = gimple_call_arg (stmt, 2);
2425 tree fn;
2426 const char *p;
2427
2428
2429 p = c_getstr (src);
2430 /* If the SRC parameter is "", return DEST. */
2431 if (p && *p == '\0')
2432 {
2433 replace_call_with_value (gsi, dest);
2434 return true;
2435 }
2436
2437 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2438 return false;
2439
2440 /* If __builtin_strcat_chk is used, assume strcat is available. */
2441 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2442 if (!fn)
2443 return false;
2444
2445 gimple *repl = gimple_build_call (fn, 2, dest, src);
2446 replace_call_with_call_and_fold (gsi, repl);
2447 return true;
2448 }
2449
2450 /* Simplify a call to the strncat builtin. */
2451
2452 static bool
2453 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2454 {
2455 gimple *stmt = gsi_stmt (*gsi);
2456 tree dst = gimple_call_arg (stmt, 0);
2457 tree src = gimple_call_arg (stmt, 1);
2458 tree len = gimple_call_arg (stmt, 2);
2459
2460 const char *p = c_getstr (src);
2461
2462 /* If the requested length is zero, or the src parameter string
2463 length is zero, return the dst parameter. */
2464 if (integer_zerop (len) || (p && *p == '\0'))
2465 {
2466 replace_call_with_value (gsi, dst);
2467 return true;
2468 }
2469
2470 if (TREE_CODE (len) != INTEGER_CST || !p)
2471 return false;
2472
2473 unsigned srclen = strlen (p);
2474
2475 int cmpsrc = compare_tree_int (len, srclen);
2476
2477 /* Return early if the requested len is less than the string length.
2478 Warnings will be issued elsewhere later. */
2479 if (cmpsrc < 0)
2480 return false;
2481
2482 unsigned HOST_WIDE_INT dstsize;
2483
2484 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2485
2486 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2487 {
2488 int cmpdst = compare_tree_int (len, dstsize);
2489
2490 if (cmpdst >= 0)
2491 {
2492 tree fndecl = gimple_call_fndecl (stmt);
2493
2494 /* Strncat copies (at most) LEN bytes and always appends
2495 the terminating NUL so the specified bound should never
2496 be equal to (or greater than) the size of the destination.
2497 If it is, the copy could overflow. */
2498 location_t loc = gimple_location (stmt);
2499 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2500 cmpdst == 0
2501 ? G_("%G%qD specified bound %E equals "
2502 "destination size")
2503 : G_("%G%qD specified bound %E exceeds "
2504 "destination size %wu"),
2505 stmt, fndecl, len, dstsize);
2506 if (nowarn)
2507 suppress_warning (stmt, OPT_Wstringop_overflow_);
2508 }
2509 }
2510
2511 if (!nowarn && cmpsrc == 0)
2512 {
2513 tree fndecl = gimple_call_fndecl (stmt);
2514 location_t loc = gimple_location (stmt);
2515
2516 /* To avoid possible overflow the specified bound should also
2517 not be equal to the length of the source, even when the size
2518 of the destination is unknown (it's not an uncommon mistake
2519 to specify as the bound to strncpy the length of the source). */
2520 if (warning_at (loc, OPT_Wstringop_overflow_,
2521 "%G%qD specified bound %E equals source length",
2522 stmt, fndecl, len))
2523 suppress_warning (stmt, OPT_Wstringop_overflow_);
2524 }
2525
2526 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2527
2528 /* If the replacement _DECL isn't initialized, don't do the
2529 transformation. */
2530 if (!fn)
2531 return false;
2532
2533 /* Otherwise, emit a call to strcat. */
2534 gcall *repl = gimple_build_call (fn, 2, dst, src);
2535 replace_call_with_call_and_fold (gsi, repl);
2536 return true;
2537 }
2538
2539 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2540 LEN, and SIZE. */
2541
2542 static bool
2543 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2544 {
2545 gimple *stmt = gsi_stmt (*gsi);
2546 tree dest = gimple_call_arg (stmt, 0);
2547 tree src = gimple_call_arg (stmt, 1);
2548 tree len = gimple_call_arg (stmt, 2);
2549 tree size = gimple_call_arg (stmt, 3);
2550 tree fn;
2551 const char *p;
2552
2553 p = c_getstr (src);
2554 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2555 if ((p && *p == '\0')
2556 || integer_zerop (len))
2557 {
2558 replace_call_with_value (gsi, dest);
2559 return true;
2560 }
2561
2562 if (! tree_fits_uhwi_p (size))
2563 return false;
2564
2565 if (! integer_all_onesp (size))
2566 {
2567 tree src_len = c_strlen (src, 1);
2568 if (src_len
2569 && tree_fits_uhwi_p (src_len)
2570 && tree_fits_uhwi_p (len)
2571 && ! tree_int_cst_lt (len, src_len))
2572 {
2573 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2574 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2575 if (!fn)
2576 return false;
2577
2578 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2579 replace_call_with_call_and_fold (gsi, repl);
2580 return true;
2581 }
2582 return false;
2583 }
2584
2585 /* If __builtin_strncat_chk is used, assume strncat is available. */
2586 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2587 if (!fn)
2588 return false;
2589
2590 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2591 replace_call_with_call_and_fold (gsi, repl);
2592 return true;
2593 }
2594
2595 /* Build and append gimple statements to STMTS that would load a first
2596 character of a memory location identified by STR. LOC is location
2597 of the statement. */
2598
2599 static tree
2600 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2601 {
2602 tree var;
2603
2604 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2605 tree cst_uchar_ptr_node
2606 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2607 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2608
2609 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2610 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2611 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2612
2613 gimple_assign_set_lhs (stmt, var);
2614 gimple_seq_add_stmt_without_update (stmts, stmt);
2615
2616 return var;
2617 }
2618
2619 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2620
2621 static bool
2622 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2623 {
2624 gimple *stmt = gsi_stmt (*gsi);
2625 tree callee = gimple_call_fndecl (stmt);
2626 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2627
2628 tree type = integer_type_node;
2629 tree str1 = gimple_call_arg (stmt, 0);
2630 tree str2 = gimple_call_arg (stmt, 1);
2631 tree lhs = gimple_call_lhs (stmt);
2632
2633 tree bound_node = NULL_TREE;
2634 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2635
2636 /* Handle strncmp and strncasecmp functions. */
2637 if (gimple_call_num_args (stmt) == 3)
2638 {
2639 bound_node = gimple_call_arg (stmt, 2);
2640 if (tree_fits_uhwi_p (bound_node))
2641 bound = tree_to_uhwi (bound_node);
2642 }
2643
2644 /* If the BOUND parameter is zero, return zero. */
2645 if (bound == 0)
2646 {
2647 replace_call_with_value (gsi, integer_zero_node);
2648 return true;
2649 }
2650
2651 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2652 if (operand_equal_p (str1, str2, 0))
2653 {
2654 replace_call_with_value (gsi, integer_zero_node);
2655 return true;
2656 }
2657
2658 /* Initially set to the number of characters, including the terminating
2659 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2660 the array Sx is not terminated by a nul.
2661 For nul-terminated strings then adjusted to their length so that
2662 LENx == NULPOSx holds. */
2663 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2664 const char *p1 = getbyterep (str1, &len1);
2665 const char *p2 = getbyterep (str2, &len2);
2666
2667 /* The position of the terminating nul character if one exists, otherwise
2668 a value greater than LENx. */
2669 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2670
2671 if (p1)
2672 {
2673 size_t n = strnlen (p1, len1);
2674 if (n < len1)
2675 len1 = nulpos1 = n;
2676 }
2677
2678 if (p2)
2679 {
2680 size_t n = strnlen (p2, len2);
2681 if (n < len2)
2682 len2 = nulpos2 = n;
2683 }
2684
2685 /* For known strings, return an immediate value. */
2686 if (p1 && p2)
2687 {
2688 int r = 0;
2689 bool known_result = false;
2690
2691 switch (fcode)
2692 {
2693 case BUILT_IN_STRCMP:
2694 case BUILT_IN_STRCMP_EQ:
2695 if (len1 != nulpos1 || len2 != nulpos2)
2696 break;
2697
2698 r = strcmp (p1, p2);
2699 known_result = true;
2700 break;
2701
2702 case BUILT_IN_STRNCMP:
2703 case BUILT_IN_STRNCMP_EQ:
2704 {
2705 if (bound == HOST_WIDE_INT_M1U)
2706 break;
2707
2708 /* Reduce the bound to be no more than the length
2709 of the shorter of the two strings, or the sizes
2710 of the unterminated arrays. */
2711 unsigned HOST_WIDE_INT n = bound;
2712
2713 if (len1 == nulpos1 && len1 < n)
2714 n = len1 + 1;
2715 if (len2 == nulpos2 && len2 < n)
2716 n = len2 + 1;
2717
2718 if (MIN (nulpos1, nulpos2) + 1 < n)
2719 break;
2720
2721 r = strncmp (p1, p2, n);
2722 known_result = true;
2723 break;
2724 }
2725 /* Only handleable situation is where the string are equal (result 0),
2726 which is already handled by operand_equal_p case. */
2727 case BUILT_IN_STRCASECMP:
2728 break;
2729 case BUILT_IN_STRNCASECMP:
2730 {
2731 if (bound == HOST_WIDE_INT_M1U)
2732 break;
2733 r = strncmp (p1, p2, bound);
2734 if (r == 0)
2735 known_result = true;
2736 break;
2737 }
2738 default:
2739 gcc_unreachable ();
2740 }
2741
2742 if (known_result)
2743 {
2744 replace_call_with_value (gsi, build_cmp_result (type, r));
2745 return true;
2746 }
2747 }
2748
2749 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2750 || fcode == BUILT_IN_STRCMP
2751 || fcode == BUILT_IN_STRCMP_EQ
2752 || fcode == BUILT_IN_STRCASECMP;
2753
2754 location_t loc = gimple_location (stmt);
2755
2756 /* If the second arg is "", return *(const unsigned char*)arg1. */
2757 if (p2 && *p2 == '\0' && nonzero_bound)
2758 {
2759 gimple_seq stmts = NULL;
2760 tree var = gimple_load_first_char (loc, str1, &stmts);
2761 if (lhs)
2762 {
2763 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2764 gimple_seq_add_stmt_without_update (&stmts, stmt);
2765 }
2766
2767 gsi_replace_with_seq_vops (gsi, stmts);
2768 return true;
2769 }
2770
2771 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2772 if (p1 && *p1 == '\0' && nonzero_bound)
2773 {
2774 gimple_seq stmts = NULL;
2775 tree var = gimple_load_first_char (loc, str2, &stmts);
2776
2777 if (lhs)
2778 {
2779 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2780 stmt = gimple_build_assign (c, NOP_EXPR, var);
2781 gimple_seq_add_stmt_without_update (&stmts, stmt);
2782
2783 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2784 gimple_seq_add_stmt_without_update (&stmts, stmt);
2785 }
2786
2787 gsi_replace_with_seq_vops (gsi, stmts);
2788 return true;
2789 }
2790
2791 /* If BOUND is one, return an expression corresponding to
2792 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2793 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2794 {
2795 gimple_seq stmts = NULL;
2796 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2797 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2798
2799 if (lhs)
2800 {
2801 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2802 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2803 gimple_seq_add_stmt_without_update (&stmts, convert1);
2804
2805 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2806 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2807 gimple_seq_add_stmt_without_update (&stmts, convert2);
2808
2809 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2810 gimple_seq_add_stmt_without_update (&stmts, stmt);
2811 }
2812
2813 gsi_replace_with_seq_vops (gsi, stmts);
2814 return true;
2815 }
2816
2817 /* If BOUND is greater than the length of one constant string,
2818 and the other argument is also a nul-terminated string, replace
2819 strncmp with strcmp. */
2820 if (fcode == BUILT_IN_STRNCMP
2821 && bound > 0 && bound < HOST_WIDE_INT_M1U
2822 && ((p2 && len2 < bound && len2 == nulpos2)
2823 || (p1 && len1 < bound && len1 == nulpos1)))
2824 {
2825 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2826 if (!fn)
2827 return false;
2828 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2829 replace_call_with_call_and_fold (gsi, repl);
2830 return true;
2831 }
2832
2833 return false;
2834 }
2835
2836 /* Fold a call to the memchr pointed by GSI iterator. */
2837
2838 static bool
2839 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2840 {
2841 gimple *stmt = gsi_stmt (*gsi);
2842 tree lhs = gimple_call_lhs (stmt);
2843 tree arg1 = gimple_call_arg (stmt, 0);
2844 tree arg2 = gimple_call_arg (stmt, 1);
2845 tree len = gimple_call_arg (stmt, 2);
2846
2847 /* If the LEN parameter is zero, return zero. */
2848 if (integer_zerop (len))
2849 {
2850 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2851 return true;
2852 }
2853
2854 char c;
2855 if (TREE_CODE (arg2) != INTEGER_CST
2856 || !tree_fits_uhwi_p (len)
2857 || !target_char_cst_p (arg2, &c))
2858 return false;
2859
2860 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2861 unsigned HOST_WIDE_INT string_length;
2862 const char *p1 = getbyterep (arg1, &string_length);
2863
2864 if (p1)
2865 {
2866 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2867 if (r == NULL)
2868 {
2869 tree mem_size, offset_node;
2870 byte_representation (arg1, &offset_node, &mem_size, NULL);
2871 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2872 ? 0 : tree_to_uhwi (offset_node);
2873 /* MEM_SIZE is the size of the array the string literal
2874 is stored in. */
2875 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2876 gcc_checking_assert (string_length <= string_size);
2877 if (length <= string_size)
2878 {
2879 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2880 return true;
2881 }
2882 }
2883 else
2884 {
2885 unsigned HOST_WIDE_INT offset = r - p1;
2886 gimple_seq stmts = NULL;
2887 if (lhs != NULL_TREE)
2888 {
2889 tree offset_cst = build_int_cst (sizetype, offset);
2890 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2891 arg1, offset_cst);
2892 gimple_seq_add_stmt_without_update (&stmts, stmt);
2893 }
2894 else
2895 gimple_seq_add_stmt_without_update (&stmts,
2896 gimple_build_nop ());
2897
2898 gsi_replace_with_seq_vops (gsi, stmts);
2899 return true;
2900 }
2901 }
2902
2903 return false;
2904 }
2905
2906 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2907 to the call. IGNORE is true if the value returned
2908 by the builtin will be ignored. UNLOCKED is true is true if this
2909 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2910 the known length of the string. Return NULL_TREE if no simplification
2911 was possible. */
2912
2913 static bool
2914 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2915 tree arg0, tree arg1,
2916 bool unlocked)
2917 {
2918 gimple *stmt = gsi_stmt (*gsi);
2919
2920 /* If we're using an unlocked function, assume the other unlocked
2921 functions exist explicitly. */
2922 tree const fn_fputc = (unlocked
2923 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2924 : builtin_decl_implicit (BUILT_IN_FPUTC));
2925 tree const fn_fwrite = (unlocked
2926 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2927 : builtin_decl_implicit (BUILT_IN_FWRITE));
2928
2929 /* If the return value is used, don't do the transformation. */
2930 if (gimple_call_lhs (stmt))
2931 return false;
2932
2933 /* Get the length of the string passed to fputs. If the length
2934 can't be determined, punt. */
2935 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2936 if (!len
2937 || TREE_CODE (len) != INTEGER_CST)
2938 return false;
2939
2940 switch (compare_tree_int (len, 1))
2941 {
2942 case -1: /* length is 0, delete the call entirely . */
2943 replace_call_with_value (gsi, integer_zero_node);
2944 return true;
2945
2946 case 0: /* length is 1, call fputc. */
2947 {
2948 const char *p = c_getstr (arg0);
2949 if (p != NULL)
2950 {
2951 if (!fn_fputc)
2952 return false;
2953
2954 gimple *repl = gimple_build_call (fn_fputc, 2,
2955 build_int_cst
2956 (integer_type_node, p[0]), arg1);
2957 replace_call_with_call_and_fold (gsi, repl);
2958 return true;
2959 }
2960 }
2961 /* FALLTHROUGH */
2962 case 1: /* length is greater than 1, call fwrite. */
2963 {
2964 /* If optimizing for size keep fputs. */
2965 if (optimize_function_for_size_p (cfun))
2966 return false;
2967 /* New argument list transforming fputs(string, stream) to
2968 fwrite(string, 1, len, stream). */
2969 if (!fn_fwrite)
2970 return false;
2971
2972 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2973 size_one_node, len, arg1);
2974 replace_call_with_call_and_fold (gsi, repl);
2975 return true;
2976 }
2977 default:
2978 gcc_unreachable ();
2979 }
2980 return false;
2981 }
2982
2983 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2984 DEST, SRC, LEN, and SIZE are the arguments to the call.
2985 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2986 code of the builtin. If MAXLEN is not NULL, it is maximum length
2987 passed as third argument. */
2988
2989 static bool
2990 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2991 tree dest, tree src, tree len, tree size,
2992 enum built_in_function fcode)
2993 {
2994 gimple *stmt = gsi_stmt (*gsi);
2995 location_t loc = gimple_location (stmt);
2996 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2997 tree fn;
2998
2999 /* If SRC and DEST are the same (and not volatile), return DEST
3000 (resp. DEST+LEN for __mempcpy_chk). */
3001 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3002 {
3003 if (fcode != BUILT_IN_MEMPCPY_CHK)
3004 {
3005 replace_call_with_value (gsi, dest);
3006 return true;
3007 }
3008 else
3009 {
3010 gimple_seq stmts = NULL;
3011 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3012 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3013 TREE_TYPE (dest), dest, len);
3014 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3015 replace_call_with_value (gsi, temp);
3016 return true;
3017 }
3018 }
3019
3020 if (! tree_fits_uhwi_p (size))
3021 return false;
3022
3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3024 if (! integer_all_onesp (size))
3025 {
3026 if (! tree_fits_uhwi_p (len))
3027 {
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3032 {
3033 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3034 {
3035 /* (void) __mempcpy_chk () can be optimized into
3036 (void) __memcpy_chk (). */
3037 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3038 if (!fn)
3039 return false;
3040
3041 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3042 replace_call_with_call_and_fold (gsi, repl);
3043 return true;
3044 }
3045 return false;
3046 }
3047 }
3048 else
3049 maxlen = len;
3050
3051 if (tree_int_cst_lt (size, maxlen))
3052 return false;
3053 }
3054
3055 fn = NULL_TREE;
3056 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3057 mem{cpy,pcpy,move,set} is available. */
3058 switch (fcode)
3059 {
3060 case BUILT_IN_MEMCPY_CHK:
3061 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3062 break;
3063 case BUILT_IN_MEMPCPY_CHK:
3064 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3065 break;
3066 case BUILT_IN_MEMMOVE_CHK:
3067 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3068 break;
3069 case BUILT_IN_MEMSET_CHK:
3070 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3071 break;
3072 default:
3073 break;
3074 }
3075
3076 if (!fn)
3077 return false;
3078
3079 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3080 replace_call_with_call_and_fold (gsi, repl);
3081 return true;
3082 }
3083
3084 /* Fold a call to the __st[rp]cpy_chk builtin.
3085 DEST, SRC, and SIZE are the arguments to the call.
3086 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3087 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3088 strings passed as second argument. */
3089
3090 static bool
3091 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3092 tree dest,
3093 tree src, tree size,
3094 enum built_in_function fcode)
3095 {
3096 gimple *stmt = gsi_stmt (*gsi);
3097 location_t loc = gimple_location (stmt);
3098 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3099 tree len, fn;
3100
3101 /* If SRC and DEST are the same (and not volatile), return DEST. */
3102 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3103 {
3104 /* Issue -Wrestrict unless the pointers are null (those do
3105 not point to objects and so do not indicate an overlap;
3106 such calls could be the result of sanitization and jump
3107 threading). */
3108 if (!integer_zerop (dest)
3109 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3110 {
3111 tree func = gimple_call_fndecl (stmt);
3112
3113 warning_at (loc, OPT_Wrestrict,
3114 "%qD source argument is the same as destination",
3115 func);
3116 }
3117
3118 replace_call_with_value (gsi, dest);
3119 return true;
3120 }
3121
3122 if (! tree_fits_uhwi_p (size))
3123 return false;
3124
3125 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3126 if (! integer_all_onesp (size))
3127 {
3128 len = c_strlen (src, 1);
3129 if (! len || ! tree_fits_uhwi_p (len))
3130 {
3131 /* If LEN is not constant, try MAXLEN too.
3132 For MAXLEN only allow optimizing into non-_ocs function
3133 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3134 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3135 {
3136 if (fcode == BUILT_IN_STPCPY_CHK)
3137 {
3138 if (! ignore)
3139 return false;
3140
3141 /* If return value of __stpcpy_chk is ignored,
3142 optimize into __strcpy_chk. */
3143 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3144 if (!fn)
3145 return false;
3146
3147 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3148 replace_call_with_call_and_fold (gsi, repl);
3149 return true;
3150 }
3151
3152 if (! len || TREE_SIDE_EFFECTS (len))
3153 return false;
3154
3155 /* If c_strlen returned something, but not a constant,
3156 transform __strcpy_chk into __memcpy_chk. */
3157 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3158 if (!fn)
3159 return false;
3160
3161 gimple_seq stmts = NULL;
3162 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3163 len = gimple_convert (&stmts, loc, size_type_node, len);
3164 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3165 build_int_cst (size_type_node, 1));
3166 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3167 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3168 replace_call_with_call_and_fold (gsi, repl);
3169 return true;
3170 }
3171 }
3172 else
3173 maxlen = len;
3174
3175 if (! tree_int_cst_lt (maxlen, size))
3176 return false;
3177 }
3178
3179 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3180 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
3181 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3182 if (!fn)
3183 return false;
3184
3185 gimple *repl = gimple_build_call (fn, 2, dest, src);
3186 replace_call_with_call_and_fold (gsi, repl);
3187 return true;
3188 }
3189
3190 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3191 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3192 length passed as third argument. IGNORE is true if return value can be
3193 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3194
3195 static bool
3196 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3197 tree dest, tree src,
3198 tree len, tree size,
3199 enum built_in_function fcode)
3200 {
3201 gimple *stmt = gsi_stmt (*gsi);
3202 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3203 tree fn;
3204
3205 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3206 {
3207 /* If return value of __stpncpy_chk is ignored,
3208 optimize into __strncpy_chk. */
3209 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3210 if (fn)
3211 {
3212 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3213 replace_call_with_call_and_fold (gsi, repl);
3214 return true;
3215 }
3216 }
3217
3218 if (! tree_fits_uhwi_p (size))
3219 return false;
3220
3221 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3222 if (! integer_all_onesp (size))
3223 {
3224 if (! tree_fits_uhwi_p (len))
3225 {
3226 /* If LEN is not constant, try MAXLEN too.
3227 For MAXLEN only allow optimizing into non-_ocs function
3228 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3229 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3230 return false;
3231 }
3232 else
3233 maxlen = len;
3234
3235 if (tree_int_cst_lt (size, maxlen))
3236 return false;
3237 }
3238
3239 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3240 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3241 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3242 if (!fn)
3243 return false;
3244
3245 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3246 replace_call_with_call_and_fold (gsi, repl);
3247 return true;
3248 }
3249
3250 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3251 Return NULL_TREE if no simplification can be made. */
3252
3253 static bool
3254 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3255 {
3256 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3257 location_t loc = gimple_location (stmt);
3258 tree dest = gimple_call_arg (stmt, 0);
3259 tree src = gimple_call_arg (stmt, 1);
3260 tree fn, lenp1;
3261
3262 /* If the result is unused, replace stpcpy with strcpy. */
3263 if (gimple_call_lhs (stmt) == NULL_TREE)
3264 {
3265 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3266 if (!fn)
3267 return false;
3268 gimple_call_set_fndecl (stmt, fn);
3269 fold_stmt (gsi);
3270 return true;
3271 }
3272
3273 /* Set to non-null if ARG refers to an unterminated array. */
3274 c_strlen_data data = { };
3275 /* The size of the unterminated array if SRC referes to one. */
3276 tree size;
3277 /* True if the size is exact/constant, false if it's the lower bound
3278 of a range. */
3279 bool exact;
3280 tree len = c_strlen (src, 1, &data, 1);
3281 if (!len
3282 || TREE_CODE (len) != INTEGER_CST)
3283 {
3284 data.decl = unterminated_array (src, &size, &exact);
3285 if (!data.decl)
3286 return false;
3287 }
3288
3289 if (data.decl)
3290 {
3291 /* Avoid folding calls with unterminated arrays. */
3292 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3293 warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3294 exact);
3295 suppress_warning (stmt, OPT_Wstringop_overread);
3296 return false;
3297 }
3298
3299 if (optimize_function_for_size_p (cfun)
3300 /* If length is zero it's small enough. */
3301 && !integer_zerop (len))
3302 return false;
3303
3304 /* If the source has a known length replace stpcpy with memcpy. */
3305 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3306 if (!fn)
3307 return false;
3308
3309 gimple_seq stmts = NULL;
3310 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3311 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3312 tem, build_int_cst (size_type_node, 1));
3313 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3314 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3315 gimple_move_vops (repl, stmt);
3316 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3317 /* Replace the result with dest + len. */
3318 stmts = NULL;
3319 tem = gimple_convert (&stmts, loc, sizetype, len);
3320 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3321 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3322 POINTER_PLUS_EXPR, dest, tem);
3323 gsi_replace (gsi, ret, false);
3324 /* Finally fold the memcpy call. */
3325 gimple_stmt_iterator gsi2 = *gsi;
3326 gsi_prev (&gsi2);
3327 fold_stmt (&gsi2);
3328 return true;
3329 }
3330
3331 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3332 NULL_TREE if a normal call should be emitted rather than expanding
3333 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3334 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3335 passed as second argument. */
3336
3337 static bool
3338 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3339 enum built_in_function fcode)
3340 {
3341 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3342 tree dest, size, len, fn, fmt, flag;
3343 const char *fmt_str;
3344
3345 /* Verify the required arguments in the original call. */
3346 if (gimple_call_num_args (stmt) < 5)
3347 return false;
3348
3349 dest = gimple_call_arg (stmt, 0);
3350 len = gimple_call_arg (stmt, 1);
3351 flag = gimple_call_arg (stmt, 2);
3352 size = gimple_call_arg (stmt, 3);
3353 fmt = gimple_call_arg (stmt, 4);
3354
3355 if (! tree_fits_uhwi_p (size))
3356 return false;
3357
3358 if (! integer_all_onesp (size))
3359 {
3360 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3361 if (! tree_fits_uhwi_p (len))
3362 {
3363 /* If LEN is not constant, try MAXLEN too.
3364 For MAXLEN only allow optimizing into non-_ocs function
3365 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3366 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3367 return false;
3368 }
3369 else
3370 maxlen = len;
3371
3372 if (tree_int_cst_lt (size, maxlen))
3373 return false;
3374 }
3375
3376 if (!init_target_chars ())
3377 return false;
3378
3379 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3380 or if format doesn't contain % chars or is "%s". */
3381 if (! integer_zerop (flag))
3382 {
3383 fmt_str = c_getstr (fmt);
3384 if (fmt_str == NULL)
3385 return false;
3386 if (strchr (fmt_str, target_percent) != NULL
3387 && strcmp (fmt_str, target_percent_s))
3388 return false;
3389 }
3390
3391 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3392 available. */
3393 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3394 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3395 if (!fn)
3396 return false;
3397
3398 /* Replace the called function and the first 5 argument by 3 retaining
3399 trailing varargs. */
3400 gimple_call_set_fndecl (stmt, fn);
3401 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3402 gimple_call_set_arg (stmt, 0, dest);
3403 gimple_call_set_arg (stmt, 1, len);
3404 gimple_call_set_arg (stmt, 2, fmt);
3405 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3406 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3407 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3408 fold_stmt (gsi);
3409 return true;
3410 }
3411
3412 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3413 Return NULL_TREE if a normal call should be emitted rather than
3414 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3415 or BUILT_IN_VSPRINTF_CHK. */
3416
3417 static bool
3418 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3419 enum built_in_function fcode)
3420 {
3421 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3422 tree dest, size, len, fn, fmt, flag;
3423 const char *fmt_str;
3424 unsigned nargs = gimple_call_num_args (stmt);
3425
3426 /* Verify the required arguments in the original call. */
3427 if (nargs < 4)
3428 return false;
3429 dest = gimple_call_arg (stmt, 0);
3430 flag = gimple_call_arg (stmt, 1);
3431 size = gimple_call_arg (stmt, 2);
3432 fmt = gimple_call_arg (stmt, 3);
3433
3434 if (! tree_fits_uhwi_p (size))
3435 return false;
3436
3437 len = NULL_TREE;
3438
3439 if (!init_target_chars ())
3440 return false;
3441
3442 /* Check whether the format is a literal string constant. */
3443 fmt_str = c_getstr (fmt);
3444 if (fmt_str != NULL)
3445 {
3446 /* If the format doesn't contain % args or %%, we know the size. */
3447 if (strchr (fmt_str, target_percent) == 0)
3448 {
3449 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3450 len = build_int_cstu (size_type_node, strlen (fmt_str));
3451 }
3452 /* If the format is "%s" and first ... argument is a string literal,
3453 we know the size too. */
3454 else if (fcode == BUILT_IN_SPRINTF_CHK
3455 && strcmp (fmt_str, target_percent_s) == 0)
3456 {
3457 tree arg;
3458
3459 if (nargs == 5)
3460 {
3461 arg = gimple_call_arg (stmt, 4);
3462 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3463 {
3464 len = c_strlen (arg, 1);
3465 if (! len || ! tree_fits_uhwi_p (len))
3466 len = NULL_TREE;
3467 }
3468 }
3469 }
3470 }
3471
3472 if (! integer_all_onesp (size))
3473 {
3474 if (! len || ! tree_int_cst_lt (len, size))
3475 return false;
3476 }
3477
3478 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3479 or if format doesn't contain % chars or is "%s". */
3480 if (! integer_zerop (flag))
3481 {
3482 if (fmt_str == NULL)
3483 return false;
3484 if (strchr (fmt_str, target_percent) != NULL
3485 && strcmp (fmt_str, target_percent_s))
3486 return false;
3487 }
3488
3489 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3490 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3491 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3492 if (!fn)
3493 return false;
3494
3495 /* Replace the called function and the first 4 argument by 2 retaining
3496 trailing varargs. */
3497 gimple_call_set_fndecl (stmt, fn);
3498 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3499 gimple_call_set_arg (stmt, 0, dest);
3500 gimple_call_set_arg (stmt, 1, fmt);
3501 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3502 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3503 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3504 fold_stmt (gsi);
3505 return true;
3506 }
3507
3508 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3509 ORIG may be null if this is a 2-argument call. We don't attempt to
3510 simplify calls with more than 3 arguments.
3511
3512 Return true if simplification was possible, otherwise false. */
3513
3514 bool
3515 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3516 {
3517 gimple *stmt = gsi_stmt (*gsi);
3518
3519 /* Verify the required arguments in the original call. We deal with two
3520 types of sprintf() calls: 'sprintf (str, fmt)' and
3521 'sprintf (dest, "%s", orig)'. */
3522 if (gimple_call_num_args (stmt) > 3)
3523 return false;
3524
3525 tree orig = NULL_TREE;
3526 if (gimple_call_num_args (stmt) == 3)
3527 orig = gimple_call_arg (stmt, 2);
3528
3529 /* Check whether the format is a literal string constant. */
3530 tree fmt = gimple_call_arg (stmt, 1);
3531 const char *fmt_str = c_getstr (fmt);
3532 if (fmt_str == NULL)
3533 return false;
3534
3535 tree dest = gimple_call_arg (stmt, 0);
3536
3537 if (!init_target_chars ())
3538 return false;
3539
3540 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3541 if (!fn)
3542 return false;
3543
3544 /* If the format doesn't contain % args or %%, use strcpy. */
3545 if (strchr (fmt_str, target_percent) == NULL)
3546 {
3547 /* Don't optimize sprintf (buf, "abc", ptr++). */
3548 if (orig)
3549 return false;
3550
3551 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3552 'format' is known to contain no % formats. */
3553 gimple_seq stmts = NULL;
3554 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3555
3556 /* Propagate the NO_WARNING bit to avoid issuing the same
3557 warning more than once. */
3558 copy_warning (repl, stmt);
3559
3560 gimple_seq_add_stmt_without_update (&stmts, repl);
3561 if (tree lhs = gimple_call_lhs (stmt))
3562 {
3563 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3564 strlen (fmt_str)));
3565 gimple_seq_add_stmt_without_update (&stmts, repl);
3566 gsi_replace_with_seq_vops (gsi, stmts);
3567 /* gsi now points at the assignment to the lhs, get a
3568 stmt iterator to the memcpy call.
3569 ??? We can't use gsi_for_stmt as that doesn't work when the
3570 CFG isn't built yet. */
3571 gimple_stmt_iterator gsi2 = *gsi;
3572 gsi_prev (&gsi2);
3573 fold_stmt (&gsi2);
3574 }
3575 else
3576 {
3577 gsi_replace_with_seq_vops (gsi, stmts);
3578 fold_stmt (gsi);
3579 }
3580 return true;
3581 }
3582
3583 /* If the format is "%s", use strcpy if the result isn't used. */
3584 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3585 {
3586 /* Don't crash on sprintf (str1, "%s"). */
3587 if (!orig)
3588 return false;
3589
3590 /* Don't fold calls with source arguments of invalid (nonpointer)
3591 types. */
3592 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3593 return false;
3594
3595 tree orig_len = NULL_TREE;
3596 if (gimple_call_lhs (stmt))
3597 {
3598 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3599 if (!orig_len)
3600 return false;
3601 }
3602
3603 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3604 gimple_seq stmts = NULL;
3605 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3606
3607 /* Propagate the NO_WARNING bit to avoid issuing the same
3608 warning more than once. */
3609 copy_warning (repl, stmt);
3610
3611 gimple_seq_add_stmt_without_update (&stmts, repl);
3612 if (tree lhs = gimple_call_lhs (stmt))
3613 {
3614 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3615 TREE_TYPE (orig_len)))
3616 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3617 repl = gimple_build_assign (lhs, orig_len);
3618 gimple_seq_add_stmt_without_update (&stmts, repl);
3619 gsi_replace_with_seq_vops (gsi, stmts);
3620 /* gsi now points at the assignment to the lhs, get a
3621 stmt iterator to the memcpy call.
3622 ??? We can't use gsi_for_stmt as that doesn't work when the
3623 CFG isn't built yet. */
3624 gimple_stmt_iterator gsi2 = *gsi;
3625 gsi_prev (&gsi2);
3626 fold_stmt (&gsi2);
3627 }
3628 else
3629 {
3630 gsi_replace_with_seq_vops (gsi, stmts);
3631 fold_stmt (gsi);
3632 }
3633 return true;
3634 }
3635 return false;
3636 }
3637
3638 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3639 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3640 attempt to simplify calls with more than 4 arguments.
3641
3642 Return true if simplification was possible, otherwise false. */
3643
3644 bool
3645 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3646 {
3647 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3648 tree dest = gimple_call_arg (stmt, 0);
3649 tree destsize = gimple_call_arg (stmt, 1);
3650 tree fmt = gimple_call_arg (stmt, 2);
3651 tree orig = NULL_TREE;
3652 const char *fmt_str = NULL;
3653
3654 if (gimple_call_num_args (stmt) > 4)
3655 return false;
3656
3657 if (gimple_call_num_args (stmt) == 4)
3658 orig = gimple_call_arg (stmt, 3);
3659
3660 if (!tree_fits_uhwi_p (destsize))
3661 return false;
3662 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3663
3664 /* Check whether the format is a literal string constant. */
3665 fmt_str = c_getstr (fmt);
3666 if (fmt_str == NULL)
3667 return false;
3668
3669 if (!init_target_chars ())
3670 return false;
3671
3672 /* If the format doesn't contain % args or %%, use strcpy. */
3673 if (strchr (fmt_str, target_percent) == NULL)
3674 {
3675 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3676 if (!fn)
3677 return false;
3678
3679 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3680 if (orig)
3681 return false;
3682
3683 /* We could expand this as
3684 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3685 or to
3686 memcpy (str, fmt_with_nul_at_cstm1, cst);
3687 but in the former case that might increase code size
3688 and in the latter case grow .rodata section too much.
3689 So punt for now. */
3690 size_t len = strlen (fmt_str);
3691 if (len >= destlen)
3692 return false;
3693
3694 gimple_seq stmts = NULL;
3695 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3696 gimple_seq_add_stmt_without_update (&stmts, repl);
3697 if (tree lhs = gimple_call_lhs (stmt))
3698 {
3699 repl = gimple_build_assign (lhs,
3700 build_int_cst (TREE_TYPE (lhs), len));
3701 gimple_seq_add_stmt_without_update (&stmts, repl);
3702 gsi_replace_with_seq_vops (gsi, stmts);
3703 /* gsi now points at the assignment to the lhs, get a
3704 stmt iterator to the memcpy call.
3705 ??? We can't use gsi_for_stmt as that doesn't work when the
3706 CFG isn't built yet. */
3707 gimple_stmt_iterator gsi2 = *gsi;
3708 gsi_prev (&gsi2);
3709 fold_stmt (&gsi2);
3710 }
3711 else
3712 {
3713 gsi_replace_with_seq_vops (gsi, stmts);
3714 fold_stmt (gsi);
3715 }
3716 return true;
3717 }
3718
3719 /* If the format is "%s", use strcpy if the result isn't used. */
3720 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3721 {
3722 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3723 if (!fn)
3724 return false;
3725
3726 /* Don't crash on snprintf (str1, cst, "%s"). */
3727 if (!orig)
3728 return false;
3729
3730 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3731 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3732 return false;
3733
3734 /* We could expand this as
3735 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3736 or to
3737 memcpy (str1, str2_with_nul_at_cstm1, cst);
3738 but in the former case that might increase code size
3739 and in the latter case grow .rodata section too much.
3740 So punt for now. */
3741 if (compare_tree_int (orig_len, destlen) >= 0)
3742 return false;
3743
3744 /* Convert snprintf (str1, cst, "%s", str2) into
3745 strcpy (str1, str2) if strlen (str2) < cst. */
3746 gimple_seq stmts = NULL;
3747 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3748 gimple_seq_add_stmt_without_update (&stmts, repl);
3749 if (tree lhs = gimple_call_lhs (stmt))
3750 {
3751 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3752 TREE_TYPE (orig_len)))
3753 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3754 repl = gimple_build_assign (lhs, orig_len);
3755 gimple_seq_add_stmt_without_update (&stmts, repl);
3756 gsi_replace_with_seq_vops (gsi, stmts);
3757 /* gsi now points at the assignment to the lhs, get a
3758 stmt iterator to the memcpy call.
3759 ??? We can't use gsi_for_stmt as that doesn't work when the
3760 CFG isn't built yet. */
3761 gimple_stmt_iterator gsi2 = *gsi;
3762 gsi_prev (&gsi2);
3763 fold_stmt (&gsi2);
3764 }
3765 else
3766 {
3767 gsi_replace_with_seq_vops (gsi, stmts);
3768 fold_stmt (gsi);
3769 }
3770 return true;
3771 }
3772 return false;
3773 }
3774
3775 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3776 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3777 more than 3 arguments, and ARG may be null in the 2-argument case.
3778
3779 Return NULL_TREE if no simplification was possible, otherwise return the
3780 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3781 code of the function to be simplified. */
3782
3783 static bool
3784 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3785 tree fp, tree fmt, tree arg,
3786 enum built_in_function fcode)
3787 {
3788 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3789 tree fn_fputc, fn_fputs;
3790 const char *fmt_str = NULL;
3791
3792 /* If the return value is used, don't do the transformation. */
3793 if (gimple_call_lhs (stmt) != NULL_TREE)
3794 return false;
3795
3796 /* Check whether the format is a literal string constant. */
3797 fmt_str = c_getstr (fmt);
3798 if (fmt_str == NULL)
3799 return false;
3800
3801 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3802 {
3803 /* If we're using an unlocked function, assume the other
3804 unlocked functions exist explicitly. */
3805 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3806 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3807 }
3808 else
3809 {
3810 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3811 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3812 }
3813
3814 if (!init_target_chars ())
3815 return false;
3816
3817 /* If the format doesn't contain % args or %%, use strcpy. */
3818 if (strchr (fmt_str, target_percent) == NULL)
3819 {
3820 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3821 && arg)
3822 return false;
3823
3824 /* If the format specifier was "", fprintf does nothing. */
3825 if (fmt_str[0] == '\0')
3826 {
3827 replace_call_with_value (gsi, NULL_TREE);
3828 return true;
3829 }
3830
3831 /* When "string" doesn't contain %, replace all cases of
3832 fprintf (fp, string) with fputs (string, fp). The fputs
3833 builtin will take care of special cases like length == 1. */
3834 if (fn_fputs)
3835 {
3836 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3837 replace_call_with_call_and_fold (gsi, repl);
3838 return true;
3839 }
3840 }
3841
3842 /* The other optimizations can be done only on the non-va_list variants. */
3843 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3844 return false;
3845
3846 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3847 else if (strcmp (fmt_str, target_percent_s) == 0)
3848 {
3849 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3850 return false;
3851 if (fn_fputs)
3852 {
3853 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3854 replace_call_with_call_and_fold (gsi, repl);
3855 return true;
3856 }
3857 }
3858
3859 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3860 else if (strcmp (fmt_str, target_percent_c) == 0)
3861 {
3862 if (!arg
3863 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3864 return false;
3865 if (fn_fputc)
3866 {
3867 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3868 replace_call_with_call_and_fold (gsi, repl);
3869 return true;
3870 }
3871 }
3872
3873 return false;
3874 }
3875
3876 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3877 FMT and ARG are the arguments to the call; we don't fold cases with
3878 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3879
3880 Return NULL_TREE if no simplification was possible, otherwise return the
3881 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3882 code of the function to be simplified. */
3883
3884 static bool
3885 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3886 tree arg, enum built_in_function fcode)
3887 {
3888 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3889 tree fn_putchar, fn_puts, newarg;
3890 const char *fmt_str = NULL;
3891
3892 /* If the return value is used, don't do the transformation. */
3893 if (gimple_call_lhs (stmt) != NULL_TREE)
3894 return false;
3895
3896 /* Check whether the format is a literal string constant. */
3897 fmt_str = c_getstr (fmt);
3898 if (fmt_str == NULL)
3899 return false;
3900
3901 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3902 {
3903 /* If we're using an unlocked function, assume the other
3904 unlocked functions exist explicitly. */
3905 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3906 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3907 }
3908 else
3909 {
3910 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3911 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3912 }
3913
3914 if (!init_target_chars ())
3915 return false;
3916
3917 if (strcmp (fmt_str, target_percent_s) == 0
3918 || strchr (fmt_str, target_percent) == NULL)
3919 {
3920 const char *str;
3921
3922 if (strcmp (fmt_str, target_percent_s) == 0)
3923 {
3924 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3925 return false;
3926
3927 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3928 return false;
3929
3930 str = c_getstr (arg);
3931 if (str == NULL)
3932 return false;
3933 }
3934 else
3935 {
3936 /* The format specifier doesn't contain any '%' characters. */
3937 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3938 && arg)
3939 return false;
3940 str = fmt_str;
3941 }
3942
3943 /* If the string was "", printf does nothing. */
3944 if (str[0] == '\0')
3945 {
3946 replace_call_with_value (gsi, NULL_TREE);
3947 return true;
3948 }
3949
3950 /* If the string has length of 1, call putchar. */
3951 if (str[1] == '\0')
3952 {
3953 /* Given printf("c"), (where c is any one character,)
3954 convert "c"[0] to an int and pass that to the replacement
3955 function. */
3956 newarg = build_int_cst (integer_type_node, str[0]);
3957 if (fn_putchar)
3958 {
3959 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3960 replace_call_with_call_and_fold (gsi, repl);
3961 return true;
3962 }
3963 }
3964 else
3965 {
3966 /* If the string was "string\n", call puts("string"). */
3967 size_t len = strlen (str);
3968 if ((unsigned char)str[len - 1] == target_newline
3969 && (size_t) (int) len == len
3970 && (int) len > 0)
3971 {
3972 char *newstr;
3973
3974 /* Create a NUL-terminated string that's one char shorter
3975 than the original, stripping off the trailing '\n'. */
3976 newstr = xstrdup (str);
3977 newstr[len - 1] = '\0';
3978 newarg = build_string_literal (len, newstr);
3979 free (newstr);
3980 if (fn_puts)
3981 {
3982 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3983 replace_call_with_call_and_fold (gsi, repl);
3984 return true;
3985 }
3986 }
3987 else
3988 /* We'd like to arrange to call fputs(string,stdout) here,
3989 but we need stdout and don't have a way to get it yet. */
3990 return false;
3991 }
3992 }
3993
3994 /* The other optimizations can be done only on the non-va_list variants. */
3995 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3996 return false;
3997
3998 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3999 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
4000 {
4001 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
4002 return false;
4003 if (fn_puts)
4004 {
4005 gcall *repl = gimple_build_call (fn_puts, 1, arg);
4006 replace_call_with_call_and_fold (gsi, repl);
4007 return true;
4008 }
4009 }
4010
4011 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4012 else if (strcmp (fmt_str, target_percent_c) == 0)
4013 {
4014 if (!arg || ! useless_type_conversion_p (integer_type_node,
4015 TREE_TYPE (arg)))
4016 return false;
4017 if (fn_putchar)
4018 {
4019 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4020 replace_call_with_call_and_fold (gsi, repl);
4021 return true;
4022 }
4023 }
4024
4025 return false;
4026 }
4027
4028
4029
4030 /* Fold a call to __builtin_strlen with known length LEN. */
4031
4032 static bool
4033 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
4034 {
4035 gimple *stmt = gsi_stmt (*gsi);
4036 tree arg = gimple_call_arg (stmt, 0);
4037
4038 wide_int minlen;
4039 wide_int maxlen;
4040
4041 c_strlen_data lendata = { };
4042 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4043 && !lendata.decl
4044 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4045 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4046 {
4047 /* The range of lengths refers to either a single constant
4048 string or to the longest and shortest constant string
4049 referenced by the argument of the strlen() call, or to
4050 the strings that can possibly be stored in the arrays
4051 the argument refers to. */
4052 minlen = wi::to_wide (lendata.minlen);
4053 maxlen = wi::to_wide (lendata.maxlen);
4054 }
4055 else
4056 {
4057 unsigned prec = TYPE_PRECISION (sizetype);
4058
4059 minlen = wi::shwi (0, prec);
4060 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4061 }
4062
4063 if (minlen == maxlen)
4064 {
4065 /* Fold the strlen call to a constant. */
4066 tree type = TREE_TYPE (lendata.minlen);
4067 tree len = force_gimple_operand_gsi (gsi,
4068 wide_int_to_tree (type, minlen),
4069 true, NULL, true, GSI_SAME_STMT);
4070 replace_call_with_value (gsi, len);
4071 return true;
4072 }
4073
4074 /* Set the strlen() range to [0, MAXLEN]. */
4075 if (tree lhs = gimple_call_lhs (stmt))
4076 set_strlen_range (lhs, minlen, maxlen);
4077
4078 return false;
4079 }
4080
4081 /* Fold a call to __builtin_acc_on_device. */
4082
4083 static bool
4084 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4085 {
4086 /* Defer folding until we know which compiler we're in. */
4087 if (symtab->state != EXPANSION)
4088 return false;
4089
4090 unsigned val_host = GOMP_DEVICE_HOST;
4091 unsigned val_dev = GOMP_DEVICE_NONE;
4092
4093 #ifdef ACCEL_COMPILER
4094 val_host = GOMP_DEVICE_NOT_HOST;
4095 val_dev = ACCEL_COMPILER_acc_device;
4096 #endif
4097
4098 location_t loc = gimple_location (gsi_stmt (*gsi));
4099
4100 tree host_eq = make_ssa_name (boolean_type_node);
4101 gimple *host_ass = gimple_build_assign
4102 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4103 gimple_set_location (host_ass, loc);
4104 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4105
4106 tree dev_eq = make_ssa_name (boolean_type_node);
4107 gimple *dev_ass = gimple_build_assign
4108 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4109 gimple_set_location (dev_ass, loc);
4110 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4111
4112 tree result = make_ssa_name (boolean_type_node);
4113 gimple *result_ass = gimple_build_assign
4114 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4115 gimple_set_location (result_ass, loc);
4116 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4117
4118 replace_call_with_value (gsi, result);
4119
4120 return true;
4121 }
4122
4123 /* Fold realloc (0, n) -> malloc (n). */
4124
4125 static bool
4126 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4127 {
4128 gimple *stmt = gsi_stmt (*gsi);
4129 tree arg = gimple_call_arg (stmt, 0);
4130 tree size = gimple_call_arg (stmt, 1);
4131
4132 if (operand_equal_p (arg, null_pointer_node, 0))
4133 {
4134 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4135 if (fn_malloc)
4136 {
4137 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4138 replace_call_with_call_and_fold (gsi, repl);
4139 return true;
4140 }
4141 }
4142 return false;
4143 }
4144
4145 /* Number of bytes into which any type but aggregate or vector types
4146 should fit. */
4147 static constexpr size_t clear_padding_unit
4148 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4149 /* Buffer size on which __builtin_clear_padding folding code works. */
4150 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4151
4152 /* Data passed through __builtin_clear_padding folding. */
4153 struct clear_padding_struct {
4154 location_t loc;
4155 /* 0 during __builtin_clear_padding folding, nonzero during
4156 clear_type_padding_in_mask. In that case, instead of clearing the
4157 non-padding bits in union_ptr array clear the padding bits in there. */
4158 bool clear_in_mask;
4159 tree base;
4160 tree alias_type;
4161 gimple_stmt_iterator *gsi;
4162 /* Alignment of buf->base + 0. */
4163 unsigned align;
4164 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4165 HOST_WIDE_INT off;
4166 /* Number of padding bytes before buf->off that don't have padding clear
4167 code emitted yet. */
4168 HOST_WIDE_INT padding_bytes;
4169 /* The size of the whole object. Never emit code to touch
4170 buf->base + buf->sz or following bytes. */
4171 HOST_WIDE_INT sz;
4172 /* Number of bytes recorded in buf->buf. */
4173 size_t size;
4174 /* When inside union, instead of emitting code we and bits inside of
4175 the union_ptr array. */
4176 unsigned char *union_ptr;
4177 /* Set bits mean padding bits that need to be cleared by the builtin. */
4178 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4179 };
4180
4181 /* Emit code to clear padding requested in BUF->buf - set bits
4182 in there stand for padding that should be cleared. FULL is true
4183 if everything from the buffer should be flushed, otherwise
4184 it can leave up to 2 * clear_padding_unit bytes for further
4185 processing. */
4186
4187 static void
4188 clear_padding_flush (clear_padding_struct *buf, bool full)
4189 {
4190 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4191 if (!full && buf->size < 2 * clear_padding_unit)
4192 return;
4193 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4194 size_t end = buf->size;
4195 if (!full)
4196 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4197 * clear_padding_unit);
4198 size_t padding_bytes = buf->padding_bytes;
4199 if (buf->union_ptr)
4200 {
4201 if (buf->clear_in_mask)
4202 {
4203 /* During clear_type_padding_in_mask, clear the padding
4204 bits set in buf->buf in the buf->union_ptr mask. */
4205 for (size_t i = 0; i < end; i++)
4206 {
4207 if (buf->buf[i] == (unsigned char) ~0)
4208 padding_bytes++;
4209 else
4210 {
4211 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4212 0, padding_bytes);
4213 padding_bytes = 0;
4214 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4215 }
4216 }
4217 if (full)
4218 {
4219 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4220 0, padding_bytes);
4221 buf->off = 0;
4222 buf->size = 0;
4223 buf->padding_bytes = 0;
4224 }
4225 else
4226 {
4227 memmove (buf->buf, buf->buf + end, buf->size - end);
4228 buf->off += end;
4229 buf->size -= end;
4230 buf->padding_bytes = padding_bytes;
4231 }
4232 return;
4233 }
4234 /* Inside of a union, instead of emitting any code, instead
4235 clear all bits in the union_ptr buffer that are clear
4236 in buf. Whole padding bytes don't clear anything. */
4237 for (size_t i = 0; i < end; i++)
4238 {
4239 if (buf->buf[i] == (unsigned char) ~0)
4240 padding_bytes++;
4241 else
4242 {
4243 padding_bytes = 0;
4244 buf->union_ptr[buf->off + i] &= buf->buf[i];
4245 }
4246 }
4247 if (full)
4248 {
4249 buf->off = 0;
4250 buf->size = 0;
4251 buf->padding_bytes = 0;
4252 }
4253 else
4254 {
4255 memmove (buf->buf, buf->buf + end, buf->size - end);
4256 buf->off += end;
4257 buf->size -= end;
4258 buf->padding_bytes = padding_bytes;
4259 }
4260 return;
4261 }
4262 size_t wordsize = UNITS_PER_WORD;
4263 for (size_t i = 0; i < end; i += wordsize)
4264 {
4265 size_t nonzero_first = wordsize;
4266 size_t nonzero_last = 0;
4267 size_t zero_first = wordsize;
4268 size_t zero_last = 0;
4269 bool all_ones = true, bytes_only = true;
4270 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4271 > (unsigned HOST_WIDE_INT) buf->sz)
4272 {
4273 gcc_assert (wordsize > 1);
4274 wordsize /= 2;
4275 i -= wordsize;
4276 continue;
4277 }
4278 for (size_t j = i; j < i + wordsize && j < end; j++)
4279 {
4280 if (buf->buf[j])
4281 {
4282 if (nonzero_first == wordsize)
4283 {
4284 nonzero_first = j - i;
4285 nonzero_last = j - i;
4286 }
4287 if (nonzero_last != j - i)
4288 all_ones = false;
4289 nonzero_last = j + 1 - i;
4290 }
4291 else
4292 {
4293 if (zero_first == wordsize)
4294 zero_first = j - i;
4295 zero_last = j + 1 - i;
4296 }
4297 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4298 {
4299 all_ones = false;
4300 bytes_only = false;
4301 }
4302 }
4303 size_t padding_end = i;
4304 if (padding_bytes)
4305 {
4306 if (nonzero_first == 0
4307 && nonzero_last == wordsize
4308 && all_ones)
4309 {
4310 /* All bits are padding and we had some padding
4311 before too. Just extend it. */
4312 padding_bytes += wordsize;
4313 continue;
4314 }
4315 if (all_ones && nonzero_first == 0)
4316 {
4317 padding_bytes += nonzero_last;
4318 padding_end += nonzero_last;
4319 nonzero_first = wordsize;
4320 nonzero_last = 0;
4321 }
4322 else if (bytes_only && nonzero_first == 0)
4323 {
4324 gcc_assert (zero_first && zero_first != wordsize);
4325 padding_bytes += zero_first;
4326 padding_end += zero_first;
4327 }
4328 tree atype, src;
4329 if (padding_bytes == 1)
4330 {
4331 atype = char_type_node;
4332 src = build_zero_cst (char_type_node);
4333 }
4334 else
4335 {
4336 atype = build_array_type_nelts (char_type_node, padding_bytes);
4337 src = build_constructor (atype, NULL);
4338 }
4339 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4340 build_int_cst (buf->alias_type,
4341 buf->off + padding_end
4342 - padding_bytes));
4343 gimple *g = gimple_build_assign (dst, src);
4344 gimple_set_location (g, buf->loc);
4345 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4346 padding_bytes = 0;
4347 buf->padding_bytes = 0;
4348 }
4349 if (nonzero_first == wordsize)
4350 /* All bits in a word are 0, there are no padding bits. */
4351 continue;
4352 if (all_ones && nonzero_last == wordsize)
4353 {
4354 /* All bits between nonzero_first and end of word are padding
4355 bits, start counting padding_bytes. */
4356 padding_bytes = nonzero_last - nonzero_first;
4357 continue;
4358 }
4359 if (bytes_only)
4360 {
4361 /* If bitfields aren't involved in this word, prefer storing
4362 individual bytes or groups of them over performing a RMW
4363 operation on the whole word. */
4364 gcc_assert (i + zero_last <= end);
4365 for (size_t j = padding_end; j < i + zero_last; j++)
4366 {
4367 if (buf->buf[j])
4368 {
4369 size_t k;
4370 for (k = j; k < i + zero_last; k++)
4371 if (buf->buf[k] == 0)
4372 break;
4373 HOST_WIDE_INT off = buf->off + j;
4374 tree atype, src;
4375 if (k - j == 1)
4376 {
4377 atype = char_type_node;
4378 src = build_zero_cst (char_type_node);
4379 }
4380 else
4381 {
4382 atype = build_array_type_nelts (char_type_node, k - j);
4383 src = build_constructor (atype, NULL);
4384 }
4385 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4386 buf->base,
4387 build_int_cst (buf->alias_type, off));
4388 gimple *g = gimple_build_assign (dst, src);
4389 gimple_set_location (g, buf->loc);
4390 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4391 j = k;
4392 }
4393 }
4394 if (nonzero_last == wordsize)
4395 padding_bytes = nonzero_last - zero_last;
4396 continue;
4397 }
4398 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4399 {
4400 if (nonzero_last - nonzero_first <= eltsz
4401 && ((nonzero_first & ~(eltsz - 1))
4402 == ((nonzero_last - 1) & ~(eltsz - 1))))
4403 {
4404 tree type;
4405 if (eltsz == 1)
4406 type = char_type_node;
4407 else
4408 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4409 0);
4410 size_t start = nonzero_first & ~(eltsz - 1);
4411 HOST_WIDE_INT off = buf->off + i + start;
4412 tree atype = type;
4413 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4414 atype = build_aligned_type (type, buf->align);
4415 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4416 build_int_cst (buf->alias_type, off));
4417 tree src;
4418 gimple *g;
4419 if (all_ones
4420 && nonzero_first == start
4421 && nonzero_last == start + eltsz)
4422 src = build_zero_cst (type);
4423 else
4424 {
4425 src = make_ssa_name (type);
4426 g = gimple_build_assign (src, unshare_expr (dst));
4427 gimple_set_location (g, buf->loc);
4428 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4429 tree mask = native_interpret_expr (type,
4430 buf->buf + i + start,
4431 eltsz);
4432 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4433 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4434 tree src_masked = make_ssa_name (type);
4435 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4436 src, mask);
4437 gimple_set_location (g, buf->loc);
4438 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4439 src = src_masked;
4440 }
4441 g = gimple_build_assign (dst, src);
4442 gimple_set_location (g, buf->loc);
4443 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4444 break;
4445 }
4446 }
4447 }
4448 if (full)
4449 {
4450 if (padding_bytes)
4451 {
4452 tree atype, src;
4453 if (padding_bytes == 1)
4454 {
4455 atype = char_type_node;
4456 src = build_zero_cst (char_type_node);
4457 }
4458 else
4459 {
4460 atype = build_array_type_nelts (char_type_node, padding_bytes);
4461 src = build_constructor (atype, NULL);
4462 }
4463 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4464 build_int_cst (buf->alias_type,
4465 buf->off + end
4466 - padding_bytes));
4467 gimple *g = gimple_build_assign (dst, src);
4468 gimple_set_location (g, buf->loc);
4469 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4470 }
4471 size_t end_rem = end % UNITS_PER_WORD;
4472 buf->off += end - end_rem;
4473 buf->size = end_rem;
4474 memset (buf->buf, 0, buf->size);
4475 buf->padding_bytes = 0;
4476 }
4477 else
4478 {
4479 memmove (buf->buf, buf->buf + end, buf->size - end);
4480 buf->off += end;
4481 buf->size -= end;
4482 buf->padding_bytes = padding_bytes;
4483 }
4484 }
4485
4486 /* Append PADDING_BYTES padding bytes. */
4487
4488 static void
4489 clear_padding_add_padding (clear_padding_struct *buf,
4490 HOST_WIDE_INT padding_bytes)
4491 {
4492 if (padding_bytes == 0)
4493 return;
4494 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4495 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4496 clear_padding_flush (buf, false);
4497 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4498 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4499 {
4500 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4501 padding_bytes -= clear_padding_buf_size - buf->size;
4502 buf->size = clear_padding_buf_size;
4503 clear_padding_flush (buf, false);
4504 gcc_assert (buf->padding_bytes);
4505 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4506 is guaranteed to be all ones. */
4507 padding_bytes += buf->size;
4508 buf->size = padding_bytes % UNITS_PER_WORD;
4509 memset (buf->buf, ~0, buf->size);
4510 buf->off += padding_bytes - buf->size;
4511 buf->padding_bytes += padding_bytes - buf->size;
4512 }
4513 else
4514 {
4515 memset (buf->buf + buf->size, ~0, padding_bytes);
4516 buf->size += padding_bytes;
4517 }
4518 }
4519
4520 static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4521
4522 /* Clear padding bits of union type TYPE. */
4523
4524 static void
4525 clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4526 {
4527 clear_padding_struct *union_buf;
4528 HOST_WIDE_INT start_off = 0, next_off = 0;
4529 size_t start_size = 0;
4530 if (buf->union_ptr)
4531 {
4532 start_off = buf->off + buf->size;
4533 next_off = start_off + sz;
4534 start_size = start_off % UNITS_PER_WORD;
4535 start_off -= start_size;
4536 clear_padding_flush (buf, true);
4537 union_buf = buf;
4538 }
4539 else
4540 {
4541 if (sz + buf->size > clear_padding_buf_size)
4542 clear_padding_flush (buf, false);
4543 union_buf = XALLOCA (clear_padding_struct);
4544 union_buf->loc = buf->loc;
4545 union_buf->clear_in_mask = buf->clear_in_mask;
4546 union_buf->base = NULL_TREE;
4547 union_buf->alias_type = NULL_TREE;
4548 union_buf->gsi = NULL;
4549 union_buf->align = 0;
4550 union_buf->off = 0;
4551 union_buf->padding_bytes = 0;
4552 union_buf->sz = sz;
4553 union_buf->size = 0;
4554 if (sz + buf->size <= clear_padding_buf_size)
4555 union_buf->union_ptr = buf->buf + buf->size;
4556 else
4557 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4558 memset (union_buf->union_ptr, ~0, sz);
4559 }
4560
4561 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4562 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4563 {
4564 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4565 {
4566 if (TREE_TYPE (field) == error_mark_node)
4567 continue;
4568 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4569 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4570 if (!buf->clear_in_mask)
4571 error_at (buf->loc, "flexible array member %qD does not have "
4572 "well defined padding bits for %qs",
4573 field, "__builtin_clear_padding");
4574 continue;
4575 }
4576 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4577 gcc_assert (union_buf->size == 0);
4578 union_buf->off = start_off;
4579 union_buf->size = start_size;
4580 memset (union_buf->buf, ~0, start_size);
4581 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4582 clear_padding_add_padding (union_buf, sz - fldsz);
4583 clear_padding_flush (union_buf, true);
4584 }
4585
4586 if (buf == union_buf)
4587 {
4588 buf->off = next_off;
4589 buf->size = next_off % UNITS_PER_WORD;
4590 buf->off -= buf->size;
4591 memset (buf->buf, ~0, buf->size);
4592 }
4593 else if (sz + buf->size <= clear_padding_buf_size)
4594 buf->size += sz;
4595 else
4596 {
4597 unsigned char *union_ptr = union_buf->union_ptr;
4598 while (sz)
4599 {
4600 clear_padding_flush (buf, false);
4601 HOST_WIDE_INT this_sz
4602 = MIN ((unsigned HOST_WIDE_INT) sz,
4603 clear_padding_buf_size - buf->size);
4604 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4605 buf->size += this_sz;
4606 union_ptr += this_sz;
4607 sz -= this_sz;
4608 }
4609 XDELETE (union_buf->union_ptr);
4610 }
4611 }
4612
4613 /* The only known floating point formats with padding bits are the
4614 IEEE extended ones. */
4615
4616 static bool
4617 clear_padding_real_needs_padding_p (tree type)
4618 {
4619 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4620 return (fmt->b == 2
4621 && fmt->signbit_ro == fmt->signbit_rw
4622 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4623 }
4624
4625 /* Return true if TYPE might contain any padding bits. */
4626
4627 static bool
4628 clear_padding_type_may_have_padding_p (tree type)
4629 {
4630 switch (TREE_CODE (type))
4631 {
4632 case RECORD_TYPE:
4633 case UNION_TYPE:
4634 return true;
4635 case ARRAY_TYPE:
4636 case COMPLEX_TYPE:
4637 case VECTOR_TYPE:
4638 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4639 case REAL_TYPE:
4640 return clear_padding_real_needs_padding_p (type);
4641 default:
4642 return false;
4643 }
4644 }
4645
4646 /* Emit a runtime loop:
4647 for (; buf.base != end; buf.base += sz)
4648 __builtin_clear_padding (buf.base); */
4649
4650 static void
4651 clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4652 {
4653 tree l1 = create_artificial_label (buf->loc);
4654 tree l2 = create_artificial_label (buf->loc);
4655 tree l3 = create_artificial_label (buf->loc);
4656 gimple *g = gimple_build_goto (l2);
4657 gimple_set_location (g, buf->loc);
4658 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4659 g = gimple_build_label (l1);
4660 gimple_set_location (g, buf->loc);
4661 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4662 clear_padding_type (buf, type, buf->sz);
4663 clear_padding_flush (buf, true);
4664 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4665 size_int (buf->sz));
4666 gimple_set_location (g, buf->loc);
4667 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4668 g = gimple_build_label (l2);
4669 gimple_set_location (g, buf->loc);
4670 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4671 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4672 gimple_set_location (g, buf->loc);
4673 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4674 g = gimple_build_label (l3);
4675 gimple_set_location (g, buf->loc);
4676 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4677 }
4678
4679 /* Clear padding bits for TYPE. Called recursively from
4680 gimple_fold_builtin_clear_padding. */
4681
4682 static void
4683 clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4684 {
4685 switch (TREE_CODE (type))
4686 {
4687 case RECORD_TYPE:
4688 HOST_WIDE_INT cur_pos;
4689 cur_pos = 0;
4690 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4691 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4692 {
4693 tree ftype = TREE_TYPE (field);
4694 if (DECL_BIT_FIELD (field))
4695 {
4696 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4697 if (fldsz == 0)
4698 continue;
4699 HOST_WIDE_INT pos = int_byte_position (field);
4700 HOST_WIDE_INT bpos
4701 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4702 bpos %= BITS_PER_UNIT;
4703 HOST_WIDE_INT end
4704 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4705 if (pos + end > cur_pos)
4706 {
4707 clear_padding_add_padding (buf, pos + end - cur_pos);
4708 cur_pos = pos + end;
4709 }
4710 gcc_assert (cur_pos > pos
4711 && ((unsigned HOST_WIDE_INT) buf->size
4712 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4713 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4714 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4715 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4716 " in %qs", "__builtin_clear_padding");
4717 else if (BYTES_BIG_ENDIAN)
4718 {
4719 /* Big endian. */
4720 if (bpos + fldsz <= BITS_PER_UNIT)
4721 *p &= ~(((1 << fldsz) - 1)
4722 << (BITS_PER_UNIT - bpos - fldsz));
4723 else
4724 {
4725 if (bpos)
4726 {
4727 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4728 p++;
4729 fldsz -= BITS_PER_UNIT - bpos;
4730 }
4731 memset (p, 0, fldsz / BITS_PER_UNIT);
4732 p += fldsz / BITS_PER_UNIT;
4733 fldsz %= BITS_PER_UNIT;
4734 if (fldsz)
4735 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4736 }
4737 }
4738 else
4739 {
4740 /* Little endian. */
4741 if (bpos + fldsz <= BITS_PER_UNIT)
4742 *p &= ~(((1 << fldsz) - 1) << bpos);
4743 else
4744 {
4745 if (bpos)
4746 {
4747 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4748 p++;
4749 fldsz -= BITS_PER_UNIT - bpos;
4750 }
4751 memset (p, 0, fldsz / BITS_PER_UNIT);
4752 p += fldsz / BITS_PER_UNIT;
4753 fldsz %= BITS_PER_UNIT;
4754 if (fldsz)
4755 *p &= ~((1 << fldsz) - 1);
4756 }
4757 }
4758 }
4759 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4760 {
4761 if (ftype == error_mark_node)
4762 continue;
4763 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4764 && !COMPLETE_TYPE_P (ftype));
4765 if (!buf->clear_in_mask)
4766 error_at (buf->loc, "flexible array member %qD does not "
4767 "have well defined padding bits for %qs",
4768 field, "__builtin_clear_padding");
4769 }
4770 else if (is_empty_type (TREE_TYPE (field)))
4771 continue;
4772 else
4773 {
4774 HOST_WIDE_INT pos = int_byte_position (field);
4775 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4776 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4777 clear_padding_add_padding (buf, pos - cur_pos);
4778 cur_pos = pos;
4779 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4780 cur_pos += fldsz;
4781 }
4782 }
4783 gcc_assert (sz >= cur_pos);
4784 clear_padding_add_padding (buf, sz - cur_pos);
4785 break;
4786 case ARRAY_TYPE:
4787 HOST_WIDE_INT nelts, fldsz;
4788 fldsz = int_size_in_bytes (TREE_TYPE (type));
4789 if (fldsz == 0)
4790 break;
4791 nelts = sz / fldsz;
4792 if (nelts > 1
4793 && sz > 8 * UNITS_PER_WORD
4794 && buf->union_ptr == NULL
4795 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4796 {
4797 /* For sufficiently large array of more than one elements,
4798 emit a runtime loop to keep code size manageable. */
4799 tree base = buf->base;
4800 unsigned int prev_align = buf->align;
4801 HOST_WIDE_INT off = buf->off + buf->size;
4802 HOST_WIDE_INT prev_sz = buf->sz;
4803 clear_padding_flush (buf, true);
4804 tree elttype = TREE_TYPE (type);
4805 buf->base = create_tmp_var (build_pointer_type (elttype));
4806 tree end = make_ssa_name (TREE_TYPE (buf->base));
4807 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4808 base, size_int (off));
4809 gimple_set_location (g, buf->loc);
4810 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4811 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4812 size_int (sz));
4813 gimple_set_location (g, buf->loc);
4814 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4815 buf->sz = fldsz;
4816 buf->align = TYPE_ALIGN (elttype);
4817 buf->off = 0;
4818 buf->size = 0;
4819 clear_padding_emit_loop (buf, elttype, end);
4820 buf->base = base;
4821 buf->sz = prev_sz;
4822 buf->align = prev_align;
4823 buf->size = off % UNITS_PER_WORD;
4824 buf->off = off - buf->size;
4825 memset (buf->buf, 0, buf->size);
4826 break;
4827 }
4828 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4829 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4830 break;
4831 case UNION_TYPE:
4832 clear_padding_union (buf, type, sz);
4833 break;
4834 case REAL_TYPE:
4835 gcc_assert ((size_t) sz <= clear_padding_unit);
4836 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4837 clear_padding_flush (buf, false);
4838 if (clear_padding_real_needs_padding_p (type))
4839 {
4840 /* Use native_interpret_expr + native_encode_expr to figure out
4841 which bits are padding. */
4842 memset (buf->buf + buf->size, ~0, sz);
4843 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4844 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4845 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4846 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4847 for (size_t i = 0; i < (size_t) sz; i++)
4848 buf->buf[buf->size + i] ^= ~0;
4849 }
4850 else
4851 memset (buf->buf + buf->size, 0, sz);
4852 buf->size += sz;
4853 break;
4854 case COMPLEX_TYPE:
4855 fldsz = int_size_in_bytes (TREE_TYPE (type));
4856 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4857 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4858 break;
4859 case VECTOR_TYPE:
4860 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4861 fldsz = int_size_in_bytes (TREE_TYPE (type));
4862 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4863 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4864 break;
4865 case NULLPTR_TYPE:
4866 gcc_assert ((size_t) sz <= clear_padding_unit);
4867 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4868 clear_padding_flush (buf, false);
4869 memset (buf->buf + buf->size, ~0, sz);
4870 buf->size += sz;
4871 break;
4872 default:
4873 gcc_assert ((size_t) sz <= clear_padding_unit);
4874 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4875 clear_padding_flush (buf, false);
4876 memset (buf->buf + buf->size, 0, sz);
4877 buf->size += sz;
4878 break;
4879 }
4880 }
4881
4882 /* Clear padding bits of TYPE in MASK. */
4883
4884 void
4885 clear_type_padding_in_mask (tree type, unsigned char *mask)
4886 {
4887 clear_padding_struct buf;
4888 buf.loc = UNKNOWN_LOCATION;
4889 buf.clear_in_mask = true;
4890 buf.base = NULL_TREE;
4891 buf.alias_type = NULL_TREE;
4892 buf.gsi = NULL;
4893 buf.align = 0;
4894 buf.off = 0;
4895 buf.padding_bytes = 0;
4896 buf.sz = int_size_in_bytes (type);
4897 buf.size = 0;
4898 buf.union_ptr = mask;
4899 clear_padding_type (&buf, type, buf.sz);
4900 clear_padding_flush (&buf, true);
4901 }
4902
4903 /* Fold __builtin_clear_padding builtin. */
4904
4905 static bool
4906 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4907 {
4908 gimple *stmt = gsi_stmt (*gsi);
4909 gcc_assert (gimple_call_num_args (stmt) == 2);
4910 tree ptr = gimple_call_arg (stmt, 0);
4911 tree typearg = gimple_call_arg (stmt, 1);
4912 tree type = TREE_TYPE (TREE_TYPE (typearg));
4913 location_t loc = gimple_location (stmt);
4914 clear_padding_struct buf;
4915 gimple_stmt_iterator gsiprev = *gsi;
4916 /* This should be folded during the lower pass. */
4917 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4918 gcc_assert (COMPLETE_TYPE_P (type));
4919 gsi_prev (&gsiprev);
4920
4921 buf.loc = loc;
4922 buf.clear_in_mask = false;
4923 buf.base = ptr;
4924 buf.alias_type = NULL_TREE;
4925 buf.gsi = gsi;
4926 buf.align = get_pointer_alignment (ptr);
4927 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4928 buf.align = MAX (buf.align, talign);
4929 buf.off = 0;
4930 buf.padding_bytes = 0;
4931 buf.size = 0;
4932 buf.sz = int_size_in_bytes (type);
4933 buf.union_ptr = NULL;
4934 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4935 sorry_at (loc, "%s not supported for variable length aggregates",
4936 "__builtin_clear_padding");
4937 /* The implementation currently assumes 8-bit host and target
4938 chars which is the case for all currently supported targets
4939 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4940 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4941 sorry_at (loc, "%s not supported on this target",
4942 "__builtin_clear_padding");
4943 else if (!clear_padding_type_may_have_padding_p (type))
4944 ;
4945 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4946 {
4947 tree sz = TYPE_SIZE_UNIT (type);
4948 tree elttype = type;
4949 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4950 while (TREE_CODE (elttype) == ARRAY_TYPE
4951 && int_size_in_bytes (elttype) < 0)
4952 elttype = TREE_TYPE (elttype);
4953 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4954 gcc_assert (eltsz >= 0);
4955 if (eltsz)
4956 {
4957 buf.base = create_tmp_var (build_pointer_type (elttype));
4958 tree end = make_ssa_name (TREE_TYPE (buf.base));
4959 gimple *g = gimple_build_assign (buf.base, ptr);
4960 gimple_set_location (g, loc);
4961 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4962 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4963 gimple_set_location (g, loc);
4964 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4965 buf.sz = eltsz;
4966 buf.align = TYPE_ALIGN (elttype);
4967 buf.alias_type = build_pointer_type (elttype);
4968 clear_padding_emit_loop (&buf, elttype, end);
4969 }
4970 }
4971 else
4972 {
4973 if (!is_gimple_mem_ref_addr (buf.base))
4974 {
4975 buf.base = make_ssa_name (TREE_TYPE (ptr));
4976 gimple *g = gimple_build_assign (buf.base, ptr);
4977 gimple_set_location (g, loc);
4978 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4979 }
4980 buf.alias_type = build_pointer_type (type);
4981 clear_padding_type (&buf, type, buf.sz);
4982 clear_padding_flush (&buf, true);
4983 }
4984
4985 gimple_stmt_iterator gsiprev2 = *gsi;
4986 gsi_prev (&gsiprev2);
4987 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4988 gsi_replace (gsi, gimple_build_nop (), true);
4989 else
4990 {
4991 gsi_remove (gsi, true);
4992 *gsi = gsiprev2;
4993 }
4994 return true;
4995 }
4996
4997 /* Fold the non-target builtin at *GSI and return whether any simplification
4998 was made. */
4999
5000 static bool
5001 gimple_fold_builtin (gimple_stmt_iterator *gsi)
5002 {
5003 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
5004 tree callee = gimple_call_fndecl (stmt);
5005
5006 /* Give up for always_inline inline builtins until they are
5007 inlined. */
5008 if (avoid_folding_inline_builtin (callee))
5009 return false;
5010
5011 unsigned n = gimple_call_num_args (stmt);
5012 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5013 switch (fcode)
5014 {
5015 case BUILT_IN_BCMP:
5016 return gimple_fold_builtin_bcmp (gsi);
5017 case BUILT_IN_BCOPY:
5018 return gimple_fold_builtin_bcopy (gsi);
5019 case BUILT_IN_BZERO:
5020 return gimple_fold_builtin_bzero (gsi);
5021
5022 case BUILT_IN_MEMSET:
5023 return gimple_fold_builtin_memset (gsi,
5024 gimple_call_arg (stmt, 1),
5025 gimple_call_arg (stmt, 2));
5026 case BUILT_IN_MEMCPY:
5027 case BUILT_IN_MEMPCPY:
5028 case BUILT_IN_MEMMOVE:
5029 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5030 gimple_call_arg (stmt, 1), fcode);
5031 case BUILT_IN_SPRINTF_CHK:
5032 case BUILT_IN_VSPRINTF_CHK:
5033 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5034 case BUILT_IN_STRCAT_CHK:
5035 return gimple_fold_builtin_strcat_chk (gsi);
5036 case BUILT_IN_STRNCAT_CHK:
5037 return gimple_fold_builtin_strncat_chk (gsi);
5038 case BUILT_IN_STRLEN:
5039 return gimple_fold_builtin_strlen (gsi);
5040 case BUILT_IN_STRCPY:
5041 return gimple_fold_builtin_strcpy (gsi,
5042 gimple_call_arg (stmt, 0),
5043 gimple_call_arg (stmt, 1));
5044 case BUILT_IN_STRNCPY:
5045 return gimple_fold_builtin_strncpy (gsi,
5046 gimple_call_arg (stmt, 0),
5047 gimple_call_arg (stmt, 1),
5048 gimple_call_arg (stmt, 2));
5049 case BUILT_IN_STRCAT:
5050 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5051 gimple_call_arg (stmt, 1));
5052 case BUILT_IN_STRNCAT:
5053 return gimple_fold_builtin_strncat (gsi);
5054 case BUILT_IN_INDEX:
5055 case BUILT_IN_STRCHR:
5056 return gimple_fold_builtin_strchr (gsi, false);
5057 case BUILT_IN_RINDEX:
5058 case BUILT_IN_STRRCHR:
5059 return gimple_fold_builtin_strchr (gsi, true);
5060 case BUILT_IN_STRSTR:
5061 return gimple_fold_builtin_strstr (gsi);
5062 case BUILT_IN_STRCMP:
5063 case BUILT_IN_STRCMP_EQ:
5064 case BUILT_IN_STRCASECMP:
5065 case BUILT_IN_STRNCMP:
5066 case BUILT_IN_STRNCMP_EQ:
5067 case BUILT_IN_STRNCASECMP:
5068 return gimple_fold_builtin_string_compare (gsi);
5069 case BUILT_IN_MEMCHR:
5070 return gimple_fold_builtin_memchr (gsi);
5071 case BUILT_IN_FPUTS:
5072 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5073 gimple_call_arg (stmt, 1), false);
5074 case BUILT_IN_FPUTS_UNLOCKED:
5075 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5076 gimple_call_arg (stmt, 1), true);
5077 case BUILT_IN_MEMCPY_CHK:
5078 case BUILT_IN_MEMPCPY_CHK:
5079 case BUILT_IN_MEMMOVE_CHK:
5080 case BUILT_IN_MEMSET_CHK:
5081 return gimple_fold_builtin_memory_chk (gsi,
5082 gimple_call_arg (stmt, 0),
5083 gimple_call_arg (stmt, 1),
5084 gimple_call_arg (stmt, 2),
5085 gimple_call_arg (stmt, 3),
5086 fcode);
5087 case BUILT_IN_STPCPY:
5088 return gimple_fold_builtin_stpcpy (gsi);
5089 case BUILT_IN_STRCPY_CHK:
5090 case BUILT_IN_STPCPY_CHK:
5091 return gimple_fold_builtin_stxcpy_chk (gsi,
5092 gimple_call_arg (stmt, 0),
5093 gimple_call_arg (stmt, 1),
5094 gimple_call_arg (stmt, 2),
5095 fcode);
5096 case BUILT_IN_STRNCPY_CHK:
5097 case BUILT_IN_STPNCPY_CHK:
5098 return gimple_fold_builtin_stxncpy_chk (gsi,
5099 gimple_call_arg (stmt, 0),
5100 gimple_call_arg (stmt, 1),
5101 gimple_call_arg (stmt, 2),
5102 gimple_call_arg (stmt, 3),
5103 fcode);
5104 case BUILT_IN_SNPRINTF_CHK:
5105 case BUILT_IN_VSNPRINTF_CHK:
5106 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5107
5108 case BUILT_IN_FPRINTF:
5109 case BUILT_IN_FPRINTF_UNLOCKED:
5110 case BUILT_IN_VFPRINTF:
5111 if (n == 2 || n == 3)
5112 return gimple_fold_builtin_fprintf (gsi,
5113 gimple_call_arg (stmt, 0),
5114 gimple_call_arg (stmt, 1),
5115 n == 3
5116 ? gimple_call_arg (stmt, 2)
5117 : NULL_TREE,
5118 fcode);
5119 break;
5120 case BUILT_IN_FPRINTF_CHK:
5121 case BUILT_IN_VFPRINTF_CHK:
5122 if (n == 3 || n == 4)
5123 return gimple_fold_builtin_fprintf (gsi,
5124 gimple_call_arg (stmt, 0),
5125 gimple_call_arg (stmt, 2),
5126 n == 4
5127 ? gimple_call_arg (stmt, 3)
5128 : NULL_TREE,
5129 fcode);
5130 break;
5131 case BUILT_IN_PRINTF:
5132 case BUILT_IN_PRINTF_UNLOCKED:
5133 case BUILT_IN_VPRINTF:
5134 if (n == 1 || n == 2)
5135 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5136 n == 2
5137 ? gimple_call_arg (stmt, 1)
5138 : NULL_TREE, fcode);
5139 break;
5140 case BUILT_IN_PRINTF_CHK:
5141 case BUILT_IN_VPRINTF_CHK:
5142 if (n == 2 || n == 3)
5143 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5144 n == 3
5145 ? gimple_call_arg (stmt, 2)
5146 : NULL_TREE, fcode);
5147 break;
5148 case BUILT_IN_ACC_ON_DEVICE:
5149 return gimple_fold_builtin_acc_on_device (gsi,
5150 gimple_call_arg (stmt, 0));
5151 case BUILT_IN_REALLOC:
5152 return gimple_fold_builtin_realloc (gsi);
5153
5154 case BUILT_IN_CLEAR_PADDING:
5155 return gimple_fold_builtin_clear_padding (gsi);
5156
5157 default:;
5158 }
5159
5160 /* Try the generic builtin folder. */
5161 bool ignore = (gimple_call_lhs (stmt) == NULL);
5162 tree result = fold_call_stmt (stmt, ignore);
5163 if (result)
5164 {
5165 if (ignore)
5166 STRIP_NOPS (result);
5167 else
5168 result = fold_convert (gimple_call_return_type (stmt), result);
5169 gimplify_and_update_call_from_tree (gsi, result);
5170 return true;
5171 }
5172
5173 return false;
5174 }
5175
5176 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5177 function calls to constants, where possible. */
5178
5179 static tree
5180 fold_internal_goacc_dim (const gimple *call)
5181 {
5182 int axis = oacc_get_ifn_dim_arg (call);
5183 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5184 tree result = NULL_TREE;
5185 tree type = TREE_TYPE (gimple_call_lhs (call));
5186
5187 switch (gimple_call_internal_fn (call))
5188 {
5189 case IFN_GOACC_DIM_POS:
5190 /* If the size is 1, we know the answer. */
5191 if (size == 1)
5192 result = build_int_cst (type, 0);
5193 break;
5194 case IFN_GOACC_DIM_SIZE:
5195 /* If the size is not dynamic, we know the answer. */
5196 if (size)
5197 result = build_int_cst (type, size);
5198 break;
5199 default:
5200 break;
5201 }
5202
5203 return result;
5204 }
5205
5206 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5207 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5208 &var where var is only addressable because of such calls. */
5209
5210 bool
5211 optimize_atomic_compare_exchange_p (gimple *stmt)
5212 {
5213 if (gimple_call_num_args (stmt) != 6
5214 || !flag_inline_atomics
5215 || !optimize
5216 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5217 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5218 || !gimple_vdef (stmt)
5219 || !gimple_vuse (stmt))
5220 return false;
5221
5222 tree fndecl = gimple_call_fndecl (stmt);
5223 switch (DECL_FUNCTION_CODE (fndecl))
5224 {
5225 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5226 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5227 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5228 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5229 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5230 break;
5231 default:
5232 return false;
5233 }
5234
5235 tree expected = gimple_call_arg (stmt, 1);
5236 if (TREE_CODE (expected) != ADDR_EXPR
5237 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5238 return false;
5239
5240 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5241 if (!is_gimple_reg_type (etype)
5242 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5243 || TREE_THIS_VOLATILE (etype)
5244 || VECTOR_TYPE_P (etype)
5245 || TREE_CODE (etype) == COMPLEX_TYPE
5246 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5247 might not preserve all the bits. See PR71716. */
5248 || SCALAR_FLOAT_TYPE_P (etype)
5249 || maybe_ne (TYPE_PRECISION (etype),
5250 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5251 return false;
5252
5253 tree weak = gimple_call_arg (stmt, 3);
5254 if (!integer_zerop (weak) && !integer_onep (weak))
5255 return false;
5256
5257 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5258 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5259 machine_mode mode = TYPE_MODE (itype);
5260
5261 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5262 == CODE_FOR_nothing
5263 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5264 return false;
5265
5266 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5267 return false;
5268
5269 return true;
5270 }
5271
5272 /* Fold
5273 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5274 into
5275 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5276 i = IMAGPART_EXPR <t>;
5277 r = (_Bool) i;
5278 e = REALPART_EXPR <t>; */
5279
5280 void
5281 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5282 {
5283 gimple *stmt = gsi_stmt (*gsi);
5284 tree fndecl = gimple_call_fndecl (stmt);
5285 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5286 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5287 tree ctype = build_complex_type (itype);
5288 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5289 bool throws = false;
5290 edge e = NULL;
5291 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5292 expected);
5293 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5294 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5295 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5296 {
5297 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5298 build1 (VIEW_CONVERT_EXPR, itype,
5299 gimple_assign_lhs (g)));
5300 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5301 }
5302 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5303 + int_size_in_bytes (itype);
5304 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5305 gimple_call_arg (stmt, 0),
5306 gimple_assign_lhs (g),
5307 gimple_call_arg (stmt, 2),
5308 build_int_cst (integer_type_node, flag),
5309 gimple_call_arg (stmt, 4),
5310 gimple_call_arg (stmt, 5));
5311 tree lhs = make_ssa_name (ctype);
5312 gimple_call_set_lhs (g, lhs);
5313 gimple_move_vops (g, stmt);
5314 tree oldlhs = gimple_call_lhs (stmt);
5315 if (stmt_can_throw_internal (cfun, stmt))
5316 {
5317 throws = true;
5318 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5319 }
5320 gimple_call_set_nothrow (as_a <gcall *> (g),
5321 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5322 gimple_call_set_lhs (stmt, NULL_TREE);
5323 gsi_replace (gsi, g, true);
5324 if (oldlhs)
5325 {
5326 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5327 build1 (IMAGPART_EXPR, itype, lhs));
5328 if (throws)
5329 {
5330 gsi_insert_on_edge_immediate (e, g);
5331 *gsi = gsi_for_stmt (g);
5332 }
5333 else
5334 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5335 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5336 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5337 }
5338 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5339 build1 (REALPART_EXPR, itype, lhs));
5340 if (throws && oldlhs == NULL_TREE)
5341 {
5342 gsi_insert_on_edge_immediate (e, g);
5343 *gsi = gsi_for_stmt (g);
5344 }
5345 else
5346 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5347 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5348 {
5349 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5350 VIEW_CONVERT_EXPR,
5351 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5352 gimple_assign_lhs (g)));
5353 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5354 }
5355 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5356 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5357 *gsi = gsiret;
5358 }
5359
5360 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5361 doesn't fit into TYPE. The test for overflow should be regardless of
5362 -fwrapv, and even for unsigned types. */
5363
5364 bool
5365 arith_overflowed_p (enum tree_code code, const_tree type,
5366 const_tree arg0, const_tree arg1)
5367 {
5368 widest2_int warg0 = widest2_int_cst (arg0);
5369 widest2_int warg1 = widest2_int_cst (arg1);
5370 widest2_int wres;
5371 switch (code)
5372 {
5373 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5374 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5375 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5376 default: gcc_unreachable ();
5377 }
5378 signop sign = TYPE_SIGN (type);
5379 if (sign == UNSIGNED && wi::neg_p (wres))
5380 return true;
5381 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5382 }
5383
5384 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5385 for the memory it references, otherwise return null. VECTYPE is the
5386 type of the memory vector. */
5387
5388 static tree
5389 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5390 {
5391 tree ptr = gimple_call_arg (call, 0);
5392 tree alias_align = gimple_call_arg (call, 1);
5393 tree mask = gimple_call_arg (call, 2);
5394 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5395 return NULL_TREE;
5396
5397 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5398 if (TYPE_ALIGN (vectype) != align)
5399 vectype = build_aligned_type (vectype, align);
5400 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5401 return fold_build2 (MEM_REF, vectype, ptr, offset);
5402 }
5403
5404 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5405
5406 static bool
5407 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5408 {
5409 tree lhs = gimple_call_lhs (call);
5410 if (!lhs)
5411 return false;
5412
5413 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5414 {
5415 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5416 gimple_set_location (new_stmt, gimple_location (call));
5417 gimple_move_vops (new_stmt, call);
5418 gsi_replace (gsi, new_stmt, false);
5419 return true;
5420 }
5421 return false;
5422 }
5423
5424 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5425
5426 static bool
5427 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5428 {
5429 tree rhs = gimple_call_arg (call, 3);
5430 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5431 {
5432 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5433 gimple_set_location (new_stmt, gimple_location (call));
5434 gimple_move_vops (new_stmt, call);
5435 gsi_replace (gsi, new_stmt, false);
5436 return true;
5437 }
5438 return false;
5439 }
5440
5441 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5442 The statement may be replaced by another statement, e.g., if the call
5443 simplifies to a constant value. Return true if any changes were made.
5444 It is assumed that the operands have been previously folded. */
5445
5446 static bool
5447 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5448 {
5449 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5450 tree callee;
5451 bool changed = false;
5452
5453 /* Check for virtual calls that became direct calls. */
5454 callee = gimple_call_fn (stmt);
5455 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5456 {
5457 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5458 {
5459 if (dump_file && virtual_method_call_p (callee)
5460 && !possible_polymorphic_call_target_p
5461 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5462 (OBJ_TYPE_REF_EXPR (callee)))))
5463 {
5464 fprintf (dump_file,
5465 "Type inheritance inconsistent devirtualization of ");
5466 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5467 fprintf (dump_file, " to ");
5468 print_generic_expr (dump_file, callee, TDF_SLIM);
5469 fprintf (dump_file, "\n");
5470 }
5471
5472 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5473 changed = true;
5474 }
5475 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5476 {
5477 bool final;
5478 vec <cgraph_node *>targets
5479 = possible_polymorphic_call_targets (callee, stmt, &final);
5480 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5481 {
5482 tree lhs = gimple_call_lhs (stmt);
5483 if (dump_enabled_p ())
5484 {
5485 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5486 "folding virtual function call to %s\n",
5487 targets.length () == 1
5488 ? targets[0]->name ()
5489 : "__builtin_unreachable");
5490 }
5491 if (targets.length () == 1)
5492 {
5493 tree fndecl = targets[0]->decl;
5494 gimple_call_set_fndecl (stmt, fndecl);
5495 changed = true;
5496 /* If changing the call to __cxa_pure_virtual
5497 or similar noreturn function, adjust gimple_call_fntype
5498 too. */
5499 if (gimple_call_noreturn_p (stmt)
5500 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5501 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5502 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5503 == void_type_node))
5504 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5505 /* If the call becomes noreturn, remove the lhs. */
5506 if (lhs
5507 && gimple_call_noreturn_p (stmt)
5508 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5509 || should_remove_lhs_p (lhs)))
5510 {
5511 if (TREE_CODE (lhs) == SSA_NAME)
5512 {
5513 tree var = create_tmp_var (TREE_TYPE (lhs));
5514 tree def = get_or_create_ssa_default_def (cfun, var);
5515 gimple *new_stmt = gimple_build_assign (lhs, def);
5516 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5517 }
5518 gimple_call_set_lhs (stmt, NULL_TREE);
5519 }
5520 maybe_remove_unused_call_args (cfun, stmt);
5521 }
5522 else
5523 {
5524 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5525 gimple *new_stmt = gimple_build_call (fndecl, 0);
5526 gimple_set_location (new_stmt, gimple_location (stmt));
5527 /* If the call had a SSA name as lhs morph that into
5528 an uninitialized value. */
5529 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5530 {
5531 tree var = create_tmp_var (TREE_TYPE (lhs));
5532 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5533 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5534 set_ssa_default_def (cfun, var, lhs);
5535 }
5536 gimple_move_vops (new_stmt, stmt);
5537 gsi_replace (gsi, new_stmt, false);
5538 return true;
5539 }
5540 }
5541 }
5542 }
5543
5544 /* Check for indirect calls that became direct calls, and then
5545 no longer require a static chain. */
5546 if (gimple_call_chain (stmt))
5547 {
5548 tree fn = gimple_call_fndecl (stmt);
5549 if (fn && !DECL_STATIC_CHAIN (fn))
5550 {
5551 gimple_call_set_chain (stmt, NULL);
5552 changed = true;
5553 }
5554 }
5555
5556 if (inplace)
5557 return changed;
5558
5559 /* Check for builtins that CCP can handle using information not
5560 available in the generic fold routines. */
5561 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5562 {
5563 if (gimple_fold_builtin (gsi))
5564 changed = true;
5565 }
5566 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5567 {
5568 changed |= targetm.gimple_fold_builtin (gsi);
5569 }
5570 else if (gimple_call_internal_p (stmt))
5571 {
5572 enum tree_code subcode = ERROR_MARK;
5573 tree result = NULL_TREE;
5574 bool cplx_result = false;
5575 tree overflow = NULL_TREE;
5576 switch (gimple_call_internal_fn (stmt))
5577 {
5578 case IFN_BUILTIN_EXPECT:
5579 result = fold_builtin_expect (gimple_location (stmt),
5580 gimple_call_arg (stmt, 0),
5581 gimple_call_arg (stmt, 1),
5582 gimple_call_arg (stmt, 2),
5583 NULL_TREE);
5584 break;
5585 case IFN_UBSAN_OBJECT_SIZE:
5586 {
5587 tree offset = gimple_call_arg (stmt, 1);
5588 tree objsize = gimple_call_arg (stmt, 2);
5589 if (integer_all_onesp (objsize)
5590 || (TREE_CODE (offset) == INTEGER_CST
5591 && TREE_CODE (objsize) == INTEGER_CST
5592 && tree_int_cst_le (offset, objsize)))
5593 {
5594 replace_call_with_value (gsi, NULL_TREE);
5595 return true;
5596 }
5597 }
5598 break;
5599 case IFN_UBSAN_PTR:
5600 if (integer_zerop (gimple_call_arg (stmt, 1)))
5601 {
5602 replace_call_with_value (gsi, NULL_TREE);
5603 return true;
5604 }
5605 break;
5606 case IFN_UBSAN_BOUNDS:
5607 {
5608 tree index = gimple_call_arg (stmt, 1);
5609 tree bound = gimple_call_arg (stmt, 2);
5610 if (TREE_CODE (index) == INTEGER_CST
5611 && TREE_CODE (bound) == INTEGER_CST)
5612 {
5613 index = fold_convert (TREE_TYPE (bound), index);
5614 if (TREE_CODE (index) == INTEGER_CST
5615 && tree_int_cst_le (index, bound))
5616 {
5617 replace_call_with_value (gsi, NULL_TREE);
5618 return true;
5619 }
5620 }
5621 }
5622 break;
5623 case IFN_GOACC_DIM_SIZE:
5624 case IFN_GOACC_DIM_POS:
5625 result = fold_internal_goacc_dim (stmt);
5626 break;
5627 case IFN_UBSAN_CHECK_ADD:
5628 subcode = PLUS_EXPR;
5629 break;
5630 case IFN_UBSAN_CHECK_SUB:
5631 subcode = MINUS_EXPR;
5632 break;
5633 case IFN_UBSAN_CHECK_MUL:
5634 subcode = MULT_EXPR;
5635 break;
5636 case IFN_ADD_OVERFLOW:
5637 subcode = PLUS_EXPR;
5638 cplx_result = true;
5639 break;
5640 case IFN_SUB_OVERFLOW:
5641 subcode = MINUS_EXPR;
5642 cplx_result = true;
5643 break;
5644 case IFN_MUL_OVERFLOW:
5645 subcode = MULT_EXPR;
5646 cplx_result = true;
5647 break;
5648 case IFN_MASK_LOAD:
5649 changed |= gimple_fold_mask_load (gsi, stmt);
5650 break;
5651 case IFN_MASK_STORE:
5652 changed |= gimple_fold_mask_store (gsi, stmt);
5653 break;
5654 default:
5655 break;
5656 }
5657 if (subcode != ERROR_MARK)
5658 {
5659 tree arg0 = gimple_call_arg (stmt, 0);
5660 tree arg1 = gimple_call_arg (stmt, 1);
5661 tree type = TREE_TYPE (arg0);
5662 if (cplx_result)
5663 {
5664 tree lhs = gimple_call_lhs (stmt);
5665 if (lhs == NULL_TREE)
5666 type = NULL_TREE;
5667 else
5668 type = TREE_TYPE (TREE_TYPE (lhs));
5669 }
5670 if (type == NULL_TREE)
5671 ;
5672 /* x = y + 0; x = y - 0; x = y * 0; */
5673 else if (integer_zerop (arg1))
5674 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5675 /* x = 0 + y; x = 0 * y; */
5676 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5677 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5678 /* x = y - y; */
5679 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5680 result = integer_zero_node;
5681 /* x = y * 1; x = 1 * y; */
5682 else if (subcode == MULT_EXPR && integer_onep (arg1))
5683 result = arg0;
5684 else if (subcode == MULT_EXPR && integer_onep (arg0))
5685 result = arg1;
5686 else if (TREE_CODE (arg0) == INTEGER_CST
5687 && TREE_CODE (arg1) == INTEGER_CST)
5688 {
5689 if (cplx_result)
5690 result = int_const_binop (subcode, fold_convert (type, arg0),
5691 fold_convert (type, arg1));
5692 else
5693 result = int_const_binop (subcode, arg0, arg1);
5694 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5695 {
5696 if (cplx_result)
5697 overflow = build_one_cst (type);
5698 else
5699 result = NULL_TREE;
5700 }
5701 }
5702 if (result)
5703 {
5704 if (result == integer_zero_node)
5705 result = build_zero_cst (type);
5706 else if (cplx_result && TREE_TYPE (result) != type)
5707 {
5708 if (TREE_CODE (result) == INTEGER_CST)
5709 {
5710 if (arith_overflowed_p (PLUS_EXPR, type, result,
5711 integer_zero_node))
5712 overflow = build_one_cst (type);
5713 }
5714 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5715 && TYPE_UNSIGNED (type))
5716 || (TYPE_PRECISION (type)
5717 < (TYPE_PRECISION (TREE_TYPE (result))
5718 + (TYPE_UNSIGNED (TREE_TYPE (result))
5719 && !TYPE_UNSIGNED (type)))))
5720 result = NULL_TREE;
5721 if (result)
5722 result = fold_convert (type, result);
5723 }
5724 }
5725 }
5726
5727 if (result)
5728 {
5729 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5730 result = drop_tree_overflow (result);
5731 if (cplx_result)
5732 {
5733 if (overflow == NULL_TREE)
5734 overflow = build_zero_cst (TREE_TYPE (result));
5735 tree ctype = build_complex_type (TREE_TYPE (result));
5736 if (TREE_CODE (result) == INTEGER_CST
5737 && TREE_CODE (overflow) == INTEGER_CST)
5738 result = build_complex (ctype, result, overflow);
5739 else
5740 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5741 ctype, result, overflow);
5742 }
5743 gimplify_and_update_call_from_tree (gsi, result);
5744 changed = true;
5745 }
5746 }
5747
5748 return changed;
5749 }
5750
5751
5752 /* Return true whether NAME has a use on STMT. */
5753
5754 static bool
5755 has_use_on_stmt (tree name, gimple *stmt)
5756 {
5757 imm_use_iterator iter;
5758 use_operand_p use_p;
5759 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5760 if (USE_STMT (use_p) == stmt)
5761 return true;
5762 return false;
5763 }
5764
5765 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5766 gimple_simplify.
5767
5768 Replaces *GSI with the simplification result in RCODE and OPS
5769 and the associated statements in *SEQ. Does the replacement
5770 according to INPLACE and returns true if the operation succeeded. */
5771
5772 static bool
5773 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5774 gimple_match_op *res_op,
5775 gimple_seq *seq, bool inplace)
5776 {
5777 gimple *stmt = gsi_stmt (*gsi);
5778 tree *ops = res_op->ops;
5779 unsigned int num_ops = res_op->num_ops;
5780
5781 /* Play safe and do not allow abnormals to be mentioned in
5782 newly created statements. See also maybe_push_res_to_seq.
5783 As an exception allow such uses if there was a use of the
5784 same SSA name on the old stmt. */
5785 for (unsigned int i = 0; i < num_ops; ++i)
5786 if (TREE_CODE (ops[i]) == SSA_NAME
5787 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5788 && !has_use_on_stmt (ops[i], stmt))
5789 return false;
5790
5791 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5792 for (unsigned int i = 0; i < 2; ++i)
5793 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5794 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5795 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5796 return false;
5797
5798 /* Don't insert new statements when INPLACE is true, even if we could
5799 reuse STMT for the final statement. */
5800 if (inplace && !gimple_seq_empty_p (*seq))
5801 return false;
5802
5803 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5804 {
5805 gcc_assert (res_op->code.is_tree_code ());
5806 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
5807 /* GIMPLE_CONDs condition may not throw. */
5808 && (!flag_exceptions
5809 || !cfun->can_throw_non_call_exceptions
5810 || !operation_could_trap_p (res_op->code,
5811 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5812 false, NULL_TREE)))
5813 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5814 else if (res_op->code == SSA_NAME)
5815 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5816 build_zero_cst (TREE_TYPE (ops[0])));
5817 else if (res_op->code == INTEGER_CST)
5818 {
5819 if (integer_zerop (ops[0]))
5820 gimple_cond_make_false (cond_stmt);
5821 else
5822 gimple_cond_make_true (cond_stmt);
5823 }
5824 else if (!inplace)
5825 {
5826 tree res = maybe_push_res_to_seq (res_op, seq);
5827 if (!res)
5828 return false;
5829 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5830 build_zero_cst (TREE_TYPE (res)));
5831 }
5832 else
5833 return false;
5834 if (dump_file && (dump_flags & TDF_DETAILS))
5835 {
5836 fprintf (dump_file, "gimple_simplified to ");
5837 if (!gimple_seq_empty_p (*seq))
5838 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5839 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5840 0, TDF_SLIM);
5841 }
5842 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5843 return true;
5844 }
5845 else if (is_gimple_assign (stmt)
5846 && res_op->code.is_tree_code ())
5847 {
5848 if (!inplace
5849 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
5850 {
5851 maybe_build_generic_op (res_op);
5852 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5853 res_op->op_or_null (0),
5854 res_op->op_or_null (1),
5855 res_op->op_or_null (2));
5856 if (dump_file && (dump_flags & TDF_DETAILS))
5857 {
5858 fprintf (dump_file, "gimple_simplified to ");
5859 if (!gimple_seq_empty_p (*seq))
5860 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5861 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5862 0, TDF_SLIM);
5863 }
5864 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5865 return true;
5866 }
5867 }
5868 else if (res_op->code.is_fn_code ()
5869 && gimple_call_combined_fn (stmt) == res_op->code)
5870 {
5871 gcc_assert (num_ops == gimple_call_num_args (stmt));
5872 for (unsigned int i = 0; i < num_ops; ++i)
5873 gimple_call_set_arg (stmt, i, ops[i]);
5874 if (dump_file && (dump_flags & TDF_DETAILS))
5875 {
5876 fprintf (dump_file, "gimple_simplified to ");
5877 if (!gimple_seq_empty_p (*seq))
5878 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5879 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5880 }
5881 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5882 return true;
5883 }
5884 else if (!inplace)
5885 {
5886 if (gimple_has_lhs (stmt))
5887 {
5888 tree lhs = gimple_get_lhs (stmt);
5889 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5890 return false;
5891 if (dump_file && (dump_flags & TDF_DETAILS))
5892 {
5893 fprintf (dump_file, "gimple_simplified to ");
5894 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5895 }
5896 gsi_replace_with_seq_vops (gsi, *seq);
5897 return true;
5898 }
5899 else
5900 gcc_unreachable ();
5901 }
5902
5903 return false;
5904 }
5905
5906 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5907
5908 static bool
5909 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5910 {
5911 bool res = false;
5912 tree *orig_t = t;
5913
5914 if (TREE_CODE (*t) == ADDR_EXPR)
5915 t = &TREE_OPERAND (*t, 0);
5916
5917 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5918 generic vector extension. The actual vector referenced is
5919 view-converted to an array type for this purpose. If the index
5920 is constant the canonical representation in the middle-end is a
5921 BIT_FIELD_REF so re-write the former to the latter here. */
5922 if (TREE_CODE (*t) == ARRAY_REF
5923 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5924 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5925 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5926 {
5927 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5928 if (VECTOR_TYPE_P (vtype))
5929 {
5930 tree low = array_ref_low_bound (*t);
5931 if (TREE_CODE (low) == INTEGER_CST)
5932 {
5933 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5934 {
5935 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5936 wi::to_widest (low));
5937 idx = wi::mul (idx, wi::to_widest
5938 (TYPE_SIZE (TREE_TYPE (*t))));
5939 widest_int ext
5940 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5941 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5942 {
5943 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5944 TREE_TYPE (*t),
5945 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5946 TYPE_SIZE (TREE_TYPE (*t)),
5947 wide_int_to_tree (bitsizetype, idx));
5948 res = true;
5949 }
5950 }
5951 }
5952 }
5953 }
5954
5955 while (handled_component_p (*t))
5956 t = &TREE_OPERAND (*t, 0);
5957
5958 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5959 of invariant addresses into a SSA name MEM_REF address. */
5960 if (TREE_CODE (*t) == MEM_REF
5961 || TREE_CODE (*t) == TARGET_MEM_REF)
5962 {
5963 tree addr = TREE_OPERAND (*t, 0);
5964 if (TREE_CODE (addr) == ADDR_EXPR
5965 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5966 || handled_component_p (TREE_OPERAND (addr, 0))))
5967 {
5968 tree base;
5969 poly_int64 coffset;
5970 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5971 &coffset);
5972 if (!base)
5973 {
5974 if (is_debug)
5975 return false;
5976 gcc_unreachable ();
5977 }
5978
5979 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5980 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5981 TREE_OPERAND (*t, 1),
5982 size_int (coffset));
5983 res = true;
5984 }
5985 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5986 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5987 }
5988
5989 /* Canonicalize back MEM_REFs to plain reference trees if the object
5990 accessed is a decl that has the same access semantics as the MEM_REF. */
5991 if (TREE_CODE (*t) == MEM_REF
5992 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5993 && integer_zerop (TREE_OPERAND (*t, 1))
5994 && MR_DEPENDENCE_CLIQUE (*t) == 0)
5995 {
5996 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5997 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5998 if (/* Same volatile qualification. */
5999 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6000 /* Same TBAA behavior with -fstrict-aliasing. */
6001 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6002 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6003 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6004 /* Same alignment. */
6005 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6006 /* We have to look out here to not drop a required conversion
6007 from the rhs to the lhs if *t appears on the lhs or vice-versa
6008 if it appears on the rhs. Thus require strict type
6009 compatibility. */
6010 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6011 {
6012 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6013 res = true;
6014 }
6015 }
6016
6017 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6018 && TREE_CODE (*t) == MEM_REF
6019 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6020 {
6021 tree base;
6022 poly_int64 coffset;
6023 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6024 &coffset);
6025 if (base)
6026 {
6027 gcc_assert (TREE_CODE (base) == MEM_REF);
6028 poly_int64 moffset;
6029 if (mem_ref_offset (base).to_shwi (&moffset))
6030 {
6031 coffset += moffset;
6032 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6033 {
6034 coffset += moffset;
6035 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6036 return true;
6037 }
6038 }
6039 }
6040 }
6041
6042 /* Canonicalize TARGET_MEM_REF in particular with respect to
6043 the indexes becoming constant. */
6044 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6045 {
6046 tree tem = maybe_fold_tmr (*t);
6047 if (tem)
6048 {
6049 *t = tem;
6050 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6051 recompute_tree_invariant_for_addr_expr (*orig_t);
6052 res = true;
6053 }
6054 }
6055
6056 return res;
6057 }
6058
6059 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6060 distinguishes both cases. */
6061
6062 static bool
6063 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6064 {
6065 bool changed = false;
6066 gimple *stmt = gsi_stmt (*gsi);
6067 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6068 unsigned i;
6069 fold_defer_overflow_warnings ();
6070
6071 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6072 after propagation.
6073 ??? This shouldn't be done in generic folding but in the
6074 propagation helpers which also know whether an address was
6075 propagated.
6076 Also canonicalize operand order. */
6077 switch (gimple_code (stmt))
6078 {
6079 case GIMPLE_ASSIGN:
6080 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6081 {
6082 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6083 if ((REFERENCE_CLASS_P (*rhs)
6084 || TREE_CODE (*rhs) == ADDR_EXPR)
6085 && maybe_canonicalize_mem_ref_addr (rhs))
6086 changed = true;
6087 tree *lhs = gimple_assign_lhs_ptr (stmt);
6088 if (REFERENCE_CLASS_P (*lhs)
6089 && maybe_canonicalize_mem_ref_addr (lhs))
6090 changed = true;
6091 }
6092 else
6093 {
6094 /* Canonicalize operand order. */
6095 enum tree_code code = gimple_assign_rhs_code (stmt);
6096 if (TREE_CODE_CLASS (code) == tcc_comparison
6097 || commutative_tree_code (code)
6098 || commutative_ternary_tree_code (code))
6099 {
6100 tree rhs1 = gimple_assign_rhs1 (stmt);
6101 tree rhs2 = gimple_assign_rhs2 (stmt);
6102 if (tree_swap_operands_p (rhs1, rhs2))
6103 {
6104 gimple_assign_set_rhs1 (stmt, rhs2);
6105 gimple_assign_set_rhs2 (stmt, rhs1);
6106 if (TREE_CODE_CLASS (code) == tcc_comparison)
6107 gimple_assign_set_rhs_code (stmt,
6108 swap_tree_comparison (code));
6109 changed = true;
6110 }
6111 }
6112 }
6113 break;
6114 case GIMPLE_CALL:
6115 {
6116 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6117 {
6118 tree *arg = gimple_call_arg_ptr (stmt, i);
6119 if (REFERENCE_CLASS_P (*arg)
6120 && maybe_canonicalize_mem_ref_addr (arg))
6121 changed = true;
6122 }
6123 tree *lhs = gimple_call_lhs_ptr (stmt);
6124 if (*lhs
6125 && REFERENCE_CLASS_P (*lhs)
6126 && maybe_canonicalize_mem_ref_addr (lhs))
6127 changed = true;
6128 break;
6129 }
6130 case GIMPLE_ASM:
6131 {
6132 gasm *asm_stmt = as_a <gasm *> (stmt);
6133 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6134 {
6135 tree link = gimple_asm_output_op (asm_stmt, i);
6136 tree op = TREE_VALUE (link);
6137 if (REFERENCE_CLASS_P (op)
6138 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6139 changed = true;
6140 }
6141 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6142 {
6143 tree link = gimple_asm_input_op (asm_stmt, i);
6144 tree op = TREE_VALUE (link);
6145 if ((REFERENCE_CLASS_P (op)
6146 || TREE_CODE (op) == ADDR_EXPR)
6147 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6148 changed = true;
6149 }
6150 }
6151 break;
6152 case GIMPLE_DEBUG:
6153 if (gimple_debug_bind_p (stmt))
6154 {
6155 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6156 if (*val
6157 && (REFERENCE_CLASS_P (*val)
6158 || TREE_CODE (*val) == ADDR_EXPR)
6159 && maybe_canonicalize_mem_ref_addr (val, true))
6160 changed = true;
6161 }
6162 break;
6163 case GIMPLE_COND:
6164 {
6165 /* Canonicalize operand order. */
6166 tree lhs = gimple_cond_lhs (stmt);
6167 tree rhs = gimple_cond_rhs (stmt);
6168 if (tree_swap_operands_p (lhs, rhs))
6169 {
6170 gcond *gc = as_a <gcond *> (stmt);
6171 gimple_cond_set_lhs (gc, rhs);
6172 gimple_cond_set_rhs (gc, lhs);
6173 gimple_cond_set_code (gc,
6174 swap_tree_comparison (gimple_cond_code (gc)));
6175 changed = true;
6176 }
6177 }
6178 default:;
6179 }
6180
6181 /* Dispatch to pattern-based folding. */
6182 if (!inplace
6183 || is_gimple_assign (stmt)
6184 || gimple_code (stmt) == GIMPLE_COND)
6185 {
6186 gimple_seq seq = NULL;
6187 gimple_match_op res_op;
6188 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6189 valueize, valueize))
6190 {
6191 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6192 changed = true;
6193 else
6194 gimple_seq_discard (seq);
6195 }
6196 }
6197
6198 stmt = gsi_stmt (*gsi);
6199
6200 /* Fold the main computation performed by the statement. */
6201 switch (gimple_code (stmt))
6202 {
6203 case GIMPLE_ASSIGN:
6204 {
6205 /* Try to canonicalize for boolean-typed X the comparisons
6206 X == 0, X == 1, X != 0, and X != 1. */
6207 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6208 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6209 {
6210 tree lhs = gimple_assign_lhs (stmt);
6211 tree op1 = gimple_assign_rhs1 (stmt);
6212 tree op2 = gimple_assign_rhs2 (stmt);
6213 tree type = TREE_TYPE (op1);
6214
6215 /* Check whether the comparison operands are of the same boolean
6216 type as the result type is.
6217 Check that second operand is an integer-constant with value
6218 one or zero. */
6219 if (TREE_CODE (op2) == INTEGER_CST
6220 && (integer_zerop (op2) || integer_onep (op2))
6221 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6222 {
6223 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6224 bool is_logical_not = false;
6225
6226 /* X == 0 and X != 1 is a logical-not.of X
6227 X == 1 and X != 0 is X */
6228 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6229 || (cmp_code == NE_EXPR && integer_onep (op2)))
6230 is_logical_not = true;
6231
6232 if (is_logical_not == false)
6233 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6234 /* Only for one-bit precision typed X the transformation
6235 !X -> ~X is valied. */
6236 else if (TYPE_PRECISION (type) == 1)
6237 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6238 /* Otherwise we use !X -> X ^ 1. */
6239 else
6240 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6241 build_int_cst (type, 1));
6242 changed = true;
6243 break;
6244 }
6245 }
6246
6247 unsigned old_num_ops = gimple_num_ops (stmt);
6248 tree lhs = gimple_assign_lhs (stmt);
6249 tree new_rhs = fold_gimple_assign (gsi);
6250 if (new_rhs
6251 && !useless_type_conversion_p (TREE_TYPE (lhs),
6252 TREE_TYPE (new_rhs)))
6253 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6254 if (new_rhs
6255 && (!inplace
6256 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6257 {
6258 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6259 changed = true;
6260 }
6261 break;
6262 }
6263
6264 case GIMPLE_CALL:
6265 changed |= gimple_fold_call (gsi, inplace);
6266 break;
6267
6268 case GIMPLE_DEBUG:
6269 if (gimple_debug_bind_p (stmt))
6270 {
6271 tree val = gimple_debug_bind_get_value (stmt);
6272 if (val
6273 && REFERENCE_CLASS_P (val))
6274 {
6275 tree tem = maybe_fold_reference (val);
6276 if (tem)
6277 {
6278 gimple_debug_bind_set_value (stmt, tem);
6279 changed = true;
6280 }
6281 }
6282 else if (val
6283 && TREE_CODE (val) == ADDR_EXPR)
6284 {
6285 tree ref = TREE_OPERAND (val, 0);
6286 tree tem = maybe_fold_reference (ref);
6287 if (tem)
6288 {
6289 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6290 gimple_debug_bind_set_value (stmt, tem);
6291 changed = true;
6292 }
6293 }
6294 }
6295 break;
6296
6297 case GIMPLE_RETURN:
6298 {
6299 greturn *ret_stmt = as_a<greturn *> (stmt);
6300 tree ret = gimple_return_retval(ret_stmt);
6301
6302 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6303 {
6304 tree val = valueize (ret);
6305 if (val && val != ret
6306 && may_propagate_copy (ret, val))
6307 {
6308 gimple_return_set_retval (ret_stmt, val);
6309 changed = true;
6310 }
6311 }
6312 }
6313 break;
6314
6315 default:;
6316 }
6317
6318 stmt = gsi_stmt (*gsi);
6319
6320 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6321 return changed;
6322 }
6323
6324 /* Valueziation callback that ends up not following SSA edges. */
6325
6326 tree
6327 no_follow_ssa_edges (tree)
6328 {
6329 return NULL_TREE;
6330 }
6331
6332 /* Valueization callback that ends up following single-use SSA edges only. */
6333
6334 tree
6335 follow_single_use_edges (tree val)
6336 {
6337 if (TREE_CODE (val) == SSA_NAME
6338 && !has_single_use (val))
6339 return NULL_TREE;
6340 return val;
6341 }
6342
6343 /* Valueization callback that follows all SSA edges. */
6344
6345 tree
6346 follow_all_ssa_edges (tree val)
6347 {
6348 return val;
6349 }
6350
6351 /* Fold the statement pointed to by GSI. In some cases, this function may
6352 replace the whole statement with a new one. Returns true iff folding
6353 makes any changes.
6354 The statement pointed to by GSI should be in valid gimple form but may
6355 be in unfolded state as resulting from for example constant propagation
6356 which can produce *&x = 0. */
6357
6358 bool
6359 fold_stmt (gimple_stmt_iterator *gsi)
6360 {
6361 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6362 }
6363
6364 bool
6365 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6366 {
6367 return fold_stmt_1 (gsi, false, valueize);
6368 }
6369
6370 /* Perform the minimal folding on statement *GSI. Only operations like
6371 *&x created by constant propagation are handled. The statement cannot
6372 be replaced with a new one. Return true if the statement was
6373 changed, false otherwise.
6374 The statement *GSI should be in valid gimple form but may
6375 be in unfolded state as resulting from for example constant propagation
6376 which can produce *&x = 0. */
6377
6378 bool
6379 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6380 {
6381 gimple *stmt = gsi_stmt (*gsi);
6382 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6383 gcc_assert (gsi_stmt (*gsi) == stmt);
6384 return changed;
6385 }
6386
6387 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6388 if EXPR is null or we don't know how.
6389 If non-null, the result always has boolean type. */
6390
6391 static tree
6392 canonicalize_bool (tree expr, bool invert)
6393 {
6394 if (!expr)
6395 return NULL_TREE;
6396 else if (invert)
6397 {
6398 if (integer_nonzerop (expr))
6399 return boolean_false_node;
6400 else if (integer_zerop (expr))
6401 return boolean_true_node;
6402 else if (TREE_CODE (expr) == SSA_NAME)
6403 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6404 build_int_cst (TREE_TYPE (expr), 0));
6405 else if (COMPARISON_CLASS_P (expr))
6406 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6407 boolean_type_node,
6408 TREE_OPERAND (expr, 0),
6409 TREE_OPERAND (expr, 1));
6410 else
6411 return NULL_TREE;
6412 }
6413 else
6414 {
6415 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6416 return expr;
6417 if (integer_nonzerop (expr))
6418 return boolean_true_node;
6419 else if (integer_zerop (expr))
6420 return boolean_false_node;
6421 else if (TREE_CODE (expr) == SSA_NAME)
6422 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6423 build_int_cst (TREE_TYPE (expr), 0));
6424 else if (COMPARISON_CLASS_P (expr))
6425 return fold_build2 (TREE_CODE (expr),
6426 boolean_type_node,
6427 TREE_OPERAND (expr, 0),
6428 TREE_OPERAND (expr, 1));
6429 else
6430 return NULL_TREE;
6431 }
6432 }
6433
6434 /* Check to see if a boolean expression EXPR is logically equivalent to the
6435 comparison (OP1 CODE OP2). Check for various identities involving
6436 SSA_NAMEs. */
6437
6438 static bool
6439 same_bool_comparison_p (const_tree expr, enum tree_code code,
6440 const_tree op1, const_tree op2)
6441 {
6442 gimple *s;
6443
6444 /* The obvious case. */
6445 if (TREE_CODE (expr) == code
6446 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6447 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6448 return true;
6449
6450 /* Check for comparing (name, name != 0) and the case where expr
6451 is an SSA_NAME with a definition matching the comparison. */
6452 if (TREE_CODE (expr) == SSA_NAME
6453 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6454 {
6455 if (operand_equal_p (expr, op1, 0))
6456 return ((code == NE_EXPR && integer_zerop (op2))
6457 || (code == EQ_EXPR && integer_nonzerop (op2)));
6458 s = SSA_NAME_DEF_STMT (expr);
6459 if (is_gimple_assign (s)
6460 && gimple_assign_rhs_code (s) == code
6461 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6462 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6463 return true;
6464 }
6465
6466 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6467 of name is a comparison, recurse. */
6468 if (TREE_CODE (op1) == SSA_NAME
6469 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6470 {
6471 s = SSA_NAME_DEF_STMT (op1);
6472 if (is_gimple_assign (s)
6473 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6474 {
6475 enum tree_code c = gimple_assign_rhs_code (s);
6476 if ((c == NE_EXPR && integer_zerop (op2))
6477 || (c == EQ_EXPR && integer_nonzerop (op2)))
6478 return same_bool_comparison_p (expr, c,
6479 gimple_assign_rhs1 (s),
6480 gimple_assign_rhs2 (s));
6481 if ((c == EQ_EXPR && integer_zerop (op2))
6482 || (c == NE_EXPR && integer_nonzerop (op2)))
6483 return same_bool_comparison_p (expr,
6484 invert_tree_comparison (c, false),
6485 gimple_assign_rhs1 (s),
6486 gimple_assign_rhs2 (s));
6487 }
6488 }
6489 return false;
6490 }
6491
6492 /* Check to see if two boolean expressions OP1 and OP2 are logically
6493 equivalent. */
6494
6495 static bool
6496 same_bool_result_p (const_tree op1, const_tree op2)
6497 {
6498 /* Simple cases first. */
6499 if (operand_equal_p (op1, op2, 0))
6500 return true;
6501
6502 /* Check the cases where at least one of the operands is a comparison.
6503 These are a bit smarter than operand_equal_p in that they apply some
6504 identifies on SSA_NAMEs. */
6505 if (COMPARISON_CLASS_P (op2)
6506 && same_bool_comparison_p (op1, TREE_CODE (op2),
6507 TREE_OPERAND (op2, 0),
6508 TREE_OPERAND (op2, 1)))
6509 return true;
6510 if (COMPARISON_CLASS_P (op1)
6511 && same_bool_comparison_p (op2, TREE_CODE (op1),
6512 TREE_OPERAND (op1, 0),
6513 TREE_OPERAND (op1, 1)))
6514 return true;
6515
6516 /* Default case. */
6517 return false;
6518 }
6519
6520 /* Forward declarations for some mutually recursive functions. */
6521
6522 static tree
6523 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6524 enum tree_code code2, tree op2a, tree op2b);
6525 static tree
6526 and_var_with_comparison (tree type, tree var, bool invert,
6527 enum tree_code code2, tree op2a, tree op2b);
6528 static tree
6529 and_var_with_comparison_1 (tree type, gimple *stmt,
6530 enum tree_code code2, tree op2a, tree op2b);
6531 static tree
6532 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6533 enum tree_code code2, tree op2a, tree op2b);
6534 static tree
6535 or_var_with_comparison (tree, tree var, bool invert,
6536 enum tree_code code2, tree op2a, tree op2b);
6537 static tree
6538 or_var_with_comparison_1 (tree, gimple *stmt,
6539 enum tree_code code2, tree op2a, tree op2b);
6540
6541 /* Helper function for and_comparisons_1: try to simplify the AND of the
6542 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6543 If INVERT is true, invert the value of the VAR before doing the AND.
6544 Return NULL_EXPR if we can't simplify this to a single expression. */
6545
6546 static tree
6547 and_var_with_comparison (tree type, tree var, bool invert,
6548 enum tree_code code2, tree op2a, tree op2b)
6549 {
6550 tree t;
6551 gimple *stmt = SSA_NAME_DEF_STMT (var);
6552
6553 /* We can only deal with variables whose definitions are assignments. */
6554 if (!is_gimple_assign (stmt))
6555 return NULL_TREE;
6556
6557 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6558 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6559 Then we only have to consider the simpler non-inverted cases. */
6560 if (invert)
6561 t = or_var_with_comparison_1 (type, stmt,
6562 invert_tree_comparison (code2, false),
6563 op2a, op2b);
6564 else
6565 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6566 return canonicalize_bool (t, invert);
6567 }
6568
6569 /* Try to simplify the AND of the ssa variable defined by the assignment
6570 STMT with the comparison specified by (OP2A CODE2 OP2B).
6571 Return NULL_EXPR if we can't simplify this to a single expression. */
6572
6573 static tree
6574 and_var_with_comparison_1 (tree type, gimple *stmt,
6575 enum tree_code code2, tree op2a, tree op2b)
6576 {
6577 tree var = gimple_assign_lhs (stmt);
6578 tree true_test_var = NULL_TREE;
6579 tree false_test_var = NULL_TREE;
6580 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6581
6582 /* Check for identities like (var AND (var == 0)) => false. */
6583 if (TREE_CODE (op2a) == SSA_NAME
6584 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6585 {
6586 if ((code2 == NE_EXPR && integer_zerop (op2b))
6587 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6588 {
6589 true_test_var = op2a;
6590 if (var == true_test_var)
6591 return var;
6592 }
6593 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6594 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6595 {
6596 false_test_var = op2a;
6597 if (var == false_test_var)
6598 return boolean_false_node;
6599 }
6600 }
6601
6602 /* If the definition is a comparison, recurse on it. */
6603 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6604 {
6605 tree t = and_comparisons_1 (type, innercode,
6606 gimple_assign_rhs1 (stmt),
6607 gimple_assign_rhs2 (stmt),
6608 code2,
6609 op2a,
6610 op2b);
6611 if (t)
6612 return t;
6613 }
6614
6615 /* If the definition is an AND or OR expression, we may be able to
6616 simplify by reassociating. */
6617 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6618 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6619 {
6620 tree inner1 = gimple_assign_rhs1 (stmt);
6621 tree inner2 = gimple_assign_rhs2 (stmt);
6622 gimple *s;
6623 tree t;
6624 tree partial = NULL_TREE;
6625 bool is_and = (innercode == BIT_AND_EXPR);
6626
6627 /* Check for boolean identities that don't require recursive examination
6628 of inner1/inner2:
6629 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6630 inner1 AND (inner1 OR inner2) => inner1
6631 !inner1 AND (inner1 AND inner2) => false
6632 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6633 Likewise for similar cases involving inner2. */
6634 if (inner1 == true_test_var)
6635 return (is_and ? var : inner1);
6636 else if (inner2 == true_test_var)
6637 return (is_and ? var : inner2);
6638 else if (inner1 == false_test_var)
6639 return (is_and
6640 ? boolean_false_node
6641 : and_var_with_comparison (type, inner2, false, code2, op2a,
6642 op2b));
6643 else if (inner2 == false_test_var)
6644 return (is_and
6645 ? boolean_false_node
6646 : and_var_with_comparison (type, inner1, false, code2, op2a,
6647 op2b));
6648
6649 /* Next, redistribute/reassociate the AND across the inner tests.
6650 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6651 if (TREE_CODE (inner1) == SSA_NAME
6652 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6653 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6654 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6655 gimple_assign_rhs1 (s),
6656 gimple_assign_rhs2 (s),
6657 code2, op2a, op2b)))
6658 {
6659 /* Handle the AND case, where we are reassociating:
6660 (inner1 AND inner2) AND (op2a code2 op2b)
6661 => (t AND inner2)
6662 If the partial result t is a constant, we win. Otherwise
6663 continue on to try reassociating with the other inner test. */
6664 if (is_and)
6665 {
6666 if (integer_onep (t))
6667 return inner2;
6668 else if (integer_zerop (t))
6669 return boolean_false_node;
6670 }
6671
6672 /* Handle the OR case, where we are redistributing:
6673 (inner1 OR inner2) AND (op2a code2 op2b)
6674 => (t OR (inner2 AND (op2a code2 op2b))) */
6675 else if (integer_onep (t))
6676 return boolean_true_node;
6677
6678 /* Save partial result for later. */
6679 partial = t;
6680 }
6681
6682 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6683 if (TREE_CODE (inner2) == SSA_NAME
6684 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6685 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6686 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6687 gimple_assign_rhs1 (s),
6688 gimple_assign_rhs2 (s),
6689 code2, op2a, op2b)))
6690 {
6691 /* Handle the AND case, where we are reassociating:
6692 (inner1 AND inner2) AND (op2a code2 op2b)
6693 => (inner1 AND t) */
6694 if (is_and)
6695 {
6696 if (integer_onep (t))
6697 return inner1;
6698 else if (integer_zerop (t))
6699 return boolean_false_node;
6700 /* If both are the same, we can apply the identity
6701 (x AND x) == x. */
6702 else if (partial && same_bool_result_p (t, partial))
6703 return t;
6704 }
6705
6706 /* Handle the OR case. where we are redistributing:
6707 (inner1 OR inner2) AND (op2a code2 op2b)
6708 => (t OR (inner1 AND (op2a code2 op2b)))
6709 => (t OR partial) */
6710 else
6711 {
6712 if (integer_onep (t))
6713 return boolean_true_node;
6714 else if (partial)
6715 {
6716 /* We already got a simplification for the other
6717 operand to the redistributed OR expression. The
6718 interesting case is when at least one is false.
6719 Or, if both are the same, we can apply the identity
6720 (x OR x) == x. */
6721 if (integer_zerop (partial))
6722 return t;
6723 else if (integer_zerop (t))
6724 return partial;
6725 else if (same_bool_result_p (t, partial))
6726 return t;
6727 }
6728 }
6729 }
6730 }
6731 return NULL_TREE;
6732 }
6733
6734 /* Try to simplify the AND of two comparisons defined by
6735 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6736 If this can be done without constructing an intermediate value,
6737 return the resulting tree; otherwise NULL_TREE is returned.
6738 This function is deliberately asymmetric as it recurses on SSA_DEFs
6739 in the first comparison but not the second. */
6740
6741 static tree
6742 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6743 enum tree_code code2, tree op2a, tree op2b)
6744 {
6745 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6746
6747 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6748 if (operand_equal_p (op1a, op2a, 0)
6749 && operand_equal_p (op1b, op2b, 0))
6750 {
6751 /* Result will be either NULL_TREE, or a combined comparison. */
6752 tree t = combine_comparisons (UNKNOWN_LOCATION,
6753 TRUTH_ANDIF_EXPR, code1, code2,
6754 truth_type, op1a, op1b);
6755 if (t)
6756 return t;
6757 }
6758
6759 /* Likewise the swapped case of the above. */
6760 if (operand_equal_p (op1a, op2b, 0)
6761 && operand_equal_p (op1b, op2a, 0))
6762 {
6763 /* Result will be either NULL_TREE, or a combined comparison. */
6764 tree t = combine_comparisons (UNKNOWN_LOCATION,
6765 TRUTH_ANDIF_EXPR, code1,
6766 swap_tree_comparison (code2),
6767 truth_type, op1a, op1b);
6768 if (t)
6769 return t;
6770 }
6771
6772 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6773 NAME's definition is a truth value. See if there are any simplifications
6774 that can be done against the NAME's definition. */
6775 if (TREE_CODE (op1a) == SSA_NAME
6776 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6777 && (integer_zerop (op1b) || integer_onep (op1b)))
6778 {
6779 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6780 || (code1 == NE_EXPR && integer_onep (op1b)));
6781 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6782 switch (gimple_code (stmt))
6783 {
6784 case GIMPLE_ASSIGN:
6785 /* Try to simplify by copy-propagating the definition. */
6786 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6787 op2b);
6788
6789 case GIMPLE_PHI:
6790 /* If every argument to the PHI produces the same result when
6791 ANDed with the second comparison, we win.
6792 Do not do this unless the type is bool since we need a bool
6793 result here anyway. */
6794 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6795 {
6796 tree result = NULL_TREE;
6797 unsigned i;
6798 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6799 {
6800 tree arg = gimple_phi_arg_def (stmt, i);
6801
6802 /* If this PHI has itself as an argument, ignore it.
6803 If all the other args produce the same result,
6804 we're still OK. */
6805 if (arg == gimple_phi_result (stmt))
6806 continue;
6807 else if (TREE_CODE (arg) == INTEGER_CST)
6808 {
6809 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6810 {
6811 if (!result)
6812 result = boolean_false_node;
6813 else if (!integer_zerop (result))
6814 return NULL_TREE;
6815 }
6816 else if (!result)
6817 result = fold_build2 (code2, boolean_type_node,
6818 op2a, op2b);
6819 else if (!same_bool_comparison_p (result,
6820 code2, op2a, op2b))
6821 return NULL_TREE;
6822 }
6823 else if (TREE_CODE (arg) == SSA_NAME
6824 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6825 {
6826 tree temp;
6827 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6828 /* In simple cases we can look through PHI nodes,
6829 but we have to be careful with loops.
6830 See PR49073. */
6831 if (! dom_info_available_p (CDI_DOMINATORS)
6832 || gimple_bb (def_stmt) == gimple_bb (stmt)
6833 || dominated_by_p (CDI_DOMINATORS,
6834 gimple_bb (def_stmt),
6835 gimple_bb (stmt)))
6836 return NULL_TREE;
6837 temp = and_var_with_comparison (type, arg, invert, code2,
6838 op2a, op2b);
6839 if (!temp)
6840 return NULL_TREE;
6841 else if (!result)
6842 result = temp;
6843 else if (!same_bool_result_p (result, temp))
6844 return NULL_TREE;
6845 }
6846 else
6847 return NULL_TREE;
6848 }
6849 return result;
6850 }
6851
6852 default:
6853 break;
6854 }
6855 }
6856 return NULL_TREE;
6857 }
6858
6859 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6860 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6861 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6862 simplify this to a single expression. As we are going to lower the cost
6863 of building SSA names / gimple stmts significantly, we need to allocate
6864 them ont the stack. This will cause the code to be a bit ugly. */
6865
6866 static tree
6867 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6868 enum tree_code code1,
6869 tree op1a, tree op1b,
6870 enum tree_code code2, tree op2a,
6871 tree op2b)
6872 {
6873 /* Allocate gimple stmt1 on the stack. */
6874 gassign *stmt1
6875 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6876 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6877 gimple_assign_set_rhs_code (stmt1, code1);
6878 gimple_assign_set_rhs1 (stmt1, op1a);
6879 gimple_assign_set_rhs2 (stmt1, op1b);
6880
6881 /* Allocate gimple stmt2 on the stack. */
6882 gassign *stmt2
6883 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6884 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6885 gimple_assign_set_rhs_code (stmt2, code2);
6886 gimple_assign_set_rhs1 (stmt2, op2a);
6887 gimple_assign_set_rhs2 (stmt2, op2b);
6888
6889 /* Allocate SSA names(lhs1) on the stack. */
6890 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6891 memset (lhs1, 0, sizeof (tree_ssa_name));
6892 TREE_SET_CODE (lhs1, SSA_NAME);
6893 TREE_TYPE (lhs1) = type;
6894 init_ssa_name_imm_use (lhs1);
6895
6896 /* Allocate SSA names(lhs2) on the stack. */
6897 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6898 memset (lhs2, 0, sizeof (tree_ssa_name));
6899 TREE_SET_CODE (lhs2, SSA_NAME);
6900 TREE_TYPE (lhs2) = type;
6901 init_ssa_name_imm_use (lhs2);
6902
6903 gimple_assign_set_lhs (stmt1, lhs1);
6904 gimple_assign_set_lhs (stmt2, lhs2);
6905
6906 gimple_match_op op (gimple_match_cond::UNCOND, code,
6907 type, gimple_assign_lhs (stmt1),
6908 gimple_assign_lhs (stmt2));
6909 if (op.resimplify (NULL, follow_all_ssa_edges))
6910 {
6911 if (gimple_simplified_result_is_gimple_val (&op))
6912 {
6913 tree res = op.ops[0];
6914 if (res == lhs1)
6915 return build2 (code1, type, op1a, op1b);
6916 else if (res == lhs2)
6917 return build2 (code2, type, op2a, op2b);
6918 else
6919 return res;
6920 }
6921 else if (op.code.is_tree_code ()
6922 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6923 {
6924 tree op0 = op.ops[0];
6925 tree op1 = op.ops[1];
6926 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6927 return NULL_TREE; /* not simple */
6928
6929 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6930 }
6931 }
6932
6933 return NULL_TREE;
6934 }
6935
6936 /* Try to simplify the AND of two comparisons, specified by
6937 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6938 If this can be simplified to a single expression (without requiring
6939 introducing more SSA variables to hold intermediate values),
6940 return the resulting tree. Otherwise return NULL_TREE.
6941 If the result expression is non-null, it has boolean type. */
6942
6943 tree
6944 maybe_fold_and_comparisons (tree type,
6945 enum tree_code code1, tree op1a, tree op1b,
6946 enum tree_code code2, tree op2a, tree op2b)
6947 {
6948 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6949 return t;
6950
6951 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6952 return t;
6953
6954 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6955 op1a, op1b, code2, op2a,
6956 op2b))
6957 return t;
6958
6959 return NULL_TREE;
6960 }
6961
6962 /* Helper function for or_comparisons_1: try to simplify the OR of the
6963 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6964 If INVERT is true, invert the value of VAR before doing the OR.
6965 Return NULL_EXPR if we can't simplify this to a single expression. */
6966
6967 static tree
6968 or_var_with_comparison (tree type, tree var, bool invert,
6969 enum tree_code code2, tree op2a, tree op2b)
6970 {
6971 tree t;
6972 gimple *stmt = SSA_NAME_DEF_STMT (var);
6973
6974 /* We can only deal with variables whose definitions are assignments. */
6975 if (!is_gimple_assign (stmt))
6976 return NULL_TREE;
6977
6978 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6979 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6980 Then we only have to consider the simpler non-inverted cases. */
6981 if (invert)
6982 t = and_var_with_comparison_1 (type, stmt,
6983 invert_tree_comparison (code2, false),
6984 op2a, op2b);
6985 else
6986 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6987 return canonicalize_bool (t, invert);
6988 }
6989
6990 /* Try to simplify the OR of the ssa variable defined by the assignment
6991 STMT with the comparison specified by (OP2A CODE2 OP2B).
6992 Return NULL_EXPR if we can't simplify this to a single expression. */
6993
6994 static tree
6995 or_var_with_comparison_1 (tree type, gimple *stmt,
6996 enum tree_code code2, tree op2a, tree op2b)
6997 {
6998 tree var = gimple_assign_lhs (stmt);
6999 tree true_test_var = NULL_TREE;
7000 tree false_test_var = NULL_TREE;
7001 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7002
7003 /* Check for identities like (var OR (var != 0)) => true . */
7004 if (TREE_CODE (op2a) == SSA_NAME
7005 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7006 {
7007 if ((code2 == NE_EXPR && integer_zerop (op2b))
7008 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7009 {
7010 true_test_var = op2a;
7011 if (var == true_test_var)
7012 return var;
7013 }
7014 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7015 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7016 {
7017 false_test_var = op2a;
7018 if (var == false_test_var)
7019 return boolean_true_node;
7020 }
7021 }
7022
7023 /* If the definition is a comparison, recurse on it. */
7024 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7025 {
7026 tree t = or_comparisons_1 (type, innercode,
7027 gimple_assign_rhs1 (stmt),
7028 gimple_assign_rhs2 (stmt),
7029 code2,
7030 op2a,
7031 op2b);
7032 if (t)
7033 return t;
7034 }
7035
7036 /* If the definition is an AND or OR expression, we may be able to
7037 simplify by reassociating. */
7038 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7039 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7040 {
7041 tree inner1 = gimple_assign_rhs1 (stmt);
7042 tree inner2 = gimple_assign_rhs2 (stmt);
7043 gimple *s;
7044 tree t;
7045 tree partial = NULL_TREE;
7046 bool is_or = (innercode == BIT_IOR_EXPR);
7047
7048 /* Check for boolean identities that don't require recursive examination
7049 of inner1/inner2:
7050 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7051 inner1 OR (inner1 AND inner2) => inner1
7052 !inner1 OR (inner1 OR inner2) => true
7053 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7054 */
7055 if (inner1 == true_test_var)
7056 return (is_or ? var : inner1);
7057 else if (inner2 == true_test_var)
7058 return (is_or ? var : inner2);
7059 else if (inner1 == false_test_var)
7060 return (is_or
7061 ? boolean_true_node
7062 : or_var_with_comparison (type, inner2, false, code2, op2a,
7063 op2b));
7064 else if (inner2 == false_test_var)
7065 return (is_or
7066 ? boolean_true_node
7067 : or_var_with_comparison (type, inner1, false, code2, op2a,
7068 op2b));
7069
7070 /* Next, redistribute/reassociate the OR across the inner tests.
7071 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7072 if (TREE_CODE (inner1) == SSA_NAME
7073 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7074 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7075 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7076 gimple_assign_rhs1 (s),
7077 gimple_assign_rhs2 (s),
7078 code2, op2a, op2b)))
7079 {
7080 /* Handle the OR case, where we are reassociating:
7081 (inner1 OR inner2) OR (op2a code2 op2b)
7082 => (t OR inner2)
7083 If the partial result t is a constant, we win. Otherwise
7084 continue on to try reassociating with the other inner test. */
7085 if (is_or)
7086 {
7087 if (integer_onep (t))
7088 return boolean_true_node;
7089 else if (integer_zerop (t))
7090 return inner2;
7091 }
7092
7093 /* Handle the AND case, where we are redistributing:
7094 (inner1 AND inner2) OR (op2a code2 op2b)
7095 => (t AND (inner2 OR (op2a code op2b))) */
7096 else if (integer_zerop (t))
7097 return boolean_false_node;
7098
7099 /* Save partial result for later. */
7100 partial = t;
7101 }
7102
7103 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7104 if (TREE_CODE (inner2) == SSA_NAME
7105 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7106 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7107 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7108 gimple_assign_rhs1 (s),
7109 gimple_assign_rhs2 (s),
7110 code2, op2a, op2b)))
7111 {
7112 /* Handle the OR case, where we are reassociating:
7113 (inner1 OR inner2) OR (op2a code2 op2b)
7114 => (inner1 OR t)
7115 => (t OR partial) */
7116 if (is_or)
7117 {
7118 if (integer_zerop (t))
7119 return inner1;
7120 else if (integer_onep (t))
7121 return boolean_true_node;
7122 /* If both are the same, we can apply the identity
7123 (x OR x) == x. */
7124 else if (partial && same_bool_result_p (t, partial))
7125 return t;
7126 }
7127
7128 /* Handle the AND case, where we are redistributing:
7129 (inner1 AND inner2) OR (op2a code2 op2b)
7130 => (t AND (inner1 OR (op2a code2 op2b)))
7131 => (t AND partial) */
7132 else
7133 {
7134 if (integer_zerop (t))
7135 return boolean_false_node;
7136 else if (partial)
7137 {
7138 /* We already got a simplification for the other
7139 operand to the redistributed AND expression. The
7140 interesting case is when at least one is true.
7141 Or, if both are the same, we can apply the identity
7142 (x AND x) == x. */
7143 if (integer_onep (partial))
7144 return t;
7145 else if (integer_onep (t))
7146 return partial;
7147 else if (same_bool_result_p (t, partial))
7148 return t;
7149 }
7150 }
7151 }
7152 }
7153 return NULL_TREE;
7154 }
7155
7156 /* Try to simplify the OR of two comparisons defined by
7157 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7158 If this can be done without constructing an intermediate value,
7159 return the resulting tree; otherwise NULL_TREE is returned.
7160 This function is deliberately asymmetric as it recurses on SSA_DEFs
7161 in the first comparison but not the second. */
7162
7163 static tree
7164 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7165 enum tree_code code2, tree op2a, tree op2b)
7166 {
7167 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7168
7169 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7170 if (operand_equal_p (op1a, op2a, 0)
7171 && operand_equal_p (op1b, op2b, 0))
7172 {
7173 /* Result will be either NULL_TREE, or a combined comparison. */
7174 tree t = combine_comparisons (UNKNOWN_LOCATION,
7175 TRUTH_ORIF_EXPR, code1, code2,
7176 truth_type, op1a, op1b);
7177 if (t)
7178 return t;
7179 }
7180
7181 /* Likewise the swapped case of the above. */
7182 if (operand_equal_p (op1a, op2b, 0)
7183 && operand_equal_p (op1b, op2a, 0))
7184 {
7185 /* Result will be either NULL_TREE, or a combined comparison. */
7186 tree t = combine_comparisons (UNKNOWN_LOCATION,
7187 TRUTH_ORIF_EXPR, code1,
7188 swap_tree_comparison (code2),
7189 truth_type, op1a, op1b);
7190 if (t)
7191 return t;
7192 }
7193
7194 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7195 NAME's definition is a truth value. See if there are any simplifications
7196 that can be done against the NAME's definition. */
7197 if (TREE_CODE (op1a) == SSA_NAME
7198 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7199 && (integer_zerop (op1b) || integer_onep (op1b)))
7200 {
7201 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7202 || (code1 == NE_EXPR && integer_onep (op1b)));
7203 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7204 switch (gimple_code (stmt))
7205 {
7206 case GIMPLE_ASSIGN:
7207 /* Try to simplify by copy-propagating the definition. */
7208 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7209 op2b);
7210
7211 case GIMPLE_PHI:
7212 /* If every argument to the PHI produces the same result when
7213 ORed with the second comparison, we win.
7214 Do not do this unless the type is bool since we need a bool
7215 result here anyway. */
7216 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7217 {
7218 tree result = NULL_TREE;
7219 unsigned i;
7220 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7221 {
7222 tree arg = gimple_phi_arg_def (stmt, i);
7223
7224 /* If this PHI has itself as an argument, ignore it.
7225 If all the other args produce the same result,
7226 we're still OK. */
7227 if (arg == gimple_phi_result (stmt))
7228 continue;
7229 else if (TREE_CODE (arg) == INTEGER_CST)
7230 {
7231 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7232 {
7233 if (!result)
7234 result = boolean_true_node;
7235 else if (!integer_onep (result))
7236 return NULL_TREE;
7237 }
7238 else if (!result)
7239 result = fold_build2 (code2, boolean_type_node,
7240 op2a, op2b);
7241 else if (!same_bool_comparison_p (result,
7242 code2, op2a, op2b))
7243 return NULL_TREE;
7244 }
7245 else if (TREE_CODE (arg) == SSA_NAME
7246 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7247 {
7248 tree temp;
7249 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7250 /* In simple cases we can look through PHI nodes,
7251 but we have to be careful with loops.
7252 See PR49073. */
7253 if (! dom_info_available_p (CDI_DOMINATORS)
7254 || gimple_bb (def_stmt) == gimple_bb (stmt)
7255 || dominated_by_p (CDI_DOMINATORS,
7256 gimple_bb (def_stmt),
7257 gimple_bb (stmt)))
7258 return NULL_TREE;
7259 temp = or_var_with_comparison (type, arg, invert, code2,
7260 op2a, op2b);
7261 if (!temp)
7262 return NULL_TREE;
7263 else if (!result)
7264 result = temp;
7265 else if (!same_bool_result_p (result, temp))
7266 return NULL_TREE;
7267 }
7268 else
7269 return NULL_TREE;
7270 }
7271 return result;
7272 }
7273
7274 default:
7275 break;
7276 }
7277 }
7278 return NULL_TREE;
7279 }
7280
7281 /* Try to simplify the OR of two comparisons, specified by
7282 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7283 If this can be simplified to a single expression (without requiring
7284 introducing more SSA variables to hold intermediate values),
7285 return the resulting tree. Otherwise return NULL_TREE.
7286 If the result expression is non-null, it has boolean type. */
7287
7288 tree
7289 maybe_fold_or_comparisons (tree type,
7290 enum tree_code code1, tree op1a, tree op1b,
7291 enum tree_code code2, tree op2a, tree op2b)
7292 {
7293 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
7294 return t;
7295
7296 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7297 return t;
7298
7299 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7300 op1a, op1b, code2, op2a,
7301 op2b))
7302 return t;
7303
7304 return NULL_TREE;
7305 }
7306
7307 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7308
7309 Either NULL_TREE, a simplified but non-constant or a constant
7310 is returned.
7311
7312 ??? This should go into a gimple-fold-inline.h file to be eventually
7313 privatized with the single valueize function used in the various TUs
7314 to avoid the indirect function call overhead. */
7315
7316 tree
7317 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7318 tree (*gvalueize) (tree))
7319 {
7320 gimple_match_op res_op;
7321 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7322 edges if there are intermediate VARYING defs. For this reason
7323 do not follow SSA edges here even though SCCVN can technically
7324 just deal fine with that. */
7325 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7326 {
7327 tree res = NULL_TREE;
7328 if (gimple_simplified_result_is_gimple_val (&res_op))
7329 res = res_op.ops[0];
7330 else if (mprts_hook)
7331 res = mprts_hook (&res_op);
7332 if (res)
7333 {
7334 if (dump_file && dump_flags & TDF_DETAILS)
7335 {
7336 fprintf (dump_file, "Match-and-simplified ");
7337 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7338 fprintf (dump_file, " to ");
7339 print_generic_expr (dump_file, res);
7340 fprintf (dump_file, "\n");
7341 }
7342 return res;
7343 }
7344 }
7345
7346 location_t loc = gimple_location (stmt);
7347 switch (gimple_code (stmt))
7348 {
7349 case GIMPLE_ASSIGN:
7350 {
7351 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7352
7353 switch (get_gimple_rhs_class (subcode))
7354 {
7355 case GIMPLE_SINGLE_RHS:
7356 {
7357 tree rhs = gimple_assign_rhs1 (stmt);
7358 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7359
7360 if (TREE_CODE (rhs) == SSA_NAME)
7361 {
7362 /* If the RHS is an SSA_NAME, return its known constant value,
7363 if any. */
7364 return (*valueize) (rhs);
7365 }
7366 /* Handle propagating invariant addresses into address
7367 operations. */
7368 else if (TREE_CODE (rhs) == ADDR_EXPR
7369 && !is_gimple_min_invariant (rhs))
7370 {
7371 poly_int64 offset = 0;
7372 tree base;
7373 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7374 &offset,
7375 valueize);
7376 if (base
7377 && (CONSTANT_CLASS_P (base)
7378 || decl_address_invariant_p (base)))
7379 return build_invariant_address (TREE_TYPE (rhs),
7380 base, offset);
7381 }
7382 else if (TREE_CODE (rhs) == CONSTRUCTOR
7383 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7384 && known_eq (CONSTRUCTOR_NELTS (rhs),
7385 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7386 {
7387 unsigned i, nelts;
7388 tree val;
7389
7390 nelts = CONSTRUCTOR_NELTS (rhs);
7391 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7393 {
7394 val = (*valueize) (val);
7395 if (TREE_CODE (val) == INTEGER_CST
7396 || TREE_CODE (val) == REAL_CST
7397 || TREE_CODE (val) == FIXED_CST)
7398 vec.quick_push (val);
7399 else
7400 return NULL_TREE;
7401 }
7402
7403 return vec.build ();
7404 }
7405 if (subcode == OBJ_TYPE_REF)
7406 {
7407 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7408 /* If callee is constant, we can fold away the wrapper. */
7409 if (is_gimple_min_invariant (val))
7410 return val;
7411 }
7412
7413 if (kind == tcc_reference)
7414 {
7415 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7416 || TREE_CODE (rhs) == REALPART_EXPR
7417 || TREE_CODE (rhs) == IMAGPART_EXPR)
7418 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7419 {
7420 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7421 return fold_unary_loc (EXPR_LOCATION (rhs),
7422 TREE_CODE (rhs),
7423 TREE_TYPE (rhs), val);
7424 }
7425 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7426 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7427 {
7428 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7429 return fold_ternary_loc (EXPR_LOCATION (rhs),
7430 TREE_CODE (rhs),
7431 TREE_TYPE (rhs), val,
7432 TREE_OPERAND (rhs, 1),
7433 TREE_OPERAND (rhs, 2));
7434 }
7435 else if (TREE_CODE (rhs) == MEM_REF
7436 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7437 {
7438 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7439 if (TREE_CODE (val) == ADDR_EXPR
7440 && is_gimple_min_invariant (val))
7441 {
7442 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7443 unshare_expr (val),
7444 TREE_OPERAND (rhs, 1));
7445 if (tem)
7446 rhs = tem;
7447 }
7448 }
7449 return fold_const_aggregate_ref_1 (rhs, valueize);
7450 }
7451 else if (kind == tcc_declaration)
7452 return get_symbol_constant_value (rhs);
7453 return rhs;
7454 }
7455
7456 case GIMPLE_UNARY_RHS:
7457 return NULL_TREE;
7458
7459 case GIMPLE_BINARY_RHS:
7460 /* Translate &x + CST into an invariant form suitable for
7461 further propagation. */
7462 if (subcode == POINTER_PLUS_EXPR)
7463 {
7464 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7465 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7466 if (TREE_CODE (op0) == ADDR_EXPR
7467 && TREE_CODE (op1) == INTEGER_CST)
7468 {
7469 tree off = fold_convert (ptr_type_node, op1);
7470 return build1_loc
7471 (loc, ADDR_EXPR, TREE_TYPE (op0),
7472 fold_build2 (MEM_REF,
7473 TREE_TYPE (TREE_TYPE (op0)),
7474 unshare_expr (op0), off));
7475 }
7476 }
7477 /* Canonicalize bool != 0 and bool == 0 appearing after
7478 valueization. While gimple_simplify handles this
7479 it can get confused by the ~X == 1 -> X == 0 transform
7480 which we cant reduce to a SSA name or a constant
7481 (and we have no way to tell gimple_simplify to not
7482 consider those transforms in the first place). */
7483 else if (subcode == EQ_EXPR
7484 || subcode == NE_EXPR)
7485 {
7486 tree lhs = gimple_assign_lhs (stmt);
7487 tree op0 = gimple_assign_rhs1 (stmt);
7488 if (useless_type_conversion_p (TREE_TYPE (lhs),
7489 TREE_TYPE (op0)))
7490 {
7491 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7492 op0 = (*valueize) (op0);
7493 if (TREE_CODE (op0) == INTEGER_CST)
7494 std::swap (op0, op1);
7495 if (TREE_CODE (op1) == INTEGER_CST
7496 && ((subcode == NE_EXPR && integer_zerop (op1))
7497 || (subcode == EQ_EXPR && integer_onep (op1))))
7498 return op0;
7499 }
7500 }
7501 return NULL_TREE;
7502
7503 case GIMPLE_TERNARY_RHS:
7504 {
7505 /* Handle ternary operators that can appear in GIMPLE form. */
7506 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7507 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7508 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7509 return fold_ternary_loc (loc, subcode,
7510 gimple_expr_type (stmt), op0, op1, op2);
7511 }
7512
7513 default:
7514 gcc_unreachable ();
7515 }
7516 }
7517
7518 case GIMPLE_CALL:
7519 {
7520 tree fn;
7521 gcall *call_stmt = as_a <gcall *> (stmt);
7522
7523 if (gimple_call_internal_p (stmt))
7524 {
7525 enum tree_code subcode = ERROR_MARK;
7526 switch (gimple_call_internal_fn (stmt))
7527 {
7528 case IFN_UBSAN_CHECK_ADD:
7529 subcode = PLUS_EXPR;
7530 break;
7531 case IFN_UBSAN_CHECK_SUB:
7532 subcode = MINUS_EXPR;
7533 break;
7534 case IFN_UBSAN_CHECK_MUL:
7535 subcode = MULT_EXPR;
7536 break;
7537 case IFN_BUILTIN_EXPECT:
7538 {
7539 tree arg0 = gimple_call_arg (stmt, 0);
7540 tree op0 = (*valueize) (arg0);
7541 if (TREE_CODE (op0) == INTEGER_CST)
7542 return op0;
7543 return NULL_TREE;
7544 }
7545 default:
7546 return NULL_TREE;
7547 }
7548 tree arg0 = gimple_call_arg (stmt, 0);
7549 tree arg1 = gimple_call_arg (stmt, 1);
7550 tree op0 = (*valueize) (arg0);
7551 tree op1 = (*valueize) (arg1);
7552
7553 if (TREE_CODE (op0) != INTEGER_CST
7554 || TREE_CODE (op1) != INTEGER_CST)
7555 {
7556 switch (subcode)
7557 {
7558 case MULT_EXPR:
7559 /* x * 0 = 0 * x = 0 without overflow. */
7560 if (integer_zerop (op0) || integer_zerop (op1))
7561 return build_zero_cst (TREE_TYPE (arg0));
7562 break;
7563 case MINUS_EXPR:
7564 /* y - y = 0 without overflow. */
7565 if (operand_equal_p (op0, op1, 0))
7566 return build_zero_cst (TREE_TYPE (arg0));
7567 break;
7568 default:
7569 break;
7570 }
7571 }
7572 tree res
7573 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7574 if (res
7575 && TREE_CODE (res) == INTEGER_CST
7576 && !TREE_OVERFLOW (res))
7577 return res;
7578 return NULL_TREE;
7579 }
7580
7581 fn = (*valueize) (gimple_call_fn (stmt));
7582 if (TREE_CODE (fn) == ADDR_EXPR
7583 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7584 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7585 && gimple_builtin_call_types_compatible_p (stmt,
7586 TREE_OPERAND (fn, 0)))
7587 {
7588 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7589 tree retval;
7590 unsigned i;
7591 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7592 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7593 retval = fold_builtin_call_array (loc,
7594 gimple_call_return_type (call_stmt),
7595 fn, gimple_call_num_args (stmt), args);
7596 if (retval)
7597 {
7598 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7599 STRIP_NOPS (retval);
7600 retval = fold_convert (gimple_call_return_type (call_stmt),
7601 retval);
7602 }
7603 return retval;
7604 }
7605 return NULL_TREE;
7606 }
7607
7608 default:
7609 return NULL_TREE;
7610 }
7611 }
7612
7613 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7614 Returns NULL_TREE if folding to a constant is not possible, otherwise
7615 returns a constant according to is_gimple_min_invariant. */
7616
7617 tree
7618 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7619 {
7620 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7621 if (res && is_gimple_min_invariant (res))
7622 return res;
7623 return NULL_TREE;
7624 }
7625
7626
7627 /* The following set of functions are supposed to fold references using
7628 their constant initializers. */
7629
7630 /* See if we can find constructor defining value of BASE.
7631 When we know the consructor with constant offset (such as
7632 base is array[40] and we do know constructor of array), then
7633 BIT_OFFSET is adjusted accordingly.
7634
7635 As a special case, return error_mark_node when constructor
7636 is not explicitly available, but it is known to be zero
7637 such as 'static const int a;'. */
7638 static tree
7639 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7640 tree (*valueize)(tree))
7641 {
7642 poly_int64 bit_offset2, size, max_size;
7643 bool reverse;
7644
7645 if (TREE_CODE (base) == MEM_REF)
7646 {
7647 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7648 if (!boff.to_shwi (bit_offset))
7649 return NULL_TREE;
7650
7651 if (valueize
7652 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7653 base = valueize (TREE_OPERAND (base, 0));
7654 if (!base || TREE_CODE (base) != ADDR_EXPR)
7655 return NULL_TREE;
7656 base = TREE_OPERAND (base, 0);
7657 }
7658 else if (valueize
7659 && TREE_CODE (base) == SSA_NAME)
7660 base = valueize (base);
7661
7662 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7663 DECL_INITIAL. If BASE is a nested reference into another
7664 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7665 the inner reference. */
7666 switch (TREE_CODE (base))
7667 {
7668 case VAR_DECL:
7669 case CONST_DECL:
7670 {
7671 tree init = ctor_for_folding (base);
7672
7673 /* Our semantic is exact opposite of ctor_for_folding;
7674 NULL means unknown, while error_mark_node is 0. */
7675 if (init == error_mark_node)
7676 return NULL_TREE;
7677 if (!init)
7678 return error_mark_node;
7679 return init;
7680 }
7681
7682 case VIEW_CONVERT_EXPR:
7683 return get_base_constructor (TREE_OPERAND (base, 0),
7684 bit_offset, valueize);
7685
7686 case ARRAY_REF:
7687 case COMPONENT_REF:
7688 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7689 &reverse);
7690 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7691 return NULL_TREE;
7692 *bit_offset += bit_offset2;
7693 return get_base_constructor (base, bit_offset, valueize);
7694
7695 case CONSTRUCTOR:
7696 return base;
7697
7698 default:
7699 if (CONSTANT_CLASS_P (base))
7700 return base;
7701
7702 return NULL_TREE;
7703 }
7704 }
7705
7706 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7707 to the memory at bit OFFSET. When non-null, TYPE is the expected
7708 type of the reference; otherwise the type of the referenced element
7709 is used instead. When SIZE is zero, attempt to fold a reference to
7710 the entire element which OFFSET refers to. Increment *SUBOFF by
7711 the bit offset of the accessed element. */
7712
7713 static tree
7714 fold_array_ctor_reference (tree type, tree ctor,
7715 unsigned HOST_WIDE_INT offset,
7716 unsigned HOST_WIDE_INT size,
7717 tree from_decl,
7718 unsigned HOST_WIDE_INT *suboff)
7719 {
7720 offset_int low_bound;
7721 offset_int elt_size;
7722 offset_int access_index;
7723 tree domain_type = NULL_TREE;
7724 HOST_WIDE_INT inner_offset;
7725
7726 /* Compute low bound and elt size. */
7727 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7728 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7729 if (domain_type && TYPE_MIN_VALUE (domain_type))
7730 {
7731 /* Static constructors for variably sized objects make no sense. */
7732 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7733 return NULL_TREE;
7734 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7735 }
7736 else
7737 low_bound = 0;
7738 /* Static constructors for variably sized objects make no sense. */
7739 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7740 return NULL_TREE;
7741 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7742
7743 /* When TYPE is non-null, verify that it specifies a constant-sized
7744 access of a multiple of the array element size. Avoid division
7745 by zero below when ELT_SIZE is zero, such as with the result of
7746 an initializer for a zero-length array or an empty struct. */
7747 if (elt_size == 0
7748 || (type
7749 && (!TYPE_SIZE_UNIT (type)
7750 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7751 return NULL_TREE;
7752
7753 /* Compute the array index we look for. */
7754 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7755 elt_size);
7756 access_index += low_bound;
7757
7758 /* And offset within the access. */
7759 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7760
7761 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7762 if (size > elt_sz * BITS_PER_UNIT)
7763 {
7764 /* native_encode_expr constraints. */
7765 if (size > MAX_BITSIZE_MODE_ANY_MODE
7766 || size % BITS_PER_UNIT != 0
7767 || inner_offset % BITS_PER_UNIT != 0
7768 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7769 return NULL_TREE;
7770
7771 unsigned ctor_idx;
7772 tree val = get_array_ctor_element_at_index (ctor, access_index,
7773 &ctor_idx);
7774 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7775 return build_zero_cst (type);
7776
7777 /* native-encode adjacent ctor elements. */
7778 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7779 unsigned bufoff = 0;
7780 offset_int index = 0;
7781 offset_int max_index = access_index;
7782 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7783 if (!val)
7784 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7785 else if (!CONSTANT_CLASS_P (val))
7786 return NULL_TREE;
7787 if (!elt->index)
7788 ;
7789 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7790 {
7791 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7792 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7793 }
7794 else
7795 index = max_index = wi::to_offset (elt->index);
7796 index = wi::umax (index, access_index);
7797 do
7798 {
7799 if (bufoff + elt_sz > sizeof (buf))
7800 elt_sz = sizeof (buf) - bufoff;
7801 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7802 inner_offset / BITS_PER_UNIT);
7803 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7804 return NULL_TREE;
7805 inner_offset = 0;
7806 bufoff += len;
7807
7808 access_index += 1;
7809 if (wi::cmpu (access_index, index) == 0)
7810 val = elt->value;
7811 else if (wi::cmpu (access_index, max_index) > 0)
7812 {
7813 ctor_idx++;
7814 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7815 {
7816 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7817 ++max_index;
7818 }
7819 else
7820 {
7821 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7822 index = 0;
7823 max_index = access_index;
7824 if (!elt->index)
7825 ;
7826 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7827 {
7828 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7829 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7830 }
7831 else
7832 index = max_index = wi::to_offset (elt->index);
7833 index = wi::umax (index, access_index);
7834 if (wi::cmpu (access_index, index) == 0)
7835 val = elt->value;
7836 else
7837 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7838 }
7839 }
7840 }
7841 while (bufoff < size / BITS_PER_UNIT);
7842 *suboff += size;
7843 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7844 }
7845
7846 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7847 {
7848 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7849 {
7850 /* For the final reference to the entire accessed element
7851 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7852 may be null) in favor of the type of the element, and set
7853 SIZE to the size of the accessed element. */
7854 inner_offset = 0;
7855 type = TREE_TYPE (val);
7856 size = elt_sz * BITS_PER_UNIT;
7857 }
7858 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7859 && TREE_CODE (val) == CONSTRUCTOR
7860 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7861 /* If this isn't the last element in the CTOR and a CTOR itself
7862 and it does not cover the whole object we are requesting give up
7863 since we're not set up for combining from multiple CTORs. */
7864 return NULL_TREE;
7865
7866 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7867 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7868 suboff);
7869 }
7870
7871 /* Memory not explicitly mentioned in constructor is 0 (or
7872 the reference is out of range). */
7873 return type ? build_zero_cst (type) : NULL_TREE;
7874 }
7875
7876 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7877 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7878 is the expected type of the reference; otherwise the type of
7879 the referenced member is used instead. When SIZE is zero,
7880 attempt to fold a reference to the entire member which OFFSET
7881 refers to; in this case. Increment *SUBOFF by the bit offset
7882 of the accessed member. */
7883
7884 static tree
7885 fold_nonarray_ctor_reference (tree type, tree ctor,
7886 unsigned HOST_WIDE_INT offset,
7887 unsigned HOST_WIDE_INT size,
7888 tree from_decl,
7889 unsigned HOST_WIDE_INT *suboff)
7890 {
7891 unsigned HOST_WIDE_INT cnt;
7892 tree cfield, cval;
7893
7894 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7895 cval)
7896 {
7897 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7898 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7899 tree field_size = DECL_SIZE (cfield);
7900
7901 if (!field_size)
7902 {
7903 /* Determine the size of the flexible array member from
7904 the size of the initializer provided for it. */
7905 field_size = TYPE_SIZE (TREE_TYPE (cval));
7906 }
7907
7908 /* Variable sized objects in static constructors makes no sense,
7909 but field_size can be NULL for flexible array members. */
7910 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7911 && TREE_CODE (byte_offset) == INTEGER_CST
7912 && (field_size != NULL_TREE
7913 ? TREE_CODE (field_size) == INTEGER_CST
7914 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7915
7916 /* Compute bit offset of the field. */
7917 offset_int bitoffset
7918 = (wi::to_offset (field_offset)
7919 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7920 /* Compute bit offset where the field ends. */
7921 offset_int bitoffset_end;
7922 if (field_size != NULL_TREE)
7923 bitoffset_end = bitoffset + wi::to_offset (field_size);
7924 else
7925 bitoffset_end = 0;
7926
7927 /* Compute the bit offset of the end of the desired access.
7928 As a special case, if the size of the desired access is
7929 zero, assume the access is to the entire field (and let
7930 the caller make any necessary adjustments by storing
7931 the actual bounds of the field in FIELDBOUNDS). */
7932 offset_int access_end = offset_int (offset);
7933 if (size)
7934 access_end += size;
7935 else
7936 access_end = bitoffset_end;
7937
7938 /* Is there any overlap between the desired access at
7939 [OFFSET, OFFSET+SIZE) and the offset of the field within
7940 the object at [BITOFFSET, BITOFFSET_END)? */
7941 if (wi::cmps (access_end, bitoffset) > 0
7942 && (field_size == NULL_TREE
7943 || wi::lts_p (offset, bitoffset_end)))
7944 {
7945 *suboff += bitoffset.to_uhwi ();
7946
7947 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7948 {
7949 /* For the final reference to the entire accessed member
7950 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7951 be null) in favor of the type of the member, and set
7952 SIZE to the size of the accessed member. */
7953 offset = bitoffset.to_uhwi ();
7954 type = TREE_TYPE (cval);
7955 size = (bitoffset_end - bitoffset).to_uhwi ();
7956 }
7957
7958 /* We do have overlap. Now see if the field is large enough
7959 to cover the access. Give up for accesses that extend
7960 beyond the end of the object or that span multiple fields. */
7961 if (wi::cmps (access_end, bitoffset_end) > 0)
7962 return NULL_TREE;
7963 if (offset < bitoffset)
7964 return NULL_TREE;
7965
7966 offset_int inner_offset = offset_int (offset) - bitoffset;
7967 return fold_ctor_reference (type, cval,
7968 inner_offset.to_uhwi (), size,
7969 from_decl, suboff);
7970 }
7971 }
7972
7973 if (!type)
7974 return NULL_TREE;
7975
7976 return build_zero_cst (type);
7977 }
7978
7979 /* CTOR is value initializing memory. Fold a reference of TYPE and
7980 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7981 is zero, attempt to fold a reference to the entire subobject
7982 which OFFSET refers to. This is used when folding accesses to
7983 string members of aggregates. When non-null, set *SUBOFF to
7984 the bit offset of the accessed subobject. */
7985
7986 tree
7987 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7988 const poly_uint64 &poly_size, tree from_decl,
7989 unsigned HOST_WIDE_INT *suboff /* = NULL */)
7990 {
7991 tree ret;
7992
7993 /* We found the field with exact match. */
7994 if (type
7995 && useless_type_conversion_p (type, TREE_TYPE (ctor))
7996 && known_eq (poly_offset, 0U))
7997 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7998
7999 /* The remaining optimizations need a constant size and offset. */
8000 unsigned HOST_WIDE_INT size, offset;
8001 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8002 return NULL_TREE;
8003
8004 /* We are at the end of walk, see if we can view convert the
8005 result. */
8006 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8007 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8008 && !compare_tree_int (TYPE_SIZE (type), size)
8009 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
8010 {
8011 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8012 if (ret)
8013 {
8014 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8015 if (ret)
8016 STRIP_USELESS_TYPE_CONVERSION (ret);
8017 }
8018 return ret;
8019 }
8020 /* For constants and byte-aligned/sized reads try to go through
8021 native_encode/interpret. */
8022 if (CONSTANT_CLASS_P (ctor)
8023 && BITS_PER_UNIT == 8
8024 && offset % BITS_PER_UNIT == 0
8025 && offset / BITS_PER_UNIT <= INT_MAX
8026 && size % BITS_PER_UNIT == 0
8027 && size <= MAX_BITSIZE_MODE_ANY_MODE
8028 && can_native_interpret_type_p (type))
8029 {
8030 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8031 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8032 offset / BITS_PER_UNIT);
8033 if (len > 0)
8034 return native_interpret_expr (type, buf, len);
8035 }
8036 if (TREE_CODE (ctor) == CONSTRUCTOR)
8037 {
8038 unsigned HOST_WIDE_INT dummy = 0;
8039 if (!suboff)
8040 suboff = &dummy;
8041
8042 tree ret;
8043 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8044 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8045 ret = fold_array_ctor_reference (type, ctor, offset, size,
8046 from_decl, suboff);
8047 else
8048 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8049 from_decl, suboff);
8050
8051 /* Fall back to native_encode_initializer. Needs to be done
8052 only in the outermost fold_ctor_reference call (because it itself
8053 recurses into CONSTRUCTORs) and doesn't update suboff. */
8054 if (ret == NULL_TREE
8055 && suboff == &dummy
8056 && BITS_PER_UNIT == 8
8057 && offset % BITS_PER_UNIT == 0
8058 && offset / BITS_PER_UNIT <= INT_MAX
8059 && size % BITS_PER_UNIT == 0
8060 && size <= MAX_BITSIZE_MODE_ANY_MODE
8061 && can_native_interpret_type_p (type))
8062 {
8063 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8064 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8065 offset / BITS_PER_UNIT);
8066 if (len > 0)
8067 return native_interpret_expr (type, buf, len);
8068 }
8069
8070 return ret;
8071 }
8072
8073 return NULL_TREE;
8074 }
8075
8076 /* Return the tree representing the element referenced by T if T is an
8077 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8078 names using VALUEIZE. Return NULL_TREE otherwise. */
8079
8080 tree
8081 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8082 {
8083 tree ctor, idx, base;
8084 poly_int64 offset, size, max_size;
8085 tree tem;
8086 bool reverse;
8087
8088 if (TREE_THIS_VOLATILE (t))
8089 return NULL_TREE;
8090
8091 if (DECL_P (t))
8092 return get_symbol_constant_value (t);
8093
8094 tem = fold_read_from_constant_string (t);
8095 if (tem)
8096 return tem;
8097
8098 switch (TREE_CODE (t))
8099 {
8100 case ARRAY_REF:
8101 case ARRAY_RANGE_REF:
8102 /* Constant indexes are handled well by get_base_constructor.
8103 Only special case variable offsets.
8104 FIXME: This code can't handle nested references with variable indexes
8105 (they will be handled only by iteration of ccp). Perhaps we can bring
8106 get_ref_base_and_extent here and make it use a valueize callback. */
8107 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8108 && valueize
8109 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8110 && poly_int_tree_p (idx))
8111 {
8112 tree low_bound, unit_size;
8113
8114 /* If the resulting bit-offset is constant, track it. */
8115 if ((low_bound = array_ref_low_bound (t),
8116 poly_int_tree_p (low_bound))
8117 && (unit_size = array_ref_element_size (t),
8118 tree_fits_uhwi_p (unit_size)))
8119 {
8120 poly_offset_int woffset
8121 = wi::sext (wi::to_poly_offset (idx)
8122 - wi::to_poly_offset (low_bound),
8123 TYPE_PRECISION (sizetype));
8124 woffset *= tree_to_uhwi (unit_size);
8125 woffset *= BITS_PER_UNIT;
8126 if (woffset.to_shwi (&offset))
8127 {
8128 base = TREE_OPERAND (t, 0);
8129 ctor = get_base_constructor (base, &offset, valueize);
8130 /* Empty constructor. Always fold to 0. */
8131 if (ctor == error_mark_node)
8132 return build_zero_cst (TREE_TYPE (t));
8133 /* Out of bound array access. Value is undefined,
8134 but don't fold. */
8135 if (maybe_lt (offset, 0))
8136 return NULL_TREE;
8137 /* We cannot determine ctor. */
8138 if (!ctor)
8139 return NULL_TREE;
8140 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8141 tree_to_uhwi (unit_size)
8142 * BITS_PER_UNIT,
8143 base);
8144 }
8145 }
8146 }
8147 /* Fallthru. */
8148
8149 case COMPONENT_REF:
8150 case BIT_FIELD_REF:
8151 case TARGET_MEM_REF:
8152 case MEM_REF:
8153 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8154 ctor = get_base_constructor (base, &offset, valueize);
8155
8156 /* Empty constructor. Always fold to 0. */
8157 if (ctor == error_mark_node)
8158 return build_zero_cst (TREE_TYPE (t));
8159 /* We do not know precise address. */
8160 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8161 return NULL_TREE;
8162 /* We cannot determine ctor. */
8163 if (!ctor)
8164 return NULL_TREE;
8165
8166 /* Out of bound array access. Value is undefined, but don't fold. */
8167 if (maybe_lt (offset, 0))
8168 return NULL_TREE;
8169
8170 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8171 if (tem)
8172 return tem;
8173
8174 /* For bit field reads try to read the representative and
8175 adjust. */
8176 if (TREE_CODE (t) == COMPONENT_REF
8177 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8178 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8179 {
8180 HOST_WIDE_INT csize, coffset;
8181 tree field = TREE_OPERAND (t, 1);
8182 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8183 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8184 && size.is_constant (&csize)
8185 && offset.is_constant (&coffset)
8186 && (coffset % BITS_PER_UNIT != 0
8187 || csize % BITS_PER_UNIT != 0)
8188 && !reverse
8189 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8190 {
8191 poly_int64 bitoffset;
8192 poly_uint64 field_offset, repr_offset;
8193 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8194 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8195 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8196 else
8197 bitoffset = 0;
8198 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8199 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8200 HOST_WIDE_INT bitoff;
8201 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8202 - TYPE_PRECISION (TREE_TYPE (field)));
8203 if (bitoffset.is_constant (&bitoff)
8204 && bitoff >= 0
8205 && bitoff <= diff)
8206 {
8207 offset -= bitoff;
8208 size = tree_to_uhwi (DECL_SIZE (repr));
8209
8210 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8211 size, base);
8212 if (tem && TREE_CODE (tem) == INTEGER_CST)
8213 {
8214 if (!BYTES_BIG_ENDIAN)
8215 tem = wide_int_to_tree (TREE_TYPE (field),
8216 wi::lrshift (wi::to_wide (tem),
8217 bitoff));
8218 else
8219 tem = wide_int_to_tree (TREE_TYPE (field),
8220 wi::lrshift (wi::to_wide (tem),
8221 diff - bitoff));
8222 return tem;
8223 }
8224 }
8225 }
8226 }
8227 break;
8228
8229 case REALPART_EXPR:
8230 case IMAGPART_EXPR:
8231 {
8232 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8233 if (c && TREE_CODE (c) == COMPLEX_CST)
8234 return fold_build1_loc (EXPR_LOCATION (t),
8235 TREE_CODE (t), TREE_TYPE (t), c);
8236 break;
8237 }
8238
8239 default:
8240 break;
8241 }
8242
8243 return NULL_TREE;
8244 }
8245
8246 tree
8247 fold_const_aggregate_ref (tree t)
8248 {
8249 return fold_const_aggregate_ref_1 (t, NULL);
8250 }
8251
8252 /* Lookup virtual method with index TOKEN in a virtual table V
8253 at OFFSET.
8254 Set CAN_REFER if non-NULL to false if method
8255 is not referable or if the virtual table is ill-formed (such as rewriten
8256 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8257
8258 tree
8259 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8260 tree v,
8261 unsigned HOST_WIDE_INT offset,
8262 bool *can_refer)
8263 {
8264 tree vtable = v, init, fn;
8265 unsigned HOST_WIDE_INT size;
8266 unsigned HOST_WIDE_INT elt_size, access_index;
8267 tree domain_type;
8268
8269 if (can_refer)
8270 *can_refer = true;
8271
8272 /* First of all double check we have virtual table. */
8273 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8274 {
8275 /* Pass down that we lost track of the target. */
8276 if (can_refer)
8277 *can_refer = false;
8278 return NULL_TREE;
8279 }
8280
8281 init = ctor_for_folding (v);
8282
8283 /* The virtual tables should always be born with constructors
8284 and we always should assume that they are avaialble for
8285 folding. At the moment we do not stream them in all cases,
8286 but it should never happen that ctor seem unreachable. */
8287 gcc_assert (init);
8288 if (init == error_mark_node)
8289 {
8290 /* Pass down that we lost track of the target. */
8291 if (can_refer)
8292 *can_refer = false;
8293 return NULL_TREE;
8294 }
8295 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8296 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8297 offset *= BITS_PER_UNIT;
8298 offset += token * size;
8299
8300 /* Lookup the value in the constructor that is assumed to be array.
8301 This is equivalent to
8302 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8303 offset, size, NULL);
8304 but in a constant time. We expect that frontend produced a simple
8305 array without indexed initializers. */
8306
8307 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8308 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8309 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8310 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8311
8312 access_index = offset / BITS_PER_UNIT / elt_size;
8313 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8314
8315 /* The C++ FE can now produce indexed fields, and we check if the indexes
8316 match. */
8317 if (access_index < CONSTRUCTOR_NELTS (init))
8318 {
8319 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8320 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8321 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8322 STRIP_NOPS (fn);
8323 }
8324 else
8325 fn = NULL;
8326
8327 /* For type inconsistent program we may end up looking up virtual method
8328 in virtual table that does not contain TOKEN entries. We may overrun
8329 the virtual table and pick up a constant or RTTI info pointer.
8330 In any case the call is undefined. */
8331 if (!fn
8332 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8333 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8334 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8335 else
8336 {
8337 fn = TREE_OPERAND (fn, 0);
8338
8339 /* When cgraph node is missing and function is not public, we cannot
8340 devirtualize. This can happen in WHOPR when the actual method
8341 ends up in other partition, because we found devirtualization
8342 possibility too late. */
8343 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8344 {
8345 if (can_refer)
8346 {
8347 *can_refer = false;
8348 return fn;
8349 }
8350 return NULL_TREE;
8351 }
8352 }
8353
8354 /* Make sure we create a cgraph node for functions we'll reference.
8355 They can be non-existent if the reference comes from an entry
8356 of an external vtable for example. */
8357 cgraph_node::get_create (fn);
8358
8359 return fn;
8360 }
8361
8362 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8363 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8364 KNOWN_BINFO carries the binfo describing the true type of
8365 OBJ_TYPE_REF_OBJECT(REF).
8366 Set CAN_REFER if non-NULL to false if method
8367 is not referable or if the virtual table is ill-formed (such as rewriten
8368 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8369
8370 tree
8371 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8372 bool *can_refer)
8373 {
8374 unsigned HOST_WIDE_INT offset;
8375 tree v;
8376
8377 v = BINFO_VTABLE (known_binfo);
8378 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8379 if (!v)
8380 return NULL_TREE;
8381
8382 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8383 {
8384 if (can_refer)
8385 *can_refer = false;
8386 return NULL_TREE;
8387 }
8388 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8389 }
8390
8391 /* Given a pointer value T, return a simplified version of an
8392 indirection through T, or NULL_TREE if no simplification is
8393 possible. Note that the resulting type may be different from
8394 the type pointed to in the sense that it is still compatible
8395 from the langhooks point of view. */
8396
8397 tree
8398 gimple_fold_indirect_ref (tree t)
8399 {
8400 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8401 tree sub = t;
8402 tree subtype;
8403
8404 STRIP_NOPS (sub);
8405 subtype = TREE_TYPE (sub);
8406 if (!POINTER_TYPE_P (subtype)
8407 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8408 return NULL_TREE;
8409
8410 if (TREE_CODE (sub) == ADDR_EXPR)
8411 {
8412 tree op = TREE_OPERAND (sub, 0);
8413 tree optype = TREE_TYPE (op);
8414 /* *&p => p */
8415 if (useless_type_conversion_p (type, optype))
8416 return op;
8417
8418 /* *(foo *)&fooarray => fooarray[0] */
8419 if (TREE_CODE (optype) == ARRAY_TYPE
8420 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8421 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8422 {
8423 tree type_domain = TYPE_DOMAIN (optype);
8424 tree min_val = size_zero_node;
8425 if (type_domain && TYPE_MIN_VALUE (type_domain))
8426 min_val = TYPE_MIN_VALUE (type_domain);
8427 if (TREE_CODE (min_val) == INTEGER_CST)
8428 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8429 }
8430 /* *(foo *)&complexfoo => __real__ complexfoo */
8431 else if (TREE_CODE (optype) == COMPLEX_TYPE
8432 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8433 return fold_build1 (REALPART_EXPR, type, op);
8434 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8435 else if (TREE_CODE (optype) == VECTOR_TYPE
8436 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8437 {
8438 tree part_width = TYPE_SIZE (type);
8439 tree index = bitsize_int (0);
8440 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8441 }
8442 }
8443
8444 /* *(p + CST) -> ... */
8445 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8446 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8447 {
8448 tree addr = TREE_OPERAND (sub, 0);
8449 tree off = TREE_OPERAND (sub, 1);
8450 tree addrtype;
8451
8452 STRIP_NOPS (addr);
8453 addrtype = TREE_TYPE (addr);
8454
8455 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8456 if (TREE_CODE (addr) == ADDR_EXPR
8457 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8458 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8459 && tree_fits_uhwi_p (off))
8460 {
8461 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8462 tree part_width = TYPE_SIZE (type);
8463 unsigned HOST_WIDE_INT part_widthi
8464 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8465 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8466 tree index = bitsize_int (indexi);
8467 if (known_lt (offset / part_widthi,
8468 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8469 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8470 part_width, index);
8471 }
8472
8473 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8474 if (TREE_CODE (addr) == ADDR_EXPR
8475 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8476 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8477 {
8478 tree size = TYPE_SIZE_UNIT (type);
8479 if (tree_int_cst_equal (size, off))
8480 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8481 }
8482
8483 /* *(p + CST) -> MEM_REF <p, CST>. */
8484 if (TREE_CODE (addr) != ADDR_EXPR
8485 || DECL_P (TREE_OPERAND (addr, 0)))
8486 return fold_build2 (MEM_REF, type,
8487 addr,
8488 wide_int_to_tree (ptype, wi::to_wide (off)));
8489 }
8490
8491 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8492 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8493 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8494 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8495 {
8496 tree type_domain;
8497 tree min_val = size_zero_node;
8498 tree osub = sub;
8499 sub = gimple_fold_indirect_ref (sub);
8500 if (! sub)
8501 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8502 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8503 if (type_domain && TYPE_MIN_VALUE (type_domain))
8504 min_val = TYPE_MIN_VALUE (type_domain);
8505 if (TREE_CODE (min_val) == INTEGER_CST)
8506 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8507 }
8508
8509 return NULL_TREE;
8510 }
8511
8512 /* Return true if CODE is an operation that when operating on signed
8513 integer types involves undefined behavior on overflow and the
8514 operation can be expressed with unsigned arithmetic. */
8515
8516 bool
8517 arith_code_with_undefined_signed_overflow (tree_code code)
8518 {
8519 switch (code)
8520 {
8521 case ABS_EXPR:
8522 case PLUS_EXPR:
8523 case MINUS_EXPR:
8524 case MULT_EXPR:
8525 case NEGATE_EXPR:
8526 case POINTER_PLUS_EXPR:
8527 return true;
8528 default:
8529 return false;
8530 }
8531 }
8532
8533 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8534 operation that can be transformed to unsigned arithmetic by converting
8535 its operand, carrying out the operation in the corresponding unsigned
8536 type and converting the result back to the original type.
8537
8538 Returns a sequence of statements that replace STMT and also contain
8539 a modified form of STMT itself. */
8540
8541 gimple_seq
8542 rewrite_to_defined_overflow (gimple *stmt)
8543 {
8544 if (dump_file && (dump_flags & TDF_DETAILS))
8545 {
8546 fprintf (dump_file, "rewriting stmt with undefined signed "
8547 "overflow ");
8548 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8549 }
8550
8551 tree lhs = gimple_assign_lhs (stmt);
8552 tree type = unsigned_type_for (TREE_TYPE (lhs));
8553 gimple_seq stmts = NULL;
8554 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8555 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8556 else
8557 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8558 {
8559 tree op = gimple_op (stmt, i);
8560 op = gimple_convert (&stmts, type, op);
8561 gimple_set_op (stmt, i, op);
8562 }
8563 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8564 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8565 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8566 gimple_set_modified (stmt, true);
8567 gimple_seq_add_stmt (&stmts, stmt);
8568 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8569 gimple_seq_add_stmt (&stmts, cvt);
8570
8571 return stmts;
8572 }
8573
8574
8575 /* The valueization hook we use for the gimple_build API simplification.
8576 This makes us match fold_buildN behavior by only combining with
8577 statements in the sequence(s) we are currently building. */
8578
8579 static tree
8580 gimple_build_valueize (tree op)
8581 {
8582 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8583 return op;
8584 return NULL_TREE;
8585 }
8586
8587 /* Build the expression CODE OP0 of type TYPE with location LOC,
8588 simplifying it first if possible. Returns the built
8589 expression value and appends statements possibly defining it
8590 to SEQ. */
8591
8592 tree
8593 gimple_build (gimple_seq *seq, location_t loc,
8594 enum tree_code code, tree type, tree op0)
8595 {
8596 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8597 if (!res)
8598 {
8599 res = create_tmp_reg_or_ssa_name (type);
8600 gimple *stmt;
8601 if (code == REALPART_EXPR
8602 || code == IMAGPART_EXPR
8603 || code == VIEW_CONVERT_EXPR)
8604 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8605 else
8606 stmt = gimple_build_assign (res, code, op0);
8607 gimple_set_location (stmt, loc);
8608 gimple_seq_add_stmt_without_update (seq, stmt);
8609 }
8610 return res;
8611 }
8612
8613 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8614 simplifying it first if possible. Returns the built
8615 expression value and appends statements possibly defining it
8616 to SEQ. */
8617
8618 tree
8619 gimple_build (gimple_seq *seq, location_t loc,
8620 enum tree_code code, tree type, tree op0, tree op1)
8621 {
8622 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8623 if (!res)
8624 {
8625 res = create_tmp_reg_or_ssa_name (type);
8626 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8627 gimple_set_location (stmt, loc);
8628 gimple_seq_add_stmt_without_update (seq, stmt);
8629 }
8630 return res;
8631 }
8632
8633 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8634 simplifying it first if possible. Returns the built
8635 expression value and appends statements possibly defining it
8636 to SEQ. */
8637
8638 tree
8639 gimple_build (gimple_seq *seq, location_t loc,
8640 enum tree_code code, tree type, tree op0, tree op1, tree op2)
8641 {
8642 tree res = gimple_simplify (code, type, op0, op1, op2,
8643 seq, gimple_build_valueize);
8644 if (!res)
8645 {
8646 res = create_tmp_reg_or_ssa_name (type);
8647 gimple *stmt;
8648 if (code == BIT_FIELD_REF)
8649 stmt = gimple_build_assign (res, code,
8650 build3 (code, type, op0, op1, op2));
8651 else
8652 stmt = gimple_build_assign (res, code, op0, op1, op2);
8653 gimple_set_location (stmt, loc);
8654 gimple_seq_add_stmt_without_update (seq, stmt);
8655 }
8656 return res;
8657 }
8658
8659 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8660 void) with a location LOC. Returns the built expression value (or NULL_TREE
8661 if TYPE is void) and appends statements possibly defining it to SEQ. */
8662
8663 tree
8664 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8665 {
8666 tree res = NULL_TREE;
8667 gcall *stmt;
8668 if (internal_fn_p (fn))
8669 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8670 else
8671 {
8672 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8673 stmt = gimple_build_call (decl, 0);
8674 }
8675 if (!VOID_TYPE_P (type))
8676 {
8677 res = create_tmp_reg_or_ssa_name (type);
8678 gimple_call_set_lhs (stmt, res);
8679 }
8680 gimple_set_location (stmt, loc);
8681 gimple_seq_add_stmt_without_update (seq, stmt);
8682 return res;
8683 }
8684
8685 /* Build the call FN (ARG0) with a result of type TYPE
8686 (or no result if TYPE is void) with location LOC,
8687 simplifying it first if possible. Returns the built
8688 expression value (or NULL_TREE if TYPE is void) and appends
8689 statements possibly defining it to SEQ. */
8690
8691 tree
8692 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8693 tree type, tree arg0)
8694 {
8695 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8696 if (!res)
8697 {
8698 gcall *stmt;
8699 if (internal_fn_p (fn))
8700 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8701 else
8702 {
8703 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8704 stmt = gimple_build_call (decl, 1, arg0);
8705 }
8706 if (!VOID_TYPE_P (type))
8707 {
8708 res = create_tmp_reg_or_ssa_name (type);
8709 gimple_call_set_lhs (stmt, res);
8710 }
8711 gimple_set_location (stmt, loc);
8712 gimple_seq_add_stmt_without_update (seq, stmt);
8713 }
8714 return res;
8715 }
8716
8717 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8718 (or no result if TYPE is void) with location LOC,
8719 simplifying it first if possible. Returns the built
8720 expression value (or NULL_TREE if TYPE is void) and appends
8721 statements possibly defining it to SEQ. */
8722
8723 tree
8724 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8725 tree type, tree arg0, tree arg1)
8726 {
8727 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8728 if (!res)
8729 {
8730 gcall *stmt;
8731 if (internal_fn_p (fn))
8732 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8733 else
8734 {
8735 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8736 stmt = gimple_build_call (decl, 2, arg0, arg1);
8737 }
8738 if (!VOID_TYPE_P (type))
8739 {
8740 res = create_tmp_reg_or_ssa_name (type);
8741 gimple_call_set_lhs (stmt, res);
8742 }
8743 gimple_set_location (stmt, loc);
8744 gimple_seq_add_stmt_without_update (seq, stmt);
8745 }
8746 return res;
8747 }
8748
8749 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8750 (or no result if TYPE is void) with location LOC,
8751 simplifying it first if possible. Returns the built
8752 expression value (or NULL_TREE if TYPE is void) and appends
8753 statements possibly defining it to SEQ. */
8754
8755 tree
8756 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8757 tree type, tree arg0, tree arg1, tree arg2)
8758 {
8759 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8760 seq, gimple_build_valueize);
8761 if (!res)
8762 {
8763 gcall *stmt;
8764 if (internal_fn_p (fn))
8765 stmt = gimple_build_call_internal (as_internal_fn (fn),
8766 3, arg0, arg1, arg2);
8767 else
8768 {
8769 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8770 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8771 }
8772 if (!VOID_TYPE_P (type))
8773 {
8774 res = create_tmp_reg_or_ssa_name (type);
8775 gimple_call_set_lhs (stmt, res);
8776 }
8777 gimple_set_location (stmt, loc);
8778 gimple_seq_add_stmt_without_update (seq, stmt);
8779 }
8780 return res;
8781 }
8782
8783 /* Build the conversion (TYPE) OP with a result of type TYPE
8784 with location LOC if such conversion is neccesary in GIMPLE,
8785 simplifying it first.
8786 Returns the built expression value and appends
8787 statements possibly defining it to SEQ. */
8788
8789 tree
8790 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8791 {
8792 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8793 return op;
8794 return gimple_build (seq, loc, NOP_EXPR, type, op);
8795 }
8796
8797 /* Build the conversion (ptrofftype) OP with a result of a type
8798 compatible with ptrofftype with location LOC if such conversion
8799 is neccesary in GIMPLE, simplifying it first.
8800 Returns the built expression value and appends
8801 statements possibly defining it to SEQ. */
8802
8803 tree
8804 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8805 {
8806 if (ptrofftype_p (TREE_TYPE (op)))
8807 return op;
8808 return gimple_convert (seq, loc, sizetype, op);
8809 }
8810
8811 /* Build a vector of type TYPE in which each element has the value OP.
8812 Return a gimple value for the result, appending any new statements
8813 to SEQ. */
8814
8815 tree
8816 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8817 tree op)
8818 {
8819 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8820 && !CONSTANT_CLASS_P (op))
8821 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8822
8823 tree res, vec = build_vector_from_val (type, op);
8824 if (is_gimple_val (vec))
8825 return vec;
8826 if (gimple_in_ssa_p (cfun))
8827 res = make_ssa_name (type);
8828 else
8829 res = create_tmp_reg (type);
8830 gimple *stmt = gimple_build_assign (res, vec);
8831 gimple_set_location (stmt, loc);
8832 gimple_seq_add_stmt_without_update (seq, stmt);
8833 return res;
8834 }
8835
8836 /* Build a vector from BUILDER, handling the case in which some elements
8837 are non-constant. Return a gimple value for the result, appending any
8838 new instructions to SEQ.
8839
8840 BUILDER must not have a stepped encoding on entry. This is because
8841 the function is not geared up to handle the arithmetic that would
8842 be needed in the variable case, and any code building a vector that
8843 is known to be constant should use BUILDER->build () directly. */
8844
8845 tree
8846 gimple_build_vector (gimple_seq *seq, location_t loc,
8847 tree_vector_builder *builder)
8848 {
8849 gcc_assert (builder->nelts_per_pattern () <= 2);
8850 unsigned int encoded_nelts = builder->encoded_nelts ();
8851 for (unsigned int i = 0; i < encoded_nelts; ++i)
8852 if (!CONSTANT_CLASS_P ((*builder)[i]))
8853 {
8854 tree type = builder->type ();
8855 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8856 vec<constructor_elt, va_gc> *v;
8857 vec_alloc (v, nelts);
8858 for (i = 0; i < nelts; ++i)
8859 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
8860
8861 tree res;
8862 if (gimple_in_ssa_p (cfun))
8863 res = make_ssa_name (type);
8864 else
8865 res = create_tmp_reg (type);
8866 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8867 gimple_set_location (stmt, loc);
8868 gimple_seq_add_stmt_without_update (seq, stmt);
8869 return res;
8870 }
8871 return builder->build ();
8872 }
8873
8874 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8875 and generate a value guaranteed to be rounded upwards to ALIGN.
8876
8877 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8878
8879 tree
8880 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8881 tree old_size, unsigned HOST_WIDE_INT align)
8882 {
8883 unsigned HOST_WIDE_INT tg_mask = align - 1;
8884 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8885 gcc_assert (INTEGRAL_TYPE_P (type));
8886 tree tree_mask = build_int_cst (type, tg_mask);
8887 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8888 tree_mask);
8889
8890 tree mask = build_int_cst (type, -align);
8891 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8892 }
8893
8894 /* Return true if the result of assignment STMT is known to be non-negative.
8895 If the return value is based on the assumption that signed overflow is
8896 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8897 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8898
8899 static bool
8900 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8901 int depth)
8902 {
8903 enum tree_code code = gimple_assign_rhs_code (stmt);
8904 switch (get_gimple_rhs_class (code))
8905 {
8906 case GIMPLE_UNARY_RHS:
8907 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8908 gimple_expr_type (stmt),
8909 gimple_assign_rhs1 (stmt),
8910 strict_overflow_p, depth);
8911 case GIMPLE_BINARY_RHS:
8912 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8913 gimple_expr_type (stmt),
8914 gimple_assign_rhs1 (stmt),
8915 gimple_assign_rhs2 (stmt),
8916 strict_overflow_p, depth);
8917 case GIMPLE_TERNARY_RHS:
8918 return false;
8919 case GIMPLE_SINGLE_RHS:
8920 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8921 strict_overflow_p, depth);
8922 case GIMPLE_INVALID_RHS:
8923 break;
8924 }
8925 gcc_unreachable ();
8926 }
8927
8928 /* Return true if return value of call STMT is known to be non-negative.
8929 If the return value is based on the assumption that signed overflow is
8930 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8931 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8932
8933 static bool
8934 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8935 int depth)
8936 {
8937 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8938 gimple_call_arg (stmt, 0) : NULL_TREE;
8939 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8940 gimple_call_arg (stmt, 1) : NULL_TREE;
8941
8942 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
8943 gimple_call_combined_fn (stmt),
8944 arg0,
8945 arg1,
8946 strict_overflow_p, depth);
8947 }
8948
8949 /* Return true if return value of call STMT is known to be non-negative.
8950 If the return value is based on the assumption that signed overflow is
8951 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8952 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8953
8954 static bool
8955 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8956 int depth)
8957 {
8958 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8959 {
8960 tree arg = gimple_phi_arg_def (stmt, i);
8961 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8962 return false;
8963 }
8964 return true;
8965 }
8966
8967 /* Return true if STMT is known to compute a non-negative value.
8968 If the return value is based on the assumption that signed overflow is
8969 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8970 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8971
8972 bool
8973 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8974 int depth)
8975 {
8976 switch (gimple_code (stmt))
8977 {
8978 case GIMPLE_ASSIGN:
8979 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8980 depth);
8981 case GIMPLE_CALL:
8982 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8983 depth);
8984 case GIMPLE_PHI:
8985 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8986 depth);
8987 default:
8988 return false;
8989 }
8990 }
8991
8992 /* Return true if the floating-point value computed by assignment STMT
8993 is known to have an integer value. We also allow +Inf, -Inf and NaN
8994 to be considered integer values. Return false for signaling NaN.
8995
8996 DEPTH is the current nesting depth of the query. */
8997
8998 static bool
8999 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9000 {
9001 enum tree_code code = gimple_assign_rhs_code (stmt);
9002 switch (get_gimple_rhs_class (code))
9003 {
9004 case GIMPLE_UNARY_RHS:
9005 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9006 gimple_assign_rhs1 (stmt), depth);
9007 case GIMPLE_BINARY_RHS:
9008 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9009 gimple_assign_rhs1 (stmt),
9010 gimple_assign_rhs2 (stmt), depth);
9011 case GIMPLE_TERNARY_RHS:
9012 return false;
9013 case GIMPLE_SINGLE_RHS:
9014 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9015 case GIMPLE_INVALID_RHS:
9016 break;
9017 }
9018 gcc_unreachable ();
9019 }
9020
9021 /* Return true if the floating-point value computed by call STMT is known
9022 to have an integer value. We also allow +Inf, -Inf and NaN to be
9023 considered integer values. Return false for signaling NaN.
9024
9025 DEPTH is the current nesting depth of the query. */
9026
9027 static bool
9028 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9029 {
9030 tree arg0 = (gimple_call_num_args (stmt) > 0
9031 ? gimple_call_arg (stmt, 0)
9032 : NULL_TREE);
9033 tree arg1 = (gimple_call_num_args (stmt) > 1
9034 ? gimple_call_arg (stmt, 1)
9035 : NULL_TREE);
9036 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9037 arg0, arg1, depth);
9038 }
9039
9040 /* Return true if the floating-point result of phi STMT is known to have
9041 an integer value. We also allow +Inf, -Inf and NaN to be considered
9042 integer values. Return false for signaling NaN.
9043
9044 DEPTH is the current nesting depth of the query. */
9045
9046 static bool
9047 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9048 {
9049 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9050 {
9051 tree arg = gimple_phi_arg_def (stmt, i);
9052 if (!integer_valued_real_single_p (arg, depth + 1))
9053 return false;
9054 }
9055 return true;
9056 }
9057
9058 /* Return true if the floating-point value computed by STMT is known
9059 to have an integer value. We also allow +Inf, -Inf and NaN to be
9060 considered integer values. Return false for signaling NaN.
9061
9062 DEPTH is the current nesting depth of the query. */
9063
9064 bool
9065 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9066 {
9067 switch (gimple_code (stmt))
9068 {
9069 case GIMPLE_ASSIGN:
9070 return gimple_assign_integer_valued_real_p (stmt, depth);
9071 case GIMPLE_CALL:
9072 return gimple_call_integer_valued_real_p (stmt, depth);
9073 case GIMPLE_PHI:
9074 return gimple_phi_integer_valued_real_p (stmt, depth);
9075 default:
9076 return false;
9077 }
9078 }