]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
gimple-fold: Use DECL_PADDING_P in __builtin_clear_padding
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
69
70 enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83 };
84
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
87
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
92
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
110 static bool
111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
112 {
113 varpool_node *vnode;
114 struct cgraph_node *node;
115 symtab_node *snode;
116
117 if (DECL_ABSTRACT_P (decl))
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122 || !VAR_OR_FUNCTION_DECL_P (decl))
123 return true;
124
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
128 {
129 if (DECL_EXTERNAL (decl))
130 return false;
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab->function_flags_ready)
134 return true;
135 snode = symtab_node::get (decl);
136 if (!snode || !snode->definition)
137 return false;
138 node = dyn_cast <cgraph_node *> (snode);
139 return !node || !node->inlined_to;
140 }
141
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
145 if (!from_decl
146 || !VAR_P (from_decl)
147 || (!DECL_EXTERNAL (from_decl)
148 && (vnode = varpool_node::get (from_decl)) != NULL
149 && vnode->definition)
150 || (flag_ltrans
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->in_other_partition))
153 return true;
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161 return false;
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
170
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
178 if (!symtab->function_flags_ready)
179 return true;
180
181 snode = symtab_node::get (decl);
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
188 return !node || !node->inlined_to;
189 }
190
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
195 tree
196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
197 {
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202 }
203
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207
208 tree
209 canonicalize_constructor_val (tree cval, tree from_decl)
210 {
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
218 {
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
239 if (!base)
240 return NULL_TREE;
241
242 if (VAR_OR_FUNCTION_DECL_P (base)
243 && !can_refer_decl_in_current_unit_p (base, from_decl))
244 return NULL_TREE;
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
247 if (VAR_P (base))
248 TREE_ADDRESSABLE (base) = 1;
249 else if (TREE_CODE (base) == FUNCTION_DECL)
250 {
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
254 cgraph_node::get_create (base);
255 }
256 /* Fixup types in global initializers. */
257 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
259
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 cval = fold_convert (TREE_TYPE (orig_cval), cval);
262 return cval;
263 }
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval) == INTEGER_CST)
266 {
267 if (TREE_OVERFLOW_P (cval))
268 cval = drop_tree_overflow (cval);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 cval = fold_convert (TREE_TYPE (orig_cval), cval);
271 return cval;
272 }
273 return orig_cval;
274 }
275
276 /* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
278
279 tree
280 get_symbol_constant_value (tree sym)
281 {
282 tree val = ctor_for_folding (sym);
283 if (val != error_mark_node)
284 {
285 if (val)
286 {
287 val = canonicalize_constructor_val (unshare_expr (val), sym);
288 if (val && is_gimple_min_invariant (val))
289 return val;
290 else
291 return NULL_TREE;
292 }
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
296 if (!val
297 && is_gimple_reg_type (TREE_TYPE (sym)))
298 return build_zero_cst (TREE_TYPE (sym));
299 }
300
301 return NULL_TREE;
302 }
303
304
305
306 /* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
310
311 static tree
312 maybe_fold_reference (tree expr, bool is_lhs)
313 {
314 tree result;
315
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332
333 if (!is_lhs
334 && (result = fold_const_aggregate_ref (expr))
335 && is_gimple_min_invariant (result))
336 return result;
337
338 return NULL_TREE;
339 }
340
341
342 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
345 folded. */
346
347 static tree
348 fold_gimple_assign (gimple_stmt_iterator *si)
349 {
350 gimple *stmt = gsi_stmt (*si);
351 enum tree_code subcode = gimple_assign_rhs_code (stmt);
352 location_t loc = gimple_location (stmt);
353
354 tree result = NULL_TREE;
355
356 switch (get_gimple_rhs_class (subcode))
357 {
358 case GIMPLE_SINGLE_RHS:
359 {
360 tree rhs = gimple_assign_rhs1 (stmt);
361
362 if (TREE_CLOBBER_P (rhs))
363 return NULL_TREE;
364
365 if (REFERENCE_CLASS_P (rhs))
366 return maybe_fold_reference (rhs, false);
367
368 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 {
370 tree val = OBJ_TYPE_REF_EXPR (rhs);
371 if (is_gimple_min_invariant (val))
372 return val;
373 else if (flag_devirtualize && virtual_method_call_p (rhs))
374 {
375 bool final;
376 vec <cgraph_node *>targets
377 = possible_polymorphic_call_targets (rhs, stmt, &final);
378 if (final && targets.length () <= 1 && dbg_cnt (devirt))
379 {
380 if (dump_enabled_p ())
381 {
382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets.length () == 1
386 ? targets[0]->name ()
387 : "NULL");
388 }
389 if (targets.length () == 1)
390 {
391 val = fold_convert (TREE_TYPE (val),
392 build_fold_addr_expr_loc
393 (loc, targets[0]->decl));
394 STRIP_USELESS_TYPE_CONVERSION (val);
395 }
396 else
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
399 val = build_int_cst (TREE_TYPE (val), 0);
400 return val;
401 }
402 }
403 }
404
405 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 {
407 tree ref = TREE_OPERAND (rhs, 0);
408 tree tem = maybe_fold_reference (ref, true);
409 if (tem
410 && TREE_CODE (tem) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem, 1)))
412 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
413 else if (tem)
414 result = fold_convert (TREE_TYPE (rhs),
415 build_fold_addr_expr_loc (loc, tem));
416 else if (TREE_CODE (ref) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref, 1)))
418 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
419
420 if (result)
421 {
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result);
427
428 if (result != rhs && valid_gimple_rhs_p (result))
429 return result;
430 }
431 }
432
433 else if (TREE_CODE (rhs) == CONSTRUCTOR
434 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
435 {
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
437 unsigned i;
438 tree val;
439
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
441 if (! CONSTANT_CLASS_P (val))
442 return NULL_TREE;
443
444 return build_vector_from_ctor (TREE_TYPE (rhs),
445 CONSTRUCTOR_ELTS (rhs));
446 }
447
448 else if (DECL_P (rhs))
449 return get_symbol_constant_value (rhs);
450 }
451 break;
452
453 case GIMPLE_UNARY_RHS:
454 break;
455
456 case GIMPLE_BINARY_RHS:
457 break;
458
459 case GIMPLE_TERNARY_RHS:
460 result = fold_ternary_loc (loc, subcode,
461 TREE_TYPE (gimple_assign_lhs (stmt)),
462 gimple_assign_rhs1 (stmt),
463 gimple_assign_rhs2 (stmt),
464 gimple_assign_rhs3 (stmt));
465
466 if (result)
467 {
468 STRIP_USELESS_TYPE_CONVERSION (result);
469 if (valid_gimple_rhs_p (result))
470 return result;
471 }
472 break;
473
474 case GIMPLE_INVALID_RHS:
475 gcc_unreachable ();
476 }
477
478 return NULL_TREE;
479 }
480
481
482 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
486
487 static void
488 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
489 {
490 gimple *stmt = gsi_stmt (*si_p);
491
492 if (gimple_has_location (stmt))
493 annotate_all_with_location (stmts, gimple_location (stmt));
494
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
497 gimple *laststore = NULL;
498 for (gimple_stmt_iterator i = gsi_last (stmts);
499 !gsi_end_p (i); gsi_prev (&i))
500 {
501 gimple *new_stmt = gsi_stmt (i);
502 if ((gimple_assign_single_p (new_stmt)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
504 || (is_gimple_call (new_stmt)
505 && (gimple_call_flags (new_stmt)
506 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
507 {
508 tree vdef;
509 if (!laststore)
510 vdef = gimple_vdef (stmt);
511 else
512 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
513 gimple_set_vdef (new_stmt, vdef);
514 if (vdef && TREE_CODE (vdef) == SSA_NAME)
515 SSA_NAME_DEF_STMT (vdef) = new_stmt;
516 laststore = new_stmt;
517 }
518 }
519
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse = gimple_vuse (stmt);
523 for (gimple_stmt_iterator i = gsi_start (stmts);
524 !gsi_end_p (i); gsi_next (&i))
525 {
526 gimple *new_stmt = gsi_stmt (i);
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt))
530 gimple_set_vuse (new_stmt, reaching_vuse);
531 gimple_set_modified (new_stmt, true);
532 if (gimple_vdef (new_stmt))
533 reaching_vuse = gimple_vdef (new_stmt);
534 }
535
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
538 if (reaching_vuse
539 && reaching_vuse == gimple_vuse (stmt))
540 {
541 tree vdef = gimple_vdef (stmt);
542 if (vdef
543 && TREE_CODE (vdef) == SSA_NAME)
544 {
545 unlink_stmt_vdef (stmt);
546 release_ssa_name (vdef);
547 }
548 }
549
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p, stmts, false);
552 }
553
554 /* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
563
564 void
565 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
566 {
567 tree lhs;
568 gimple *stmt, *new_stmt;
569 gimple_stmt_iterator i;
570 gimple_seq stmts = NULL;
571
572 stmt = gsi_stmt (*si_p);
573
574 gcc_assert (is_gimple_call (stmt));
575
576 push_gimplify_context (gimple_in_ssa_p (cfun));
577
578 lhs = gimple_call_lhs (stmt);
579 if (lhs == NULL_TREE)
580 {
581 gimplify_and_add (expr, &stmts);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts))
585 {
586 pop_gimplify_context (NULL);
587 if (gimple_in_ssa_p (cfun))
588 {
589 unlink_stmt_vdef (stmt);
590 release_defs (stmt);
591 }
592 gsi_replace (si_p, gimple_build_nop (), false);
593 return;
594 }
595 }
596 else
597 {
598 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
599 new_stmt = gimple_build_assign (lhs, tmp);
600 i = gsi_last (stmts);
601 gsi_insert_after_without_update (&i, new_stmt,
602 GSI_CONTINUE_LINKING);
603 }
604
605 pop_gimplify_context (NULL);
606
607 gsi_replace_with_seq_vops (si_p, stmts);
608 }
609
610
611 /* Replace the call at *GSI with the gimple value VAL. */
612
613 void
614 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
615 {
616 gimple *stmt = gsi_stmt (*gsi);
617 tree lhs = gimple_call_lhs (stmt);
618 gimple *repl;
619 if (lhs)
620 {
621 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
622 val = fold_convert (TREE_TYPE (lhs), val);
623 repl = gimple_build_assign (lhs, val);
624 }
625 else
626 repl = gimple_build_nop ();
627 tree vdef = gimple_vdef (stmt);
628 if (vdef && TREE_CODE (vdef) == SSA_NAME)
629 {
630 unlink_stmt_vdef (stmt);
631 release_ssa_name (vdef);
632 }
633 gsi_replace (gsi, repl, false);
634 }
635
636 /* Replace the call at *GSI with the new call REPL and fold that
637 again. */
638
639 static void
640 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
641 {
642 gimple *stmt = gsi_stmt (*gsi);
643 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
644 gimple_set_location (repl, gimple_location (stmt));
645 gimple_move_vops (repl, stmt);
646 gsi_replace (gsi, repl, false);
647 fold_stmt (gsi);
648 }
649
650 /* Return true if VAR is a VAR_DECL or a component thereof. */
651
652 static bool
653 var_decl_component_p (tree var)
654 {
655 tree inner = var;
656 while (handled_component_p (inner))
657 inner = TREE_OPERAND (inner, 0);
658 return (DECL_P (inner)
659 || (TREE_CODE (inner) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
661 }
662
663 /* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
665
666 static bool
667 size_must_be_zero_p (tree size)
668 {
669 if (integer_zerop (size))
670 return true;
671
672 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
673 return false;
674
675 tree type = TREE_TYPE (size);
676 int prec = TYPE_PRECISION (type);
677
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
681 value_range valid_range (build_int_cst (type, 0),
682 wide_int_to_tree (type, ssize_max));
683 value_range vr;
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
687 }
688
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
695
696 static bool
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
698 tree dest, tree src, enum built_in_function code)
699 {
700 gimple *stmt = gsi_stmt (*gsi);
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
703 location_t loc = gimple_location (stmt);
704
705 /* If the LEN parameter is a constant zero or in range where
706 the only valid value is zero, return DEST. */
707 if (size_must_be_zero_p (len))
708 {
709 gimple *repl;
710 if (gimple_call_lhs (stmt))
711 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
716 {
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
719 }
720 gsi_replace (gsi, repl, false);
721 return true;
722 }
723
724 /* If SRC and DEST are the same (and not volatile), return
725 DEST{,+LEN,+LEN-1}. */
726 if (operand_equal_p (src, dest, 0))
727 {
728 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
729 It's safe and may even be emitted by GCC itself (see bug
730 32667). */
731 unlink_stmt_vdef (stmt);
732 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
733 release_ssa_name (gimple_vdef (stmt));
734 if (!lhs)
735 {
736 gsi_replace (gsi, gimple_build_nop (), false);
737 return true;
738 }
739 goto done;
740 }
741 else
742 {
743 /* We cannot (easily) change the type of the copy if it is a storage
744 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
745 modify the storage order of objects (see storage_order_barrier_p). */
746 tree srctype
747 = POINTER_TYPE_P (TREE_TYPE (src))
748 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
749 tree desttype
750 = POINTER_TYPE_P (TREE_TYPE (dest))
751 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
752 tree destvar, srcvar, srcoff;
753 unsigned int src_align, dest_align;
754 unsigned HOST_WIDE_INT tmp_len;
755 const char *tmp_str;
756
757 /* Build accesses at offset zero with a ref-all character type. */
758 tree off0
759 = build_int_cst (build_pointer_type_for_mode (char_type_node,
760 ptr_mode, true), 0);
761
762 /* If we can perform the copy efficiently with first doing all loads
763 and then all stores inline it that way. Currently efficiently
764 means that we can load all the memory into a single integer
765 register which is what MOVE_MAX gives us. */
766 src_align = get_pointer_alignment (src);
767 dest_align = get_pointer_alignment (dest);
768 if (tree_fits_uhwi_p (len)
769 && compare_tree_int (len, MOVE_MAX) <= 0
770 /* FIXME: Don't transform copies from strings with known length.
771 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
772 from being handled, and the case was XFAILed for that reason.
773 Now that it is handled and the XFAIL removed, as soon as other
774 strlenopt tests that rely on it for passing are adjusted, this
775 hack can be removed. */
776 && !c_strlen (src, 1)
777 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
778 && memchr (tmp_str, 0, tmp_len) == NULL)
779 && !(srctype
780 && AGGREGATE_TYPE_P (srctype)
781 && TYPE_REVERSE_STORAGE_ORDER (srctype))
782 && !(desttype
783 && AGGREGATE_TYPE_P (desttype)
784 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
785 {
786 unsigned ilen = tree_to_uhwi (len);
787 if (pow2p_hwi (ilen))
788 {
789 /* Detect out-of-bounds accesses without issuing warnings.
790 Avoid folding out-of-bounds copies but to avoid false
791 positives for unreachable code defer warning until after
792 DCE has worked its magic.
793 -Wrestrict is still diagnosed. */
794 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
795 dest, src, len, len,
796 false, false))
797 if (warning != OPT_Wrestrict)
798 return false;
799
800 scalar_int_mode mode;
801 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
802 if (type
803 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
804 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
805 /* If the destination pointer is not aligned we must be able
806 to emit an unaligned store. */
807 && (dest_align >= GET_MODE_ALIGNMENT (mode)
808 || !targetm.slow_unaligned_access (mode, dest_align)
809 || (optab_handler (movmisalign_optab, mode)
810 != CODE_FOR_nothing)))
811 {
812 tree srctype = type;
813 tree desttype = type;
814 if (src_align < GET_MODE_ALIGNMENT (mode))
815 srctype = build_aligned_type (type, src_align);
816 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
817 tree tem = fold_const_aggregate_ref (srcmem);
818 if (tem)
819 srcmem = tem;
820 else if (src_align < GET_MODE_ALIGNMENT (mode)
821 && targetm.slow_unaligned_access (mode, src_align)
822 && (optab_handler (movmisalign_optab, mode)
823 == CODE_FOR_nothing))
824 srcmem = NULL_TREE;
825 if (srcmem)
826 {
827 gimple *new_stmt;
828 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
829 {
830 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
831 srcmem
832 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
833 new_stmt);
834 gimple_assign_set_lhs (new_stmt, srcmem);
835 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
836 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
837 }
838 if (dest_align < GET_MODE_ALIGNMENT (mode))
839 desttype = build_aligned_type (type, dest_align);
840 new_stmt
841 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
842 dest, off0),
843 srcmem);
844 gimple_move_vops (new_stmt, stmt);
845 if (!lhs)
846 {
847 gsi_replace (gsi, new_stmt, false);
848 return true;
849 }
850 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
851 goto done;
852 }
853 }
854 }
855 }
856
857 if (code == BUILT_IN_MEMMOVE)
858 {
859 /* Both DEST and SRC must be pointer types.
860 ??? This is what old code did. Is the testing for pointer types
861 really mandatory?
862
863 If either SRC is readonly or length is 1, we can use memcpy. */
864 if (!dest_align || !src_align)
865 return false;
866 if (readonly_data_expr (src)
867 || (tree_fits_uhwi_p (len)
868 && (MIN (src_align, dest_align) / BITS_PER_UNIT
869 >= tree_to_uhwi (len))))
870 {
871 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
872 if (!fn)
873 return false;
874 gimple_call_set_fndecl (stmt, fn);
875 gimple_call_set_arg (stmt, 0, dest);
876 gimple_call_set_arg (stmt, 1, src);
877 fold_stmt (gsi);
878 return true;
879 }
880
881 /* If *src and *dest can't overlap, optimize into memcpy as well. */
882 if (TREE_CODE (src) == ADDR_EXPR
883 && TREE_CODE (dest) == ADDR_EXPR)
884 {
885 tree src_base, dest_base, fn;
886 poly_int64 src_offset = 0, dest_offset = 0;
887 poly_uint64 maxsize;
888
889 srcvar = TREE_OPERAND (src, 0);
890 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
891 if (src_base == NULL)
892 src_base = srcvar;
893 destvar = TREE_OPERAND (dest, 0);
894 dest_base = get_addr_base_and_unit_offset (destvar,
895 &dest_offset);
896 if (dest_base == NULL)
897 dest_base = destvar;
898 if (!poly_int_tree_p (len, &maxsize))
899 maxsize = -1;
900 if (SSA_VAR_P (src_base)
901 && SSA_VAR_P (dest_base))
902 {
903 if (operand_equal_p (src_base, dest_base, 0)
904 && ranges_maybe_overlap_p (src_offset, maxsize,
905 dest_offset, maxsize))
906 return false;
907 }
908 else if (TREE_CODE (src_base) == MEM_REF
909 && TREE_CODE (dest_base) == MEM_REF)
910 {
911 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
912 TREE_OPERAND (dest_base, 0), 0))
913 return false;
914 poly_offset_int full_src_offset
915 = mem_ref_offset (src_base) + src_offset;
916 poly_offset_int full_dest_offset
917 = mem_ref_offset (dest_base) + dest_offset;
918 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
919 full_dest_offset, maxsize))
920 return false;
921 }
922 else
923 return false;
924
925 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
926 if (!fn)
927 return false;
928 gimple_call_set_fndecl (stmt, fn);
929 gimple_call_set_arg (stmt, 0, dest);
930 gimple_call_set_arg (stmt, 1, src);
931 fold_stmt (gsi);
932 return true;
933 }
934
935 /* If the destination and source do not alias optimize into
936 memcpy as well. */
937 if ((is_gimple_min_invariant (dest)
938 || TREE_CODE (dest) == SSA_NAME)
939 && (is_gimple_min_invariant (src)
940 || TREE_CODE (src) == SSA_NAME))
941 {
942 ao_ref destr, srcr;
943 ao_ref_init_from_ptr_and_size (&destr, dest, len);
944 ao_ref_init_from_ptr_and_size (&srcr, src, len);
945 if (!refs_may_alias_p_1 (&destr, &srcr, false))
946 {
947 tree fn;
948 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
949 if (!fn)
950 return false;
951 gimple_call_set_fndecl (stmt, fn);
952 gimple_call_set_arg (stmt, 0, dest);
953 gimple_call_set_arg (stmt, 1, src);
954 fold_stmt (gsi);
955 return true;
956 }
957 }
958
959 return false;
960 }
961
962 if (!tree_fits_shwi_p (len))
963 return false;
964 if (!srctype
965 || (AGGREGATE_TYPE_P (srctype)
966 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
967 return false;
968 if (!desttype
969 || (AGGREGATE_TYPE_P (desttype)
970 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
971 return false;
972 /* In the following try to find a type that is most natural to be
973 used for the memcpy source and destination and that allows
974 the most optimization when memcpy is turned into a plain assignment
975 using that type. In theory we could always use a char[len] type
976 but that only gains us that the destination and source possibly
977 no longer will have their address taken. */
978 if (TREE_CODE (srctype) == ARRAY_TYPE
979 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 srctype = TREE_TYPE (srctype);
981 if (TREE_CODE (desttype) == ARRAY_TYPE
982 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
983 desttype = TREE_TYPE (desttype);
984 if (TREE_ADDRESSABLE (srctype)
985 || TREE_ADDRESSABLE (desttype))
986 return false;
987
988 /* Make sure we are not copying using a floating-point mode or
989 a type whose size possibly does not match its precision. */
990 if (FLOAT_MODE_P (TYPE_MODE (desttype))
991 || TREE_CODE (desttype) == BOOLEAN_TYPE
992 || TREE_CODE (desttype) == ENUMERAL_TYPE)
993 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
994 if (FLOAT_MODE_P (TYPE_MODE (srctype))
995 || TREE_CODE (srctype) == BOOLEAN_TYPE
996 || TREE_CODE (srctype) == ENUMERAL_TYPE)
997 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
998 if (!srctype)
999 srctype = desttype;
1000 if (!desttype)
1001 desttype = srctype;
1002 if (!srctype)
1003 return false;
1004
1005 src_align = get_pointer_alignment (src);
1006 dest_align = get_pointer_alignment (dest);
1007
1008 /* Choose between src and destination type for the access based
1009 on alignment, whether the access constitutes a register access
1010 and whether it may actually expose a declaration for SSA rewrite
1011 or SRA decomposition. Also try to expose a string constant, we
1012 might be able to concatenate several of them later into a single
1013 string store. */
1014 destvar = NULL_TREE;
1015 srcvar = NULL_TREE;
1016 if (TREE_CODE (dest) == ADDR_EXPR
1017 && var_decl_component_p (TREE_OPERAND (dest, 0))
1018 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1019 && dest_align >= TYPE_ALIGN (desttype)
1020 && (is_gimple_reg_type (desttype)
1021 || src_align >= TYPE_ALIGN (desttype)))
1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1023 else if (TREE_CODE (src) == ADDR_EXPR
1024 && var_decl_component_p (TREE_OPERAND (src, 0))
1025 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1026 && src_align >= TYPE_ALIGN (srctype)
1027 && (is_gimple_reg_type (srctype)
1028 || dest_align >= TYPE_ALIGN (srctype)))
1029 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1030 /* FIXME: Don't transform copies from strings with known original length.
1031 As soon as strlenopt tests that rely on it for passing are adjusted,
1032 this hack can be removed. */
1033 else if (gimple_call_alloca_for_var_p (stmt)
1034 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1035 && integer_zerop (srcoff)
1036 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1037 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1038 srctype = TREE_TYPE (srcvar);
1039 else
1040 return false;
1041
1042 /* Now that we chose an access type express the other side in
1043 terms of it if the target allows that with respect to alignment
1044 constraints. */
1045 if (srcvar == NULL_TREE)
1046 {
1047 if (src_align >= TYPE_ALIGN (desttype))
1048 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1049 else
1050 {
1051 if (STRICT_ALIGNMENT)
1052 return false;
1053 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1054 src_align);
1055 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1056 }
1057 }
1058 else if (destvar == NULL_TREE)
1059 {
1060 if (dest_align >= TYPE_ALIGN (srctype))
1061 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1062 else
1063 {
1064 if (STRICT_ALIGNMENT)
1065 return false;
1066 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1067 dest_align);
1068 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1069 }
1070 }
1071
1072 /* Same as above, detect out-of-bounds accesses without issuing
1073 warnings. Avoid folding out-of-bounds copies but to avoid
1074 false positives for unreachable code defer warning until
1075 after DCE has worked its magic.
1076 -Wrestrict is still diagnosed. */
1077 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1078 dest, src, len, len,
1079 false, false))
1080 if (warning != OPT_Wrestrict)
1081 return false;
1082
1083 gimple *new_stmt;
1084 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1085 {
1086 tree tem = fold_const_aggregate_ref (srcvar);
1087 if (tem)
1088 srcvar = tem;
1089 if (! is_gimple_min_invariant (srcvar))
1090 {
1091 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1092 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1093 new_stmt);
1094 gimple_assign_set_lhs (new_stmt, srcvar);
1095 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1096 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1097 }
1098 new_stmt = gimple_build_assign (destvar, srcvar);
1099 goto set_vop_and_replace;
1100 }
1101
1102 /* We get an aggregate copy. If the source is a STRING_CST, then
1103 directly use its type to perform the copy. */
1104 if (TREE_CODE (srcvar) == STRING_CST)
1105 desttype = srctype;
1106
1107 /* Or else, use an unsigned char[] type to perform the copy in order
1108 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1109 types or float modes behavior on copying. */
1110 else
1111 {
1112 desttype = build_array_type_nelts (unsigned_char_type_node,
1113 tree_to_uhwi (len));
1114 srctype = desttype;
1115 if (src_align > TYPE_ALIGN (srctype))
1116 srctype = build_aligned_type (srctype, src_align);
1117 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1118 }
1119
1120 if (dest_align > TYPE_ALIGN (desttype))
1121 desttype = build_aligned_type (desttype, dest_align);
1122 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1123 new_stmt = gimple_build_assign (destvar, srcvar);
1124
1125 set_vop_and_replace:
1126 gimple_move_vops (new_stmt, stmt);
1127 if (!lhs)
1128 {
1129 gsi_replace (gsi, new_stmt, false);
1130 return true;
1131 }
1132 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1133 }
1134
1135 done:
1136 gimple_seq stmts = NULL;
1137 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1138 len = NULL_TREE;
1139 else if (code == BUILT_IN_MEMPCPY)
1140 {
1141 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1142 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1143 TREE_TYPE (dest), dest, len);
1144 }
1145 else
1146 gcc_unreachable ();
1147
1148 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1149 gimple *repl = gimple_build_assign (lhs, dest);
1150 gsi_replace (gsi, repl, false);
1151 return true;
1152 }
1153
1154 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1155 to built-in memcmp (a, b, len). */
1156
1157 static bool
1158 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1159 {
1160 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1161
1162 if (!fn)
1163 return false;
1164
1165 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1166
1167 gimple *stmt = gsi_stmt (*gsi);
1168 tree a = gimple_call_arg (stmt, 0);
1169 tree b = gimple_call_arg (stmt, 1);
1170 tree len = gimple_call_arg (stmt, 2);
1171
1172 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1173 replace_call_with_call_and_fold (gsi, repl);
1174
1175 return true;
1176 }
1177
1178 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1179 to built-in memmove (dest, src, len). */
1180
1181 static bool
1182 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1183 {
1184 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1185
1186 if (!fn)
1187 return false;
1188
1189 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1190 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1191 len) into memmove (dest, src, len). */
1192
1193 gimple *stmt = gsi_stmt (*gsi);
1194 tree src = gimple_call_arg (stmt, 0);
1195 tree dest = gimple_call_arg (stmt, 1);
1196 tree len = gimple_call_arg (stmt, 2);
1197
1198 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1199 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1200 replace_call_with_call_and_fold (gsi, repl);
1201
1202 return true;
1203 }
1204
1205 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1206 to built-in memset (dest, 0, len). */
1207
1208 static bool
1209 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1210 {
1211 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1212
1213 if (!fn)
1214 return false;
1215
1216 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1217
1218 gimple *stmt = gsi_stmt (*gsi);
1219 tree dest = gimple_call_arg (stmt, 0);
1220 tree len = gimple_call_arg (stmt, 1);
1221
1222 gimple_seq seq = NULL;
1223 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1224 gimple_seq_add_stmt_without_update (&seq, repl);
1225 gsi_replace_with_seq_vops (gsi, seq);
1226 fold_stmt (gsi);
1227
1228 return true;
1229 }
1230
1231 /* Fold function call to builtin memset or bzero at *GSI setting the
1232 memory of size LEN to VAL. Return whether a simplification was made. */
1233
1234 static bool
1235 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1236 {
1237 gimple *stmt = gsi_stmt (*gsi);
1238 tree etype;
1239 unsigned HOST_WIDE_INT length, cval;
1240
1241 /* If the LEN parameter is zero, return DEST. */
1242 if (integer_zerop (len))
1243 {
1244 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1245 return true;
1246 }
1247
1248 if (! tree_fits_uhwi_p (len))
1249 return false;
1250
1251 if (TREE_CODE (c) != INTEGER_CST)
1252 return false;
1253
1254 tree dest = gimple_call_arg (stmt, 0);
1255 tree var = dest;
1256 if (TREE_CODE (var) != ADDR_EXPR)
1257 return false;
1258
1259 var = TREE_OPERAND (var, 0);
1260 if (TREE_THIS_VOLATILE (var))
1261 return false;
1262
1263 etype = TREE_TYPE (var);
1264 if (TREE_CODE (etype) == ARRAY_TYPE)
1265 etype = TREE_TYPE (etype);
1266
1267 if (!INTEGRAL_TYPE_P (etype)
1268 && !POINTER_TYPE_P (etype))
1269 return NULL_TREE;
1270
1271 if (! var_decl_component_p (var))
1272 return NULL_TREE;
1273
1274 length = tree_to_uhwi (len);
1275 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1276 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1277 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1278 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1279 return NULL_TREE;
1280
1281 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1282 return NULL_TREE;
1283
1284 if (!type_has_mode_precision_p (etype))
1285 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1286 TYPE_UNSIGNED (etype));
1287
1288 if (integer_zerop (c))
1289 cval = 0;
1290 else
1291 {
1292 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1293 return NULL_TREE;
1294
1295 cval = TREE_INT_CST_LOW (c);
1296 cval &= 0xff;
1297 cval |= cval << 8;
1298 cval |= cval << 16;
1299 cval |= (cval << 31) << 1;
1300 }
1301
1302 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1303 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1304 gimple_move_vops (store, stmt);
1305 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1306 if (gimple_call_lhs (stmt))
1307 {
1308 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1309 gsi_replace (gsi, asgn, false);
1310 }
1311 else
1312 {
1313 gimple_stmt_iterator gsi2 = *gsi;
1314 gsi_prev (gsi);
1315 gsi_remove (&gsi2, true);
1316 }
1317
1318 return true;
1319 }
1320
1321 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1322
1323 static bool
1324 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1325 c_strlen_data *pdata, unsigned eltsize)
1326 {
1327 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1328
1329 /* The length computed by this invocation of the function. */
1330 tree val = NULL_TREE;
1331
1332 /* True if VAL is an optimistic (tight) bound determined from
1333 the size of the character array in which the string may be
1334 stored. In that case, the computed VAL is used to set
1335 PDATA->MAXBOUND. */
1336 bool tight_bound = false;
1337
1338 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1339 if (TREE_CODE (arg) == ADDR_EXPR
1340 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1341 {
1342 tree op = TREE_OPERAND (arg, 0);
1343 if (integer_zerop (TREE_OPERAND (op, 1)))
1344 {
1345 tree aop0 = TREE_OPERAND (op, 0);
1346 if (TREE_CODE (aop0) == INDIRECT_REF
1347 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1348 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1349 pdata, eltsize);
1350 }
1351 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1352 && rkind == SRK_LENRANGE)
1353 {
1354 /* Fail if an array is the last member of a struct object
1355 since it could be treated as a (fake) flexible array
1356 member. */
1357 tree idx = TREE_OPERAND (op, 1);
1358
1359 arg = TREE_OPERAND (op, 0);
1360 tree optype = TREE_TYPE (arg);
1361 if (tree dom = TYPE_DOMAIN (optype))
1362 if (tree bound = TYPE_MAX_VALUE (dom))
1363 if (TREE_CODE (bound) == INTEGER_CST
1364 && TREE_CODE (idx) == INTEGER_CST
1365 && tree_int_cst_lt (bound, idx))
1366 return false;
1367 }
1368 }
1369
1370 if (rkind == SRK_INT_VALUE)
1371 {
1372 /* We are computing the maximum value (not string length). */
1373 val = arg;
1374 if (TREE_CODE (val) != INTEGER_CST
1375 || tree_int_cst_sgn (val) < 0)
1376 return false;
1377 }
1378 else
1379 {
1380 c_strlen_data lendata = { };
1381 val = c_strlen (arg, 1, &lendata, eltsize);
1382
1383 if (!val && lendata.decl)
1384 {
1385 /* ARG refers to an unterminated const character array.
1386 DATA.DECL with size DATA.LEN. */
1387 val = lendata.minlen;
1388 pdata->decl = lendata.decl;
1389 }
1390 }
1391
1392 /* Set if VAL represents the maximum length based on array size (set
1393 when exact length cannot be determined). */
1394 bool maxbound = false;
1395
1396 if (!val && rkind == SRK_LENRANGE)
1397 {
1398 if (TREE_CODE (arg) == ADDR_EXPR)
1399 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1400 pdata, eltsize);
1401
1402 if (TREE_CODE (arg) == ARRAY_REF)
1403 {
1404 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1405
1406 /* Determine the "innermost" array type. */
1407 while (TREE_CODE (optype) == ARRAY_TYPE
1408 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1409 optype = TREE_TYPE (optype);
1410
1411 /* Avoid arrays of pointers. */
1412 tree eltype = TREE_TYPE (optype);
1413 if (TREE_CODE (optype) != ARRAY_TYPE
1414 || !INTEGRAL_TYPE_P (eltype))
1415 return false;
1416
1417 /* Fail when the array bound is unknown or zero. */
1418 val = TYPE_SIZE_UNIT (optype);
1419 if (!val
1420 || TREE_CODE (val) != INTEGER_CST
1421 || integer_zerop (val))
1422 return false;
1423
1424 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1425 integer_one_node);
1426
1427 /* Set the minimum size to zero since the string in
1428 the array could have zero length. */
1429 pdata->minlen = ssize_int (0);
1430
1431 tight_bound = true;
1432 }
1433 else if (TREE_CODE (arg) == COMPONENT_REF
1434 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1435 == ARRAY_TYPE))
1436 {
1437 /* Use the type of the member array to determine the upper
1438 bound on the length of the array. This may be overly
1439 optimistic if the array itself isn't NUL-terminated and
1440 the caller relies on the subsequent member to contain
1441 the NUL but that would only be considered valid if
1442 the array were the last member of a struct. */
1443
1444 tree fld = TREE_OPERAND (arg, 1);
1445
1446 tree optype = TREE_TYPE (fld);
1447
1448 /* Determine the "innermost" array type. */
1449 while (TREE_CODE (optype) == ARRAY_TYPE
1450 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1451 optype = TREE_TYPE (optype);
1452
1453 /* Fail when the array bound is unknown or zero. */
1454 val = TYPE_SIZE_UNIT (optype);
1455 if (!val
1456 || TREE_CODE (val) != INTEGER_CST
1457 || integer_zerop (val))
1458 return false;
1459 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1460 integer_one_node);
1461
1462 /* Set the minimum size to zero since the string in
1463 the array could have zero length. */
1464 pdata->minlen = ssize_int (0);
1465
1466 /* The array size determined above is an optimistic bound
1467 on the length. If the array isn't nul-terminated the
1468 length computed by the library function would be greater.
1469 Even though using strlen to cross the subobject boundary
1470 is undefined, avoid drawing conclusions from the member
1471 type about the length here. */
1472 tight_bound = true;
1473 }
1474 else if (TREE_CODE (arg) == MEM_REF
1475 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1477 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1478 {
1479 /* Handle a MEM_REF into a DECL accessing an array of integers,
1480 being conservative about references to extern structures with
1481 flexible array members that can be initialized to arbitrary
1482 numbers of elements as an extension (static structs are okay).
1483 FIXME: Make this less conservative -- see
1484 component_ref_size in tree.c. */
1485 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1486 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1487 && (decl_binds_to_current_def_p (ref)
1488 || !array_at_struct_end_p (arg)))
1489 {
1490 /* Fail if the offset is out of bounds. Such accesses
1491 should be diagnosed at some point. */
1492 val = DECL_SIZE_UNIT (ref);
1493 if (!val
1494 || TREE_CODE (val) != INTEGER_CST
1495 || integer_zerop (val))
1496 return false;
1497
1498 poly_offset_int psiz = wi::to_offset (val);
1499 poly_offset_int poff = mem_ref_offset (arg);
1500 if (known_le (psiz, poff))
1501 return false;
1502
1503 pdata->minlen = ssize_int (0);
1504
1505 /* Subtract the offset and one for the terminating nul. */
1506 psiz -= poff;
1507 psiz -= 1;
1508 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1509 /* Since VAL reflects the size of a declared object
1510 rather the type of the access it is not a tight bound. */
1511 }
1512 }
1513 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1514 {
1515 /* Avoid handling pointers to arrays. GCC might misuse
1516 a pointer to an array of one bound to point to an array
1517 object of a greater bound. */
1518 tree argtype = TREE_TYPE (arg);
1519 if (TREE_CODE (argtype) == ARRAY_TYPE)
1520 {
1521 val = TYPE_SIZE_UNIT (argtype);
1522 if (!val
1523 || TREE_CODE (val) != INTEGER_CST
1524 || integer_zerop (val))
1525 return false;
1526 val = wide_int_to_tree (TREE_TYPE (val),
1527 wi::sub (wi::to_wide (val), 1));
1528
1529 /* Set the minimum size to zero since the string in
1530 the array could have zero length. */
1531 pdata->minlen = ssize_int (0);
1532 }
1533 }
1534 maxbound = true;
1535 }
1536
1537 if (!val)
1538 return false;
1539
1540 /* Adjust the lower bound on the string length as necessary. */
1541 if (!pdata->minlen
1542 || (rkind != SRK_STRLEN
1543 && TREE_CODE (pdata->minlen) == INTEGER_CST
1544 && TREE_CODE (val) == INTEGER_CST
1545 && tree_int_cst_lt (val, pdata->minlen)))
1546 pdata->minlen = val;
1547
1548 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1549 {
1550 /* Adjust the tighter (more optimistic) string length bound
1551 if necessary and proceed to adjust the more conservative
1552 bound. */
1553 if (TREE_CODE (val) == INTEGER_CST)
1554 {
1555 if (tree_int_cst_lt (pdata->maxbound, val))
1556 pdata->maxbound = val;
1557 }
1558 else
1559 pdata->maxbound = val;
1560 }
1561 else if (pdata->maxbound || maxbound)
1562 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1563 if VAL corresponds to the maximum length determined based
1564 on the type of the object. */
1565 pdata->maxbound = val;
1566
1567 if (tight_bound)
1568 {
1569 /* VAL computed above represents an optimistically tight bound
1570 on the length of the string based on the referenced object's
1571 or subobject's type. Determine the conservative upper bound
1572 based on the enclosing object's size if possible. */
1573 if (rkind == SRK_LENRANGE)
1574 {
1575 poly_int64 offset;
1576 tree base = get_addr_base_and_unit_offset (arg, &offset);
1577 if (!base)
1578 {
1579 /* When the call above fails due to a non-constant offset
1580 assume the offset is zero and use the size of the whole
1581 enclosing object instead. */
1582 base = get_base_address (arg);
1583 offset = 0;
1584 }
1585 /* If the base object is a pointer no upper bound on the length
1586 can be determined. Otherwise the maximum length is equal to
1587 the size of the enclosing object minus the offset of
1588 the referenced subobject minus 1 (for the terminating nul). */
1589 tree type = TREE_TYPE (base);
1590 if (TREE_CODE (type) == POINTER_TYPE
1591 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1592 || !(val = DECL_SIZE_UNIT (base)))
1593 val = build_all_ones_cst (size_type_node);
1594 else
1595 {
1596 val = DECL_SIZE_UNIT (base);
1597 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1598 size_int (offset + 1));
1599 }
1600 }
1601 else
1602 return false;
1603 }
1604
1605 if (pdata->maxlen)
1606 {
1607 /* Adjust the more conservative bound if possible/necessary
1608 and fail otherwise. */
1609 if (rkind != SRK_STRLEN)
1610 {
1611 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1612 || TREE_CODE (val) != INTEGER_CST)
1613 return false;
1614
1615 if (tree_int_cst_lt (pdata->maxlen, val))
1616 pdata->maxlen = val;
1617 return true;
1618 }
1619 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1620 {
1621 /* Fail if the length of this ARG is different from that
1622 previously determined from another ARG. */
1623 return false;
1624 }
1625 }
1626
1627 pdata->maxlen = val;
1628 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1629 }
1630
1631 /* For an ARG referencing one or more strings, try to obtain the range
1632 of their lengths, or the size of the largest array ARG referes to if
1633 the range of lengths cannot be determined, and store all in *PDATA.
1634 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1635 the maximum constant value.
1636 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1637 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1638 length or if we are unable to determine the length, return false.
1639 VISITED is a bitmap of visited variables.
1640 RKIND determines the kind of value or range to obtain (see
1641 strlen_range_kind).
1642 Set PDATA->DECL if ARG refers to an unterminated constant array.
1643 On input, set ELTSIZE to 1 for normal single byte character strings,
1644 and either 2 or 4 for wide characer strings (the size of wchar_t).
1645 Return true if *PDATA was successfully populated and false otherwise. */
1646
1647 static bool
1648 get_range_strlen (tree arg, bitmap *visited,
1649 strlen_range_kind rkind,
1650 c_strlen_data *pdata, unsigned eltsize)
1651 {
1652
1653 if (TREE_CODE (arg) != SSA_NAME)
1654 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1655
1656 /* If ARG is registered for SSA update we cannot look at its defining
1657 statement. */
1658 if (name_registered_for_update_p (arg))
1659 return false;
1660
1661 /* If we were already here, break the infinite cycle. */
1662 if (!*visited)
1663 *visited = BITMAP_ALLOC (NULL);
1664 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1665 return true;
1666
1667 tree var = arg;
1668 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1669
1670 switch (gimple_code (def_stmt))
1671 {
1672 case GIMPLE_ASSIGN:
1673 /* The RHS of the statement defining VAR must either have a
1674 constant length or come from another SSA_NAME with a constant
1675 length. */
1676 if (gimple_assign_single_p (def_stmt)
1677 || gimple_assign_unary_nop_p (def_stmt))
1678 {
1679 tree rhs = gimple_assign_rhs1 (def_stmt);
1680 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1681 }
1682 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1683 {
1684 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1685 gimple_assign_rhs3 (def_stmt) };
1686
1687 for (unsigned int i = 0; i < 2; i++)
1688 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1689 {
1690 if (rkind != SRK_LENRANGE)
1691 return false;
1692 /* Set the upper bound to the maximum to prevent
1693 it from being adjusted in the next iteration but
1694 leave MINLEN and the more conservative MAXBOUND
1695 determined so far alone (or leave them null if
1696 they haven't been set yet). That the MINLEN is
1697 in fact zero can be determined from MAXLEN being
1698 unbounded but the discovered minimum is used for
1699 diagnostics. */
1700 pdata->maxlen = build_all_ones_cst (size_type_node);
1701 }
1702 return true;
1703 }
1704 return false;
1705
1706 case GIMPLE_PHI:
1707 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1708 must have a constant length. */
1709 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1710 {
1711 tree arg = gimple_phi_arg (def_stmt, i)->def;
1712
1713 /* If this PHI has itself as an argument, we cannot
1714 determine the string length of this argument. However,
1715 if we can find a constant string length for the other
1716 PHI args then we can still be sure that this is a
1717 constant string length. So be optimistic and just
1718 continue with the next argument. */
1719 if (arg == gimple_phi_result (def_stmt))
1720 continue;
1721
1722 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1723 {
1724 if (rkind != SRK_LENRANGE)
1725 return false;
1726 /* Set the upper bound to the maximum to prevent
1727 it from being adjusted in the next iteration but
1728 leave MINLEN and the more conservative MAXBOUND
1729 determined so far alone (or leave them null if
1730 they haven't been set yet). That the MINLEN is
1731 in fact zero can be determined from MAXLEN being
1732 unbounded but the discovered minimum is used for
1733 diagnostics. */
1734 pdata->maxlen = build_all_ones_cst (size_type_node);
1735 }
1736 }
1737 return true;
1738
1739 default:
1740 return false;
1741 }
1742 }
1743
1744 /* Try to obtain the range of the lengths of the string(s) referenced
1745 by ARG, or the size of the largest array ARG refers to if the range
1746 of lengths cannot be determined, and store all in *PDATA which must
1747 be zero-initialized on input except PDATA->MAXBOUND may be set to
1748 a non-null tree node other than INTEGER_CST to request to have it
1749 set to the length of the longest string in a PHI. ELTSIZE is
1750 the expected size of the string element in bytes: 1 for char and
1751 some power of 2 for wide characters.
1752 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1753 for optimization. Returning false means that a nonzero PDATA->MINLEN
1754 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1755 is -1 (in that case, the actual range is indeterminate, i.e.,
1756 [0, PTRDIFF_MAX - 2]. */
1757
1758 bool
1759 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1760 {
1761 bitmap visited = NULL;
1762 tree maxbound = pdata->maxbound;
1763
1764 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1765 {
1766 /* On failure extend the length range to an impossible maximum
1767 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1768 members can stay unchanged regardless. */
1769 pdata->minlen = ssize_int (0);
1770 pdata->maxlen = build_all_ones_cst (size_type_node);
1771 }
1772 else if (!pdata->minlen)
1773 pdata->minlen = ssize_int (0);
1774
1775 /* If it's unchanged from it initial non-null value, set the conservative
1776 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1777 if (maxbound && pdata->maxbound == maxbound)
1778 pdata->maxbound = build_all_ones_cst (size_type_node);
1779
1780 if (visited)
1781 BITMAP_FREE (visited);
1782
1783 return !integer_all_onesp (pdata->maxlen);
1784 }
1785
1786 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1787 For ARG of pointer types, NONSTR indicates if the caller is prepared
1788 to handle unterminated strings. For integer ARG and when RKIND ==
1789 SRK_INT_VALUE, NONSTR must be null.
1790
1791 If an unterminated array is discovered and our caller handles
1792 unterminated arrays, then bubble up the offending DECL and
1793 return the maximum size. Otherwise return NULL. */
1794
1795 static tree
1796 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1797 {
1798 /* A non-null NONSTR is meaningless when determining the maximum
1799 value of an integer ARG. */
1800 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1801 /* ARG must have an integral type when RKIND says so. */
1802 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1803
1804 bitmap visited = NULL;
1805
1806 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1807 is unbounded. */
1808 c_strlen_data lendata = { };
1809 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1810 lendata.maxlen = NULL_TREE;
1811 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1812 lendata.maxlen = NULL_TREE;
1813
1814 if (visited)
1815 BITMAP_FREE (visited);
1816
1817 if (nonstr)
1818 {
1819 /* For callers prepared to handle unterminated arrays set
1820 *NONSTR to point to the declaration of the array and return
1821 the maximum length/size. */
1822 *nonstr = lendata.decl;
1823 return lendata.maxlen;
1824 }
1825
1826 /* Fail if the constant array isn't nul-terminated. */
1827 return lendata.decl ? NULL_TREE : lendata.maxlen;
1828 }
1829
1830
1831 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1832 If LEN is not NULL, it represents the length of the string to be
1833 copied. Return NULL_TREE if no simplification can be made. */
1834
1835 static bool
1836 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1837 tree dest, tree src)
1838 {
1839 gimple *stmt = gsi_stmt (*gsi);
1840 location_t loc = gimple_location (stmt);
1841 tree fn;
1842
1843 /* If SRC and DEST are the same (and not volatile), return DEST. */
1844 if (operand_equal_p (src, dest, 0))
1845 {
1846 /* Issue -Wrestrict unless the pointers are null (those do
1847 not point to objects and so do not indicate an overlap;
1848 such calls could be the result of sanitization and jump
1849 threading). */
1850 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1851 {
1852 tree func = gimple_call_fndecl (stmt);
1853
1854 warning_at (loc, OPT_Wrestrict,
1855 "%qD source argument is the same as destination",
1856 func);
1857 }
1858
1859 replace_call_with_value (gsi, dest);
1860 return true;
1861 }
1862
1863 if (optimize_function_for_size_p (cfun))
1864 return false;
1865
1866 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1867 if (!fn)
1868 return false;
1869
1870 /* Set to non-null if ARG refers to an unterminated array. */
1871 tree nonstr = NULL;
1872 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1873
1874 if (nonstr)
1875 {
1876 /* Avoid folding calls with unterminated arrays. */
1877 if (!gimple_no_warning_p (stmt))
1878 warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
1879 gimple_set_no_warning (stmt, true);
1880 return false;
1881 }
1882
1883 if (!len)
1884 return false;
1885
1886 len = fold_convert_loc (loc, size_type_node, len);
1887 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1888 len = force_gimple_operand_gsi (gsi, len, true,
1889 NULL_TREE, true, GSI_SAME_STMT);
1890 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1893 }
1894
1895 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1896 If SLEN is not NULL, it represents the length of the source string.
1897 Return NULL_TREE if no simplification can be made. */
1898
1899 static bool
1900 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1901 tree dest, tree src, tree len)
1902 {
1903 gimple *stmt = gsi_stmt (*gsi);
1904 location_t loc = gimple_location (stmt);
1905 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1906
1907 /* If the LEN parameter is zero, return DEST. */
1908 if (integer_zerop (len))
1909 {
1910 /* Avoid warning if the destination refers to an array/pointer
1911 decorate with attribute nonstring. */
1912 if (!nonstring)
1913 {
1914 tree fndecl = gimple_call_fndecl (stmt);
1915
1916 /* Warn about the lack of nul termination: the result is not
1917 a (nul-terminated) string. */
1918 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1919 if (slen && !integer_zerop (slen))
1920 warning_at (loc, OPT_Wstringop_truncation,
1921 "%G%qD destination unchanged after copying no bytes "
1922 "from a string of length %E",
1923 stmt, fndecl, slen);
1924 else
1925 warning_at (loc, OPT_Wstringop_truncation,
1926 "%G%qD destination unchanged after copying no bytes",
1927 stmt, fndecl);
1928 }
1929
1930 replace_call_with_value (gsi, dest);
1931 return true;
1932 }
1933
1934 /* We can't compare slen with len as constants below if len is not a
1935 constant. */
1936 if (TREE_CODE (len) != INTEGER_CST)
1937 return false;
1938
1939 /* Now, we must be passed a constant src ptr parameter. */
1940 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1941 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1942 return false;
1943
1944 /* The size of the source string including the terminating nul. */
1945 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1946
1947 /* We do not support simplification of this case, though we do
1948 support it when expanding trees into RTL. */
1949 /* FIXME: generate a call to __builtin_memset. */
1950 if (tree_int_cst_lt (ssize, len))
1951 return false;
1952
1953 /* Diagnose truncation that leaves the copy unterminated. */
1954 maybe_diag_stxncpy_trunc (*gsi, src, len);
1955
1956 /* OK transform into builtin memcpy. */
1957 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1958 if (!fn)
1959 return false;
1960
1961 len = fold_convert_loc (loc, size_type_node, len);
1962 len = force_gimple_operand_gsi (gsi, len, true,
1963 NULL_TREE, true, GSI_SAME_STMT);
1964 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1965 replace_call_with_call_and_fold (gsi, repl);
1966
1967 return true;
1968 }
1969
1970 /* Fold function call to builtin strchr or strrchr.
1971 If both arguments are constant, evaluate and fold the result,
1972 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1973 In general strlen is significantly faster than strchr
1974 due to being a simpler operation. */
1975 static bool
1976 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1977 {
1978 gimple *stmt = gsi_stmt (*gsi);
1979 tree str = gimple_call_arg (stmt, 0);
1980 tree c = gimple_call_arg (stmt, 1);
1981 location_t loc = gimple_location (stmt);
1982 const char *p;
1983 char ch;
1984
1985 if (!gimple_call_lhs (stmt))
1986 return false;
1987
1988 /* Avoid folding if the first argument is not a nul-terminated array.
1989 Defer warning until later. */
1990 if (!check_nul_terminated_array (NULL_TREE, str))
1991 return false;
1992
1993 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1994 {
1995 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1996
1997 if (p1 == NULL)
1998 {
1999 replace_call_with_value (gsi, integer_zero_node);
2000 return true;
2001 }
2002
2003 tree len = build_int_cst (size_type_node, p1 - p);
2004 gimple_seq stmts = NULL;
2005 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2006 POINTER_PLUS_EXPR, str, len);
2007 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2008 gsi_replace_with_seq_vops (gsi, stmts);
2009 return true;
2010 }
2011
2012 if (!integer_zerop (c))
2013 return false;
2014
2015 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2016 if (is_strrchr && optimize_function_for_size_p (cfun))
2017 {
2018 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2019
2020 if (strchr_fn)
2021 {
2022 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2023 replace_call_with_call_and_fold (gsi, repl);
2024 return true;
2025 }
2026
2027 return false;
2028 }
2029
2030 tree len;
2031 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2032
2033 if (!strlen_fn)
2034 return false;
2035
2036 /* Create newstr = strlen (str). */
2037 gimple_seq stmts = NULL;
2038 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2039 gimple_set_location (new_stmt, loc);
2040 len = create_tmp_reg_or_ssa_name (size_type_node);
2041 gimple_call_set_lhs (new_stmt, len);
2042 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2043
2044 /* Create (str p+ strlen (str)). */
2045 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2046 POINTER_PLUS_EXPR, str, len);
2047 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2048 gsi_replace_with_seq_vops (gsi, stmts);
2049 /* gsi now points at the assignment to the lhs, get a
2050 stmt iterator to the strlen.
2051 ??? We can't use gsi_for_stmt as that doesn't work when the
2052 CFG isn't built yet. */
2053 gimple_stmt_iterator gsi2 = *gsi;
2054 gsi_prev (&gsi2);
2055 fold_stmt (&gsi2);
2056 return true;
2057 }
2058
2059 /* Fold function call to builtin strstr.
2060 If both arguments are constant, evaluate and fold the result,
2061 additionally fold strstr (x, "") into x and strstr (x, "c")
2062 into strchr (x, 'c'). */
2063 static bool
2064 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2065 {
2066 gimple *stmt = gsi_stmt (*gsi);
2067 if (!gimple_call_lhs (stmt))
2068 return false;
2069
2070 tree haystack = gimple_call_arg (stmt, 0);
2071 tree needle = gimple_call_arg (stmt, 1);
2072
2073 /* Avoid folding if either argument is not a nul-terminated array.
2074 Defer warning until later. */
2075 if (!check_nul_terminated_array (NULL_TREE, haystack)
2076 || !check_nul_terminated_array (NULL_TREE, needle))
2077 return false;
2078
2079 const char *q = c_getstr (needle);
2080 if (q == NULL)
2081 return false;
2082
2083 if (const char *p = c_getstr (haystack))
2084 {
2085 const char *r = strstr (p, q);
2086
2087 if (r == NULL)
2088 {
2089 replace_call_with_value (gsi, integer_zero_node);
2090 return true;
2091 }
2092
2093 tree len = build_int_cst (size_type_node, r - p);
2094 gimple_seq stmts = NULL;
2095 gimple *new_stmt
2096 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2097 haystack, len);
2098 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2099 gsi_replace_with_seq_vops (gsi, stmts);
2100 return true;
2101 }
2102
2103 /* For strstr (x, "") return x. */
2104 if (q[0] == '\0')
2105 {
2106 replace_call_with_value (gsi, haystack);
2107 return true;
2108 }
2109
2110 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2111 if (q[1] == '\0')
2112 {
2113 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2114 if (strchr_fn)
2115 {
2116 tree c = build_int_cst (integer_type_node, q[0]);
2117 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2118 replace_call_with_call_and_fold (gsi, repl);
2119 return true;
2120 }
2121 }
2122
2123 return false;
2124 }
2125
2126 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2127 to the call.
2128
2129 Return NULL_TREE if no simplification was possible, otherwise return the
2130 simplified form of the call as a tree.
2131
2132 The simplified form may be a constant or other expression which
2133 computes the same value, but in a more efficient manner (including
2134 calls to other builtin functions).
2135
2136 The call may contain arguments which need to be evaluated, but
2137 which are not useful to determine the result of the call. In
2138 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2139 COMPOUND_EXPR will be an argument which must be evaluated.
2140 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2141 COMPOUND_EXPR in the chain will contain the tree for the simplified
2142 form of the builtin function call. */
2143
2144 static bool
2145 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2146 {
2147 gimple *stmt = gsi_stmt (*gsi);
2148 location_t loc = gimple_location (stmt);
2149
2150 const char *p = c_getstr (src);
2151
2152 /* If the string length is zero, return the dst parameter. */
2153 if (p && *p == '\0')
2154 {
2155 replace_call_with_value (gsi, dst);
2156 return true;
2157 }
2158
2159 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2160 return false;
2161
2162 /* See if we can store by pieces into (dst + strlen(dst)). */
2163 tree newdst;
2164 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2165 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2166
2167 if (!strlen_fn || !memcpy_fn)
2168 return false;
2169
2170 /* If the length of the source string isn't computable don't
2171 split strcat into strlen and memcpy. */
2172 tree len = get_maxval_strlen (src, SRK_STRLEN);
2173 if (! len)
2174 return false;
2175
2176 /* Create strlen (dst). */
2177 gimple_seq stmts = NULL, stmts2;
2178 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2179 gimple_set_location (repl, loc);
2180 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2181 gimple_call_set_lhs (repl, newdst);
2182 gimple_seq_add_stmt_without_update (&stmts, repl);
2183
2184 /* Create (dst p+ strlen (dst)). */
2185 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2186 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2187 gimple_seq_add_seq_without_update (&stmts, stmts2);
2188
2189 len = fold_convert_loc (loc, size_type_node, len);
2190 len = size_binop_loc (loc, PLUS_EXPR, len,
2191 build_int_cst (size_type_node, 1));
2192 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2193 gimple_seq_add_seq_without_update (&stmts, stmts2);
2194
2195 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2196 gimple_seq_add_stmt_without_update (&stmts, repl);
2197 if (gimple_call_lhs (stmt))
2198 {
2199 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2200 gimple_seq_add_stmt_without_update (&stmts, repl);
2201 gsi_replace_with_seq_vops (gsi, stmts);
2202 /* gsi now points at the assignment to the lhs, get a
2203 stmt iterator to the memcpy call.
2204 ??? We can't use gsi_for_stmt as that doesn't work when the
2205 CFG isn't built yet. */
2206 gimple_stmt_iterator gsi2 = *gsi;
2207 gsi_prev (&gsi2);
2208 fold_stmt (&gsi2);
2209 }
2210 else
2211 {
2212 gsi_replace_with_seq_vops (gsi, stmts);
2213 fold_stmt (gsi);
2214 }
2215 return true;
2216 }
2217
2218 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2219 are the arguments to the call. */
2220
2221 static bool
2222 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2223 {
2224 gimple *stmt = gsi_stmt (*gsi);
2225 tree dest = gimple_call_arg (stmt, 0);
2226 tree src = gimple_call_arg (stmt, 1);
2227 tree size = gimple_call_arg (stmt, 2);
2228 tree fn;
2229 const char *p;
2230
2231
2232 p = c_getstr (src);
2233 /* If the SRC parameter is "", return DEST. */
2234 if (p && *p == '\0')
2235 {
2236 replace_call_with_value (gsi, dest);
2237 return true;
2238 }
2239
2240 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2241 return false;
2242
2243 /* If __builtin_strcat_chk is used, assume strcat is available. */
2244 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2245 if (!fn)
2246 return false;
2247
2248 gimple *repl = gimple_build_call (fn, 2, dest, src);
2249 replace_call_with_call_and_fold (gsi, repl);
2250 return true;
2251 }
2252
2253 /* Simplify a call to the strncat builtin. */
2254
2255 static bool
2256 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2257 {
2258 gimple *stmt = gsi_stmt (*gsi);
2259 tree dst = gimple_call_arg (stmt, 0);
2260 tree src = gimple_call_arg (stmt, 1);
2261 tree len = gimple_call_arg (stmt, 2);
2262
2263 const char *p = c_getstr (src);
2264
2265 /* If the requested length is zero, or the src parameter string
2266 length is zero, return the dst parameter. */
2267 if (integer_zerop (len) || (p && *p == '\0'))
2268 {
2269 replace_call_with_value (gsi, dst);
2270 return true;
2271 }
2272
2273 if (TREE_CODE (len) != INTEGER_CST || !p)
2274 return false;
2275
2276 unsigned srclen = strlen (p);
2277
2278 int cmpsrc = compare_tree_int (len, srclen);
2279
2280 /* Return early if the requested len is less than the string length.
2281 Warnings will be issued elsewhere later. */
2282 if (cmpsrc < 0)
2283 return false;
2284
2285 unsigned HOST_WIDE_INT dstsize;
2286
2287 bool nowarn = gimple_no_warning_p (stmt);
2288
2289 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2290 {
2291 int cmpdst = compare_tree_int (len, dstsize);
2292
2293 if (cmpdst >= 0)
2294 {
2295 tree fndecl = gimple_call_fndecl (stmt);
2296
2297 /* Strncat copies (at most) LEN bytes and always appends
2298 the terminating NUL so the specified bound should never
2299 be equal to (or greater than) the size of the destination.
2300 If it is, the copy could overflow. */
2301 location_t loc = gimple_location (stmt);
2302 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2303 cmpdst == 0
2304 ? G_("%G%qD specified bound %E equals "
2305 "destination size")
2306 : G_("%G%qD specified bound %E exceeds "
2307 "destination size %wu"),
2308 stmt, fndecl, len, dstsize);
2309 if (nowarn)
2310 gimple_set_no_warning (stmt, true);
2311 }
2312 }
2313
2314 if (!nowarn && cmpsrc == 0)
2315 {
2316 tree fndecl = gimple_call_fndecl (stmt);
2317 location_t loc = gimple_location (stmt);
2318
2319 /* To avoid possible overflow the specified bound should also
2320 not be equal to the length of the source, even when the size
2321 of the destination is unknown (it's not an uncommon mistake
2322 to specify as the bound to strncpy the length of the source). */
2323 if (warning_at (loc, OPT_Wstringop_overflow_,
2324 "%G%qD specified bound %E equals source length",
2325 stmt, fndecl, len))
2326 gimple_set_no_warning (stmt, true);
2327 }
2328
2329 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2330
2331 /* If the replacement _DECL isn't initialized, don't do the
2332 transformation. */
2333 if (!fn)
2334 return false;
2335
2336 /* Otherwise, emit a call to strcat. */
2337 gcall *repl = gimple_build_call (fn, 2, dst, src);
2338 replace_call_with_call_and_fold (gsi, repl);
2339 return true;
2340 }
2341
2342 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2343 LEN, and SIZE. */
2344
2345 static bool
2346 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2347 {
2348 gimple *stmt = gsi_stmt (*gsi);
2349 tree dest = gimple_call_arg (stmt, 0);
2350 tree src = gimple_call_arg (stmt, 1);
2351 tree len = gimple_call_arg (stmt, 2);
2352 tree size = gimple_call_arg (stmt, 3);
2353 tree fn;
2354 const char *p;
2355
2356 p = c_getstr (src);
2357 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2358 if ((p && *p == '\0')
2359 || integer_zerop (len))
2360 {
2361 replace_call_with_value (gsi, dest);
2362 return true;
2363 }
2364
2365 if (! tree_fits_uhwi_p (size))
2366 return false;
2367
2368 if (! integer_all_onesp (size))
2369 {
2370 tree src_len = c_strlen (src, 1);
2371 if (src_len
2372 && tree_fits_uhwi_p (src_len)
2373 && tree_fits_uhwi_p (len)
2374 && ! tree_int_cst_lt (len, src_len))
2375 {
2376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2377 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2378 if (!fn)
2379 return false;
2380
2381 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2382 replace_call_with_call_and_fold (gsi, repl);
2383 return true;
2384 }
2385 return false;
2386 }
2387
2388 /* If __builtin_strncat_chk is used, assume strncat is available. */
2389 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2390 if (!fn)
2391 return false;
2392
2393 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2394 replace_call_with_call_and_fold (gsi, repl);
2395 return true;
2396 }
2397
2398 /* Build and append gimple statements to STMTS that would load a first
2399 character of a memory location identified by STR. LOC is location
2400 of the statement. */
2401
2402 static tree
2403 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2404 {
2405 tree var;
2406
2407 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2408 tree cst_uchar_ptr_node
2409 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2410 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2411
2412 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2413 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2414 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2415
2416 gimple_assign_set_lhs (stmt, var);
2417 gimple_seq_add_stmt_without_update (stmts, stmt);
2418
2419 return var;
2420 }
2421
2422 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2423
2424 static bool
2425 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2426 {
2427 gimple *stmt = gsi_stmt (*gsi);
2428 tree callee = gimple_call_fndecl (stmt);
2429 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2430
2431 tree type = integer_type_node;
2432 tree str1 = gimple_call_arg (stmt, 0);
2433 tree str2 = gimple_call_arg (stmt, 1);
2434 tree lhs = gimple_call_lhs (stmt);
2435
2436 tree bound_node = NULL_TREE;
2437 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2438
2439 /* Handle strncmp and strncasecmp functions. */
2440 if (gimple_call_num_args (stmt) == 3)
2441 {
2442 bound_node = gimple_call_arg (stmt, 2);
2443 if (tree_fits_uhwi_p (bound_node))
2444 bound = tree_to_uhwi (bound_node);
2445 }
2446
2447 /* If the BOUND parameter is zero, return zero. */
2448 if (bound == 0)
2449 {
2450 replace_call_with_value (gsi, integer_zero_node);
2451 return true;
2452 }
2453
2454 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2455 if (operand_equal_p (str1, str2, 0))
2456 {
2457 replace_call_with_value (gsi, integer_zero_node);
2458 return true;
2459 }
2460
2461 /* Initially set to the number of characters, including the terminating
2462 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2463 the array Sx is not terminated by a nul.
2464 For nul-terminated strings then adjusted to their length so that
2465 LENx == NULPOSx holds. */
2466 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2467 const char *p1 = getbyterep (str1, &len1);
2468 const char *p2 = getbyterep (str2, &len2);
2469
2470 /* The position of the terminating nul character if one exists, otherwise
2471 a value greater than LENx. */
2472 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2473
2474 if (p1)
2475 {
2476 size_t n = strnlen (p1, len1);
2477 if (n < len1)
2478 len1 = nulpos1 = n;
2479 }
2480
2481 if (p2)
2482 {
2483 size_t n = strnlen (p2, len2);
2484 if (n < len2)
2485 len2 = nulpos2 = n;
2486 }
2487
2488 /* For known strings, return an immediate value. */
2489 if (p1 && p2)
2490 {
2491 int r = 0;
2492 bool known_result = false;
2493
2494 switch (fcode)
2495 {
2496 case BUILT_IN_STRCMP:
2497 case BUILT_IN_STRCMP_EQ:
2498 if (len1 != nulpos1 || len2 != nulpos2)
2499 break;
2500
2501 r = strcmp (p1, p2);
2502 known_result = true;
2503 break;
2504
2505 case BUILT_IN_STRNCMP:
2506 case BUILT_IN_STRNCMP_EQ:
2507 {
2508 if (bound == HOST_WIDE_INT_M1U)
2509 break;
2510
2511 /* Reduce the bound to be no more than the length
2512 of the shorter of the two strings, or the sizes
2513 of the unterminated arrays. */
2514 unsigned HOST_WIDE_INT n = bound;
2515
2516 if (len1 == nulpos1 && len1 < n)
2517 n = len1 + 1;
2518 if (len2 == nulpos2 && len2 < n)
2519 n = len2 + 1;
2520
2521 if (MIN (nulpos1, nulpos2) + 1 < n)
2522 break;
2523
2524 r = strncmp (p1, p2, n);
2525 known_result = true;
2526 break;
2527 }
2528 /* Only handleable situation is where the string are equal (result 0),
2529 which is already handled by operand_equal_p case. */
2530 case BUILT_IN_STRCASECMP:
2531 break;
2532 case BUILT_IN_STRNCASECMP:
2533 {
2534 if (bound == HOST_WIDE_INT_M1U)
2535 break;
2536 r = strncmp (p1, p2, bound);
2537 if (r == 0)
2538 known_result = true;
2539 break;
2540 }
2541 default:
2542 gcc_unreachable ();
2543 }
2544
2545 if (known_result)
2546 {
2547 replace_call_with_value (gsi, build_cmp_result (type, r));
2548 return true;
2549 }
2550 }
2551
2552 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2553 || fcode == BUILT_IN_STRCMP
2554 || fcode == BUILT_IN_STRCMP_EQ
2555 || fcode == BUILT_IN_STRCASECMP;
2556
2557 location_t loc = gimple_location (stmt);
2558
2559 /* If the second arg is "", return *(const unsigned char*)arg1. */
2560 if (p2 && *p2 == '\0' && nonzero_bound)
2561 {
2562 gimple_seq stmts = NULL;
2563 tree var = gimple_load_first_char (loc, str1, &stmts);
2564 if (lhs)
2565 {
2566 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2567 gimple_seq_add_stmt_without_update (&stmts, stmt);
2568 }
2569
2570 gsi_replace_with_seq_vops (gsi, stmts);
2571 return true;
2572 }
2573
2574 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2575 if (p1 && *p1 == '\0' && nonzero_bound)
2576 {
2577 gimple_seq stmts = NULL;
2578 tree var = gimple_load_first_char (loc, str2, &stmts);
2579
2580 if (lhs)
2581 {
2582 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2583 stmt = gimple_build_assign (c, NOP_EXPR, var);
2584 gimple_seq_add_stmt_without_update (&stmts, stmt);
2585
2586 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2587 gimple_seq_add_stmt_without_update (&stmts, stmt);
2588 }
2589
2590 gsi_replace_with_seq_vops (gsi, stmts);
2591 return true;
2592 }
2593
2594 /* If BOUND is one, return an expression corresponding to
2595 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2596 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2597 {
2598 gimple_seq stmts = NULL;
2599 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2600 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2601
2602 if (lhs)
2603 {
2604 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2605 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2606 gimple_seq_add_stmt_without_update (&stmts, convert1);
2607
2608 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2609 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2610 gimple_seq_add_stmt_without_update (&stmts, convert2);
2611
2612 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2613 gimple_seq_add_stmt_without_update (&stmts, stmt);
2614 }
2615
2616 gsi_replace_with_seq_vops (gsi, stmts);
2617 return true;
2618 }
2619
2620 /* If BOUND is greater than the length of one constant string,
2621 and the other argument is also a nul-terminated string, replace
2622 strncmp with strcmp. */
2623 if (fcode == BUILT_IN_STRNCMP
2624 && bound > 0 && bound < HOST_WIDE_INT_M1U
2625 && ((p2 && len2 < bound && len2 == nulpos2)
2626 || (p1 && len1 < bound && len1 == nulpos1)))
2627 {
2628 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2629 if (!fn)
2630 return false;
2631 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2632 replace_call_with_call_and_fold (gsi, repl);
2633 return true;
2634 }
2635
2636 return false;
2637 }
2638
2639 /* Fold a call to the memchr pointed by GSI iterator. */
2640
2641 static bool
2642 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2643 {
2644 gimple *stmt = gsi_stmt (*gsi);
2645 tree lhs = gimple_call_lhs (stmt);
2646 tree arg1 = gimple_call_arg (stmt, 0);
2647 tree arg2 = gimple_call_arg (stmt, 1);
2648 tree len = gimple_call_arg (stmt, 2);
2649
2650 /* If the LEN parameter is zero, return zero. */
2651 if (integer_zerop (len))
2652 {
2653 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2654 return true;
2655 }
2656
2657 char c;
2658 if (TREE_CODE (arg2) != INTEGER_CST
2659 || !tree_fits_uhwi_p (len)
2660 || !target_char_cst_p (arg2, &c))
2661 return false;
2662
2663 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2664 unsigned HOST_WIDE_INT string_length;
2665 const char *p1 = getbyterep (arg1, &string_length);
2666
2667 if (p1)
2668 {
2669 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2670 if (r == NULL)
2671 {
2672 tree mem_size, offset_node;
2673 byte_representation (arg1, &offset_node, &mem_size, NULL);
2674 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2675 ? 0 : tree_to_uhwi (offset_node);
2676 /* MEM_SIZE is the size of the array the string literal
2677 is stored in. */
2678 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2679 gcc_checking_assert (string_length <= string_size);
2680 if (length <= string_size)
2681 {
2682 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2683 return true;
2684 }
2685 }
2686 else
2687 {
2688 unsigned HOST_WIDE_INT offset = r - p1;
2689 gimple_seq stmts = NULL;
2690 if (lhs != NULL_TREE)
2691 {
2692 tree offset_cst = build_int_cst (sizetype, offset);
2693 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2694 arg1, offset_cst);
2695 gimple_seq_add_stmt_without_update (&stmts, stmt);
2696 }
2697 else
2698 gimple_seq_add_stmt_without_update (&stmts,
2699 gimple_build_nop ());
2700
2701 gsi_replace_with_seq_vops (gsi, stmts);
2702 return true;
2703 }
2704 }
2705
2706 return false;
2707 }
2708
2709 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2710 to the call. IGNORE is true if the value returned
2711 by the builtin will be ignored. UNLOCKED is true is true if this
2712 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2713 the known length of the string. Return NULL_TREE if no simplification
2714 was possible. */
2715
2716 static bool
2717 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2718 tree arg0, tree arg1,
2719 bool unlocked)
2720 {
2721 gimple *stmt = gsi_stmt (*gsi);
2722
2723 /* If we're using an unlocked function, assume the other unlocked
2724 functions exist explicitly. */
2725 tree const fn_fputc = (unlocked
2726 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2727 : builtin_decl_implicit (BUILT_IN_FPUTC));
2728 tree const fn_fwrite = (unlocked
2729 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2730 : builtin_decl_implicit (BUILT_IN_FWRITE));
2731
2732 /* If the return value is used, don't do the transformation. */
2733 if (gimple_call_lhs (stmt))
2734 return false;
2735
2736 /* Get the length of the string passed to fputs. If the length
2737 can't be determined, punt. */
2738 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2739 if (!len
2740 || TREE_CODE (len) != INTEGER_CST)
2741 return false;
2742
2743 switch (compare_tree_int (len, 1))
2744 {
2745 case -1: /* length is 0, delete the call entirely . */
2746 replace_call_with_value (gsi, integer_zero_node);
2747 return true;
2748
2749 case 0: /* length is 1, call fputc. */
2750 {
2751 const char *p = c_getstr (arg0);
2752 if (p != NULL)
2753 {
2754 if (!fn_fputc)
2755 return false;
2756
2757 gimple *repl = gimple_build_call (fn_fputc, 2,
2758 build_int_cst
2759 (integer_type_node, p[0]), arg1);
2760 replace_call_with_call_and_fold (gsi, repl);
2761 return true;
2762 }
2763 }
2764 /* FALLTHROUGH */
2765 case 1: /* length is greater than 1, call fwrite. */
2766 {
2767 /* If optimizing for size keep fputs. */
2768 if (optimize_function_for_size_p (cfun))
2769 return false;
2770 /* New argument list transforming fputs(string, stream) to
2771 fwrite(string, 1, len, stream). */
2772 if (!fn_fwrite)
2773 return false;
2774
2775 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2776 size_one_node, len, arg1);
2777 replace_call_with_call_and_fold (gsi, repl);
2778 return true;
2779 }
2780 default:
2781 gcc_unreachable ();
2782 }
2783 return false;
2784 }
2785
2786 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2787 DEST, SRC, LEN, and SIZE are the arguments to the call.
2788 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2789 code of the builtin. If MAXLEN is not NULL, it is maximum length
2790 passed as third argument. */
2791
2792 static bool
2793 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2794 tree dest, tree src, tree len, tree size,
2795 enum built_in_function fcode)
2796 {
2797 gimple *stmt = gsi_stmt (*gsi);
2798 location_t loc = gimple_location (stmt);
2799 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2800 tree fn;
2801
2802 /* If SRC and DEST are the same (and not volatile), return DEST
2803 (resp. DEST+LEN for __mempcpy_chk). */
2804 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2805 {
2806 if (fcode != BUILT_IN_MEMPCPY_CHK)
2807 {
2808 replace_call_with_value (gsi, dest);
2809 return true;
2810 }
2811 else
2812 {
2813 gimple_seq stmts = NULL;
2814 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2815 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2816 TREE_TYPE (dest), dest, len);
2817 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2818 replace_call_with_value (gsi, temp);
2819 return true;
2820 }
2821 }
2822
2823 if (! tree_fits_uhwi_p (size))
2824 return false;
2825
2826 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2827 if (! integer_all_onesp (size))
2828 {
2829 if (! tree_fits_uhwi_p (len))
2830 {
2831 /* If LEN is not constant, try MAXLEN too.
2832 For MAXLEN only allow optimizing into non-_ocs function
2833 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2834 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2835 {
2836 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2837 {
2838 /* (void) __mempcpy_chk () can be optimized into
2839 (void) __memcpy_chk (). */
2840 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2841 if (!fn)
2842 return false;
2843
2844 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2845 replace_call_with_call_and_fold (gsi, repl);
2846 return true;
2847 }
2848 return false;
2849 }
2850 }
2851 else
2852 maxlen = len;
2853
2854 if (tree_int_cst_lt (size, maxlen))
2855 return false;
2856 }
2857
2858 fn = NULL_TREE;
2859 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2860 mem{cpy,pcpy,move,set} is available. */
2861 switch (fcode)
2862 {
2863 case BUILT_IN_MEMCPY_CHK:
2864 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2865 break;
2866 case BUILT_IN_MEMPCPY_CHK:
2867 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2868 break;
2869 case BUILT_IN_MEMMOVE_CHK:
2870 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2871 break;
2872 case BUILT_IN_MEMSET_CHK:
2873 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2874 break;
2875 default:
2876 break;
2877 }
2878
2879 if (!fn)
2880 return false;
2881
2882 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2883 replace_call_with_call_and_fold (gsi, repl);
2884 return true;
2885 }
2886
2887 /* Fold a call to the __st[rp]cpy_chk builtin.
2888 DEST, SRC, and SIZE are the arguments to the call.
2889 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2890 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2891 strings passed as second argument. */
2892
2893 static bool
2894 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2895 tree dest,
2896 tree src, tree size,
2897 enum built_in_function fcode)
2898 {
2899 gimple *stmt = gsi_stmt (*gsi);
2900 location_t loc = gimple_location (stmt);
2901 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2902 tree len, fn;
2903
2904 /* If SRC and DEST are the same (and not volatile), return DEST. */
2905 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2906 {
2907 /* Issue -Wrestrict unless the pointers are null (those do
2908 not point to objects and so do not indicate an overlap;
2909 such calls could be the result of sanitization and jump
2910 threading). */
2911 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2912 {
2913 tree func = gimple_call_fndecl (stmt);
2914
2915 warning_at (loc, OPT_Wrestrict,
2916 "%qD source argument is the same as destination",
2917 func);
2918 }
2919
2920 replace_call_with_value (gsi, dest);
2921 return true;
2922 }
2923
2924 if (! tree_fits_uhwi_p (size))
2925 return false;
2926
2927 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2928 if (! integer_all_onesp (size))
2929 {
2930 len = c_strlen (src, 1);
2931 if (! len || ! tree_fits_uhwi_p (len))
2932 {
2933 /* If LEN is not constant, try MAXLEN too.
2934 For MAXLEN only allow optimizing into non-_ocs function
2935 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2936 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2937 {
2938 if (fcode == BUILT_IN_STPCPY_CHK)
2939 {
2940 if (! ignore)
2941 return false;
2942
2943 /* If return value of __stpcpy_chk is ignored,
2944 optimize into __strcpy_chk. */
2945 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2946 if (!fn)
2947 return false;
2948
2949 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2950 replace_call_with_call_and_fold (gsi, repl);
2951 return true;
2952 }
2953
2954 if (! len || TREE_SIDE_EFFECTS (len))
2955 return false;
2956
2957 /* If c_strlen returned something, but not a constant,
2958 transform __strcpy_chk into __memcpy_chk. */
2959 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2960 if (!fn)
2961 return false;
2962
2963 gimple_seq stmts = NULL;
2964 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2965 len = gimple_convert (&stmts, loc, size_type_node, len);
2966 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2967 build_int_cst (size_type_node, 1));
2968 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2969 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2970 replace_call_with_call_and_fold (gsi, repl);
2971 return true;
2972 }
2973 }
2974 else
2975 maxlen = len;
2976
2977 if (! tree_int_cst_lt (maxlen, size))
2978 return false;
2979 }
2980
2981 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2982 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2983 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2984 if (!fn)
2985 return false;
2986
2987 gimple *repl = gimple_build_call (fn, 2, dest, src);
2988 replace_call_with_call_and_fold (gsi, repl);
2989 return true;
2990 }
2991
2992 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2993 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2994 length passed as third argument. IGNORE is true if return value can be
2995 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2996
2997 static bool
2998 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2999 tree dest, tree src,
3000 tree len, tree size,
3001 enum built_in_function fcode)
3002 {
3003 gimple *stmt = gsi_stmt (*gsi);
3004 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3005 tree fn;
3006
3007 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3008 {
3009 /* If return value of __stpncpy_chk is ignored,
3010 optimize into __strncpy_chk. */
3011 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3012 if (fn)
3013 {
3014 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3015 replace_call_with_call_and_fold (gsi, repl);
3016 return true;
3017 }
3018 }
3019
3020 if (! tree_fits_uhwi_p (size))
3021 return false;
3022
3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3024 if (! integer_all_onesp (size))
3025 {
3026 if (! tree_fits_uhwi_p (len))
3027 {
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3032 return false;
3033 }
3034 else
3035 maxlen = len;
3036
3037 if (tree_int_cst_lt (size, maxlen))
3038 return false;
3039 }
3040
3041 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3042 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3043 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3044 if (!fn)
3045 return false;
3046
3047 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3048 replace_call_with_call_and_fold (gsi, repl);
3049 return true;
3050 }
3051
3052 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3053 Return NULL_TREE if no simplification can be made. */
3054
3055 static bool
3056 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3057 {
3058 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3059 location_t loc = gimple_location (stmt);
3060 tree dest = gimple_call_arg (stmt, 0);
3061 tree src = gimple_call_arg (stmt, 1);
3062 tree fn, lenp1;
3063
3064 /* If the result is unused, replace stpcpy with strcpy. */
3065 if (gimple_call_lhs (stmt) == NULL_TREE)
3066 {
3067 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3068 if (!fn)
3069 return false;
3070 gimple_call_set_fndecl (stmt, fn);
3071 fold_stmt (gsi);
3072 return true;
3073 }
3074
3075 /* Set to non-null if ARG refers to an unterminated array. */
3076 c_strlen_data data = { };
3077 /* The size of the unterminated array if SRC referes to one. */
3078 tree size;
3079 /* True if the size is exact/constant, false if it's the lower bound
3080 of a range. */
3081 bool exact;
3082 tree len = c_strlen (src, 1, &data, 1);
3083 if (!len
3084 || TREE_CODE (len) != INTEGER_CST)
3085 {
3086 data.decl = unterminated_array (src, &size, &exact);
3087 if (!data.decl)
3088 return false;
3089 }
3090
3091 if (data.decl)
3092 {
3093 /* Avoid folding calls with unterminated arrays. */
3094 if (!gimple_no_warning_p (stmt))
3095 warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3096 exact);
3097 gimple_set_no_warning (stmt, true);
3098 return false;
3099 }
3100
3101 if (optimize_function_for_size_p (cfun)
3102 /* If length is zero it's small enough. */
3103 && !integer_zerop (len))
3104 return false;
3105
3106 /* If the source has a known length replace stpcpy with memcpy. */
3107 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3108 if (!fn)
3109 return false;
3110
3111 gimple_seq stmts = NULL;
3112 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3113 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3114 tem, build_int_cst (size_type_node, 1));
3115 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3116 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3117 gimple_move_vops (repl, stmt);
3118 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3119 /* Replace the result with dest + len. */
3120 stmts = NULL;
3121 tem = gimple_convert (&stmts, loc, sizetype, len);
3122 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3123 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3124 POINTER_PLUS_EXPR, dest, tem);
3125 gsi_replace (gsi, ret, false);
3126 /* Finally fold the memcpy call. */
3127 gimple_stmt_iterator gsi2 = *gsi;
3128 gsi_prev (&gsi2);
3129 fold_stmt (&gsi2);
3130 return true;
3131 }
3132
3133 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3134 NULL_TREE if a normal call should be emitted rather than expanding
3135 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3136 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3137 passed as second argument. */
3138
3139 static bool
3140 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3141 enum built_in_function fcode)
3142 {
3143 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3144 tree dest, size, len, fn, fmt, flag;
3145 const char *fmt_str;
3146
3147 /* Verify the required arguments in the original call. */
3148 if (gimple_call_num_args (stmt) < 5)
3149 return false;
3150
3151 dest = gimple_call_arg (stmt, 0);
3152 len = gimple_call_arg (stmt, 1);
3153 flag = gimple_call_arg (stmt, 2);
3154 size = gimple_call_arg (stmt, 3);
3155 fmt = gimple_call_arg (stmt, 4);
3156
3157 if (! tree_fits_uhwi_p (size))
3158 return false;
3159
3160 if (! integer_all_onesp (size))
3161 {
3162 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3163 if (! tree_fits_uhwi_p (len))
3164 {
3165 /* If LEN is not constant, try MAXLEN too.
3166 For MAXLEN only allow optimizing into non-_ocs function
3167 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3168 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3169 return false;
3170 }
3171 else
3172 maxlen = len;
3173
3174 if (tree_int_cst_lt (size, maxlen))
3175 return false;
3176 }
3177
3178 if (!init_target_chars ())
3179 return false;
3180
3181 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3182 or if format doesn't contain % chars or is "%s". */
3183 if (! integer_zerop (flag))
3184 {
3185 fmt_str = c_getstr (fmt);
3186 if (fmt_str == NULL)
3187 return false;
3188 if (strchr (fmt_str, target_percent) != NULL
3189 && strcmp (fmt_str, target_percent_s))
3190 return false;
3191 }
3192
3193 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3194 available. */
3195 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3196 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3197 if (!fn)
3198 return false;
3199
3200 /* Replace the called function and the first 5 argument by 3 retaining
3201 trailing varargs. */
3202 gimple_call_set_fndecl (stmt, fn);
3203 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3204 gimple_call_set_arg (stmt, 0, dest);
3205 gimple_call_set_arg (stmt, 1, len);
3206 gimple_call_set_arg (stmt, 2, fmt);
3207 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3208 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3209 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3210 fold_stmt (gsi);
3211 return true;
3212 }
3213
3214 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3215 Return NULL_TREE if a normal call should be emitted rather than
3216 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3217 or BUILT_IN_VSPRINTF_CHK. */
3218
3219 static bool
3220 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3221 enum built_in_function fcode)
3222 {
3223 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3224 tree dest, size, len, fn, fmt, flag;
3225 const char *fmt_str;
3226 unsigned nargs = gimple_call_num_args (stmt);
3227
3228 /* Verify the required arguments in the original call. */
3229 if (nargs < 4)
3230 return false;
3231 dest = gimple_call_arg (stmt, 0);
3232 flag = gimple_call_arg (stmt, 1);
3233 size = gimple_call_arg (stmt, 2);
3234 fmt = gimple_call_arg (stmt, 3);
3235
3236 if (! tree_fits_uhwi_p (size))
3237 return false;
3238
3239 len = NULL_TREE;
3240
3241 if (!init_target_chars ())
3242 return false;
3243
3244 /* Check whether the format is a literal string constant. */
3245 fmt_str = c_getstr (fmt);
3246 if (fmt_str != NULL)
3247 {
3248 /* If the format doesn't contain % args or %%, we know the size. */
3249 if (strchr (fmt_str, target_percent) == 0)
3250 {
3251 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3252 len = build_int_cstu (size_type_node, strlen (fmt_str));
3253 }
3254 /* If the format is "%s" and first ... argument is a string literal,
3255 we know the size too. */
3256 else if (fcode == BUILT_IN_SPRINTF_CHK
3257 && strcmp (fmt_str, target_percent_s) == 0)
3258 {
3259 tree arg;
3260
3261 if (nargs == 5)
3262 {
3263 arg = gimple_call_arg (stmt, 4);
3264 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3265 {
3266 len = c_strlen (arg, 1);
3267 if (! len || ! tree_fits_uhwi_p (len))
3268 len = NULL_TREE;
3269 }
3270 }
3271 }
3272 }
3273
3274 if (! integer_all_onesp (size))
3275 {
3276 if (! len || ! tree_int_cst_lt (len, size))
3277 return false;
3278 }
3279
3280 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3281 or if format doesn't contain % chars or is "%s". */
3282 if (! integer_zerop (flag))
3283 {
3284 if (fmt_str == NULL)
3285 return false;
3286 if (strchr (fmt_str, target_percent) != NULL
3287 && strcmp (fmt_str, target_percent_s))
3288 return false;
3289 }
3290
3291 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3292 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3293 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3294 if (!fn)
3295 return false;
3296
3297 /* Replace the called function and the first 4 argument by 2 retaining
3298 trailing varargs. */
3299 gimple_call_set_fndecl (stmt, fn);
3300 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3301 gimple_call_set_arg (stmt, 0, dest);
3302 gimple_call_set_arg (stmt, 1, fmt);
3303 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3304 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3305 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3306 fold_stmt (gsi);
3307 return true;
3308 }
3309
3310 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3311 ORIG may be null if this is a 2-argument call. We don't attempt to
3312 simplify calls with more than 3 arguments.
3313
3314 Return true if simplification was possible, otherwise false. */
3315
3316 bool
3317 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3318 {
3319 gimple *stmt = gsi_stmt (*gsi);
3320 tree dest = gimple_call_arg (stmt, 0);
3321 tree fmt = gimple_call_arg (stmt, 1);
3322 tree orig = NULL_TREE;
3323 const char *fmt_str = NULL;
3324
3325 /* Verify the required arguments in the original call. We deal with two
3326 types of sprintf() calls: 'sprintf (str, fmt)' and
3327 'sprintf (dest, "%s", orig)'. */
3328 if (gimple_call_num_args (stmt) > 3)
3329 return false;
3330
3331 if (gimple_call_num_args (stmt) == 3)
3332 orig = gimple_call_arg (stmt, 2);
3333
3334 /* Check whether the format is a literal string constant. */
3335 fmt_str = c_getstr (fmt);
3336 if (fmt_str == NULL)
3337 return false;
3338
3339 if (!init_target_chars ())
3340 return false;
3341
3342 /* If the format doesn't contain % args or %%, use strcpy. */
3343 if (strchr (fmt_str, target_percent) == NULL)
3344 {
3345 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3346
3347 if (!fn)
3348 return false;
3349
3350 /* Don't optimize sprintf (buf, "abc", ptr++). */
3351 if (orig)
3352 return false;
3353
3354 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3355 'format' is known to contain no % formats. */
3356 gimple_seq stmts = NULL;
3357 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3358
3359 /* Propagate the NO_WARNING bit to avoid issuing the same
3360 warning more than once. */
3361 if (gimple_no_warning_p (stmt))
3362 gimple_set_no_warning (repl, true);
3363
3364 gimple_seq_add_stmt_without_update (&stmts, repl);
3365 if (tree lhs = gimple_call_lhs (stmt))
3366 {
3367 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3368 strlen (fmt_str)));
3369 gimple_seq_add_stmt_without_update (&stmts, repl);
3370 gsi_replace_with_seq_vops (gsi, stmts);
3371 /* gsi now points at the assignment to the lhs, get a
3372 stmt iterator to the memcpy call.
3373 ??? We can't use gsi_for_stmt as that doesn't work when the
3374 CFG isn't built yet. */
3375 gimple_stmt_iterator gsi2 = *gsi;
3376 gsi_prev (&gsi2);
3377 fold_stmt (&gsi2);
3378 }
3379 else
3380 {
3381 gsi_replace_with_seq_vops (gsi, stmts);
3382 fold_stmt (gsi);
3383 }
3384 return true;
3385 }
3386
3387 /* If the format is "%s", use strcpy if the result isn't used. */
3388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3389 {
3390 tree fn;
3391 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3392
3393 if (!fn)
3394 return false;
3395
3396 /* Don't crash on sprintf (str1, "%s"). */
3397 if (!orig)
3398 return false;
3399
3400 tree orig_len = NULL_TREE;
3401 if (gimple_call_lhs (stmt))
3402 {
3403 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3404 if (!orig_len)
3405 return false;
3406 }
3407
3408 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3409 gimple_seq stmts = NULL;
3410 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3411
3412 /* Propagate the NO_WARNING bit to avoid issuing the same
3413 warning more than once. */
3414 if (gimple_no_warning_p (stmt))
3415 gimple_set_no_warning (repl, true);
3416
3417 gimple_seq_add_stmt_without_update (&stmts, repl);
3418 if (tree lhs = gimple_call_lhs (stmt))
3419 {
3420 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3421 TREE_TYPE (orig_len)))
3422 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3423 repl = gimple_build_assign (lhs, orig_len);
3424 gimple_seq_add_stmt_without_update (&stmts, repl);
3425 gsi_replace_with_seq_vops (gsi, stmts);
3426 /* gsi now points at the assignment to the lhs, get a
3427 stmt iterator to the memcpy call.
3428 ??? We can't use gsi_for_stmt as that doesn't work when the
3429 CFG isn't built yet. */
3430 gimple_stmt_iterator gsi2 = *gsi;
3431 gsi_prev (&gsi2);
3432 fold_stmt (&gsi2);
3433 }
3434 else
3435 {
3436 gsi_replace_with_seq_vops (gsi, stmts);
3437 fold_stmt (gsi);
3438 }
3439 return true;
3440 }
3441 return false;
3442 }
3443
3444 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3445 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3446 attempt to simplify calls with more than 4 arguments.
3447
3448 Return true if simplification was possible, otherwise false. */
3449
3450 bool
3451 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3452 {
3453 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3454 tree dest = gimple_call_arg (stmt, 0);
3455 tree destsize = gimple_call_arg (stmt, 1);
3456 tree fmt = gimple_call_arg (stmt, 2);
3457 tree orig = NULL_TREE;
3458 const char *fmt_str = NULL;
3459
3460 if (gimple_call_num_args (stmt) > 4)
3461 return false;
3462
3463 if (gimple_call_num_args (stmt) == 4)
3464 orig = gimple_call_arg (stmt, 3);
3465
3466 if (!tree_fits_uhwi_p (destsize))
3467 return false;
3468 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3469
3470 /* Check whether the format is a literal string constant. */
3471 fmt_str = c_getstr (fmt);
3472 if (fmt_str == NULL)
3473 return false;
3474
3475 if (!init_target_chars ())
3476 return false;
3477
3478 /* If the format doesn't contain % args or %%, use strcpy. */
3479 if (strchr (fmt_str, target_percent) == NULL)
3480 {
3481 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3482 if (!fn)
3483 return false;
3484
3485 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3486 if (orig)
3487 return false;
3488
3489 /* We could expand this as
3490 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3491 or to
3492 memcpy (str, fmt_with_nul_at_cstm1, cst);
3493 but in the former case that might increase code size
3494 and in the latter case grow .rodata section too much.
3495 So punt for now. */
3496 size_t len = strlen (fmt_str);
3497 if (len >= destlen)
3498 return false;
3499
3500 gimple_seq stmts = NULL;
3501 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3502 gimple_seq_add_stmt_without_update (&stmts, repl);
3503 if (tree lhs = gimple_call_lhs (stmt))
3504 {
3505 repl = gimple_build_assign (lhs,
3506 build_int_cst (TREE_TYPE (lhs), len));
3507 gimple_seq_add_stmt_without_update (&stmts, repl);
3508 gsi_replace_with_seq_vops (gsi, stmts);
3509 /* gsi now points at the assignment to the lhs, get a
3510 stmt iterator to the memcpy call.
3511 ??? We can't use gsi_for_stmt as that doesn't work when the
3512 CFG isn't built yet. */
3513 gimple_stmt_iterator gsi2 = *gsi;
3514 gsi_prev (&gsi2);
3515 fold_stmt (&gsi2);
3516 }
3517 else
3518 {
3519 gsi_replace_with_seq_vops (gsi, stmts);
3520 fold_stmt (gsi);
3521 }
3522 return true;
3523 }
3524
3525 /* If the format is "%s", use strcpy if the result isn't used. */
3526 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3527 {
3528 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3529 if (!fn)
3530 return false;
3531
3532 /* Don't crash on snprintf (str1, cst, "%s"). */
3533 if (!orig)
3534 return false;
3535
3536 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3537 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3538 return false;
3539
3540 /* We could expand this as
3541 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3542 or to
3543 memcpy (str1, str2_with_nul_at_cstm1, cst);
3544 but in the former case that might increase code size
3545 and in the latter case grow .rodata section too much.
3546 So punt for now. */
3547 if (compare_tree_int (orig_len, destlen) >= 0)
3548 return false;
3549
3550 /* Convert snprintf (str1, cst, "%s", str2) into
3551 strcpy (str1, str2) if strlen (str2) < cst. */
3552 gimple_seq stmts = NULL;
3553 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3554 gimple_seq_add_stmt_without_update (&stmts, repl);
3555 if (tree lhs = gimple_call_lhs (stmt))
3556 {
3557 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3558 TREE_TYPE (orig_len)))
3559 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3560 repl = gimple_build_assign (lhs, orig_len);
3561 gimple_seq_add_stmt_without_update (&stmts, repl);
3562 gsi_replace_with_seq_vops (gsi, stmts);
3563 /* gsi now points at the assignment to the lhs, get a
3564 stmt iterator to the memcpy call.
3565 ??? We can't use gsi_for_stmt as that doesn't work when the
3566 CFG isn't built yet. */
3567 gimple_stmt_iterator gsi2 = *gsi;
3568 gsi_prev (&gsi2);
3569 fold_stmt (&gsi2);
3570 }
3571 else
3572 {
3573 gsi_replace_with_seq_vops (gsi, stmts);
3574 fold_stmt (gsi);
3575 }
3576 return true;
3577 }
3578 return false;
3579 }
3580
3581 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3582 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3583 more than 3 arguments, and ARG may be null in the 2-argument case.
3584
3585 Return NULL_TREE if no simplification was possible, otherwise return the
3586 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3587 code of the function to be simplified. */
3588
3589 static bool
3590 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3591 tree fp, tree fmt, tree arg,
3592 enum built_in_function fcode)
3593 {
3594 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3595 tree fn_fputc, fn_fputs;
3596 const char *fmt_str = NULL;
3597
3598 /* If the return value is used, don't do the transformation. */
3599 if (gimple_call_lhs (stmt) != NULL_TREE)
3600 return false;
3601
3602 /* Check whether the format is a literal string constant. */
3603 fmt_str = c_getstr (fmt);
3604 if (fmt_str == NULL)
3605 return false;
3606
3607 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3608 {
3609 /* If we're using an unlocked function, assume the other
3610 unlocked functions exist explicitly. */
3611 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3612 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3613 }
3614 else
3615 {
3616 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3617 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3618 }
3619
3620 if (!init_target_chars ())
3621 return false;
3622
3623 /* If the format doesn't contain % args or %%, use strcpy. */
3624 if (strchr (fmt_str, target_percent) == NULL)
3625 {
3626 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3627 && arg)
3628 return false;
3629
3630 /* If the format specifier was "", fprintf does nothing. */
3631 if (fmt_str[0] == '\0')
3632 {
3633 replace_call_with_value (gsi, NULL_TREE);
3634 return true;
3635 }
3636
3637 /* When "string" doesn't contain %, replace all cases of
3638 fprintf (fp, string) with fputs (string, fp). The fputs
3639 builtin will take care of special cases like length == 1. */
3640 if (fn_fputs)
3641 {
3642 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3643 replace_call_with_call_and_fold (gsi, repl);
3644 return true;
3645 }
3646 }
3647
3648 /* The other optimizations can be done only on the non-va_list variants. */
3649 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3650 return false;
3651
3652 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3653 else if (strcmp (fmt_str, target_percent_s) == 0)
3654 {
3655 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3656 return false;
3657 if (fn_fputs)
3658 {
3659 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3660 replace_call_with_call_and_fold (gsi, repl);
3661 return true;
3662 }
3663 }
3664
3665 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3666 else if (strcmp (fmt_str, target_percent_c) == 0)
3667 {
3668 if (!arg
3669 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3670 return false;
3671 if (fn_fputc)
3672 {
3673 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3674 replace_call_with_call_and_fold (gsi, repl);
3675 return true;
3676 }
3677 }
3678
3679 return false;
3680 }
3681
3682 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3683 FMT and ARG are the arguments to the call; we don't fold cases with
3684 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3685
3686 Return NULL_TREE if no simplification was possible, otherwise return the
3687 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3688 code of the function to be simplified. */
3689
3690 static bool
3691 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3692 tree arg, enum built_in_function fcode)
3693 {
3694 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3695 tree fn_putchar, fn_puts, newarg;
3696 const char *fmt_str = NULL;
3697
3698 /* If the return value is used, don't do the transformation. */
3699 if (gimple_call_lhs (stmt) != NULL_TREE)
3700 return false;
3701
3702 /* Check whether the format is a literal string constant. */
3703 fmt_str = c_getstr (fmt);
3704 if (fmt_str == NULL)
3705 return false;
3706
3707 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3708 {
3709 /* If we're using an unlocked function, assume the other
3710 unlocked functions exist explicitly. */
3711 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3712 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3713 }
3714 else
3715 {
3716 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3717 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3718 }
3719
3720 if (!init_target_chars ())
3721 return false;
3722
3723 if (strcmp (fmt_str, target_percent_s) == 0
3724 || strchr (fmt_str, target_percent) == NULL)
3725 {
3726 const char *str;
3727
3728 if (strcmp (fmt_str, target_percent_s) == 0)
3729 {
3730 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3731 return false;
3732
3733 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3734 return false;
3735
3736 str = c_getstr (arg);
3737 if (str == NULL)
3738 return false;
3739 }
3740 else
3741 {
3742 /* The format specifier doesn't contain any '%' characters. */
3743 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3744 && arg)
3745 return false;
3746 str = fmt_str;
3747 }
3748
3749 /* If the string was "", printf does nothing. */
3750 if (str[0] == '\0')
3751 {
3752 replace_call_with_value (gsi, NULL_TREE);
3753 return true;
3754 }
3755
3756 /* If the string has length of 1, call putchar. */
3757 if (str[1] == '\0')
3758 {
3759 /* Given printf("c"), (where c is any one character,)
3760 convert "c"[0] to an int and pass that to the replacement
3761 function. */
3762 newarg = build_int_cst (integer_type_node, str[0]);
3763 if (fn_putchar)
3764 {
3765 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3766 replace_call_with_call_and_fold (gsi, repl);
3767 return true;
3768 }
3769 }
3770 else
3771 {
3772 /* If the string was "string\n", call puts("string"). */
3773 size_t len = strlen (str);
3774 if ((unsigned char)str[len - 1] == target_newline
3775 && (size_t) (int) len == len
3776 && (int) len > 0)
3777 {
3778 char *newstr;
3779
3780 /* Create a NUL-terminated string that's one char shorter
3781 than the original, stripping off the trailing '\n'. */
3782 newstr = xstrdup (str);
3783 newstr[len - 1] = '\0';
3784 newarg = build_string_literal (len, newstr);
3785 free (newstr);
3786 if (fn_puts)
3787 {
3788 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3789 replace_call_with_call_and_fold (gsi, repl);
3790 return true;
3791 }
3792 }
3793 else
3794 /* We'd like to arrange to call fputs(string,stdout) here,
3795 but we need stdout and don't have a way to get it yet. */
3796 return false;
3797 }
3798 }
3799
3800 /* The other optimizations can be done only on the non-va_list variants. */
3801 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3802 return false;
3803
3804 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3805 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3806 {
3807 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3808 return false;
3809 if (fn_puts)
3810 {
3811 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3812 replace_call_with_call_and_fold (gsi, repl);
3813 return true;
3814 }
3815 }
3816
3817 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3818 else if (strcmp (fmt_str, target_percent_c) == 0)
3819 {
3820 if (!arg || ! useless_type_conversion_p (integer_type_node,
3821 TREE_TYPE (arg)))
3822 return false;
3823 if (fn_putchar)
3824 {
3825 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3826 replace_call_with_call_and_fold (gsi, repl);
3827 return true;
3828 }
3829 }
3830
3831 return false;
3832 }
3833
3834
3835
3836 /* Fold a call to __builtin_strlen with known length LEN. */
3837
3838 static bool
3839 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3840 {
3841 gimple *stmt = gsi_stmt (*gsi);
3842 tree arg = gimple_call_arg (stmt, 0);
3843
3844 wide_int minlen;
3845 wide_int maxlen;
3846
3847 c_strlen_data lendata = { };
3848 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3849 && !lendata.decl
3850 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3851 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3852 {
3853 /* The range of lengths refers to either a single constant
3854 string or to the longest and shortest constant string
3855 referenced by the argument of the strlen() call, or to
3856 the strings that can possibly be stored in the arrays
3857 the argument refers to. */
3858 minlen = wi::to_wide (lendata.minlen);
3859 maxlen = wi::to_wide (lendata.maxlen);
3860 }
3861 else
3862 {
3863 unsigned prec = TYPE_PRECISION (sizetype);
3864
3865 minlen = wi::shwi (0, prec);
3866 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3867 }
3868
3869 if (minlen == maxlen)
3870 {
3871 /* Fold the strlen call to a constant. */
3872 tree type = TREE_TYPE (lendata.minlen);
3873 tree len = force_gimple_operand_gsi (gsi,
3874 wide_int_to_tree (type, minlen),
3875 true, NULL, true, GSI_SAME_STMT);
3876 replace_call_with_value (gsi, len);
3877 return true;
3878 }
3879
3880 /* Set the strlen() range to [0, MAXLEN]. */
3881 if (tree lhs = gimple_call_lhs (stmt))
3882 set_strlen_range (lhs, minlen, maxlen);
3883
3884 return false;
3885 }
3886
3887 /* Fold a call to __builtin_acc_on_device. */
3888
3889 static bool
3890 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3891 {
3892 /* Defer folding until we know which compiler we're in. */
3893 if (symtab->state != EXPANSION)
3894 return false;
3895
3896 unsigned val_host = GOMP_DEVICE_HOST;
3897 unsigned val_dev = GOMP_DEVICE_NONE;
3898
3899 #ifdef ACCEL_COMPILER
3900 val_host = GOMP_DEVICE_NOT_HOST;
3901 val_dev = ACCEL_COMPILER_acc_device;
3902 #endif
3903
3904 location_t loc = gimple_location (gsi_stmt (*gsi));
3905
3906 tree host_eq = make_ssa_name (boolean_type_node);
3907 gimple *host_ass = gimple_build_assign
3908 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3909 gimple_set_location (host_ass, loc);
3910 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3911
3912 tree dev_eq = make_ssa_name (boolean_type_node);
3913 gimple *dev_ass = gimple_build_assign
3914 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3915 gimple_set_location (dev_ass, loc);
3916 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3917
3918 tree result = make_ssa_name (boolean_type_node);
3919 gimple *result_ass = gimple_build_assign
3920 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3921 gimple_set_location (result_ass, loc);
3922 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3923
3924 replace_call_with_value (gsi, result);
3925
3926 return true;
3927 }
3928
3929 /* Fold realloc (0, n) -> malloc (n). */
3930
3931 static bool
3932 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3933 {
3934 gimple *stmt = gsi_stmt (*gsi);
3935 tree arg = gimple_call_arg (stmt, 0);
3936 tree size = gimple_call_arg (stmt, 1);
3937
3938 if (operand_equal_p (arg, null_pointer_node, 0))
3939 {
3940 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3941 if (fn_malloc)
3942 {
3943 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3944 replace_call_with_call_and_fold (gsi, repl);
3945 return true;
3946 }
3947 }
3948 return false;
3949 }
3950
3951 /* Number of bytes into which any type but aggregate or vector types
3952 should fit. */
3953 static constexpr size_t clear_padding_unit
3954 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
3955 /* Buffer size on which __builtin_clear_padding folding code works. */
3956 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
3957
3958 /* Data passed through __builtin_clear_padding folding. */
3959 struct clear_padding_struct {
3960 location_t loc;
3961 tree base;
3962 tree alias_type;
3963 gimple_stmt_iterator *gsi;
3964 /* Alignment of buf->base + 0. */
3965 unsigned align;
3966 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
3967 HOST_WIDE_INT off;
3968 /* Number of padding bytes before buf->off that don't have padding clear
3969 code emitted yet. */
3970 HOST_WIDE_INT padding_bytes;
3971 /* The size of the whole object. Never emit code to touch
3972 buf->base + buf->sz or following bytes. */
3973 HOST_WIDE_INT sz;
3974 /* Number of bytes recorded in buf->buf. */
3975 size_t size;
3976 /* When inside union, instead of emitting code we and bits inside of
3977 the union_ptr array. */
3978 unsigned char *union_ptr;
3979 /* Set bits mean padding bits that need to be cleared by the builtin. */
3980 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
3981 };
3982
3983 /* Emit code to clear padding requested in BUF->buf - set bits
3984 in there stand for padding that should be cleared. FULL is true
3985 if everything from the buffer should be flushed, otherwise
3986 it can leave up to 2 * clear_padding_unit bytes for further
3987 processing. */
3988
3989 static void
3990 clear_padding_flush (clear_padding_struct *buf, bool full)
3991 {
3992 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
3993 if (!full && buf->size < 2 * clear_padding_unit)
3994 return;
3995 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
3996 size_t end = buf->size;
3997 if (!full)
3998 end = ((end - clear_padding_unit - 1) / clear_padding_unit
3999 * clear_padding_unit);
4000 size_t padding_bytes = buf->padding_bytes;
4001 if (buf->union_ptr)
4002 {
4003 /* Inside of a union, instead of emitting any code, instead
4004 clear all bits in the union_ptr buffer that are clear
4005 in buf. Whole padding bytes don't clear anything. */
4006 for (size_t i = 0; i < end; i++)
4007 {
4008 if (buf->buf[i] == (unsigned char) ~0)
4009 padding_bytes++;
4010 else
4011 {
4012 padding_bytes = 0;
4013 buf->union_ptr[buf->off + i] &= buf->buf[i];
4014 }
4015 }
4016 if (full)
4017 {
4018 buf->off = 0;
4019 buf->size = 0;
4020 buf->padding_bytes = 0;
4021 }
4022 else
4023 {
4024 memmove (buf->buf, buf->buf + end, buf->size - end);
4025 buf->off += end;
4026 buf->size -= end;
4027 buf->padding_bytes = padding_bytes;
4028 }
4029 return;
4030 }
4031 size_t wordsize = UNITS_PER_WORD;
4032 for (size_t i = 0; i < end; i += wordsize)
4033 {
4034 size_t nonzero_first = wordsize;
4035 size_t nonzero_last = 0;
4036 size_t zero_first = wordsize;
4037 size_t zero_last = 0;
4038 bool all_ones = true, bytes_only = true;
4039 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4040 > (unsigned HOST_WIDE_INT) buf->sz)
4041 {
4042 gcc_assert (wordsize > 1);
4043 wordsize /= 2;
4044 i -= wordsize;
4045 continue;
4046 }
4047 for (size_t j = i; j < i + wordsize && j < end; j++)
4048 {
4049 if (buf->buf[j])
4050 {
4051 if (nonzero_first == wordsize)
4052 {
4053 nonzero_first = j - i;
4054 nonzero_last = j - i;
4055 }
4056 if (nonzero_last != j - i)
4057 all_ones = false;
4058 nonzero_last = j + 1 - i;
4059 }
4060 else
4061 {
4062 if (zero_first == wordsize)
4063 zero_first = j - i;
4064 zero_last = j + 1 - i;
4065 }
4066 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4067 {
4068 all_ones = false;
4069 bytes_only = false;
4070 }
4071 }
4072 size_t padding_end = i;
4073 if (padding_bytes)
4074 {
4075 if (nonzero_first == 0
4076 && nonzero_last == wordsize
4077 && all_ones)
4078 {
4079 /* All bits are padding and we had some padding
4080 before too. Just extend it. */
4081 padding_bytes += wordsize;
4082 continue;
4083 }
4084 if (all_ones && nonzero_first == 0)
4085 {
4086 padding_bytes += nonzero_last;
4087 padding_end += nonzero_last;
4088 nonzero_first = wordsize;
4089 nonzero_last = 0;
4090 }
4091 else if (bytes_only && nonzero_first == 0)
4092 {
4093 gcc_assert (zero_first && zero_first != wordsize);
4094 padding_bytes += zero_first;
4095 padding_end += zero_first;
4096 }
4097 tree atype, src;
4098 if (padding_bytes == 1)
4099 {
4100 atype = char_type_node;
4101 src = build_zero_cst (char_type_node);
4102 }
4103 else
4104 {
4105 atype = build_array_type_nelts (char_type_node, padding_bytes);
4106 src = build_constructor (atype, NULL);
4107 }
4108 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4109 build_int_cst (buf->alias_type,
4110 buf->off + padding_end
4111 - padding_bytes));
4112 gimple *g = gimple_build_assign (dst, src);
4113 gimple_set_location (g, buf->loc);
4114 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4115 padding_bytes = 0;
4116 buf->padding_bytes = 0;
4117 }
4118 if (nonzero_first == wordsize)
4119 /* All bits in a word are 0, there are no padding bits. */
4120 continue;
4121 if (all_ones && nonzero_last == wordsize)
4122 {
4123 /* All bits between nonzero_first and end of word are padding
4124 bits, start counting padding_bytes. */
4125 padding_bytes = nonzero_last - nonzero_first;
4126 continue;
4127 }
4128 if (bytes_only)
4129 {
4130 /* If bitfields aren't involved in this word, prefer storing
4131 individual bytes or groups of them over performing a RMW
4132 operation on the whole word. */
4133 gcc_assert (i + zero_last <= end);
4134 for (size_t j = padding_end; j < i + zero_last; j++)
4135 {
4136 if (buf->buf[j])
4137 {
4138 size_t k;
4139 for (k = j; k < i + zero_last; k++)
4140 if (buf->buf[k] == 0)
4141 break;
4142 HOST_WIDE_INT off = buf->off + j;
4143 tree atype, src;
4144 if (k - j == 1)
4145 {
4146 atype = char_type_node;
4147 src = build_zero_cst (char_type_node);
4148 }
4149 else
4150 {
4151 atype = build_array_type_nelts (char_type_node, k - j);
4152 src = build_constructor (atype, NULL);
4153 }
4154 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4155 buf->base,
4156 build_int_cst (buf->alias_type, off));
4157 gimple *g = gimple_build_assign (dst, src);
4158 gimple_set_location (g, buf->loc);
4159 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4160 j = k;
4161 }
4162 }
4163 if (nonzero_last == wordsize)
4164 padding_bytes = nonzero_last - zero_last;
4165 continue;
4166 }
4167 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4168 {
4169 if (nonzero_last - nonzero_first <= eltsz
4170 && ((nonzero_first & ~(eltsz - 1))
4171 == ((nonzero_last - 1) & ~(eltsz - 1))))
4172 {
4173 tree type;
4174 if (eltsz == 1)
4175 type = char_type_node;
4176 else
4177 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4178 0);
4179 size_t start = nonzero_first & ~(eltsz - 1);
4180 HOST_WIDE_INT off = buf->off + i + start;
4181 tree atype = type;
4182 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4183 atype = build_aligned_type (type, buf->align);
4184 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4185 build_int_cst (buf->alias_type, off));
4186 tree src;
4187 gimple *g;
4188 if (all_ones
4189 && nonzero_first == start
4190 && nonzero_last == start + eltsz)
4191 src = build_zero_cst (type);
4192 else
4193 {
4194 src = make_ssa_name (type);
4195 g = gimple_build_assign (src, unshare_expr (dst));
4196 gimple_set_location (g, buf->loc);
4197 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4198 tree mask = native_interpret_expr (type,
4199 buf->buf + i + start,
4200 eltsz);
4201 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4202 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4203 tree src_masked = make_ssa_name (type);
4204 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4205 src, mask);
4206 gimple_set_location (g, buf->loc);
4207 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4208 src = src_masked;
4209 }
4210 g = gimple_build_assign (dst, src);
4211 gimple_set_location (g, buf->loc);
4212 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4213 break;
4214 }
4215 }
4216 }
4217 if (full)
4218 {
4219 if (padding_bytes)
4220 {
4221 tree atype, src;
4222 if (padding_bytes == 1)
4223 {
4224 atype = char_type_node;
4225 src = build_zero_cst (char_type_node);
4226 }
4227 else
4228 {
4229 atype = build_array_type_nelts (char_type_node, padding_bytes);
4230 src = build_constructor (atype, NULL);
4231 }
4232 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4233 build_int_cst (buf->alias_type,
4234 buf->off + end
4235 - padding_bytes));
4236 gimple *g = gimple_build_assign (dst, src);
4237 gimple_set_location (g, buf->loc);
4238 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4239 }
4240 size_t end_rem = end % UNITS_PER_WORD;
4241 buf->off += end - end_rem;
4242 buf->size = end_rem;
4243 memset (buf->buf, 0, buf->size);
4244 buf->padding_bytes = 0;
4245 }
4246 else
4247 {
4248 memmove (buf->buf, buf->buf + end, buf->size - end);
4249 buf->off += end;
4250 buf->size -= end;
4251 buf->padding_bytes = padding_bytes;
4252 }
4253 }
4254
4255 /* Append PADDING_BYTES padding bytes. */
4256
4257 static void
4258 clear_padding_add_padding (clear_padding_struct *buf,
4259 HOST_WIDE_INT padding_bytes)
4260 {
4261 if (padding_bytes == 0)
4262 return;
4263 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4264 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4265 clear_padding_flush (buf, false);
4266 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4267 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4268 {
4269 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4270 padding_bytes -= clear_padding_buf_size - buf->size;
4271 buf->size = clear_padding_buf_size;
4272 clear_padding_flush (buf, false);
4273 gcc_assert (buf->padding_bytes);
4274 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4275 is guaranteed to be all ones. */
4276 padding_bytes += buf->size;
4277 buf->size = padding_bytes % UNITS_PER_WORD;
4278 memset (buf->buf, ~0, buf->size);
4279 buf->off += padding_bytes - buf->size;
4280 buf->padding_bytes += padding_bytes - buf->size;
4281 }
4282 else
4283 {
4284 memset (buf->buf + buf->size, ~0, padding_bytes);
4285 buf->size += padding_bytes;
4286 }
4287 }
4288
4289 static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4290
4291 /* Clear padding bits of union type TYPE. */
4292
4293 static void
4294 clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4295 {
4296 clear_padding_struct *union_buf;
4297 HOST_WIDE_INT start_off = 0, next_off = 0;
4298 size_t start_size = 0;
4299 if (buf->union_ptr)
4300 {
4301 start_off = buf->off + buf->size;
4302 next_off = start_off + sz;
4303 start_size = start_off % UNITS_PER_WORD;
4304 start_off -= start_size;
4305 clear_padding_flush (buf, true);
4306 union_buf = buf;
4307 }
4308 else
4309 {
4310 if (sz + buf->size > clear_padding_buf_size)
4311 clear_padding_flush (buf, false);
4312 union_buf = XALLOCA (clear_padding_struct);
4313 union_buf->loc = buf->loc;
4314 union_buf->base = NULL_TREE;
4315 union_buf->alias_type = NULL_TREE;
4316 union_buf->gsi = NULL;
4317 union_buf->align = 0;
4318 union_buf->off = 0;
4319 union_buf->padding_bytes = 0;
4320 union_buf->sz = sz;
4321 union_buf->size = 0;
4322 if (sz + buf->size <= clear_padding_buf_size)
4323 union_buf->union_ptr = buf->buf + buf->size;
4324 else
4325 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4326 memset (union_buf->union_ptr, ~0, sz);
4327 }
4328
4329 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4330 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4331 {
4332 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4333 {
4334 if (TREE_TYPE (field) == error_mark_node)
4335 continue;
4336 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4337 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4338 error_at (buf->loc, "flexible array member %qD does not have "
4339 "well defined padding bits for %qs",
4340 field, "__builtin_clear_padding");
4341 continue;
4342 }
4343 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4344 gcc_assert (union_buf->size == 0);
4345 union_buf->off = start_off;
4346 union_buf->size = start_size;
4347 memset (union_buf->buf, ~0, start_size);
4348 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4349 clear_padding_add_padding (union_buf, sz - fldsz);
4350 clear_padding_flush (union_buf, true);
4351 }
4352
4353 if (buf == union_buf)
4354 {
4355 buf->off = next_off;
4356 buf->size = next_off % UNITS_PER_WORD;
4357 buf->off -= buf->size;
4358 memset (buf->buf, ~0, buf->size);
4359 }
4360 else if (sz + buf->size <= clear_padding_buf_size)
4361 buf->size += sz;
4362 else
4363 {
4364 unsigned char *union_ptr = union_buf->union_ptr;
4365 while (sz)
4366 {
4367 clear_padding_flush (buf, false);
4368 HOST_WIDE_INT this_sz
4369 = MIN ((unsigned HOST_WIDE_INT) sz,
4370 clear_padding_buf_size - buf->size);
4371 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4372 buf->size += this_sz;
4373 union_ptr += this_sz;
4374 sz -= this_sz;
4375 }
4376 XDELETE (union_buf->union_ptr);
4377 }
4378 }
4379
4380 /* The only known floating point formats with padding bits are the
4381 IEEE extended ones. */
4382
4383 static bool
4384 clear_padding_real_needs_padding_p (tree type)
4385 {
4386 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4387 return (fmt->b == 2
4388 && fmt->signbit_ro == fmt->signbit_rw
4389 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4390 }
4391
4392 /* Return true if TYPE might contain any padding bits. */
4393
4394 static bool
4395 clear_padding_type_may_have_padding_p (tree type)
4396 {
4397 switch (TREE_CODE (type))
4398 {
4399 case RECORD_TYPE:
4400 case UNION_TYPE:
4401 return true;
4402 case ARRAY_TYPE:
4403 case COMPLEX_TYPE:
4404 case VECTOR_TYPE:
4405 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4406 case REAL_TYPE:
4407 return clear_padding_real_needs_padding_p (type);
4408 default:
4409 return false;
4410 }
4411 }
4412
4413 /* Emit a runtime loop:
4414 for (; buf.base != end; buf.base += sz)
4415 __builtin_clear_padding (buf.base); */
4416
4417 static void
4418 clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4419 {
4420 tree l1 = create_artificial_label (buf->loc);
4421 tree l2 = create_artificial_label (buf->loc);
4422 tree l3 = create_artificial_label (buf->loc);
4423 gimple *g = gimple_build_goto (l2);
4424 gimple_set_location (g, buf->loc);
4425 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4426 g = gimple_build_label (l1);
4427 gimple_set_location (g, buf->loc);
4428 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4429 clear_padding_type (buf, type, buf->sz);
4430 clear_padding_flush (buf, true);
4431 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4432 size_int (buf->sz));
4433 gimple_set_location (g, buf->loc);
4434 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4435 g = gimple_build_label (l2);
4436 gimple_set_location (g, buf->loc);
4437 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4438 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4439 gimple_set_location (g, buf->loc);
4440 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4441 g = gimple_build_label (l3);
4442 gimple_set_location (g, buf->loc);
4443 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4444 }
4445
4446 /* Clear padding bits for TYPE. Called recursively from
4447 gimple_fold_builtin_clear_padding. */
4448
4449 static void
4450 clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4451 {
4452 switch (TREE_CODE (type))
4453 {
4454 case RECORD_TYPE:
4455 HOST_WIDE_INT cur_pos;
4456 cur_pos = 0;
4457 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4458 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4459 {
4460 tree ftype = TREE_TYPE (field);
4461 if (DECL_BIT_FIELD (field))
4462 {
4463 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4464 if (fldsz == 0)
4465 continue;
4466 HOST_WIDE_INT pos = int_byte_position (field);
4467 HOST_WIDE_INT bpos
4468 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4469 bpos %= BITS_PER_UNIT;
4470 HOST_WIDE_INT end
4471 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4472 if (pos + end > cur_pos)
4473 {
4474 clear_padding_add_padding (buf, pos + end - cur_pos);
4475 cur_pos = pos + end;
4476 }
4477 gcc_assert (cur_pos > pos
4478 && ((unsigned HOST_WIDE_INT) buf->size
4479 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4480 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4481 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4482 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4483 " in %qs", "__builtin_clear_padding");
4484 else if (BYTES_BIG_ENDIAN)
4485 {
4486 /* Big endian. */
4487 if (bpos + fldsz <= BITS_PER_UNIT)
4488 *p &= ~(((1 << fldsz) - 1)
4489 << (BITS_PER_UNIT - bpos - fldsz));
4490 else
4491 {
4492 if (bpos)
4493 {
4494 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4495 p++;
4496 fldsz -= BITS_PER_UNIT - bpos;
4497 }
4498 memset (p, 0, fldsz / BITS_PER_UNIT);
4499 p += fldsz / BITS_PER_UNIT;
4500 fldsz %= BITS_PER_UNIT;
4501 if (fldsz)
4502 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4503 }
4504 }
4505 else
4506 {
4507 /* Little endian. */
4508 if (bpos + fldsz <= BITS_PER_UNIT)
4509 *p &= ~(((1 << fldsz) - 1) << bpos);
4510 else
4511 {
4512 if (bpos)
4513 {
4514 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4515 p++;
4516 fldsz -= BITS_PER_UNIT - bpos;
4517 }
4518 memset (p, 0, fldsz / BITS_PER_UNIT);
4519 p += fldsz / BITS_PER_UNIT;
4520 fldsz %= BITS_PER_UNIT;
4521 if (fldsz)
4522 *p &= ~((1 << fldsz) - 1);
4523 }
4524 }
4525 }
4526 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4527 {
4528 if (ftype == error_mark_node)
4529 continue;
4530 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4531 && !COMPLETE_TYPE_P (ftype));
4532 error_at (buf->loc, "flexible array member %qD does not have "
4533 "well defined padding bits for %qs",
4534 field, "__builtin_clear_padding");
4535 }
4536 else
4537 {
4538 HOST_WIDE_INT pos = int_byte_position (field);
4539 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4540 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4541 clear_padding_add_padding (buf, pos - cur_pos);
4542 cur_pos = pos;
4543 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4544 cur_pos += fldsz;
4545 }
4546 }
4547 gcc_assert (sz >= cur_pos);
4548 clear_padding_add_padding (buf, sz - cur_pos);
4549 break;
4550 case ARRAY_TYPE:
4551 HOST_WIDE_INT nelts, fldsz;
4552 fldsz = int_size_in_bytes (TREE_TYPE (type));
4553 nelts = sz / fldsz;
4554 if (nelts > 1
4555 && sz > 8 * UNITS_PER_WORD
4556 && buf->union_ptr == NULL
4557 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4558 {
4559 /* For sufficiently large array of more than one elements,
4560 emit a runtime loop to keep code size manageable. */
4561 tree base = buf->base;
4562 unsigned int prev_align = buf->align;
4563 HOST_WIDE_INT off = buf->off + buf->size;
4564 HOST_WIDE_INT prev_sz = buf->sz;
4565 clear_padding_flush (buf, true);
4566 tree elttype = TREE_TYPE (type);
4567 buf->base = create_tmp_var (build_pointer_type (elttype));
4568 tree end = make_ssa_name (TREE_TYPE (buf->base));
4569 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4570 base, size_int (off));
4571 gimple_set_location (g, buf->loc);
4572 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4573 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4574 size_int (sz));
4575 gimple_set_location (g, buf->loc);
4576 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4577 buf->sz = fldsz;
4578 buf->align = TYPE_ALIGN (elttype);
4579 buf->off = 0;
4580 buf->size = 0;
4581 clear_padding_emit_loop (buf, elttype, end);
4582 buf->base = base;
4583 buf->sz = prev_sz;
4584 buf->align = prev_align;
4585 buf->size = off % UNITS_PER_WORD;
4586 buf->off = off - buf->size;
4587 memset (buf->buf, 0, buf->size);
4588 break;
4589 }
4590 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4591 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4592 break;
4593 case UNION_TYPE:
4594 clear_padding_union (buf, type, sz);
4595 break;
4596 case REAL_TYPE:
4597 gcc_assert ((size_t) sz <= clear_padding_unit);
4598 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4599 clear_padding_flush (buf, false);
4600 if (clear_padding_real_needs_padding_p (type))
4601 {
4602 /* Use native_interpret_expr + native_encode_expr to figure out
4603 which bits are padding. */
4604 memset (buf->buf + buf->size, ~0, sz);
4605 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4606 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4607 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4608 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4609 for (size_t i = 0; i < (size_t) sz; i++)
4610 buf->buf[buf->size + i] ^= ~0;
4611 }
4612 else
4613 memset (buf->buf + buf->size, 0, sz);
4614 buf->size += sz;
4615 break;
4616 case COMPLEX_TYPE:
4617 fldsz = int_size_in_bytes (TREE_TYPE (type));
4618 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4619 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4620 break;
4621 case VECTOR_TYPE:
4622 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4623 fldsz = int_size_in_bytes (TREE_TYPE (type));
4624 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4625 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4626 break;
4627 case NULLPTR_TYPE:
4628 gcc_assert ((size_t) sz <= clear_padding_unit);
4629 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4630 clear_padding_flush (buf, false);
4631 memset (buf->buf + buf->size, ~0, sz);
4632 buf->size += sz;
4633 break;
4634 default:
4635 gcc_assert ((size_t) sz <= clear_padding_unit);
4636 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4637 clear_padding_flush (buf, false);
4638 memset (buf->buf + buf->size, 0, sz);
4639 buf->size += sz;
4640 break;
4641 }
4642 }
4643
4644 /* Fold __builtin_clear_padding builtin. */
4645
4646 static bool
4647 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4648 {
4649 gimple *stmt = gsi_stmt (*gsi);
4650 gcc_assert (gimple_call_num_args (stmt) == 2);
4651 tree ptr = gimple_call_arg (stmt, 0);
4652 tree typearg = gimple_call_arg (stmt, 1);
4653 tree type = TREE_TYPE (TREE_TYPE (typearg));
4654 location_t loc = gimple_location (stmt);
4655 clear_padding_struct buf;
4656 gimple_stmt_iterator gsiprev = *gsi;
4657 /* This should be folded during the lower pass. */
4658 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4659 gcc_assert (COMPLETE_TYPE_P (type));
4660 gsi_prev (&gsiprev);
4661
4662 buf.loc = loc;
4663 buf.base = ptr;
4664 buf.alias_type = NULL_TREE;
4665 buf.gsi = gsi;
4666 buf.align = get_pointer_alignment (ptr);
4667 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4668 buf.align = MAX (buf.align, talign);
4669 buf.off = 0;
4670 buf.padding_bytes = 0;
4671 buf.size = 0;
4672 buf.sz = int_size_in_bytes (type);
4673 buf.union_ptr = NULL;
4674 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4675 sorry_at (loc, "%s not supported for variable length aggregates",
4676 "__builtin_clear_padding");
4677 /* The implementation currently assumes 8-bit host and target
4678 chars which is the case for all currently supported targets
4679 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4680 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4681 sorry_at (loc, "%s not supported on this target",
4682 "__builtin_clear_padding");
4683 else if (!clear_padding_type_may_have_padding_p (type))
4684 ;
4685 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4686 {
4687 tree sz = TYPE_SIZE_UNIT (type);
4688 tree elttype = type;
4689 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4690 while (TREE_CODE (elttype) == ARRAY_TYPE
4691 && int_size_in_bytes (elttype) < 0)
4692 elttype = TREE_TYPE (elttype);
4693 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4694 gcc_assert (eltsz >= 0);
4695 if (eltsz)
4696 {
4697 buf.base = create_tmp_var (build_pointer_type (elttype));
4698 tree end = make_ssa_name (TREE_TYPE (buf.base));
4699 gimple *g = gimple_build_assign (buf.base, ptr);
4700 gimple_set_location (g, loc);
4701 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4702 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4703 gimple_set_location (g, loc);
4704 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4705 buf.sz = eltsz;
4706 buf.align = TYPE_ALIGN (elttype);
4707 buf.alias_type = build_pointer_type (elttype);
4708 clear_padding_emit_loop (&buf, elttype, end);
4709 }
4710 }
4711 else
4712 {
4713 if (!is_gimple_mem_ref_addr (buf.base))
4714 {
4715 buf.base = make_ssa_name (TREE_TYPE (ptr));
4716 gimple *g = gimple_build_assign (buf.base, ptr);
4717 gimple_set_location (g, loc);
4718 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4719 }
4720 buf.alias_type = build_pointer_type (type);
4721 clear_padding_type (&buf, type, buf.sz);
4722 clear_padding_flush (&buf, true);
4723 }
4724
4725 gimple_stmt_iterator gsiprev2 = *gsi;
4726 gsi_prev (&gsiprev2);
4727 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4728 gsi_replace (gsi, gimple_build_nop (), true);
4729 else
4730 {
4731 gsi_remove (gsi, true);
4732 *gsi = gsiprev2;
4733 }
4734 return true;
4735 }
4736
4737 /* Fold the non-target builtin at *GSI and return whether any simplification
4738 was made. */
4739
4740 static bool
4741 gimple_fold_builtin (gimple_stmt_iterator *gsi)
4742 {
4743 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
4744 tree callee = gimple_call_fndecl (stmt);
4745
4746 /* Give up for always_inline inline builtins until they are
4747 inlined. */
4748 if (avoid_folding_inline_builtin (callee))
4749 return false;
4750
4751 unsigned n = gimple_call_num_args (stmt);
4752 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
4753 switch (fcode)
4754 {
4755 case BUILT_IN_BCMP:
4756 return gimple_fold_builtin_bcmp (gsi);
4757 case BUILT_IN_BCOPY:
4758 return gimple_fold_builtin_bcopy (gsi);
4759 case BUILT_IN_BZERO:
4760 return gimple_fold_builtin_bzero (gsi);
4761
4762 case BUILT_IN_MEMSET:
4763 return gimple_fold_builtin_memset (gsi,
4764 gimple_call_arg (stmt, 1),
4765 gimple_call_arg (stmt, 2));
4766 case BUILT_IN_MEMCPY:
4767 case BUILT_IN_MEMPCPY:
4768 case BUILT_IN_MEMMOVE:
4769 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
4770 gimple_call_arg (stmt, 1), fcode);
4771 case BUILT_IN_SPRINTF_CHK:
4772 case BUILT_IN_VSPRINTF_CHK:
4773 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
4774 case BUILT_IN_STRCAT_CHK:
4775 return gimple_fold_builtin_strcat_chk (gsi);
4776 case BUILT_IN_STRNCAT_CHK:
4777 return gimple_fold_builtin_strncat_chk (gsi);
4778 case BUILT_IN_STRLEN:
4779 return gimple_fold_builtin_strlen (gsi);
4780 case BUILT_IN_STRCPY:
4781 return gimple_fold_builtin_strcpy (gsi,
4782 gimple_call_arg (stmt, 0),
4783 gimple_call_arg (stmt, 1));
4784 case BUILT_IN_STRNCPY:
4785 return gimple_fold_builtin_strncpy (gsi,
4786 gimple_call_arg (stmt, 0),
4787 gimple_call_arg (stmt, 1),
4788 gimple_call_arg (stmt, 2));
4789 case BUILT_IN_STRCAT:
4790 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
4791 gimple_call_arg (stmt, 1));
4792 case BUILT_IN_STRNCAT:
4793 return gimple_fold_builtin_strncat (gsi);
4794 case BUILT_IN_INDEX:
4795 case BUILT_IN_STRCHR:
4796 return gimple_fold_builtin_strchr (gsi, false);
4797 case BUILT_IN_RINDEX:
4798 case BUILT_IN_STRRCHR:
4799 return gimple_fold_builtin_strchr (gsi, true);
4800 case BUILT_IN_STRSTR:
4801 return gimple_fold_builtin_strstr (gsi);
4802 case BUILT_IN_STRCMP:
4803 case BUILT_IN_STRCMP_EQ:
4804 case BUILT_IN_STRCASECMP:
4805 case BUILT_IN_STRNCMP:
4806 case BUILT_IN_STRNCMP_EQ:
4807 case BUILT_IN_STRNCASECMP:
4808 return gimple_fold_builtin_string_compare (gsi);
4809 case BUILT_IN_MEMCHR:
4810 return gimple_fold_builtin_memchr (gsi);
4811 case BUILT_IN_FPUTS:
4812 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4813 gimple_call_arg (stmt, 1), false);
4814 case BUILT_IN_FPUTS_UNLOCKED:
4815 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4816 gimple_call_arg (stmt, 1), true);
4817 case BUILT_IN_MEMCPY_CHK:
4818 case BUILT_IN_MEMPCPY_CHK:
4819 case BUILT_IN_MEMMOVE_CHK:
4820 case BUILT_IN_MEMSET_CHK:
4821 return gimple_fold_builtin_memory_chk (gsi,
4822 gimple_call_arg (stmt, 0),
4823 gimple_call_arg (stmt, 1),
4824 gimple_call_arg (stmt, 2),
4825 gimple_call_arg (stmt, 3),
4826 fcode);
4827 case BUILT_IN_STPCPY:
4828 return gimple_fold_builtin_stpcpy (gsi);
4829 case BUILT_IN_STRCPY_CHK:
4830 case BUILT_IN_STPCPY_CHK:
4831 return gimple_fold_builtin_stxcpy_chk (gsi,
4832 gimple_call_arg (stmt, 0),
4833 gimple_call_arg (stmt, 1),
4834 gimple_call_arg (stmt, 2),
4835 fcode);
4836 case BUILT_IN_STRNCPY_CHK:
4837 case BUILT_IN_STPNCPY_CHK:
4838 return gimple_fold_builtin_stxncpy_chk (gsi,
4839 gimple_call_arg (stmt, 0),
4840 gimple_call_arg (stmt, 1),
4841 gimple_call_arg (stmt, 2),
4842 gimple_call_arg (stmt, 3),
4843 fcode);
4844 case BUILT_IN_SNPRINTF_CHK:
4845 case BUILT_IN_VSNPRINTF_CHK:
4846 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
4847
4848 case BUILT_IN_FPRINTF:
4849 case BUILT_IN_FPRINTF_UNLOCKED:
4850 case BUILT_IN_VFPRINTF:
4851 if (n == 2 || n == 3)
4852 return gimple_fold_builtin_fprintf (gsi,
4853 gimple_call_arg (stmt, 0),
4854 gimple_call_arg (stmt, 1),
4855 n == 3
4856 ? gimple_call_arg (stmt, 2)
4857 : NULL_TREE,
4858 fcode);
4859 break;
4860 case BUILT_IN_FPRINTF_CHK:
4861 case BUILT_IN_VFPRINTF_CHK:
4862 if (n == 3 || n == 4)
4863 return gimple_fold_builtin_fprintf (gsi,
4864 gimple_call_arg (stmt, 0),
4865 gimple_call_arg (stmt, 2),
4866 n == 4
4867 ? gimple_call_arg (stmt, 3)
4868 : NULL_TREE,
4869 fcode);
4870 break;
4871 case BUILT_IN_PRINTF:
4872 case BUILT_IN_PRINTF_UNLOCKED:
4873 case BUILT_IN_VPRINTF:
4874 if (n == 1 || n == 2)
4875 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4876 n == 2
4877 ? gimple_call_arg (stmt, 1)
4878 : NULL_TREE, fcode);
4879 break;
4880 case BUILT_IN_PRINTF_CHK:
4881 case BUILT_IN_VPRINTF_CHK:
4882 if (n == 2 || n == 3)
4883 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4884 n == 3
4885 ? gimple_call_arg (stmt, 2)
4886 : NULL_TREE, fcode);
4887 break;
4888 case BUILT_IN_ACC_ON_DEVICE:
4889 return gimple_fold_builtin_acc_on_device (gsi,
4890 gimple_call_arg (stmt, 0));
4891 case BUILT_IN_REALLOC:
4892 return gimple_fold_builtin_realloc (gsi);
4893
4894 case BUILT_IN_CLEAR_PADDING:
4895 return gimple_fold_builtin_clear_padding (gsi);
4896
4897 default:;
4898 }
4899
4900 /* Try the generic builtin folder. */
4901 bool ignore = (gimple_call_lhs (stmt) == NULL);
4902 tree result = fold_call_stmt (stmt, ignore);
4903 if (result)
4904 {
4905 if (ignore)
4906 STRIP_NOPS (result);
4907 else
4908 result = fold_convert (gimple_call_return_type (stmt), result);
4909 if (!update_call_from_tree (gsi, result))
4910 gimplify_and_update_call_from_tree (gsi, result);
4911 return true;
4912 }
4913
4914 return false;
4915 }
4916
4917 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4918 function calls to constants, where possible. */
4919
4920 static tree
4921 fold_internal_goacc_dim (const gimple *call)
4922 {
4923 int axis = oacc_get_ifn_dim_arg (call);
4924 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4925 tree result = NULL_TREE;
4926 tree type = TREE_TYPE (gimple_call_lhs (call));
4927
4928 switch (gimple_call_internal_fn (call))
4929 {
4930 case IFN_GOACC_DIM_POS:
4931 /* If the size is 1, we know the answer. */
4932 if (size == 1)
4933 result = build_int_cst (type, 0);
4934 break;
4935 case IFN_GOACC_DIM_SIZE:
4936 /* If the size is not dynamic, we know the answer. */
4937 if (size)
4938 result = build_int_cst (type, size);
4939 break;
4940 default:
4941 break;
4942 }
4943
4944 return result;
4945 }
4946
4947 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4948 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4949 &var where var is only addressable because of such calls. */
4950
4951 bool
4952 optimize_atomic_compare_exchange_p (gimple *stmt)
4953 {
4954 if (gimple_call_num_args (stmt) != 6
4955 || !flag_inline_atomics
4956 || !optimize
4957 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4958 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4959 || !gimple_vdef (stmt)
4960 || !gimple_vuse (stmt))
4961 return false;
4962
4963 tree fndecl = gimple_call_fndecl (stmt);
4964 switch (DECL_FUNCTION_CODE (fndecl))
4965 {
4966 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4967 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4968 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4969 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4970 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4971 break;
4972 default:
4973 return false;
4974 }
4975
4976 tree expected = gimple_call_arg (stmt, 1);
4977 if (TREE_CODE (expected) != ADDR_EXPR
4978 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4979 return false;
4980
4981 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4982 if (!is_gimple_reg_type (etype)
4983 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4984 || TREE_THIS_VOLATILE (etype)
4985 || VECTOR_TYPE_P (etype)
4986 || TREE_CODE (etype) == COMPLEX_TYPE
4987 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4988 might not preserve all the bits. See PR71716. */
4989 || SCALAR_FLOAT_TYPE_P (etype)
4990 || maybe_ne (TYPE_PRECISION (etype),
4991 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4992 return false;
4993
4994 tree weak = gimple_call_arg (stmt, 3);
4995 if (!integer_zerop (weak) && !integer_onep (weak))
4996 return false;
4997
4998 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4999 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5000 machine_mode mode = TYPE_MODE (itype);
5001
5002 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5003 == CODE_FOR_nothing
5004 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5005 return false;
5006
5007 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5008 return false;
5009
5010 return true;
5011 }
5012
5013 /* Fold
5014 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5015 into
5016 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5017 i = IMAGPART_EXPR <t>;
5018 r = (_Bool) i;
5019 e = REALPART_EXPR <t>; */
5020
5021 void
5022 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5023 {
5024 gimple *stmt = gsi_stmt (*gsi);
5025 tree fndecl = gimple_call_fndecl (stmt);
5026 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5027 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5028 tree ctype = build_complex_type (itype);
5029 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5030 bool throws = false;
5031 edge e = NULL;
5032 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5033 expected);
5034 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5035 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5036 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5037 {
5038 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5039 build1 (VIEW_CONVERT_EXPR, itype,
5040 gimple_assign_lhs (g)));
5041 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5042 }
5043 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5044 + int_size_in_bytes (itype);
5045 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5046 gimple_call_arg (stmt, 0),
5047 gimple_assign_lhs (g),
5048 gimple_call_arg (stmt, 2),
5049 build_int_cst (integer_type_node, flag),
5050 gimple_call_arg (stmt, 4),
5051 gimple_call_arg (stmt, 5));
5052 tree lhs = make_ssa_name (ctype);
5053 gimple_call_set_lhs (g, lhs);
5054 gimple_move_vops (g, stmt);
5055 tree oldlhs = gimple_call_lhs (stmt);
5056 if (stmt_can_throw_internal (cfun, stmt))
5057 {
5058 throws = true;
5059 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5060 }
5061 gimple_call_set_nothrow (as_a <gcall *> (g),
5062 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5063 gimple_call_set_lhs (stmt, NULL_TREE);
5064 gsi_replace (gsi, g, true);
5065 if (oldlhs)
5066 {
5067 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5068 build1 (IMAGPART_EXPR, itype, lhs));
5069 if (throws)
5070 {
5071 gsi_insert_on_edge_immediate (e, g);
5072 *gsi = gsi_for_stmt (g);
5073 }
5074 else
5075 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5076 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5077 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5078 }
5079 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5080 build1 (REALPART_EXPR, itype, lhs));
5081 if (throws && oldlhs == NULL_TREE)
5082 {
5083 gsi_insert_on_edge_immediate (e, g);
5084 *gsi = gsi_for_stmt (g);
5085 }
5086 else
5087 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5088 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5089 {
5090 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5091 VIEW_CONVERT_EXPR,
5092 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5093 gimple_assign_lhs (g)));
5094 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5095 }
5096 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5097 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5098 *gsi = gsiret;
5099 }
5100
5101 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5102 doesn't fit into TYPE. The test for overflow should be regardless of
5103 -fwrapv, and even for unsigned types. */
5104
5105 bool
5106 arith_overflowed_p (enum tree_code code, const_tree type,
5107 const_tree arg0, const_tree arg1)
5108 {
5109 widest2_int warg0 = widest2_int_cst (arg0);
5110 widest2_int warg1 = widest2_int_cst (arg1);
5111 widest2_int wres;
5112 switch (code)
5113 {
5114 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5115 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5116 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5117 default: gcc_unreachable ();
5118 }
5119 signop sign = TYPE_SIGN (type);
5120 if (sign == UNSIGNED && wi::neg_p (wres))
5121 return true;
5122 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5123 }
5124
5125 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5126 for the memory it references, otherwise return null. VECTYPE is the
5127 type of the memory vector. */
5128
5129 static tree
5130 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5131 {
5132 tree ptr = gimple_call_arg (call, 0);
5133 tree alias_align = gimple_call_arg (call, 1);
5134 tree mask = gimple_call_arg (call, 2);
5135 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5136 return NULL_TREE;
5137
5138 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
5139 if (TYPE_ALIGN (vectype) != align)
5140 vectype = build_aligned_type (vectype, align);
5141 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5142 return fold_build2 (MEM_REF, vectype, ptr, offset);
5143 }
5144
5145 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5146
5147 static bool
5148 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5149 {
5150 tree lhs = gimple_call_lhs (call);
5151 if (!lhs)
5152 return false;
5153
5154 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5155 {
5156 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5157 gimple_set_location (new_stmt, gimple_location (call));
5158 gimple_move_vops (new_stmt, call);
5159 gsi_replace (gsi, new_stmt, false);
5160 return true;
5161 }
5162 return false;
5163 }
5164
5165 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5166
5167 static bool
5168 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5169 {
5170 tree rhs = gimple_call_arg (call, 3);
5171 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5172 {
5173 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5174 gimple_set_location (new_stmt, gimple_location (call));
5175 gimple_move_vops (new_stmt, call);
5176 gsi_replace (gsi, new_stmt, false);
5177 return true;
5178 }
5179 return false;
5180 }
5181
5182 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5183 The statement may be replaced by another statement, e.g., if the call
5184 simplifies to a constant value. Return true if any changes were made.
5185 It is assumed that the operands have been previously folded. */
5186
5187 static bool
5188 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5189 {
5190 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5191 tree callee;
5192 bool changed = false;
5193 unsigned i;
5194
5195 /* Fold *& in call arguments. */
5196 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5197 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
5198 {
5199 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
5200 if (tmp)
5201 {
5202 gimple_call_set_arg (stmt, i, tmp);
5203 changed = true;
5204 }
5205 }
5206
5207 /* Check for virtual calls that became direct calls. */
5208 callee = gimple_call_fn (stmt);
5209 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5210 {
5211 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5212 {
5213 if (dump_file && virtual_method_call_p (callee)
5214 && !possible_polymorphic_call_target_p
5215 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5216 (OBJ_TYPE_REF_EXPR (callee)))))
5217 {
5218 fprintf (dump_file,
5219 "Type inheritance inconsistent devirtualization of ");
5220 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5221 fprintf (dump_file, " to ");
5222 print_generic_expr (dump_file, callee, TDF_SLIM);
5223 fprintf (dump_file, "\n");
5224 }
5225
5226 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5227 changed = true;
5228 }
5229 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5230 {
5231 bool final;
5232 vec <cgraph_node *>targets
5233 = possible_polymorphic_call_targets (callee, stmt, &final);
5234 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5235 {
5236 tree lhs = gimple_call_lhs (stmt);
5237 if (dump_enabled_p ())
5238 {
5239 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5240 "folding virtual function call to %s\n",
5241 targets.length () == 1
5242 ? targets[0]->name ()
5243 : "__builtin_unreachable");
5244 }
5245 if (targets.length () == 1)
5246 {
5247 tree fndecl = targets[0]->decl;
5248 gimple_call_set_fndecl (stmt, fndecl);
5249 changed = true;
5250 /* If changing the call to __cxa_pure_virtual
5251 or similar noreturn function, adjust gimple_call_fntype
5252 too. */
5253 if (gimple_call_noreturn_p (stmt)
5254 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5255 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5256 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5257 == void_type_node))
5258 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5259 /* If the call becomes noreturn, remove the lhs. */
5260 if (lhs
5261 && gimple_call_noreturn_p (stmt)
5262 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5263 || should_remove_lhs_p (lhs)))
5264 {
5265 if (TREE_CODE (lhs) == SSA_NAME)
5266 {
5267 tree var = create_tmp_var (TREE_TYPE (lhs));
5268 tree def = get_or_create_ssa_default_def (cfun, var);
5269 gimple *new_stmt = gimple_build_assign (lhs, def);
5270 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5271 }
5272 gimple_call_set_lhs (stmt, NULL_TREE);
5273 }
5274 maybe_remove_unused_call_args (cfun, stmt);
5275 }
5276 else
5277 {
5278 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5279 gimple *new_stmt = gimple_build_call (fndecl, 0);
5280 gimple_set_location (new_stmt, gimple_location (stmt));
5281 /* If the call had a SSA name as lhs morph that into
5282 an uninitialized value. */
5283 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5284 {
5285 tree var = create_tmp_var (TREE_TYPE (lhs));
5286 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5287 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5288 set_ssa_default_def (cfun, var, lhs);
5289 }
5290 gimple_move_vops (new_stmt, stmt);
5291 gsi_replace (gsi, new_stmt, false);
5292 return true;
5293 }
5294 }
5295 }
5296 }
5297
5298 /* Check for indirect calls that became direct calls, and then
5299 no longer require a static chain. */
5300 if (gimple_call_chain (stmt))
5301 {
5302 tree fn = gimple_call_fndecl (stmt);
5303 if (fn && !DECL_STATIC_CHAIN (fn))
5304 {
5305 gimple_call_set_chain (stmt, NULL);
5306 changed = true;
5307 }
5308 else
5309 {
5310 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
5311 if (tmp)
5312 {
5313 gimple_call_set_chain (stmt, tmp);
5314 changed = true;
5315 }
5316 }
5317 }
5318
5319 if (inplace)
5320 return changed;
5321
5322 /* Check for builtins that CCP can handle using information not
5323 available in the generic fold routines. */
5324 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5325 {
5326 if (gimple_fold_builtin (gsi))
5327 changed = true;
5328 }
5329 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5330 {
5331 changed |= targetm.gimple_fold_builtin (gsi);
5332 }
5333 else if (gimple_call_internal_p (stmt))
5334 {
5335 enum tree_code subcode = ERROR_MARK;
5336 tree result = NULL_TREE;
5337 bool cplx_result = false;
5338 tree overflow = NULL_TREE;
5339 switch (gimple_call_internal_fn (stmt))
5340 {
5341 case IFN_BUILTIN_EXPECT:
5342 result = fold_builtin_expect (gimple_location (stmt),
5343 gimple_call_arg (stmt, 0),
5344 gimple_call_arg (stmt, 1),
5345 gimple_call_arg (stmt, 2),
5346 NULL_TREE);
5347 break;
5348 case IFN_UBSAN_OBJECT_SIZE:
5349 {
5350 tree offset = gimple_call_arg (stmt, 1);
5351 tree objsize = gimple_call_arg (stmt, 2);
5352 if (integer_all_onesp (objsize)
5353 || (TREE_CODE (offset) == INTEGER_CST
5354 && TREE_CODE (objsize) == INTEGER_CST
5355 && tree_int_cst_le (offset, objsize)))
5356 {
5357 replace_call_with_value (gsi, NULL_TREE);
5358 return true;
5359 }
5360 }
5361 break;
5362 case IFN_UBSAN_PTR:
5363 if (integer_zerop (gimple_call_arg (stmt, 1)))
5364 {
5365 replace_call_with_value (gsi, NULL_TREE);
5366 return true;
5367 }
5368 break;
5369 case IFN_UBSAN_BOUNDS:
5370 {
5371 tree index = gimple_call_arg (stmt, 1);
5372 tree bound = gimple_call_arg (stmt, 2);
5373 if (TREE_CODE (index) == INTEGER_CST
5374 && TREE_CODE (bound) == INTEGER_CST)
5375 {
5376 index = fold_convert (TREE_TYPE (bound), index);
5377 if (TREE_CODE (index) == INTEGER_CST
5378 && tree_int_cst_le (index, bound))
5379 {
5380 replace_call_with_value (gsi, NULL_TREE);
5381 return true;
5382 }
5383 }
5384 }
5385 break;
5386 case IFN_GOACC_DIM_SIZE:
5387 case IFN_GOACC_DIM_POS:
5388 result = fold_internal_goacc_dim (stmt);
5389 break;
5390 case IFN_UBSAN_CHECK_ADD:
5391 subcode = PLUS_EXPR;
5392 break;
5393 case IFN_UBSAN_CHECK_SUB:
5394 subcode = MINUS_EXPR;
5395 break;
5396 case IFN_UBSAN_CHECK_MUL:
5397 subcode = MULT_EXPR;
5398 break;
5399 case IFN_ADD_OVERFLOW:
5400 subcode = PLUS_EXPR;
5401 cplx_result = true;
5402 break;
5403 case IFN_SUB_OVERFLOW:
5404 subcode = MINUS_EXPR;
5405 cplx_result = true;
5406 break;
5407 case IFN_MUL_OVERFLOW:
5408 subcode = MULT_EXPR;
5409 cplx_result = true;
5410 break;
5411 case IFN_MASK_LOAD:
5412 changed |= gimple_fold_mask_load (gsi, stmt);
5413 break;
5414 case IFN_MASK_STORE:
5415 changed |= gimple_fold_mask_store (gsi, stmt);
5416 break;
5417 default:
5418 break;
5419 }
5420 if (subcode != ERROR_MARK)
5421 {
5422 tree arg0 = gimple_call_arg (stmt, 0);
5423 tree arg1 = gimple_call_arg (stmt, 1);
5424 tree type = TREE_TYPE (arg0);
5425 if (cplx_result)
5426 {
5427 tree lhs = gimple_call_lhs (stmt);
5428 if (lhs == NULL_TREE)
5429 type = NULL_TREE;
5430 else
5431 type = TREE_TYPE (TREE_TYPE (lhs));
5432 }
5433 if (type == NULL_TREE)
5434 ;
5435 /* x = y + 0; x = y - 0; x = y * 0; */
5436 else if (integer_zerop (arg1))
5437 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5438 /* x = 0 + y; x = 0 * y; */
5439 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5440 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5441 /* x = y - y; */
5442 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5443 result = integer_zero_node;
5444 /* x = y * 1; x = 1 * y; */
5445 else if (subcode == MULT_EXPR && integer_onep (arg1))
5446 result = arg0;
5447 else if (subcode == MULT_EXPR && integer_onep (arg0))
5448 result = arg1;
5449 else if (TREE_CODE (arg0) == INTEGER_CST
5450 && TREE_CODE (arg1) == INTEGER_CST)
5451 {
5452 if (cplx_result)
5453 result = int_const_binop (subcode, fold_convert (type, arg0),
5454 fold_convert (type, arg1));
5455 else
5456 result = int_const_binop (subcode, arg0, arg1);
5457 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5458 {
5459 if (cplx_result)
5460 overflow = build_one_cst (type);
5461 else
5462 result = NULL_TREE;
5463 }
5464 }
5465 if (result)
5466 {
5467 if (result == integer_zero_node)
5468 result = build_zero_cst (type);
5469 else if (cplx_result && TREE_TYPE (result) != type)
5470 {
5471 if (TREE_CODE (result) == INTEGER_CST)
5472 {
5473 if (arith_overflowed_p (PLUS_EXPR, type, result,
5474 integer_zero_node))
5475 overflow = build_one_cst (type);
5476 }
5477 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5478 && TYPE_UNSIGNED (type))
5479 || (TYPE_PRECISION (type)
5480 < (TYPE_PRECISION (TREE_TYPE (result))
5481 + (TYPE_UNSIGNED (TREE_TYPE (result))
5482 && !TYPE_UNSIGNED (type)))))
5483 result = NULL_TREE;
5484 if (result)
5485 result = fold_convert (type, result);
5486 }
5487 }
5488 }
5489
5490 if (result)
5491 {
5492 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5493 result = drop_tree_overflow (result);
5494 if (cplx_result)
5495 {
5496 if (overflow == NULL_TREE)
5497 overflow = build_zero_cst (TREE_TYPE (result));
5498 tree ctype = build_complex_type (TREE_TYPE (result));
5499 if (TREE_CODE (result) == INTEGER_CST
5500 && TREE_CODE (overflow) == INTEGER_CST)
5501 result = build_complex (ctype, result, overflow);
5502 else
5503 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5504 ctype, result, overflow);
5505 }
5506 if (!update_call_from_tree (gsi, result))
5507 gimplify_and_update_call_from_tree (gsi, result);
5508 changed = true;
5509 }
5510 }
5511
5512 return changed;
5513 }
5514
5515
5516 /* Return true whether NAME has a use on STMT. */
5517
5518 static bool
5519 has_use_on_stmt (tree name, gimple *stmt)
5520 {
5521 imm_use_iterator iter;
5522 use_operand_p use_p;
5523 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5524 if (USE_STMT (use_p) == stmt)
5525 return true;
5526 return false;
5527 }
5528
5529 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5530 gimple_simplify.
5531
5532 Replaces *GSI with the simplification result in RCODE and OPS
5533 and the associated statements in *SEQ. Does the replacement
5534 according to INPLACE and returns true if the operation succeeded. */
5535
5536 static bool
5537 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5538 gimple_match_op *res_op,
5539 gimple_seq *seq, bool inplace)
5540 {
5541 gimple *stmt = gsi_stmt (*gsi);
5542 tree *ops = res_op->ops;
5543 unsigned int num_ops = res_op->num_ops;
5544
5545 /* Play safe and do not allow abnormals to be mentioned in
5546 newly created statements. See also maybe_push_res_to_seq.
5547 As an exception allow such uses if there was a use of the
5548 same SSA name on the old stmt. */
5549 for (unsigned int i = 0; i < num_ops; ++i)
5550 if (TREE_CODE (ops[i]) == SSA_NAME
5551 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5552 && !has_use_on_stmt (ops[i], stmt))
5553 return false;
5554
5555 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5556 for (unsigned int i = 0; i < 2; ++i)
5557 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5558 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5559 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5560 return false;
5561
5562 /* Don't insert new statements when INPLACE is true, even if we could
5563 reuse STMT for the final statement. */
5564 if (inplace && !gimple_seq_empty_p (*seq))
5565 return false;
5566
5567 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5568 {
5569 gcc_assert (res_op->code.is_tree_code ());
5570 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
5571 /* GIMPLE_CONDs condition may not throw. */
5572 && (!flag_exceptions
5573 || !cfun->can_throw_non_call_exceptions
5574 || !operation_could_trap_p (res_op->code,
5575 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5576 false, NULL_TREE)))
5577 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5578 else if (res_op->code == SSA_NAME)
5579 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5580 build_zero_cst (TREE_TYPE (ops[0])));
5581 else if (res_op->code == INTEGER_CST)
5582 {
5583 if (integer_zerop (ops[0]))
5584 gimple_cond_make_false (cond_stmt);
5585 else
5586 gimple_cond_make_true (cond_stmt);
5587 }
5588 else if (!inplace)
5589 {
5590 tree res = maybe_push_res_to_seq (res_op, seq);
5591 if (!res)
5592 return false;
5593 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5594 build_zero_cst (TREE_TYPE (res)));
5595 }
5596 else
5597 return false;
5598 if (dump_file && (dump_flags & TDF_DETAILS))
5599 {
5600 fprintf (dump_file, "gimple_simplified to ");
5601 if (!gimple_seq_empty_p (*seq))
5602 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5603 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5604 0, TDF_SLIM);
5605 }
5606 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5607 return true;
5608 }
5609 else if (is_gimple_assign (stmt)
5610 && res_op->code.is_tree_code ())
5611 {
5612 if (!inplace
5613 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
5614 {
5615 maybe_build_generic_op (res_op);
5616 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5617 res_op->op_or_null (0),
5618 res_op->op_or_null (1),
5619 res_op->op_or_null (2));
5620 if (dump_file && (dump_flags & TDF_DETAILS))
5621 {
5622 fprintf (dump_file, "gimple_simplified to ");
5623 if (!gimple_seq_empty_p (*seq))
5624 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5625 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5626 0, TDF_SLIM);
5627 }
5628 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5629 return true;
5630 }
5631 }
5632 else if (res_op->code.is_fn_code ()
5633 && gimple_call_combined_fn (stmt) == res_op->code)
5634 {
5635 gcc_assert (num_ops == gimple_call_num_args (stmt));
5636 for (unsigned int i = 0; i < num_ops; ++i)
5637 gimple_call_set_arg (stmt, i, ops[i]);
5638 if (dump_file && (dump_flags & TDF_DETAILS))
5639 {
5640 fprintf (dump_file, "gimple_simplified to ");
5641 if (!gimple_seq_empty_p (*seq))
5642 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5643 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5644 }
5645 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5646 return true;
5647 }
5648 else if (!inplace)
5649 {
5650 if (gimple_has_lhs (stmt))
5651 {
5652 tree lhs = gimple_get_lhs (stmt);
5653 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5654 return false;
5655 if (dump_file && (dump_flags & TDF_DETAILS))
5656 {
5657 fprintf (dump_file, "gimple_simplified to ");
5658 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5659 }
5660 gsi_replace_with_seq_vops (gsi, *seq);
5661 return true;
5662 }
5663 else
5664 gcc_unreachable ();
5665 }
5666
5667 return false;
5668 }
5669
5670 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5671
5672 static bool
5673 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5674 {
5675 bool res = false;
5676 tree *orig_t = t;
5677
5678 if (TREE_CODE (*t) == ADDR_EXPR)
5679 t = &TREE_OPERAND (*t, 0);
5680
5681 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5682 generic vector extension. The actual vector referenced is
5683 view-converted to an array type for this purpose. If the index
5684 is constant the canonical representation in the middle-end is a
5685 BIT_FIELD_REF so re-write the former to the latter here. */
5686 if (TREE_CODE (*t) == ARRAY_REF
5687 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5688 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5689 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5690 {
5691 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5692 if (VECTOR_TYPE_P (vtype))
5693 {
5694 tree low = array_ref_low_bound (*t);
5695 if (TREE_CODE (low) == INTEGER_CST)
5696 {
5697 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5698 {
5699 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5700 wi::to_widest (low));
5701 idx = wi::mul (idx, wi::to_widest
5702 (TYPE_SIZE (TREE_TYPE (*t))));
5703 widest_int ext
5704 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5705 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5706 {
5707 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5708 TREE_TYPE (*t),
5709 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5710 TYPE_SIZE (TREE_TYPE (*t)),
5711 wide_int_to_tree (bitsizetype, idx));
5712 res = true;
5713 }
5714 }
5715 }
5716 }
5717 }
5718
5719 while (handled_component_p (*t))
5720 t = &TREE_OPERAND (*t, 0);
5721
5722 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5723 of invariant addresses into a SSA name MEM_REF address. */
5724 if (TREE_CODE (*t) == MEM_REF
5725 || TREE_CODE (*t) == TARGET_MEM_REF)
5726 {
5727 tree addr = TREE_OPERAND (*t, 0);
5728 if (TREE_CODE (addr) == ADDR_EXPR
5729 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5730 || handled_component_p (TREE_OPERAND (addr, 0))))
5731 {
5732 tree base;
5733 poly_int64 coffset;
5734 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5735 &coffset);
5736 if (!base)
5737 {
5738 if (is_debug)
5739 return false;
5740 gcc_unreachable ();
5741 }
5742
5743 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5744 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5745 TREE_OPERAND (*t, 1),
5746 size_int (coffset));
5747 res = true;
5748 }
5749 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5750 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5751 }
5752
5753 /* Canonicalize back MEM_REFs to plain reference trees if the object
5754 accessed is a decl that has the same access semantics as the MEM_REF. */
5755 if (TREE_CODE (*t) == MEM_REF
5756 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5757 && integer_zerop (TREE_OPERAND (*t, 1))
5758 && MR_DEPENDENCE_CLIQUE (*t) == 0)
5759 {
5760 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5761 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5762 if (/* Same volatile qualification. */
5763 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
5764 /* Same TBAA behavior with -fstrict-aliasing. */
5765 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
5766 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
5767 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
5768 /* Same alignment. */
5769 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
5770 /* We have to look out here to not drop a required conversion
5771 from the rhs to the lhs if *t appears on the lhs or vice-versa
5772 if it appears on the rhs. Thus require strict type
5773 compatibility. */
5774 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
5775 {
5776 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5777 res = true;
5778 }
5779 }
5780
5781 else if (TREE_CODE (*orig_t) == ADDR_EXPR
5782 && TREE_CODE (*t) == MEM_REF
5783 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
5784 {
5785 tree base;
5786 poly_int64 coffset;
5787 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
5788 &coffset);
5789 if (base)
5790 {
5791 gcc_assert (TREE_CODE (base) == MEM_REF);
5792 poly_int64 moffset;
5793 if (mem_ref_offset (base).to_shwi (&moffset))
5794 {
5795 coffset += moffset;
5796 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
5797 {
5798 coffset += moffset;
5799 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
5800 return true;
5801 }
5802 }
5803 }
5804 }
5805
5806 /* Canonicalize TARGET_MEM_REF in particular with respect to
5807 the indexes becoming constant. */
5808 else if (TREE_CODE (*t) == TARGET_MEM_REF)
5809 {
5810 tree tem = maybe_fold_tmr (*t);
5811 if (tem)
5812 {
5813 *t = tem;
5814 res = true;
5815 }
5816 }
5817
5818 return res;
5819 }
5820
5821 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
5822 distinguishes both cases. */
5823
5824 static bool
5825 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
5826 {
5827 bool changed = false;
5828 gimple *stmt = gsi_stmt (*gsi);
5829 bool nowarning = gimple_no_warning_p (stmt);
5830 unsigned i;
5831 fold_defer_overflow_warnings ();
5832
5833 /* First do required canonicalization of [TARGET_]MEM_REF addresses
5834 after propagation.
5835 ??? This shouldn't be done in generic folding but in the
5836 propagation helpers which also know whether an address was
5837 propagated.
5838 Also canonicalize operand order. */
5839 switch (gimple_code (stmt))
5840 {
5841 case GIMPLE_ASSIGN:
5842 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
5843 {
5844 tree *rhs = gimple_assign_rhs1_ptr (stmt);
5845 if ((REFERENCE_CLASS_P (*rhs)
5846 || TREE_CODE (*rhs) == ADDR_EXPR)
5847 && maybe_canonicalize_mem_ref_addr (rhs))
5848 changed = true;
5849 tree *lhs = gimple_assign_lhs_ptr (stmt);
5850 if (REFERENCE_CLASS_P (*lhs)
5851 && maybe_canonicalize_mem_ref_addr (lhs))
5852 changed = true;
5853 }
5854 else
5855 {
5856 /* Canonicalize operand order. */
5857 enum tree_code code = gimple_assign_rhs_code (stmt);
5858 if (TREE_CODE_CLASS (code) == tcc_comparison
5859 || commutative_tree_code (code)
5860 || commutative_ternary_tree_code (code))
5861 {
5862 tree rhs1 = gimple_assign_rhs1 (stmt);
5863 tree rhs2 = gimple_assign_rhs2 (stmt);
5864 if (tree_swap_operands_p (rhs1, rhs2))
5865 {
5866 gimple_assign_set_rhs1 (stmt, rhs2);
5867 gimple_assign_set_rhs2 (stmt, rhs1);
5868 if (TREE_CODE_CLASS (code) == tcc_comparison)
5869 gimple_assign_set_rhs_code (stmt,
5870 swap_tree_comparison (code));
5871 changed = true;
5872 }
5873 }
5874 }
5875 break;
5876 case GIMPLE_CALL:
5877 {
5878 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5879 {
5880 tree *arg = gimple_call_arg_ptr (stmt, i);
5881 if (REFERENCE_CLASS_P (*arg)
5882 && maybe_canonicalize_mem_ref_addr (arg))
5883 changed = true;
5884 }
5885 tree *lhs = gimple_call_lhs_ptr (stmt);
5886 if (*lhs
5887 && REFERENCE_CLASS_P (*lhs)
5888 && maybe_canonicalize_mem_ref_addr (lhs))
5889 changed = true;
5890 break;
5891 }
5892 case GIMPLE_ASM:
5893 {
5894 gasm *asm_stmt = as_a <gasm *> (stmt);
5895 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5896 {
5897 tree link = gimple_asm_output_op (asm_stmt, i);
5898 tree op = TREE_VALUE (link);
5899 if (REFERENCE_CLASS_P (op)
5900 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5901 changed = true;
5902 }
5903 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5904 {
5905 tree link = gimple_asm_input_op (asm_stmt, i);
5906 tree op = TREE_VALUE (link);
5907 if ((REFERENCE_CLASS_P (op)
5908 || TREE_CODE (op) == ADDR_EXPR)
5909 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5910 changed = true;
5911 }
5912 }
5913 break;
5914 case GIMPLE_DEBUG:
5915 if (gimple_debug_bind_p (stmt))
5916 {
5917 tree *val = gimple_debug_bind_get_value_ptr (stmt);
5918 if (*val
5919 && (REFERENCE_CLASS_P (*val)
5920 || TREE_CODE (*val) == ADDR_EXPR)
5921 && maybe_canonicalize_mem_ref_addr (val, true))
5922 changed = true;
5923 }
5924 break;
5925 case GIMPLE_COND:
5926 {
5927 /* Canonicalize operand order. */
5928 tree lhs = gimple_cond_lhs (stmt);
5929 tree rhs = gimple_cond_rhs (stmt);
5930 if (tree_swap_operands_p (lhs, rhs))
5931 {
5932 gcond *gc = as_a <gcond *> (stmt);
5933 gimple_cond_set_lhs (gc, rhs);
5934 gimple_cond_set_rhs (gc, lhs);
5935 gimple_cond_set_code (gc,
5936 swap_tree_comparison (gimple_cond_code (gc)));
5937 changed = true;
5938 }
5939 }
5940 default:;
5941 }
5942
5943 /* Dispatch to pattern-based folding. */
5944 if (!inplace
5945 || is_gimple_assign (stmt)
5946 || gimple_code (stmt) == GIMPLE_COND)
5947 {
5948 gimple_seq seq = NULL;
5949 gimple_match_op res_op;
5950 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
5951 valueize, valueize))
5952 {
5953 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
5954 changed = true;
5955 else
5956 gimple_seq_discard (seq);
5957 }
5958 }
5959
5960 stmt = gsi_stmt (*gsi);
5961
5962 /* Fold the main computation performed by the statement. */
5963 switch (gimple_code (stmt))
5964 {
5965 case GIMPLE_ASSIGN:
5966 {
5967 /* Try to canonicalize for boolean-typed X the comparisons
5968 X == 0, X == 1, X != 0, and X != 1. */
5969 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5970 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5971 {
5972 tree lhs = gimple_assign_lhs (stmt);
5973 tree op1 = gimple_assign_rhs1 (stmt);
5974 tree op2 = gimple_assign_rhs2 (stmt);
5975 tree type = TREE_TYPE (op1);
5976
5977 /* Check whether the comparison operands are of the same boolean
5978 type as the result type is.
5979 Check that second operand is an integer-constant with value
5980 one or zero. */
5981 if (TREE_CODE (op2) == INTEGER_CST
5982 && (integer_zerop (op2) || integer_onep (op2))
5983 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5984 {
5985 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5986 bool is_logical_not = false;
5987
5988 /* X == 0 and X != 1 is a logical-not.of X
5989 X == 1 and X != 0 is X */
5990 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5991 || (cmp_code == NE_EXPR && integer_onep (op2)))
5992 is_logical_not = true;
5993
5994 if (is_logical_not == false)
5995 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5996 /* Only for one-bit precision typed X the transformation
5997 !X -> ~X is valied. */
5998 else if (TYPE_PRECISION (type) == 1)
5999 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6000 /* Otherwise we use !X -> X ^ 1. */
6001 else
6002 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6003 build_int_cst (type, 1));
6004 changed = true;
6005 break;
6006 }
6007 }
6008
6009 unsigned old_num_ops = gimple_num_ops (stmt);
6010 tree lhs = gimple_assign_lhs (stmt);
6011 tree new_rhs = fold_gimple_assign (gsi);
6012 if (new_rhs
6013 && !useless_type_conversion_p (TREE_TYPE (lhs),
6014 TREE_TYPE (new_rhs)))
6015 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6016 if (new_rhs
6017 && (!inplace
6018 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6019 {
6020 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6021 changed = true;
6022 }
6023 break;
6024 }
6025
6026 case GIMPLE_CALL:
6027 changed |= gimple_fold_call (gsi, inplace);
6028 break;
6029
6030 case GIMPLE_ASM:
6031 /* Fold *& in asm operands. */
6032 {
6033 gasm *asm_stmt = as_a <gasm *> (stmt);
6034 size_t noutputs;
6035 const char **oconstraints;
6036 const char *constraint;
6037 bool allows_mem, allows_reg;
6038
6039 noutputs = gimple_asm_noutputs (asm_stmt);
6040 oconstraints = XALLOCAVEC (const char *, noutputs);
6041
6042 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6043 {
6044 tree link = gimple_asm_output_op (asm_stmt, i);
6045 tree op = TREE_VALUE (link);
6046 oconstraints[i]
6047 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6048 if (REFERENCE_CLASS_P (op)
6049 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
6050 {
6051 TREE_VALUE (link) = op;
6052 changed = true;
6053 }
6054 }
6055 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6056 {
6057 tree link = gimple_asm_input_op (asm_stmt, i);
6058 tree op = TREE_VALUE (link);
6059 constraint
6060 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6061 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6062 oconstraints, &allows_mem, &allows_reg);
6063 if (REFERENCE_CLASS_P (op)
6064 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
6065 != NULL_TREE)
6066 {
6067 TREE_VALUE (link) = op;
6068 changed = true;
6069 }
6070 }
6071 }
6072 break;
6073
6074 case GIMPLE_DEBUG:
6075 if (gimple_debug_bind_p (stmt))
6076 {
6077 tree val = gimple_debug_bind_get_value (stmt);
6078 if (val
6079 && REFERENCE_CLASS_P (val))
6080 {
6081 tree tem = maybe_fold_reference (val, false);
6082 if (tem)
6083 {
6084 gimple_debug_bind_set_value (stmt, tem);
6085 changed = true;
6086 }
6087 }
6088 else if (val
6089 && TREE_CODE (val) == ADDR_EXPR)
6090 {
6091 tree ref = TREE_OPERAND (val, 0);
6092 tree tem = maybe_fold_reference (ref, false);
6093 if (tem)
6094 {
6095 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6096 gimple_debug_bind_set_value (stmt, tem);
6097 changed = true;
6098 }
6099 }
6100 }
6101 break;
6102
6103 case GIMPLE_RETURN:
6104 {
6105 greturn *ret_stmt = as_a<greturn *> (stmt);
6106 tree ret = gimple_return_retval(ret_stmt);
6107
6108 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6109 {
6110 tree val = valueize (ret);
6111 if (val && val != ret
6112 && may_propagate_copy (ret, val))
6113 {
6114 gimple_return_set_retval (ret_stmt, val);
6115 changed = true;
6116 }
6117 }
6118 }
6119 break;
6120
6121 default:;
6122 }
6123
6124 stmt = gsi_stmt (*gsi);
6125
6126 /* Fold *& on the lhs. */
6127 if (gimple_has_lhs (stmt))
6128 {
6129 tree lhs = gimple_get_lhs (stmt);
6130 if (lhs && REFERENCE_CLASS_P (lhs))
6131 {
6132 tree new_lhs = maybe_fold_reference (lhs, true);
6133 if (new_lhs)
6134 {
6135 gimple_set_lhs (stmt, new_lhs);
6136 changed = true;
6137 }
6138 }
6139 }
6140
6141 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6142 return changed;
6143 }
6144
6145 /* Valueziation callback that ends up not following SSA edges. */
6146
6147 tree
6148 no_follow_ssa_edges (tree)
6149 {
6150 return NULL_TREE;
6151 }
6152
6153 /* Valueization callback that ends up following single-use SSA edges only. */
6154
6155 tree
6156 follow_single_use_edges (tree val)
6157 {
6158 if (TREE_CODE (val) == SSA_NAME
6159 && !has_single_use (val))
6160 return NULL_TREE;
6161 return val;
6162 }
6163
6164 /* Valueization callback that follows all SSA edges. */
6165
6166 tree
6167 follow_all_ssa_edges (tree val)
6168 {
6169 return val;
6170 }
6171
6172 /* Fold the statement pointed to by GSI. In some cases, this function may
6173 replace the whole statement with a new one. Returns true iff folding
6174 makes any changes.
6175 The statement pointed to by GSI should be in valid gimple form but may
6176 be in unfolded state as resulting from for example constant propagation
6177 which can produce *&x = 0. */
6178
6179 bool
6180 fold_stmt (gimple_stmt_iterator *gsi)
6181 {
6182 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6183 }
6184
6185 bool
6186 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6187 {
6188 return fold_stmt_1 (gsi, false, valueize);
6189 }
6190
6191 /* Perform the minimal folding on statement *GSI. Only operations like
6192 *&x created by constant propagation are handled. The statement cannot
6193 be replaced with a new one. Return true if the statement was
6194 changed, false otherwise.
6195 The statement *GSI should be in valid gimple form but may
6196 be in unfolded state as resulting from for example constant propagation
6197 which can produce *&x = 0. */
6198
6199 bool
6200 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6201 {
6202 gimple *stmt = gsi_stmt (*gsi);
6203 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6204 gcc_assert (gsi_stmt (*gsi) == stmt);
6205 return changed;
6206 }
6207
6208 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6209 if EXPR is null or we don't know how.
6210 If non-null, the result always has boolean type. */
6211
6212 static tree
6213 canonicalize_bool (tree expr, bool invert)
6214 {
6215 if (!expr)
6216 return NULL_TREE;
6217 else if (invert)
6218 {
6219 if (integer_nonzerop (expr))
6220 return boolean_false_node;
6221 else if (integer_zerop (expr))
6222 return boolean_true_node;
6223 else if (TREE_CODE (expr) == SSA_NAME)
6224 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6225 build_int_cst (TREE_TYPE (expr), 0));
6226 else if (COMPARISON_CLASS_P (expr))
6227 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6228 boolean_type_node,
6229 TREE_OPERAND (expr, 0),
6230 TREE_OPERAND (expr, 1));
6231 else
6232 return NULL_TREE;
6233 }
6234 else
6235 {
6236 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6237 return expr;
6238 if (integer_nonzerop (expr))
6239 return boolean_true_node;
6240 else if (integer_zerop (expr))
6241 return boolean_false_node;
6242 else if (TREE_CODE (expr) == SSA_NAME)
6243 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6244 build_int_cst (TREE_TYPE (expr), 0));
6245 else if (COMPARISON_CLASS_P (expr))
6246 return fold_build2 (TREE_CODE (expr),
6247 boolean_type_node,
6248 TREE_OPERAND (expr, 0),
6249 TREE_OPERAND (expr, 1));
6250 else
6251 return NULL_TREE;
6252 }
6253 }
6254
6255 /* Check to see if a boolean expression EXPR is logically equivalent to the
6256 comparison (OP1 CODE OP2). Check for various identities involving
6257 SSA_NAMEs. */
6258
6259 static bool
6260 same_bool_comparison_p (const_tree expr, enum tree_code code,
6261 const_tree op1, const_tree op2)
6262 {
6263 gimple *s;
6264
6265 /* The obvious case. */
6266 if (TREE_CODE (expr) == code
6267 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6268 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6269 return true;
6270
6271 /* Check for comparing (name, name != 0) and the case where expr
6272 is an SSA_NAME with a definition matching the comparison. */
6273 if (TREE_CODE (expr) == SSA_NAME
6274 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6275 {
6276 if (operand_equal_p (expr, op1, 0))
6277 return ((code == NE_EXPR && integer_zerop (op2))
6278 || (code == EQ_EXPR && integer_nonzerop (op2)));
6279 s = SSA_NAME_DEF_STMT (expr);
6280 if (is_gimple_assign (s)
6281 && gimple_assign_rhs_code (s) == code
6282 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6283 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6284 return true;
6285 }
6286
6287 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6288 of name is a comparison, recurse. */
6289 if (TREE_CODE (op1) == SSA_NAME
6290 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6291 {
6292 s = SSA_NAME_DEF_STMT (op1);
6293 if (is_gimple_assign (s)
6294 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6295 {
6296 enum tree_code c = gimple_assign_rhs_code (s);
6297 if ((c == NE_EXPR && integer_zerop (op2))
6298 || (c == EQ_EXPR && integer_nonzerop (op2)))
6299 return same_bool_comparison_p (expr, c,
6300 gimple_assign_rhs1 (s),
6301 gimple_assign_rhs2 (s));
6302 if ((c == EQ_EXPR && integer_zerop (op2))
6303 || (c == NE_EXPR && integer_nonzerop (op2)))
6304 return same_bool_comparison_p (expr,
6305 invert_tree_comparison (c, false),
6306 gimple_assign_rhs1 (s),
6307 gimple_assign_rhs2 (s));
6308 }
6309 }
6310 return false;
6311 }
6312
6313 /* Check to see if two boolean expressions OP1 and OP2 are logically
6314 equivalent. */
6315
6316 static bool
6317 same_bool_result_p (const_tree op1, const_tree op2)
6318 {
6319 /* Simple cases first. */
6320 if (operand_equal_p (op1, op2, 0))
6321 return true;
6322
6323 /* Check the cases where at least one of the operands is a comparison.
6324 These are a bit smarter than operand_equal_p in that they apply some
6325 identifies on SSA_NAMEs. */
6326 if (COMPARISON_CLASS_P (op2)
6327 && same_bool_comparison_p (op1, TREE_CODE (op2),
6328 TREE_OPERAND (op2, 0),
6329 TREE_OPERAND (op2, 1)))
6330 return true;
6331 if (COMPARISON_CLASS_P (op1)
6332 && same_bool_comparison_p (op2, TREE_CODE (op1),
6333 TREE_OPERAND (op1, 0),
6334 TREE_OPERAND (op1, 1)))
6335 return true;
6336
6337 /* Default case. */
6338 return false;
6339 }
6340
6341 /* Forward declarations for some mutually recursive functions. */
6342
6343 static tree
6344 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6345 enum tree_code code2, tree op2a, tree op2b);
6346 static tree
6347 and_var_with_comparison (tree type, tree var, bool invert,
6348 enum tree_code code2, tree op2a, tree op2b);
6349 static tree
6350 and_var_with_comparison_1 (tree type, gimple *stmt,
6351 enum tree_code code2, tree op2a, tree op2b);
6352 static tree
6353 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6354 enum tree_code code2, tree op2a, tree op2b);
6355 static tree
6356 or_var_with_comparison (tree, tree var, bool invert,
6357 enum tree_code code2, tree op2a, tree op2b);
6358 static tree
6359 or_var_with_comparison_1 (tree, gimple *stmt,
6360 enum tree_code code2, tree op2a, tree op2b);
6361
6362 /* Helper function for and_comparisons_1: try to simplify the AND of the
6363 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6364 If INVERT is true, invert the value of the VAR before doing the AND.
6365 Return NULL_EXPR if we can't simplify this to a single expression. */
6366
6367 static tree
6368 and_var_with_comparison (tree type, tree var, bool invert,
6369 enum tree_code code2, tree op2a, tree op2b)
6370 {
6371 tree t;
6372 gimple *stmt = SSA_NAME_DEF_STMT (var);
6373
6374 /* We can only deal with variables whose definitions are assignments. */
6375 if (!is_gimple_assign (stmt))
6376 return NULL_TREE;
6377
6378 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6379 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6380 Then we only have to consider the simpler non-inverted cases. */
6381 if (invert)
6382 t = or_var_with_comparison_1 (type, stmt,
6383 invert_tree_comparison (code2, false),
6384 op2a, op2b);
6385 else
6386 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6387 return canonicalize_bool (t, invert);
6388 }
6389
6390 /* Try to simplify the AND of the ssa variable defined by the assignment
6391 STMT with the comparison specified by (OP2A CODE2 OP2B).
6392 Return NULL_EXPR if we can't simplify this to a single expression. */
6393
6394 static tree
6395 and_var_with_comparison_1 (tree type, gimple *stmt,
6396 enum tree_code code2, tree op2a, tree op2b)
6397 {
6398 tree var = gimple_assign_lhs (stmt);
6399 tree true_test_var = NULL_TREE;
6400 tree false_test_var = NULL_TREE;
6401 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6402
6403 /* Check for identities like (var AND (var == 0)) => false. */
6404 if (TREE_CODE (op2a) == SSA_NAME
6405 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6406 {
6407 if ((code2 == NE_EXPR && integer_zerop (op2b))
6408 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6409 {
6410 true_test_var = op2a;
6411 if (var == true_test_var)
6412 return var;
6413 }
6414 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6415 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6416 {
6417 false_test_var = op2a;
6418 if (var == false_test_var)
6419 return boolean_false_node;
6420 }
6421 }
6422
6423 /* If the definition is a comparison, recurse on it. */
6424 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6425 {
6426 tree t = and_comparisons_1 (type, innercode,
6427 gimple_assign_rhs1 (stmt),
6428 gimple_assign_rhs2 (stmt),
6429 code2,
6430 op2a,
6431 op2b);
6432 if (t)
6433 return t;
6434 }
6435
6436 /* If the definition is an AND or OR expression, we may be able to
6437 simplify by reassociating. */
6438 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6439 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6440 {
6441 tree inner1 = gimple_assign_rhs1 (stmt);
6442 tree inner2 = gimple_assign_rhs2 (stmt);
6443 gimple *s;
6444 tree t;
6445 tree partial = NULL_TREE;
6446 bool is_and = (innercode == BIT_AND_EXPR);
6447
6448 /* Check for boolean identities that don't require recursive examination
6449 of inner1/inner2:
6450 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6451 inner1 AND (inner1 OR inner2) => inner1
6452 !inner1 AND (inner1 AND inner2) => false
6453 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6454 Likewise for similar cases involving inner2. */
6455 if (inner1 == true_test_var)
6456 return (is_and ? var : inner1);
6457 else if (inner2 == true_test_var)
6458 return (is_and ? var : inner2);
6459 else if (inner1 == false_test_var)
6460 return (is_and
6461 ? boolean_false_node
6462 : and_var_with_comparison (type, inner2, false, code2, op2a,
6463 op2b));
6464 else if (inner2 == false_test_var)
6465 return (is_and
6466 ? boolean_false_node
6467 : and_var_with_comparison (type, inner1, false, code2, op2a,
6468 op2b));
6469
6470 /* Next, redistribute/reassociate the AND across the inner tests.
6471 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6472 if (TREE_CODE (inner1) == SSA_NAME
6473 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6474 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6475 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6476 gimple_assign_rhs1 (s),
6477 gimple_assign_rhs2 (s),
6478 code2, op2a, op2b)))
6479 {
6480 /* Handle the AND case, where we are reassociating:
6481 (inner1 AND inner2) AND (op2a code2 op2b)
6482 => (t AND inner2)
6483 If the partial result t is a constant, we win. Otherwise
6484 continue on to try reassociating with the other inner test. */
6485 if (is_and)
6486 {
6487 if (integer_onep (t))
6488 return inner2;
6489 else if (integer_zerop (t))
6490 return boolean_false_node;
6491 }
6492
6493 /* Handle the OR case, where we are redistributing:
6494 (inner1 OR inner2) AND (op2a code2 op2b)
6495 => (t OR (inner2 AND (op2a code2 op2b))) */
6496 else if (integer_onep (t))
6497 return boolean_true_node;
6498
6499 /* Save partial result for later. */
6500 partial = t;
6501 }
6502
6503 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6504 if (TREE_CODE (inner2) == SSA_NAME
6505 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6506 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6507 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6508 gimple_assign_rhs1 (s),
6509 gimple_assign_rhs2 (s),
6510 code2, op2a, op2b)))
6511 {
6512 /* Handle the AND case, where we are reassociating:
6513 (inner1 AND inner2) AND (op2a code2 op2b)
6514 => (inner1 AND t) */
6515 if (is_and)
6516 {
6517 if (integer_onep (t))
6518 return inner1;
6519 else if (integer_zerop (t))
6520 return boolean_false_node;
6521 /* If both are the same, we can apply the identity
6522 (x AND x) == x. */
6523 else if (partial && same_bool_result_p (t, partial))
6524 return t;
6525 }
6526
6527 /* Handle the OR case. where we are redistributing:
6528 (inner1 OR inner2) AND (op2a code2 op2b)
6529 => (t OR (inner1 AND (op2a code2 op2b)))
6530 => (t OR partial) */
6531 else
6532 {
6533 if (integer_onep (t))
6534 return boolean_true_node;
6535 else if (partial)
6536 {
6537 /* We already got a simplification for the other
6538 operand to the redistributed OR expression. The
6539 interesting case is when at least one is false.
6540 Or, if both are the same, we can apply the identity
6541 (x OR x) == x. */
6542 if (integer_zerop (partial))
6543 return t;
6544 else if (integer_zerop (t))
6545 return partial;
6546 else if (same_bool_result_p (t, partial))
6547 return t;
6548 }
6549 }
6550 }
6551 }
6552 return NULL_TREE;
6553 }
6554
6555 /* Try to simplify the AND of two comparisons defined by
6556 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6557 If this can be done without constructing an intermediate value,
6558 return the resulting tree; otherwise NULL_TREE is returned.
6559 This function is deliberately asymmetric as it recurses on SSA_DEFs
6560 in the first comparison but not the second. */
6561
6562 static tree
6563 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6564 enum tree_code code2, tree op2a, tree op2b)
6565 {
6566 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6567
6568 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6569 if (operand_equal_p (op1a, op2a, 0)
6570 && operand_equal_p (op1b, op2b, 0))
6571 {
6572 /* Result will be either NULL_TREE, or a combined comparison. */
6573 tree t = combine_comparisons (UNKNOWN_LOCATION,
6574 TRUTH_ANDIF_EXPR, code1, code2,
6575 truth_type, op1a, op1b);
6576 if (t)
6577 return t;
6578 }
6579
6580 /* Likewise the swapped case of the above. */
6581 if (operand_equal_p (op1a, op2b, 0)
6582 && operand_equal_p (op1b, op2a, 0))
6583 {
6584 /* Result will be either NULL_TREE, or a combined comparison. */
6585 tree t = combine_comparisons (UNKNOWN_LOCATION,
6586 TRUTH_ANDIF_EXPR, code1,
6587 swap_tree_comparison (code2),
6588 truth_type, op1a, op1b);
6589 if (t)
6590 return t;
6591 }
6592
6593 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6594 NAME's definition is a truth value. See if there are any simplifications
6595 that can be done against the NAME's definition. */
6596 if (TREE_CODE (op1a) == SSA_NAME
6597 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6598 && (integer_zerop (op1b) || integer_onep (op1b)))
6599 {
6600 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6601 || (code1 == NE_EXPR && integer_onep (op1b)));
6602 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6603 switch (gimple_code (stmt))
6604 {
6605 case GIMPLE_ASSIGN:
6606 /* Try to simplify by copy-propagating the definition. */
6607 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6608 op2b);
6609
6610 case GIMPLE_PHI:
6611 /* If every argument to the PHI produces the same result when
6612 ANDed with the second comparison, we win.
6613 Do not do this unless the type is bool since we need a bool
6614 result here anyway. */
6615 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6616 {
6617 tree result = NULL_TREE;
6618 unsigned i;
6619 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6620 {
6621 tree arg = gimple_phi_arg_def (stmt, i);
6622
6623 /* If this PHI has itself as an argument, ignore it.
6624 If all the other args produce the same result,
6625 we're still OK. */
6626 if (arg == gimple_phi_result (stmt))
6627 continue;
6628 else if (TREE_CODE (arg) == INTEGER_CST)
6629 {
6630 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6631 {
6632 if (!result)
6633 result = boolean_false_node;
6634 else if (!integer_zerop (result))
6635 return NULL_TREE;
6636 }
6637 else if (!result)
6638 result = fold_build2 (code2, boolean_type_node,
6639 op2a, op2b);
6640 else if (!same_bool_comparison_p (result,
6641 code2, op2a, op2b))
6642 return NULL_TREE;
6643 }
6644 else if (TREE_CODE (arg) == SSA_NAME
6645 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6646 {
6647 tree temp;
6648 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6649 /* In simple cases we can look through PHI nodes,
6650 but we have to be careful with loops.
6651 See PR49073. */
6652 if (! dom_info_available_p (CDI_DOMINATORS)
6653 || gimple_bb (def_stmt) == gimple_bb (stmt)
6654 || dominated_by_p (CDI_DOMINATORS,
6655 gimple_bb (def_stmt),
6656 gimple_bb (stmt)))
6657 return NULL_TREE;
6658 temp = and_var_with_comparison (type, arg, invert, code2,
6659 op2a, op2b);
6660 if (!temp)
6661 return NULL_TREE;
6662 else if (!result)
6663 result = temp;
6664 else if (!same_bool_result_p (result, temp))
6665 return NULL_TREE;
6666 }
6667 else
6668 return NULL_TREE;
6669 }
6670 return result;
6671 }
6672
6673 default:
6674 break;
6675 }
6676 }
6677 return NULL_TREE;
6678 }
6679
6680 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6681 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6682 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6683 simplify this to a single expression. As we are going to lower the cost
6684 of building SSA names / gimple stmts significantly, we need to allocate
6685 them ont the stack. This will cause the code to be a bit ugly. */
6686
6687 static tree
6688 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6689 enum tree_code code1,
6690 tree op1a, tree op1b,
6691 enum tree_code code2, tree op2a,
6692 tree op2b)
6693 {
6694 /* Allocate gimple stmt1 on the stack. */
6695 gassign *stmt1
6696 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6697 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6698 gimple_assign_set_rhs_code (stmt1, code1);
6699 gimple_assign_set_rhs1 (stmt1, op1a);
6700 gimple_assign_set_rhs2 (stmt1, op1b);
6701
6702 /* Allocate gimple stmt2 on the stack. */
6703 gassign *stmt2
6704 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6705 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6706 gimple_assign_set_rhs_code (stmt2, code2);
6707 gimple_assign_set_rhs1 (stmt2, op2a);
6708 gimple_assign_set_rhs2 (stmt2, op2b);
6709
6710 /* Allocate SSA names(lhs1) on the stack. */
6711 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6712 memset (lhs1, 0, sizeof (tree_ssa_name));
6713 TREE_SET_CODE (lhs1, SSA_NAME);
6714 TREE_TYPE (lhs1) = type;
6715 init_ssa_name_imm_use (lhs1);
6716
6717 /* Allocate SSA names(lhs2) on the stack. */
6718 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6719 memset (lhs2, 0, sizeof (tree_ssa_name));
6720 TREE_SET_CODE (lhs2, SSA_NAME);
6721 TREE_TYPE (lhs2) = type;
6722 init_ssa_name_imm_use (lhs2);
6723
6724 gimple_assign_set_lhs (stmt1, lhs1);
6725 gimple_assign_set_lhs (stmt2, lhs2);
6726
6727 gimple_match_op op (gimple_match_cond::UNCOND, code,
6728 type, gimple_assign_lhs (stmt1),
6729 gimple_assign_lhs (stmt2));
6730 if (op.resimplify (NULL, follow_all_ssa_edges))
6731 {
6732 if (gimple_simplified_result_is_gimple_val (&op))
6733 {
6734 tree res = op.ops[0];
6735 if (res == lhs1)
6736 return build2 (code1, type, op1a, op1b);
6737 else if (res == lhs2)
6738 return build2 (code2, type, op2a, op2b);
6739 else
6740 return res;
6741 }
6742 else if (op.code.is_tree_code ()
6743 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6744 {
6745 tree op0 = op.ops[0];
6746 tree op1 = op.ops[1];
6747 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6748 return NULL_TREE; /* not simple */
6749
6750 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6751 }
6752 }
6753
6754 return NULL_TREE;
6755 }
6756
6757 /* Try to simplify the AND of two comparisons, specified by
6758 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6759 If this can be simplified to a single expression (without requiring
6760 introducing more SSA variables to hold intermediate values),
6761 return the resulting tree. Otherwise return NULL_TREE.
6762 If the result expression is non-null, it has boolean type. */
6763
6764 tree
6765 maybe_fold_and_comparisons (tree type,
6766 enum tree_code code1, tree op1a, tree op1b,
6767 enum tree_code code2, tree op2a, tree op2b)
6768 {
6769 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6770 return t;
6771
6772 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6773 return t;
6774
6775 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6776 op1a, op1b, code2, op2a,
6777 op2b))
6778 return t;
6779
6780 return NULL_TREE;
6781 }
6782
6783 /* Helper function for or_comparisons_1: try to simplify the OR of the
6784 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6785 If INVERT is true, invert the value of VAR before doing the OR.
6786 Return NULL_EXPR if we can't simplify this to a single expression. */
6787
6788 static tree
6789 or_var_with_comparison (tree type, tree var, bool invert,
6790 enum tree_code code2, tree op2a, tree op2b)
6791 {
6792 tree t;
6793 gimple *stmt = SSA_NAME_DEF_STMT (var);
6794
6795 /* We can only deal with variables whose definitions are assignments. */
6796 if (!is_gimple_assign (stmt))
6797 return NULL_TREE;
6798
6799 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6800 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6801 Then we only have to consider the simpler non-inverted cases. */
6802 if (invert)
6803 t = and_var_with_comparison_1 (type, stmt,
6804 invert_tree_comparison (code2, false),
6805 op2a, op2b);
6806 else
6807 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6808 return canonicalize_bool (t, invert);
6809 }
6810
6811 /* Try to simplify the OR of the ssa variable defined by the assignment
6812 STMT with the comparison specified by (OP2A CODE2 OP2B).
6813 Return NULL_EXPR if we can't simplify this to a single expression. */
6814
6815 static tree
6816 or_var_with_comparison_1 (tree type, gimple *stmt,
6817 enum tree_code code2, tree op2a, tree op2b)
6818 {
6819 tree var = gimple_assign_lhs (stmt);
6820 tree true_test_var = NULL_TREE;
6821 tree false_test_var = NULL_TREE;
6822 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6823
6824 /* Check for identities like (var OR (var != 0)) => true . */
6825 if (TREE_CODE (op2a) == SSA_NAME
6826 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6827 {
6828 if ((code2 == NE_EXPR && integer_zerop (op2b))
6829 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6830 {
6831 true_test_var = op2a;
6832 if (var == true_test_var)
6833 return var;
6834 }
6835 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6836 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6837 {
6838 false_test_var = op2a;
6839 if (var == false_test_var)
6840 return boolean_true_node;
6841 }
6842 }
6843
6844 /* If the definition is a comparison, recurse on it. */
6845 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6846 {
6847 tree t = or_comparisons_1 (type, innercode,
6848 gimple_assign_rhs1 (stmt),
6849 gimple_assign_rhs2 (stmt),
6850 code2,
6851 op2a,
6852 op2b);
6853 if (t)
6854 return t;
6855 }
6856
6857 /* If the definition is an AND or OR expression, we may be able to
6858 simplify by reassociating. */
6859 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6860 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6861 {
6862 tree inner1 = gimple_assign_rhs1 (stmt);
6863 tree inner2 = gimple_assign_rhs2 (stmt);
6864 gimple *s;
6865 tree t;
6866 tree partial = NULL_TREE;
6867 bool is_or = (innercode == BIT_IOR_EXPR);
6868
6869 /* Check for boolean identities that don't require recursive examination
6870 of inner1/inner2:
6871 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6872 inner1 OR (inner1 AND inner2) => inner1
6873 !inner1 OR (inner1 OR inner2) => true
6874 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6875 */
6876 if (inner1 == true_test_var)
6877 return (is_or ? var : inner1);
6878 else if (inner2 == true_test_var)
6879 return (is_or ? var : inner2);
6880 else if (inner1 == false_test_var)
6881 return (is_or
6882 ? boolean_true_node
6883 : or_var_with_comparison (type, inner2, false, code2, op2a,
6884 op2b));
6885 else if (inner2 == false_test_var)
6886 return (is_or
6887 ? boolean_true_node
6888 : or_var_with_comparison (type, inner1, false, code2, op2a,
6889 op2b));
6890
6891 /* Next, redistribute/reassociate the OR across the inner tests.
6892 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6893 if (TREE_CODE (inner1) == SSA_NAME
6894 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6895 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6896 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6897 gimple_assign_rhs1 (s),
6898 gimple_assign_rhs2 (s),
6899 code2, op2a, op2b)))
6900 {
6901 /* Handle the OR case, where we are reassociating:
6902 (inner1 OR inner2) OR (op2a code2 op2b)
6903 => (t OR inner2)
6904 If the partial result t is a constant, we win. Otherwise
6905 continue on to try reassociating with the other inner test. */
6906 if (is_or)
6907 {
6908 if (integer_onep (t))
6909 return boolean_true_node;
6910 else if (integer_zerop (t))
6911 return inner2;
6912 }
6913
6914 /* Handle the AND case, where we are redistributing:
6915 (inner1 AND inner2) OR (op2a code2 op2b)
6916 => (t AND (inner2 OR (op2a code op2b))) */
6917 else if (integer_zerop (t))
6918 return boolean_false_node;
6919
6920 /* Save partial result for later. */
6921 partial = t;
6922 }
6923
6924 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6925 if (TREE_CODE (inner2) == SSA_NAME
6926 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6927 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6928 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6929 gimple_assign_rhs1 (s),
6930 gimple_assign_rhs2 (s),
6931 code2, op2a, op2b)))
6932 {
6933 /* Handle the OR case, where we are reassociating:
6934 (inner1 OR inner2) OR (op2a code2 op2b)
6935 => (inner1 OR t)
6936 => (t OR partial) */
6937 if (is_or)
6938 {
6939 if (integer_zerop (t))
6940 return inner1;
6941 else if (integer_onep (t))
6942 return boolean_true_node;
6943 /* If both are the same, we can apply the identity
6944 (x OR x) == x. */
6945 else if (partial && same_bool_result_p (t, partial))
6946 return t;
6947 }
6948
6949 /* Handle the AND case, where we are redistributing:
6950 (inner1 AND inner2) OR (op2a code2 op2b)
6951 => (t AND (inner1 OR (op2a code2 op2b)))
6952 => (t AND partial) */
6953 else
6954 {
6955 if (integer_zerop (t))
6956 return boolean_false_node;
6957 else if (partial)
6958 {
6959 /* We already got a simplification for the other
6960 operand to the redistributed AND expression. The
6961 interesting case is when at least one is true.
6962 Or, if both are the same, we can apply the identity
6963 (x AND x) == x. */
6964 if (integer_onep (partial))
6965 return t;
6966 else if (integer_onep (t))
6967 return partial;
6968 else if (same_bool_result_p (t, partial))
6969 return t;
6970 }
6971 }
6972 }
6973 }
6974 return NULL_TREE;
6975 }
6976
6977 /* Try to simplify the OR of two comparisons defined by
6978 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6979 If this can be done without constructing an intermediate value,
6980 return the resulting tree; otherwise NULL_TREE is returned.
6981 This function is deliberately asymmetric as it recurses on SSA_DEFs
6982 in the first comparison but not the second. */
6983
6984 static tree
6985 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6986 enum tree_code code2, tree op2a, tree op2b)
6987 {
6988 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6989
6990 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6991 if (operand_equal_p (op1a, op2a, 0)
6992 && operand_equal_p (op1b, op2b, 0))
6993 {
6994 /* Result will be either NULL_TREE, or a combined comparison. */
6995 tree t = combine_comparisons (UNKNOWN_LOCATION,
6996 TRUTH_ORIF_EXPR, code1, code2,
6997 truth_type, op1a, op1b);
6998 if (t)
6999 return t;
7000 }
7001
7002 /* Likewise the swapped case of the above. */
7003 if (operand_equal_p (op1a, op2b, 0)
7004 && operand_equal_p (op1b, op2a, 0))
7005 {
7006 /* Result will be either NULL_TREE, or a combined comparison. */
7007 tree t = combine_comparisons (UNKNOWN_LOCATION,
7008 TRUTH_ORIF_EXPR, code1,
7009 swap_tree_comparison (code2),
7010 truth_type, op1a, op1b);
7011 if (t)
7012 return t;
7013 }
7014
7015 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7016 NAME's definition is a truth value. See if there are any simplifications
7017 that can be done against the NAME's definition. */
7018 if (TREE_CODE (op1a) == SSA_NAME
7019 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7020 && (integer_zerop (op1b) || integer_onep (op1b)))
7021 {
7022 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7023 || (code1 == NE_EXPR && integer_onep (op1b)));
7024 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7025 switch (gimple_code (stmt))
7026 {
7027 case GIMPLE_ASSIGN:
7028 /* Try to simplify by copy-propagating the definition. */
7029 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7030 op2b);
7031
7032 case GIMPLE_PHI:
7033 /* If every argument to the PHI produces the same result when
7034 ORed with the second comparison, we win.
7035 Do not do this unless the type is bool since we need a bool
7036 result here anyway. */
7037 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7038 {
7039 tree result = NULL_TREE;
7040 unsigned i;
7041 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7042 {
7043 tree arg = gimple_phi_arg_def (stmt, i);
7044
7045 /* If this PHI has itself as an argument, ignore it.
7046 If all the other args produce the same result,
7047 we're still OK. */
7048 if (arg == gimple_phi_result (stmt))
7049 continue;
7050 else if (TREE_CODE (arg) == INTEGER_CST)
7051 {
7052 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7053 {
7054 if (!result)
7055 result = boolean_true_node;
7056 else if (!integer_onep (result))
7057 return NULL_TREE;
7058 }
7059 else if (!result)
7060 result = fold_build2 (code2, boolean_type_node,
7061 op2a, op2b);
7062 else if (!same_bool_comparison_p (result,
7063 code2, op2a, op2b))
7064 return NULL_TREE;
7065 }
7066 else if (TREE_CODE (arg) == SSA_NAME
7067 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7068 {
7069 tree temp;
7070 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7071 /* In simple cases we can look through PHI nodes,
7072 but we have to be careful with loops.
7073 See PR49073. */
7074 if (! dom_info_available_p (CDI_DOMINATORS)
7075 || gimple_bb (def_stmt) == gimple_bb (stmt)
7076 || dominated_by_p (CDI_DOMINATORS,
7077 gimple_bb (def_stmt),
7078 gimple_bb (stmt)))
7079 return NULL_TREE;
7080 temp = or_var_with_comparison (type, arg, invert, code2,
7081 op2a, op2b);
7082 if (!temp)
7083 return NULL_TREE;
7084 else if (!result)
7085 result = temp;
7086 else if (!same_bool_result_p (result, temp))
7087 return NULL_TREE;
7088 }
7089 else
7090 return NULL_TREE;
7091 }
7092 return result;
7093 }
7094
7095 default:
7096 break;
7097 }
7098 }
7099 return NULL_TREE;
7100 }
7101
7102 /* Try to simplify the OR of two comparisons, specified by
7103 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7104 If this can be simplified to a single expression (without requiring
7105 introducing more SSA variables to hold intermediate values),
7106 return the resulting tree. Otherwise return NULL_TREE.
7107 If the result expression is non-null, it has boolean type. */
7108
7109 tree
7110 maybe_fold_or_comparisons (tree type,
7111 enum tree_code code1, tree op1a, tree op1b,
7112 enum tree_code code2, tree op2a, tree op2b)
7113 {
7114 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
7115 return t;
7116
7117 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7118 return t;
7119
7120 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7121 op1a, op1b, code2, op2a,
7122 op2b))
7123 return t;
7124
7125 return NULL_TREE;
7126 }
7127
7128 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7129
7130 Either NULL_TREE, a simplified but non-constant or a constant
7131 is returned.
7132
7133 ??? This should go into a gimple-fold-inline.h file to be eventually
7134 privatized with the single valueize function used in the various TUs
7135 to avoid the indirect function call overhead. */
7136
7137 tree
7138 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7139 tree (*gvalueize) (tree))
7140 {
7141 gimple_match_op res_op;
7142 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7143 edges if there are intermediate VARYING defs. For this reason
7144 do not follow SSA edges here even though SCCVN can technically
7145 just deal fine with that. */
7146 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7147 {
7148 tree res = NULL_TREE;
7149 if (gimple_simplified_result_is_gimple_val (&res_op))
7150 res = res_op.ops[0];
7151 else if (mprts_hook)
7152 res = mprts_hook (&res_op);
7153 if (res)
7154 {
7155 if (dump_file && dump_flags & TDF_DETAILS)
7156 {
7157 fprintf (dump_file, "Match-and-simplified ");
7158 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7159 fprintf (dump_file, " to ");
7160 print_generic_expr (dump_file, res);
7161 fprintf (dump_file, "\n");
7162 }
7163 return res;
7164 }
7165 }
7166
7167 location_t loc = gimple_location (stmt);
7168 switch (gimple_code (stmt))
7169 {
7170 case GIMPLE_ASSIGN:
7171 {
7172 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7173
7174 switch (get_gimple_rhs_class (subcode))
7175 {
7176 case GIMPLE_SINGLE_RHS:
7177 {
7178 tree rhs = gimple_assign_rhs1 (stmt);
7179 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7180
7181 if (TREE_CODE (rhs) == SSA_NAME)
7182 {
7183 /* If the RHS is an SSA_NAME, return its known constant value,
7184 if any. */
7185 return (*valueize) (rhs);
7186 }
7187 /* Handle propagating invariant addresses into address
7188 operations. */
7189 else if (TREE_CODE (rhs) == ADDR_EXPR
7190 && !is_gimple_min_invariant (rhs))
7191 {
7192 poly_int64 offset = 0;
7193 tree base;
7194 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7195 &offset,
7196 valueize);
7197 if (base
7198 && (CONSTANT_CLASS_P (base)
7199 || decl_address_invariant_p (base)))
7200 return build_invariant_address (TREE_TYPE (rhs),
7201 base, offset);
7202 }
7203 else if (TREE_CODE (rhs) == CONSTRUCTOR
7204 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7205 && known_eq (CONSTRUCTOR_NELTS (rhs),
7206 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7207 {
7208 unsigned i, nelts;
7209 tree val;
7210
7211 nelts = CONSTRUCTOR_NELTS (rhs);
7212 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7213 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7214 {
7215 val = (*valueize) (val);
7216 if (TREE_CODE (val) == INTEGER_CST
7217 || TREE_CODE (val) == REAL_CST
7218 || TREE_CODE (val) == FIXED_CST)
7219 vec.quick_push (val);
7220 else
7221 return NULL_TREE;
7222 }
7223
7224 return vec.build ();
7225 }
7226 if (subcode == OBJ_TYPE_REF)
7227 {
7228 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7229 /* If callee is constant, we can fold away the wrapper. */
7230 if (is_gimple_min_invariant (val))
7231 return val;
7232 }
7233
7234 if (kind == tcc_reference)
7235 {
7236 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7237 || TREE_CODE (rhs) == REALPART_EXPR
7238 || TREE_CODE (rhs) == IMAGPART_EXPR)
7239 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7240 {
7241 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7242 return fold_unary_loc (EXPR_LOCATION (rhs),
7243 TREE_CODE (rhs),
7244 TREE_TYPE (rhs), val);
7245 }
7246 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7247 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7248 {
7249 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7250 return fold_ternary_loc (EXPR_LOCATION (rhs),
7251 TREE_CODE (rhs),
7252 TREE_TYPE (rhs), val,
7253 TREE_OPERAND (rhs, 1),
7254 TREE_OPERAND (rhs, 2));
7255 }
7256 else if (TREE_CODE (rhs) == MEM_REF
7257 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7258 {
7259 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7260 if (TREE_CODE (val) == ADDR_EXPR
7261 && is_gimple_min_invariant (val))
7262 {
7263 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7264 unshare_expr (val),
7265 TREE_OPERAND (rhs, 1));
7266 if (tem)
7267 rhs = tem;
7268 }
7269 }
7270 return fold_const_aggregate_ref_1 (rhs, valueize);
7271 }
7272 else if (kind == tcc_declaration)
7273 return get_symbol_constant_value (rhs);
7274 return rhs;
7275 }
7276
7277 case GIMPLE_UNARY_RHS:
7278 return NULL_TREE;
7279
7280 case GIMPLE_BINARY_RHS:
7281 /* Translate &x + CST into an invariant form suitable for
7282 further propagation. */
7283 if (subcode == POINTER_PLUS_EXPR)
7284 {
7285 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7286 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7287 if (TREE_CODE (op0) == ADDR_EXPR
7288 && TREE_CODE (op1) == INTEGER_CST)
7289 {
7290 tree off = fold_convert (ptr_type_node, op1);
7291 return build1_loc
7292 (loc, ADDR_EXPR, TREE_TYPE (op0),
7293 fold_build2 (MEM_REF,
7294 TREE_TYPE (TREE_TYPE (op0)),
7295 unshare_expr (op0), off));
7296 }
7297 }
7298 /* Canonicalize bool != 0 and bool == 0 appearing after
7299 valueization. While gimple_simplify handles this
7300 it can get confused by the ~X == 1 -> X == 0 transform
7301 which we cant reduce to a SSA name or a constant
7302 (and we have no way to tell gimple_simplify to not
7303 consider those transforms in the first place). */
7304 else if (subcode == EQ_EXPR
7305 || subcode == NE_EXPR)
7306 {
7307 tree lhs = gimple_assign_lhs (stmt);
7308 tree op0 = gimple_assign_rhs1 (stmt);
7309 if (useless_type_conversion_p (TREE_TYPE (lhs),
7310 TREE_TYPE (op0)))
7311 {
7312 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7313 op0 = (*valueize) (op0);
7314 if (TREE_CODE (op0) == INTEGER_CST)
7315 std::swap (op0, op1);
7316 if (TREE_CODE (op1) == INTEGER_CST
7317 && ((subcode == NE_EXPR && integer_zerop (op1))
7318 || (subcode == EQ_EXPR && integer_onep (op1))))
7319 return op0;
7320 }
7321 }
7322 return NULL_TREE;
7323
7324 case GIMPLE_TERNARY_RHS:
7325 {
7326 /* Handle ternary operators that can appear in GIMPLE form. */
7327 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7328 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7329 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7330 return fold_ternary_loc (loc, subcode,
7331 gimple_expr_type (stmt), op0, op1, op2);
7332 }
7333
7334 default:
7335 gcc_unreachable ();
7336 }
7337 }
7338
7339 case GIMPLE_CALL:
7340 {
7341 tree fn;
7342 gcall *call_stmt = as_a <gcall *> (stmt);
7343
7344 if (gimple_call_internal_p (stmt))
7345 {
7346 enum tree_code subcode = ERROR_MARK;
7347 switch (gimple_call_internal_fn (stmt))
7348 {
7349 case IFN_UBSAN_CHECK_ADD:
7350 subcode = PLUS_EXPR;
7351 break;
7352 case IFN_UBSAN_CHECK_SUB:
7353 subcode = MINUS_EXPR;
7354 break;
7355 case IFN_UBSAN_CHECK_MUL:
7356 subcode = MULT_EXPR;
7357 break;
7358 case IFN_BUILTIN_EXPECT:
7359 {
7360 tree arg0 = gimple_call_arg (stmt, 0);
7361 tree op0 = (*valueize) (arg0);
7362 if (TREE_CODE (op0) == INTEGER_CST)
7363 return op0;
7364 return NULL_TREE;
7365 }
7366 default:
7367 return NULL_TREE;
7368 }
7369 tree arg0 = gimple_call_arg (stmt, 0);
7370 tree arg1 = gimple_call_arg (stmt, 1);
7371 tree op0 = (*valueize) (arg0);
7372 tree op1 = (*valueize) (arg1);
7373
7374 if (TREE_CODE (op0) != INTEGER_CST
7375 || TREE_CODE (op1) != INTEGER_CST)
7376 {
7377 switch (subcode)
7378 {
7379 case MULT_EXPR:
7380 /* x * 0 = 0 * x = 0 without overflow. */
7381 if (integer_zerop (op0) || integer_zerop (op1))
7382 return build_zero_cst (TREE_TYPE (arg0));
7383 break;
7384 case MINUS_EXPR:
7385 /* y - y = 0 without overflow. */
7386 if (operand_equal_p (op0, op1, 0))
7387 return build_zero_cst (TREE_TYPE (arg0));
7388 break;
7389 default:
7390 break;
7391 }
7392 }
7393 tree res
7394 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7395 if (res
7396 && TREE_CODE (res) == INTEGER_CST
7397 && !TREE_OVERFLOW (res))
7398 return res;
7399 return NULL_TREE;
7400 }
7401
7402 fn = (*valueize) (gimple_call_fn (stmt));
7403 if (TREE_CODE (fn) == ADDR_EXPR
7404 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7405 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7406 && gimple_builtin_call_types_compatible_p (stmt,
7407 TREE_OPERAND (fn, 0)))
7408 {
7409 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7410 tree retval;
7411 unsigned i;
7412 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7413 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7414 retval = fold_builtin_call_array (loc,
7415 gimple_call_return_type (call_stmt),
7416 fn, gimple_call_num_args (stmt), args);
7417 if (retval)
7418 {
7419 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7420 STRIP_NOPS (retval);
7421 retval = fold_convert (gimple_call_return_type (call_stmt),
7422 retval);
7423 }
7424 return retval;
7425 }
7426 return NULL_TREE;
7427 }
7428
7429 default:
7430 return NULL_TREE;
7431 }
7432 }
7433
7434 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7435 Returns NULL_TREE if folding to a constant is not possible, otherwise
7436 returns a constant according to is_gimple_min_invariant. */
7437
7438 tree
7439 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7440 {
7441 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7442 if (res && is_gimple_min_invariant (res))
7443 return res;
7444 return NULL_TREE;
7445 }
7446
7447
7448 /* The following set of functions are supposed to fold references using
7449 their constant initializers. */
7450
7451 /* See if we can find constructor defining value of BASE.
7452 When we know the consructor with constant offset (such as
7453 base is array[40] and we do know constructor of array), then
7454 BIT_OFFSET is adjusted accordingly.
7455
7456 As a special case, return error_mark_node when constructor
7457 is not explicitly available, but it is known to be zero
7458 such as 'static const int a;'. */
7459 static tree
7460 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7461 tree (*valueize)(tree))
7462 {
7463 poly_int64 bit_offset2, size, max_size;
7464 bool reverse;
7465
7466 if (TREE_CODE (base) == MEM_REF)
7467 {
7468 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7469 if (!boff.to_shwi (bit_offset))
7470 return NULL_TREE;
7471
7472 if (valueize
7473 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7474 base = valueize (TREE_OPERAND (base, 0));
7475 if (!base || TREE_CODE (base) != ADDR_EXPR)
7476 return NULL_TREE;
7477 base = TREE_OPERAND (base, 0);
7478 }
7479 else if (valueize
7480 && TREE_CODE (base) == SSA_NAME)
7481 base = valueize (base);
7482
7483 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7484 DECL_INITIAL. If BASE is a nested reference into another
7485 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7486 the inner reference. */
7487 switch (TREE_CODE (base))
7488 {
7489 case VAR_DECL:
7490 case CONST_DECL:
7491 {
7492 tree init = ctor_for_folding (base);
7493
7494 /* Our semantic is exact opposite of ctor_for_folding;
7495 NULL means unknown, while error_mark_node is 0. */
7496 if (init == error_mark_node)
7497 return NULL_TREE;
7498 if (!init)
7499 return error_mark_node;
7500 return init;
7501 }
7502
7503 case VIEW_CONVERT_EXPR:
7504 return get_base_constructor (TREE_OPERAND (base, 0),
7505 bit_offset, valueize);
7506
7507 case ARRAY_REF:
7508 case COMPONENT_REF:
7509 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7510 &reverse);
7511 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7512 return NULL_TREE;
7513 *bit_offset += bit_offset2;
7514 return get_base_constructor (base, bit_offset, valueize);
7515
7516 case CONSTRUCTOR:
7517 return base;
7518
7519 default:
7520 if (CONSTANT_CLASS_P (base))
7521 return base;
7522
7523 return NULL_TREE;
7524 }
7525 }
7526
7527 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7528 to the memory at bit OFFSET. When non-null, TYPE is the expected
7529 type of the reference; otherwise the type of the referenced element
7530 is used instead. When SIZE is zero, attempt to fold a reference to
7531 the entire element which OFFSET refers to. Increment *SUBOFF by
7532 the bit offset of the accessed element. */
7533
7534 static tree
7535 fold_array_ctor_reference (tree type, tree ctor,
7536 unsigned HOST_WIDE_INT offset,
7537 unsigned HOST_WIDE_INT size,
7538 tree from_decl,
7539 unsigned HOST_WIDE_INT *suboff)
7540 {
7541 offset_int low_bound;
7542 offset_int elt_size;
7543 offset_int access_index;
7544 tree domain_type = NULL_TREE;
7545 HOST_WIDE_INT inner_offset;
7546
7547 /* Compute low bound and elt size. */
7548 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7549 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7550 if (domain_type && TYPE_MIN_VALUE (domain_type))
7551 {
7552 /* Static constructors for variably sized objects make no sense. */
7553 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7554 return NULL_TREE;
7555 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7556 }
7557 else
7558 low_bound = 0;
7559 /* Static constructors for variably sized objects make no sense. */
7560 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7561 return NULL_TREE;
7562 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7563
7564 /* When TYPE is non-null, verify that it specifies a constant-sized
7565 access of a multiple of the array element size. Avoid division
7566 by zero below when ELT_SIZE is zero, such as with the result of
7567 an initializer for a zero-length array or an empty struct. */
7568 if (elt_size == 0
7569 || (type
7570 && (!TYPE_SIZE_UNIT (type)
7571 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7572 return NULL_TREE;
7573
7574 /* Compute the array index we look for. */
7575 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7576 elt_size);
7577 access_index += low_bound;
7578
7579 /* And offset within the access. */
7580 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7581
7582 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7583 if (size > elt_sz * BITS_PER_UNIT)
7584 {
7585 /* native_encode_expr constraints. */
7586 if (size > MAX_BITSIZE_MODE_ANY_MODE
7587 || size % BITS_PER_UNIT != 0
7588 || inner_offset % BITS_PER_UNIT != 0
7589 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7590 return NULL_TREE;
7591
7592 unsigned ctor_idx;
7593 tree val = get_array_ctor_element_at_index (ctor, access_index,
7594 &ctor_idx);
7595 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7596 return build_zero_cst (type);
7597
7598 /* native-encode adjacent ctor elements. */
7599 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7600 unsigned bufoff = 0;
7601 offset_int index = 0;
7602 offset_int max_index = access_index;
7603 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7604 if (!val)
7605 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7606 else if (!CONSTANT_CLASS_P (val))
7607 return NULL_TREE;
7608 if (!elt->index)
7609 ;
7610 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7611 {
7612 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7613 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7614 }
7615 else
7616 index = max_index = wi::to_offset (elt->index);
7617 index = wi::umax (index, access_index);
7618 do
7619 {
7620 if (bufoff + elt_sz > sizeof (buf))
7621 elt_sz = sizeof (buf) - bufoff;
7622 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7623 inner_offset / BITS_PER_UNIT);
7624 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7625 return NULL_TREE;
7626 inner_offset = 0;
7627 bufoff += len;
7628
7629 access_index += 1;
7630 if (wi::cmpu (access_index, index) == 0)
7631 val = elt->value;
7632 else if (wi::cmpu (access_index, max_index) > 0)
7633 {
7634 ctor_idx++;
7635 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7636 {
7637 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7638 ++max_index;
7639 }
7640 else
7641 {
7642 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7643 index = 0;
7644 max_index = access_index;
7645 if (!elt->index)
7646 ;
7647 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7648 {
7649 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7650 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7651 }
7652 else
7653 index = max_index = wi::to_offset (elt->index);
7654 index = wi::umax (index, access_index);
7655 if (wi::cmpu (access_index, index) == 0)
7656 val = elt->value;
7657 else
7658 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7659 }
7660 }
7661 }
7662 while (bufoff < size / BITS_PER_UNIT);
7663 *suboff += size;
7664 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7665 }
7666
7667 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7668 {
7669 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7670 {
7671 /* For the final reference to the entire accessed element
7672 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7673 may be null) in favor of the type of the element, and set
7674 SIZE to the size of the accessed element. */
7675 inner_offset = 0;
7676 type = TREE_TYPE (val);
7677 size = elt_sz * BITS_PER_UNIT;
7678 }
7679 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7680 && TREE_CODE (val) == CONSTRUCTOR
7681 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7682 /* If this isn't the last element in the CTOR and a CTOR itself
7683 and it does not cover the whole object we are requesting give up
7684 since we're not set up for combining from multiple CTORs. */
7685 return NULL_TREE;
7686
7687 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7688 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7689 suboff);
7690 }
7691
7692 /* Memory not explicitly mentioned in constructor is 0 (or
7693 the reference is out of range). */
7694 return type ? build_zero_cst (type) : NULL_TREE;
7695 }
7696
7697 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7698 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7699 is the expected type of the reference; otherwise the type of
7700 the referenced member is used instead. When SIZE is zero,
7701 attempt to fold a reference to the entire member which OFFSET
7702 refers to; in this case. Increment *SUBOFF by the bit offset
7703 of the accessed member. */
7704
7705 static tree
7706 fold_nonarray_ctor_reference (tree type, tree ctor,
7707 unsigned HOST_WIDE_INT offset,
7708 unsigned HOST_WIDE_INT size,
7709 tree from_decl,
7710 unsigned HOST_WIDE_INT *suboff)
7711 {
7712 unsigned HOST_WIDE_INT cnt;
7713 tree cfield, cval;
7714
7715 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7716 cval)
7717 {
7718 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7719 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7720 tree field_size = DECL_SIZE (cfield);
7721
7722 if (!field_size)
7723 {
7724 /* Determine the size of the flexible array member from
7725 the size of the initializer provided for it. */
7726 field_size = TYPE_SIZE (TREE_TYPE (cval));
7727 }
7728
7729 /* Variable sized objects in static constructors makes no sense,
7730 but field_size can be NULL for flexible array members. */
7731 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7732 && TREE_CODE (byte_offset) == INTEGER_CST
7733 && (field_size != NULL_TREE
7734 ? TREE_CODE (field_size) == INTEGER_CST
7735 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7736
7737 /* Compute bit offset of the field. */
7738 offset_int bitoffset
7739 = (wi::to_offset (field_offset)
7740 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7741 /* Compute bit offset where the field ends. */
7742 offset_int bitoffset_end;
7743 if (field_size != NULL_TREE)
7744 bitoffset_end = bitoffset + wi::to_offset (field_size);
7745 else
7746 bitoffset_end = 0;
7747
7748 /* Compute the bit offset of the end of the desired access.
7749 As a special case, if the size of the desired access is
7750 zero, assume the access is to the entire field (and let
7751 the caller make any necessary adjustments by storing
7752 the actual bounds of the field in FIELDBOUNDS). */
7753 offset_int access_end = offset_int (offset);
7754 if (size)
7755 access_end += size;
7756 else
7757 access_end = bitoffset_end;
7758
7759 /* Is there any overlap between the desired access at
7760 [OFFSET, OFFSET+SIZE) and the offset of the field within
7761 the object at [BITOFFSET, BITOFFSET_END)? */
7762 if (wi::cmps (access_end, bitoffset) > 0
7763 && (field_size == NULL_TREE
7764 || wi::lts_p (offset, bitoffset_end)))
7765 {
7766 *suboff += bitoffset.to_uhwi ();
7767
7768 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7769 {
7770 /* For the final reference to the entire accessed member
7771 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7772 be null) in favor of the type of the member, and set
7773 SIZE to the size of the accessed member. */
7774 offset = bitoffset.to_uhwi ();
7775 type = TREE_TYPE (cval);
7776 size = (bitoffset_end - bitoffset).to_uhwi ();
7777 }
7778
7779 /* We do have overlap. Now see if the field is large enough
7780 to cover the access. Give up for accesses that extend
7781 beyond the end of the object or that span multiple fields. */
7782 if (wi::cmps (access_end, bitoffset_end) > 0)
7783 return NULL_TREE;
7784 if (offset < bitoffset)
7785 return NULL_TREE;
7786
7787 offset_int inner_offset = offset_int (offset) - bitoffset;
7788 return fold_ctor_reference (type, cval,
7789 inner_offset.to_uhwi (), size,
7790 from_decl, suboff);
7791 }
7792 }
7793
7794 if (!type)
7795 return NULL_TREE;
7796
7797 return build_zero_cst (type);
7798 }
7799
7800 /* CTOR is value initializing memory. Fold a reference of TYPE and
7801 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7802 is zero, attempt to fold a reference to the entire subobject
7803 which OFFSET refers to. This is used when folding accesses to
7804 string members of aggregates. When non-null, set *SUBOFF to
7805 the bit offset of the accessed subobject. */
7806
7807 tree
7808 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7809 const poly_uint64 &poly_size, tree from_decl,
7810 unsigned HOST_WIDE_INT *suboff /* = NULL */)
7811 {
7812 tree ret;
7813
7814 /* We found the field with exact match. */
7815 if (type
7816 && useless_type_conversion_p (type, TREE_TYPE (ctor))
7817 && known_eq (poly_offset, 0U))
7818 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7819
7820 /* The remaining optimizations need a constant size and offset. */
7821 unsigned HOST_WIDE_INT size, offset;
7822 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
7823 return NULL_TREE;
7824
7825 /* We are at the end of walk, see if we can view convert the
7826 result. */
7827 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
7828 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
7829 && !compare_tree_int (TYPE_SIZE (type), size)
7830 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
7831 {
7832 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7833 if (ret)
7834 {
7835 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
7836 if (ret)
7837 STRIP_USELESS_TYPE_CONVERSION (ret);
7838 }
7839 return ret;
7840 }
7841 /* For constants and byte-aligned/sized reads try to go through
7842 native_encode/interpret. */
7843 if (CONSTANT_CLASS_P (ctor)
7844 && BITS_PER_UNIT == 8
7845 && offset % BITS_PER_UNIT == 0
7846 && offset / BITS_PER_UNIT <= INT_MAX
7847 && size % BITS_PER_UNIT == 0
7848 && size <= MAX_BITSIZE_MODE_ANY_MODE
7849 && can_native_interpret_type_p (type))
7850 {
7851 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7852 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7853 offset / BITS_PER_UNIT);
7854 if (len > 0)
7855 return native_interpret_expr (type, buf, len);
7856 }
7857 if (TREE_CODE (ctor) == CONSTRUCTOR)
7858 {
7859 unsigned HOST_WIDE_INT dummy = 0;
7860 if (!suboff)
7861 suboff = &dummy;
7862
7863 tree ret;
7864 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7865 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
7866 ret = fold_array_ctor_reference (type, ctor, offset, size,
7867 from_decl, suboff);
7868 else
7869 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7870 from_decl, suboff);
7871
7872 /* Fall back to native_encode_initializer. Needs to be done
7873 only in the outermost fold_ctor_reference call (because it itself
7874 recurses into CONSTRUCTORs) and doesn't update suboff. */
7875 if (ret == NULL_TREE
7876 && suboff == &dummy
7877 && BITS_PER_UNIT == 8
7878 && offset % BITS_PER_UNIT == 0
7879 && offset / BITS_PER_UNIT <= INT_MAX
7880 && size % BITS_PER_UNIT == 0
7881 && size <= MAX_BITSIZE_MODE_ANY_MODE
7882 && can_native_interpret_type_p (type))
7883 {
7884 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7885 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7886 offset / BITS_PER_UNIT);
7887 if (len > 0)
7888 return native_interpret_expr (type, buf, len);
7889 }
7890
7891 return ret;
7892 }
7893
7894 return NULL_TREE;
7895 }
7896
7897 /* Return the tree representing the element referenced by T if T is an
7898 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7899 names using VALUEIZE. Return NULL_TREE otherwise. */
7900
7901 tree
7902 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7903 {
7904 tree ctor, idx, base;
7905 poly_int64 offset, size, max_size;
7906 tree tem;
7907 bool reverse;
7908
7909 if (TREE_THIS_VOLATILE (t))
7910 return NULL_TREE;
7911
7912 if (DECL_P (t))
7913 return get_symbol_constant_value (t);
7914
7915 tem = fold_read_from_constant_string (t);
7916 if (tem)
7917 return tem;
7918
7919 switch (TREE_CODE (t))
7920 {
7921 case ARRAY_REF:
7922 case ARRAY_RANGE_REF:
7923 /* Constant indexes are handled well by get_base_constructor.
7924 Only special case variable offsets.
7925 FIXME: This code can't handle nested references with variable indexes
7926 (they will be handled only by iteration of ccp). Perhaps we can bring
7927 get_ref_base_and_extent here and make it use a valueize callback. */
7928 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7929 && valueize
7930 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
7931 && poly_int_tree_p (idx))
7932 {
7933 tree low_bound, unit_size;
7934
7935 /* If the resulting bit-offset is constant, track it. */
7936 if ((low_bound = array_ref_low_bound (t),
7937 poly_int_tree_p (low_bound))
7938 && (unit_size = array_ref_element_size (t),
7939 tree_fits_uhwi_p (unit_size)))
7940 {
7941 poly_offset_int woffset
7942 = wi::sext (wi::to_poly_offset (idx)
7943 - wi::to_poly_offset (low_bound),
7944 TYPE_PRECISION (TREE_TYPE (idx)));
7945 woffset *= tree_to_uhwi (unit_size);
7946 woffset *= BITS_PER_UNIT;
7947 if (woffset.to_shwi (&offset))
7948 {
7949 base = TREE_OPERAND (t, 0);
7950 ctor = get_base_constructor (base, &offset, valueize);
7951 /* Empty constructor. Always fold to 0. */
7952 if (ctor == error_mark_node)
7953 return build_zero_cst (TREE_TYPE (t));
7954 /* Out of bound array access. Value is undefined,
7955 but don't fold. */
7956 if (maybe_lt (offset, 0))
7957 return NULL_TREE;
7958 /* We cannot determine ctor. */
7959 if (!ctor)
7960 return NULL_TREE;
7961 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7962 tree_to_uhwi (unit_size)
7963 * BITS_PER_UNIT,
7964 base);
7965 }
7966 }
7967 }
7968 /* Fallthru. */
7969
7970 case COMPONENT_REF:
7971 case BIT_FIELD_REF:
7972 case TARGET_MEM_REF:
7973 case MEM_REF:
7974 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7975 ctor = get_base_constructor (base, &offset, valueize);
7976
7977 /* Empty constructor. Always fold to 0. */
7978 if (ctor == error_mark_node)
7979 return build_zero_cst (TREE_TYPE (t));
7980 /* We do not know precise address. */
7981 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7982 return NULL_TREE;
7983 /* We cannot determine ctor. */
7984 if (!ctor)
7985 return NULL_TREE;
7986
7987 /* Out of bound array access. Value is undefined, but don't fold. */
7988 if (maybe_lt (offset, 0))
7989 return NULL_TREE;
7990
7991 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
7992 if (tem)
7993 return tem;
7994
7995 /* For bit field reads try to read the representative and
7996 adjust. */
7997 if (TREE_CODE (t) == COMPONENT_REF
7998 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
7999 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8000 {
8001 HOST_WIDE_INT csize, coffset;
8002 tree field = TREE_OPERAND (t, 1);
8003 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8004 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8005 && size.is_constant (&csize)
8006 && offset.is_constant (&coffset)
8007 && (coffset % BITS_PER_UNIT != 0
8008 || csize % BITS_PER_UNIT != 0)
8009 && !reverse
8010 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8011 {
8012 poly_int64 bitoffset;
8013 poly_uint64 field_offset, repr_offset;
8014 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8015 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8016 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8017 else
8018 bitoffset = 0;
8019 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8020 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8021 HOST_WIDE_INT bitoff;
8022 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8023 - TYPE_PRECISION (TREE_TYPE (field)));
8024 if (bitoffset.is_constant (&bitoff)
8025 && bitoff >= 0
8026 && bitoff <= diff)
8027 {
8028 offset -= bitoff;
8029 size = tree_to_uhwi (DECL_SIZE (repr));
8030
8031 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8032 size, base);
8033 if (tem && TREE_CODE (tem) == INTEGER_CST)
8034 {
8035 if (!BYTES_BIG_ENDIAN)
8036 tem = wide_int_to_tree (TREE_TYPE (field),
8037 wi::lrshift (wi::to_wide (tem),
8038 bitoff));
8039 else
8040 tem = wide_int_to_tree (TREE_TYPE (field),
8041 wi::lrshift (wi::to_wide (tem),
8042 diff - bitoff));
8043 return tem;
8044 }
8045 }
8046 }
8047 }
8048 break;
8049
8050 case REALPART_EXPR:
8051 case IMAGPART_EXPR:
8052 {
8053 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8054 if (c && TREE_CODE (c) == COMPLEX_CST)
8055 return fold_build1_loc (EXPR_LOCATION (t),
8056 TREE_CODE (t), TREE_TYPE (t), c);
8057 break;
8058 }
8059
8060 default:
8061 break;
8062 }
8063
8064 return NULL_TREE;
8065 }
8066
8067 tree
8068 fold_const_aggregate_ref (tree t)
8069 {
8070 return fold_const_aggregate_ref_1 (t, NULL);
8071 }
8072
8073 /* Lookup virtual method with index TOKEN in a virtual table V
8074 at OFFSET.
8075 Set CAN_REFER if non-NULL to false if method
8076 is not referable or if the virtual table is ill-formed (such as rewriten
8077 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8078
8079 tree
8080 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8081 tree v,
8082 unsigned HOST_WIDE_INT offset,
8083 bool *can_refer)
8084 {
8085 tree vtable = v, init, fn;
8086 unsigned HOST_WIDE_INT size;
8087 unsigned HOST_WIDE_INT elt_size, access_index;
8088 tree domain_type;
8089
8090 if (can_refer)
8091 *can_refer = true;
8092
8093 /* First of all double check we have virtual table. */
8094 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8095 {
8096 /* Pass down that we lost track of the target. */
8097 if (can_refer)
8098 *can_refer = false;
8099 return NULL_TREE;
8100 }
8101
8102 init = ctor_for_folding (v);
8103
8104 /* The virtual tables should always be born with constructors
8105 and we always should assume that they are avaialble for
8106 folding. At the moment we do not stream them in all cases,
8107 but it should never happen that ctor seem unreachable. */
8108 gcc_assert (init);
8109 if (init == error_mark_node)
8110 {
8111 /* Pass down that we lost track of the target. */
8112 if (can_refer)
8113 *can_refer = false;
8114 return NULL_TREE;
8115 }
8116 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8117 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8118 offset *= BITS_PER_UNIT;
8119 offset += token * size;
8120
8121 /* Lookup the value in the constructor that is assumed to be array.
8122 This is equivalent to
8123 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8124 offset, size, NULL);
8125 but in a constant time. We expect that frontend produced a simple
8126 array without indexed initializers. */
8127
8128 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8129 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8130 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8131 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8132
8133 access_index = offset / BITS_PER_UNIT / elt_size;
8134 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8135
8136 /* The C++ FE can now produce indexed fields, and we check if the indexes
8137 match. */
8138 if (access_index < CONSTRUCTOR_NELTS (init))
8139 {
8140 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8141 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8142 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8143 STRIP_NOPS (fn);
8144 }
8145 else
8146 fn = NULL;
8147
8148 /* For type inconsistent program we may end up looking up virtual method
8149 in virtual table that does not contain TOKEN entries. We may overrun
8150 the virtual table and pick up a constant or RTTI info pointer.
8151 In any case the call is undefined. */
8152 if (!fn
8153 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8154 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8155 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8156 else
8157 {
8158 fn = TREE_OPERAND (fn, 0);
8159
8160 /* When cgraph node is missing and function is not public, we cannot
8161 devirtualize. This can happen in WHOPR when the actual method
8162 ends up in other partition, because we found devirtualization
8163 possibility too late. */
8164 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8165 {
8166 if (can_refer)
8167 {
8168 *can_refer = false;
8169 return fn;
8170 }
8171 return NULL_TREE;
8172 }
8173 }
8174
8175 /* Make sure we create a cgraph node for functions we'll reference.
8176 They can be non-existent if the reference comes from an entry
8177 of an external vtable for example. */
8178 cgraph_node::get_create (fn);
8179
8180 return fn;
8181 }
8182
8183 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8184 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8185 KNOWN_BINFO carries the binfo describing the true type of
8186 OBJ_TYPE_REF_OBJECT(REF).
8187 Set CAN_REFER if non-NULL to false if method
8188 is not referable or if the virtual table is ill-formed (such as rewriten
8189 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8190
8191 tree
8192 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8193 bool *can_refer)
8194 {
8195 unsigned HOST_WIDE_INT offset;
8196 tree v;
8197
8198 v = BINFO_VTABLE (known_binfo);
8199 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8200 if (!v)
8201 return NULL_TREE;
8202
8203 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8204 {
8205 if (can_refer)
8206 *can_refer = false;
8207 return NULL_TREE;
8208 }
8209 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8210 }
8211
8212 /* Given a pointer value T, return a simplified version of an
8213 indirection through T, or NULL_TREE if no simplification is
8214 possible. Note that the resulting type may be different from
8215 the type pointed to in the sense that it is still compatible
8216 from the langhooks point of view. */
8217
8218 tree
8219 gimple_fold_indirect_ref (tree t)
8220 {
8221 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8222 tree sub = t;
8223 tree subtype;
8224
8225 STRIP_NOPS (sub);
8226 subtype = TREE_TYPE (sub);
8227 if (!POINTER_TYPE_P (subtype)
8228 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8229 return NULL_TREE;
8230
8231 if (TREE_CODE (sub) == ADDR_EXPR)
8232 {
8233 tree op = TREE_OPERAND (sub, 0);
8234 tree optype = TREE_TYPE (op);
8235 /* *&p => p */
8236 if (useless_type_conversion_p (type, optype))
8237 return op;
8238
8239 /* *(foo *)&fooarray => fooarray[0] */
8240 if (TREE_CODE (optype) == ARRAY_TYPE
8241 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8242 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8243 {
8244 tree type_domain = TYPE_DOMAIN (optype);
8245 tree min_val = size_zero_node;
8246 if (type_domain && TYPE_MIN_VALUE (type_domain))
8247 min_val = TYPE_MIN_VALUE (type_domain);
8248 if (TREE_CODE (min_val) == INTEGER_CST)
8249 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8250 }
8251 /* *(foo *)&complexfoo => __real__ complexfoo */
8252 else if (TREE_CODE (optype) == COMPLEX_TYPE
8253 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8254 return fold_build1 (REALPART_EXPR, type, op);
8255 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8256 else if (TREE_CODE (optype) == VECTOR_TYPE
8257 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8258 {
8259 tree part_width = TYPE_SIZE (type);
8260 tree index = bitsize_int (0);
8261 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8262 }
8263 }
8264
8265 /* *(p + CST) -> ... */
8266 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8267 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8268 {
8269 tree addr = TREE_OPERAND (sub, 0);
8270 tree off = TREE_OPERAND (sub, 1);
8271 tree addrtype;
8272
8273 STRIP_NOPS (addr);
8274 addrtype = TREE_TYPE (addr);
8275
8276 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8277 if (TREE_CODE (addr) == ADDR_EXPR
8278 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8279 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8280 && tree_fits_uhwi_p (off))
8281 {
8282 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8283 tree part_width = TYPE_SIZE (type);
8284 unsigned HOST_WIDE_INT part_widthi
8285 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8286 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8287 tree index = bitsize_int (indexi);
8288 if (known_lt (offset / part_widthi,
8289 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8290 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8291 part_width, index);
8292 }
8293
8294 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8295 if (TREE_CODE (addr) == ADDR_EXPR
8296 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8297 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8298 {
8299 tree size = TYPE_SIZE_UNIT (type);
8300 if (tree_int_cst_equal (size, off))
8301 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8302 }
8303
8304 /* *(p + CST) -> MEM_REF <p, CST>. */
8305 if (TREE_CODE (addr) != ADDR_EXPR
8306 || DECL_P (TREE_OPERAND (addr, 0)))
8307 return fold_build2 (MEM_REF, type,
8308 addr,
8309 wide_int_to_tree (ptype, wi::to_wide (off)));
8310 }
8311
8312 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8313 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8314 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8315 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8316 {
8317 tree type_domain;
8318 tree min_val = size_zero_node;
8319 tree osub = sub;
8320 sub = gimple_fold_indirect_ref (sub);
8321 if (! sub)
8322 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8323 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8324 if (type_domain && TYPE_MIN_VALUE (type_domain))
8325 min_val = TYPE_MIN_VALUE (type_domain);
8326 if (TREE_CODE (min_val) == INTEGER_CST)
8327 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8328 }
8329
8330 return NULL_TREE;
8331 }
8332
8333 /* Return true if CODE is an operation that when operating on signed
8334 integer types involves undefined behavior on overflow and the
8335 operation can be expressed with unsigned arithmetic. */
8336
8337 bool
8338 arith_code_with_undefined_signed_overflow (tree_code code)
8339 {
8340 switch (code)
8341 {
8342 case ABS_EXPR:
8343 case PLUS_EXPR:
8344 case MINUS_EXPR:
8345 case MULT_EXPR:
8346 case NEGATE_EXPR:
8347 case POINTER_PLUS_EXPR:
8348 return true;
8349 default:
8350 return false;
8351 }
8352 }
8353
8354 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8355 operation that can be transformed to unsigned arithmetic by converting
8356 its operand, carrying out the operation in the corresponding unsigned
8357 type and converting the result back to the original type.
8358
8359 Returns a sequence of statements that replace STMT and also contain
8360 a modified form of STMT itself. */
8361
8362 gimple_seq
8363 rewrite_to_defined_overflow (gimple *stmt)
8364 {
8365 if (dump_file && (dump_flags & TDF_DETAILS))
8366 {
8367 fprintf (dump_file, "rewriting stmt with undefined signed "
8368 "overflow ");
8369 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8370 }
8371
8372 tree lhs = gimple_assign_lhs (stmt);
8373 tree type = unsigned_type_for (TREE_TYPE (lhs));
8374 gimple_seq stmts = NULL;
8375 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8376 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8377 else
8378 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8379 {
8380 tree op = gimple_op (stmt, i);
8381 op = gimple_convert (&stmts, type, op);
8382 gimple_set_op (stmt, i, op);
8383 }
8384 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8385 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8386 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8387 gimple_set_modified (stmt, true);
8388 gimple_seq_add_stmt (&stmts, stmt);
8389 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8390 gimple_seq_add_stmt (&stmts, cvt);
8391
8392 return stmts;
8393 }
8394
8395
8396 /* The valueization hook we use for the gimple_build API simplification.
8397 This makes us match fold_buildN behavior by only combining with
8398 statements in the sequence(s) we are currently building. */
8399
8400 static tree
8401 gimple_build_valueize (tree op)
8402 {
8403 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8404 return op;
8405 return NULL_TREE;
8406 }
8407
8408 /* Build the expression CODE OP0 of type TYPE with location LOC,
8409 simplifying it first if possible. Returns the built
8410 expression value and appends statements possibly defining it
8411 to SEQ. */
8412
8413 tree
8414 gimple_build (gimple_seq *seq, location_t loc,
8415 enum tree_code code, tree type, tree op0)
8416 {
8417 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8418 if (!res)
8419 {
8420 res = create_tmp_reg_or_ssa_name (type);
8421 gimple *stmt;
8422 if (code == REALPART_EXPR
8423 || code == IMAGPART_EXPR
8424 || code == VIEW_CONVERT_EXPR)
8425 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8426 else
8427 stmt = gimple_build_assign (res, code, op0);
8428 gimple_set_location (stmt, loc);
8429 gimple_seq_add_stmt_without_update (seq, stmt);
8430 }
8431 return res;
8432 }
8433
8434 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8435 simplifying it first if possible. Returns the built
8436 expression value and appends statements possibly defining it
8437 to SEQ. */
8438
8439 tree
8440 gimple_build (gimple_seq *seq, location_t loc,
8441 enum tree_code code, tree type, tree op0, tree op1)
8442 {
8443 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8444 if (!res)
8445 {
8446 res = create_tmp_reg_or_ssa_name (type);
8447 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8448 gimple_set_location (stmt, loc);
8449 gimple_seq_add_stmt_without_update (seq, stmt);
8450 }
8451 return res;
8452 }
8453
8454 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8455 simplifying it first if possible. Returns the built
8456 expression value and appends statements possibly defining it
8457 to SEQ. */
8458
8459 tree
8460 gimple_build (gimple_seq *seq, location_t loc,
8461 enum tree_code code, tree type, tree op0, tree op1, tree op2)
8462 {
8463 tree res = gimple_simplify (code, type, op0, op1, op2,
8464 seq, gimple_build_valueize);
8465 if (!res)
8466 {
8467 res = create_tmp_reg_or_ssa_name (type);
8468 gimple *stmt;
8469 if (code == BIT_FIELD_REF)
8470 stmt = gimple_build_assign (res, code,
8471 build3 (code, type, op0, op1, op2));
8472 else
8473 stmt = gimple_build_assign (res, code, op0, op1, op2);
8474 gimple_set_location (stmt, loc);
8475 gimple_seq_add_stmt_without_update (seq, stmt);
8476 }
8477 return res;
8478 }
8479
8480 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8481 void) with a location LOC. Returns the built expression value (or NULL_TREE
8482 if TYPE is void) and appends statements possibly defining it to SEQ. */
8483
8484 tree
8485 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8486 {
8487 tree res = NULL_TREE;
8488 gcall *stmt;
8489 if (internal_fn_p (fn))
8490 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8491 else
8492 {
8493 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8494 stmt = gimple_build_call (decl, 0);
8495 }
8496 if (!VOID_TYPE_P (type))
8497 {
8498 res = create_tmp_reg_or_ssa_name (type);
8499 gimple_call_set_lhs (stmt, res);
8500 }
8501 gimple_set_location (stmt, loc);
8502 gimple_seq_add_stmt_without_update (seq, stmt);
8503 return res;
8504 }
8505
8506 /* Build the call FN (ARG0) with a result of type TYPE
8507 (or no result if TYPE is void) with location LOC,
8508 simplifying it first if possible. Returns the built
8509 expression value (or NULL_TREE if TYPE is void) and appends
8510 statements possibly defining it to SEQ. */
8511
8512 tree
8513 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8514 tree type, tree arg0)
8515 {
8516 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8517 if (!res)
8518 {
8519 gcall *stmt;
8520 if (internal_fn_p (fn))
8521 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8522 else
8523 {
8524 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8525 stmt = gimple_build_call (decl, 1, arg0);
8526 }
8527 if (!VOID_TYPE_P (type))
8528 {
8529 res = create_tmp_reg_or_ssa_name (type);
8530 gimple_call_set_lhs (stmt, res);
8531 }
8532 gimple_set_location (stmt, loc);
8533 gimple_seq_add_stmt_without_update (seq, stmt);
8534 }
8535 return res;
8536 }
8537
8538 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8539 (or no result if TYPE is void) with location LOC,
8540 simplifying it first if possible. Returns the built
8541 expression value (or NULL_TREE if TYPE is void) and appends
8542 statements possibly defining it to SEQ. */
8543
8544 tree
8545 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8546 tree type, tree arg0, tree arg1)
8547 {
8548 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8549 if (!res)
8550 {
8551 gcall *stmt;
8552 if (internal_fn_p (fn))
8553 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8554 else
8555 {
8556 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8557 stmt = gimple_build_call (decl, 2, arg0, arg1);
8558 }
8559 if (!VOID_TYPE_P (type))
8560 {
8561 res = create_tmp_reg_or_ssa_name (type);
8562 gimple_call_set_lhs (stmt, res);
8563 }
8564 gimple_set_location (stmt, loc);
8565 gimple_seq_add_stmt_without_update (seq, stmt);
8566 }
8567 return res;
8568 }
8569
8570 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8571 (or no result if TYPE is void) with location LOC,
8572 simplifying it first if possible. Returns the built
8573 expression value (or NULL_TREE if TYPE is void) and appends
8574 statements possibly defining it to SEQ. */
8575
8576 tree
8577 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8578 tree type, tree arg0, tree arg1, tree arg2)
8579 {
8580 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8581 seq, gimple_build_valueize);
8582 if (!res)
8583 {
8584 gcall *stmt;
8585 if (internal_fn_p (fn))
8586 stmt = gimple_build_call_internal (as_internal_fn (fn),
8587 3, arg0, arg1, arg2);
8588 else
8589 {
8590 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8591 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8592 }
8593 if (!VOID_TYPE_P (type))
8594 {
8595 res = create_tmp_reg_or_ssa_name (type);
8596 gimple_call_set_lhs (stmt, res);
8597 }
8598 gimple_set_location (stmt, loc);
8599 gimple_seq_add_stmt_without_update (seq, stmt);
8600 }
8601 return res;
8602 }
8603
8604 /* Build the conversion (TYPE) OP with a result of type TYPE
8605 with location LOC if such conversion is neccesary in GIMPLE,
8606 simplifying it first.
8607 Returns the built expression value and appends
8608 statements possibly defining it to SEQ. */
8609
8610 tree
8611 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8612 {
8613 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8614 return op;
8615 return gimple_build (seq, loc, NOP_EXPR, type, op);
8616 }
8617
8618 /* Build the conversion (ptrofftype) OP with a result of a type
8619 compatible with ptrofftype with location LOC if such conversion
8620 is neccesary in GIMPLE, simplifying it first.
8621 Returns the built expression value and appends
8622 statements possibly defining it to SEQ. */
8623
8624 tree
8625 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8626 {
8627 if (ptrofftype_p (TREE_TYPE (op)))
8628 return op;
8629 return gimple_convert (seq, loc, sizetype, op);
8630 }
8631
8632 /* Build a vector of type TYPE in which each element has the value OP.
8633 Return a gimple value for the result, appending any new statements
8634 to SEQ. */
8635
8636 tree
8637 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8638 tree op)
8639 {
8640 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8641 && !CONSTANT_CLASS_P (op))
8642 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8643
8644 tree res, vec = build_vector_from_val (type, op);
8645 if (is_gimple_val (vec))
8646 return vec;
8647 if (gimple_in_ssa_p (cfun))
8648 res = make_ssa_name (type);
8649 else
8650 res = create_tmp_reg (type);
8651 gimple *stmt = gimple_build_assign (res, vec);
8652 gimple_set_location (stmt, loc);
8653 gimple_seq_add_stmt_without_update (seq, stmt);
8654 return res;
8655 }
8656
8657 /* Build a vector from BUILDER, handling the case in which some elements
8658 are non-constant. Return a gimple value for the result, appending any
8659 new instructions to SEQ.
8660
8661 BUILDER must not have a stepped encoding on entry. This is because
8662 the function is not geared up to handle the arithmetic that would
8663 be needed in the variable case, and any code building a vector that
8664 is known to be constant should use BUILDER->build () directly. */
8665
8666 tree
8667 gimple_build_vector (gimple_seq *seq, location_t loc,
8668 tree_vector_builder *builder)
8669 {
8670 gcc_assert (builder->nelts_per_pattern () <= 2);
8671 unsigned int encoded_nelts = builder->encoded_nelts ();
8672 for (unsigned int i = 0; i < encoded_nelts; ++i)
8673 if (!CONSTANT_CLASS_P ((*builder)[i]))
8674 {
8675 tree type = builder->type ();
8676 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8677 vec<constructor_elt, va_gc> *v;
8678 vec_alloc (v, nelts);
8679 for (i = 0; i < nelts; ++i)
8680 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
8681
8682 tree res;
8683 if (gimple_in_ssa_p (cfun))
8684 res = make_ssa_name (type);
8685 else
8686 res = create_tmp_reg (type);
8687 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8688 gimple_set_location (stmt, loc);
8689 gimple_seq_add_stmt_without_update (seq, stmt);
8690 return res;
8691 }
8692 return builder->build ();
8693 }
8694
8695 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8696 and generate a value guaranteed to be rounded upwards to ALIGN.
8697
8698 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8699
8700 tree
8701 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8702 tree old_size, unsigned HOST_WIDE_INT align)
8703 {
8704 unsigned HOST_WIDE_INT tg_mask = align - 1;
8705 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8706 gcc_assert (INTEGRAL_TYPE_P (type));
8707 tree tree_mask = build_int_cst (type, tg_mask);
8708 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8709 tree_mask);
8710
8711 tree mask = build_int_cst (type, -align);
8712 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8713 }
8714
8715 /* Return true if the result of assignment STMT is known to be non-negative.
8716 If the return value is based on the assumption that signed overflow is
8717 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8718 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8719
8720 static bool
8721 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8722 int depth)
8723 {
8724 enum tree_code code = gimple_assign_rhs_code (stmt);
8725 switch (get_gimple_rhs_class (code))
8726 {
8727 case GIMPLE_UNARY_RHS:
8728 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8729 gimple_expr_type (stmt),
8730 gimple_assign_rhs1 (stmt),
8731 strict_overflow_p, depth);
8732 case GIMPLE_BINARY_RHS:
8733 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8734 gimple_expr_type (stmt),
8735 gimple_assign_rhs1 (stmt),
8736 gimple_assign_rhs2 (stmt),
8737 strict_overflow_p, depth);
8738 case GIMPLE_TERNARY_RHS:
8739 return false;
8740 case GIMPLE_SINGLE_RHS:
8741 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8742 strict_overflow_p, depth);
8743 case GIMPLE_INVALID_RHS:
8744 break;
8745 }
8746 gcc_unreachable ();
8747 }
8748
8749 /* Return true if return value of call STMT is known to be non-negative.
8750 If the return value is based on the assumption that signed overflow is
8751 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8752 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8753
8754 static bool
8755 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8756 int depth)
8757 {
8758 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8759 gimple_call_arg (stmt, 0) : NULL_TREE;
8760 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8761 gimple_call_arg (stmt, 1) : NULL_TREE;
8762
8763 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
8764 gimple_call_combined_fn (stmt),
8765 arg0,
8766 arg1,
8767 strict_overflow_p, depth);
8768 }
8769
8770 /* Return true if return value of call STMT is known to be non-negative.
8771 If the return value is based on the assumption that signed overflow is
8772 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8773 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8774
8775 static bool
8776 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8777 int depth)
8778 {
8779 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8780 {
8781 tree arg = gimple_phi_arg_def (stmt, i);
8782 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8783 return false;
8784 }
8785 return true;
8786 }
8787
8788 /* Return true if STMT is known to compute a non-negative value.
8789 If the return value is based on the assumption that signed overflow is
8790 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8791 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8792
8793 bool
8794 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8795 int depth)
8796 {
8797 switch (gimple_code (stmt))
8798 {
8799 case GIMPLE_ASSIGN:
8800 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8801 depth);
8802 case GIMPLE_CALL:
8803 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8804 depth);
8805 case GIMPLE_PHI:
8806 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8807 depth);
8808 default:
8809 return false;
8810 }
8811 }
8812
8813 /* Return true if the floating-point value computed by assignment STMT
8814 is known to have an integer value. We also allow +Inf, -Inf and NaN
8815 to be considered integer values. Return false for signaling NaN.
8816
8817 DEPTH is the current nesting depth of the query. */
8818
8819 static bool
8820 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
8821 {
8822 enum tree_code code = gimple_assign_rhs_code (stmt);
8823 switch (get_gimple_rhs_class (code))
8824 {
8825 case GIMPLE_UNARY_RHS:
8826 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
8827 gimple_assign_rhs1 (stmt), depth);
8828 case GIMPLE_BINARY_RHS:
8829 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
8830 gimple_assign_rhs1 (stmt),
8831 gimple_assign_rhs2 (stmt), depth);
8832 case GIMPLE_TERNARY_RHS:
8833 return false;
8834 case GIMPLE_SINGLE_RHS:
8835 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
8836 case GIMPLE_INVALID_RHS:
8837 break;
8838 }
8839 gcc_unreachable ();
8840 }
8841
8842 /* Return true if the floating-point value computed by call STMT is known
8843 to have an integer value. We also allow +Inf, -Inf and NaN to be
8844 considered integer values. Return false for signaling NaN.
8845
8846 DEPTH is the current nesting depth of the query. */
8847
8848 static bool
8849 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
8850 {
8851 tree arg0 = (gimple_call_num_args (stmt) > 0
8852 ? gimple_call_arg (stmt, 0)
8853 : NULL_TREE);
8854 tree arg1 = (gimple_call_num_args (stmt) > 1
8855 ? gimple_call_arg (stmt, 1)
8856 : NULL_TREE);
8857 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
8858 arg0, arg1, depth);
8859 }
8860
8861 /* Return true if the floating-point result of phi STMT is known to have
8862 an integer value. We also allow +Inf, -Inf and NaN to be considered
8863 integer values. Return false for signaling NaN.
8864
8865 DEPTH is the current nesting depth of the query. */
8866
8867 static bool
8868 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
8869 {
8870 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8871 {
8872 tree arg = gimple_phi_arg_def (stmt, i);
8873 if (!integer_valued_real_single_p (arg, depth + 1))
8874 return false;
8875 }
8876 return true;
8877 }
8878
8879 /* Return true if the floating-point value computed by STMT is known
8880 to have an integer value. We also allow +Inf, -Inf and NaN to be
8881 considered integer values. Return false for signaling NaN.
8882
8883 DEPTH is the current nesting depth of the query. */
8884
8885 bool
8886 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
8887 {
8888 switch (gimple_code (stmt))
8889 {
8890 case GIMPLE_ASSIGN:
8891 return gimple_assign_integer_valued_real_p (stmt, depth);
8892 case GIMPLE_CALL:
8893 return gimple_call_integer_valued_real_p (stmt, depth);
8894 case GIMPLE_PHI:
8895 return gimple_phi_integer_valued_real_p (stmt, depth);
8896 default:
8897 return false;
8898 }
8899 }