]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
PR rtl-optimization/46649
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
87c0a9fc 2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4ee9c684 4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
48e1416a 8
4ee9c684 9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
8c4c00c1 11Free Software Foundation; either version 3, or (at your option) any
4ee9c684 12later version.
48e1416a 13
4ee9c684 14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
48e1416a 18
4ee9c684 19You should have received a copy of the GNU General Public License
8c4c00c1 20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
4ee9c684 22
88dbf20f 23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
bfa30570 31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
88dbf20f 38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
48e1416a 66
88dbf20f 67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
4ee9c684 102 References:
103
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112
113#include "config.h"
114#include "system.h"
115#include "coretypes.h"
116#include "tm.h"
4ee9c684 117#include "tree.h"
41511585 118#include "flags.h"
4ee9c684 119#include "tm_p.h"
4ee9c684 120#include "basic-block.h"
41511585 121#include "output.h"
41511585 122#include "function.h"
ce084dfc 123#include "tree-pretty-print.h"
124#include "gimple-pretty-print.h"
41511585 125#include "timevar.h"
4ee9c684 126#include "tree-dump.h"
41511585 127#include "tree-flow.h"
4ee9c684 128#include "tree-pass.h"
41511585 129#include "tree-ssa-propagate.h"
5a4b7e1e 130#include "value-prof.h"
41511585 131#include "langhooks.h"
8782adcf 132#include "target.h"
0b205f4c 133#include "diagnostic-core.h"
43fb76c1 134#include "dbgcnt.h"
4ee9c684 135
136
137/* Possible lattice values. */
138typedef enum
139{
bfa30570 140 UNINITIALIZED,
4ee9c684 141 UNDEFINED,
142 CONSTANT,
143 VARYING
88dbf20f 144} ccp_lattice_t;
4ee9c684 145
14f101cf 146struct prop_value_d {
147 /* Lattice value. */
148 ccp_lattice_t lattice_val;
149
150 /* Propagated value. */
151 tree value;
b7e55469 152
153 /* Mask that applies to the propagated value during CCP. For
154 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
155 double_int mask;
14f101cf 156};
157
158typedef struct prop_value_d prop_value_t;
159
88dbf20f 160/* Array of propagated constant values. After propagation,
161 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
162 the constant is held in an SSA name representing a memory store
4fb5e5ca 163 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
164 memory reference used to store (i.e., the LHS of the assignment
165 doing the store). */
20140406 166static prop_value_t *const_val;
4ee9c684 167
4af351a8 168static void canonicalize_float_value (prop_value_t *);
6688f8ec 169static bool ccp_fold_stmt (gimple_stmt_iterator *);
83f4b93b 170static tree fold_ctor_reference (tree type, tree ctor,
171 unsigned HOST_WIDE_INT offset,
172 unsigned HOST_WIDE_INT size);
4af351a8 173
88dbf20f 174/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
01406fc0 175
176static void
88dbf20f 177dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
01406fc0 178{
41511585 179 switch (val.lattice_val)
01406fc0 180 {
88dbf20f 181 case UNINITIALIZED:
182 fprintf (outf, "%sUNINITIALIZED", prefix);
183 break;
41511585 184 case UNDEFINED:
185 fprintf (outf, "%sUNDEFINED", prefix);
186 break;
187 case VARYING:
188 fprintf (outf, "%sVARYING", prefix);
189 break;
41511585 190 case CONSTANT:
191 fprintf (outf, "%sCONSTANT ", prefix);
b7e55469 192 if (TREE_CODE (val.value) != INTEGER_CST
193 || double_int_zero_p (val.mask))
194 print_generic_expr (outf, val.value, dump_flags);
195 else
196 {
197 double_int cval = double_int_and_not (tree_to_double_int (val.value),
198 val.mask);
199 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
200 prefix, cval.high, cval.low);
201 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
202 val.mask.high, val.mask.low);
203 }
41511585 204 break;
205 default:
8c0963c4 206 gcc_unreachable ();
41511585 207 }
01406fc0 208}
4ee9c684 209
4ee9c684 210
88dbf20f 211/* Print lattice value VAL to stderr. */
212
213void debug_lattice_value (prop_value_t val);
214
4b987fac 215DEBUG_FUNCTION void
88dbf20f 216debug_lattice_value (prop_value_t val)
217{
218 dump_lattice_value (stderr, "", val);
219 fprintf (stderr, "\n");
220}
4ee9c684 221
4ee9c684 222
88dbf20f 223/* Compute a default value for variable VAR and store it in the
224 CONST_VAL array. The following rules are used to get default
225 values:
01406fc0 226
88dbf20f 227 1- Global and static variables that are declared constant are
228 considered CONSTANT.
229
230 2- Any other value is considered UNDEFINED. This is useful when
41511585 231 considering PHI nodes. PHI arguments that are undefined do not
232 change the constant value of the PHI node, which allows for more
88dbf20f 233 constants to be propagated.
4ee9c684 234
8883e700 235 3- Variables defined by statements other than assignments and PHI
88dbf20f 236 nodes are considered VARYING.
4ee9c684 237
8883e700 238 4- Initial values of variables that are not GIMPLE registers are
bfa30570 239 considered VARYING. */
4ee9c684 240
88dbf20f 241static prop_value_t
242get_default_value (tree var)
243{
244 tree sym = SSA_NAME_VAR (var);
b7e55469 245 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
8edeb88b 246 gimple stmt;
247
248 stmt = SSA_NAME_DEF_STMT (var);
249
250 if (gimple_nop_p (stmt))
4ee9c684 251 {
8edeb88b 252 /* Variables defined by an empty statement are those used
253 before being initialized. If VAR is a local variable, we
254 can assume initially that it is UNDEFINED, otherwise we must
255 consider it VARYING. */
524a0531 256 if (is_gimple_reg (sym)
257 && TREE_CODE (sym) == VAR_DECL)
8edeb88b 258 val.lattice_val = UNDEFINED;
259 else
b7e55469 260 {
261 val.lattice_val = VARYING;
262 val.mask = double_int_minus_one;
263 }
4ee9c684 264 }
8edeb88b 265 else if (is_gimple_assign (stmt)
266 /* Value-returning GIMPLE_CALL statements assign to
267 a variable, and are treated similarly to GIMPLE_ASSIGN. */
268 || (is_gimple_call (stmt)
269 && gimple_call_lhs (stmt) != NULL_TREE)
270 || gimple_code (stmt) == GIMPLE_PHI)
41511585 271 {
8edeb88b 272 tree cst;
273 if (gimple_assign_single_p (stmt)
274 && DECL_P (gimple_assign_rhs1 (stmt))
275 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
88dbf20f 276 {
8edeb88b 277 val.lattice_val = CONSTANT;
278 val.value = cst;
88dbf20f 279 }
280 else
8edeb88b 281 /* Any other variable defined by an assignment or a PHI node
282 is considered UNDEFINED. */
283 val.lattice_val = UNDEFINED;
284 }
285 else
286 {
287 /* Otherwise, VAR will never take on a constant value. */
288 val.lattice_val = VARYING;
b7e55469 289 val.mask = double_int_minus_one;
41511585 290 }
4ee9c684 291
41511585 292 return val;
293}
4ee9c684 294
4ee9c684 295
bfa30570 296/* Get the constant value associated with variable VAR. */
4ee9c684 297
bfa30570 298static inline prop_value_t *
299get_value (tree var)
88dbf20f 300{
e004838d 301 prop_value_t *val;
bfa30570 302
e004838d 303 if (const_val == NULL)
304 return NULL;
305
306 val = &const_val[SSA_NAME_VERSION (var)];
bfa30570 307 if (val->lattice_val == UNINITIALIZED)
4ee9c684 308 *val = get_default_value (var);
309
4af351a8 310 canonicalize_float_value (val);
311
4ee9c684 312 return val;
313}
314
15d138c9 315/* Return the constant tree value associated with VAR. */
316
317static inline tree
318get_constant_value (tree var)
319{
98d92e3c 320 prop_value_t *val;
321 if (TREE_CODE (var) != SSA_NAME)
322 {
323 if (is_gimple_min_invariant (var))
324 return var;
325 return NULL_TREE;
326 }
327 val = get_value (var);
b7e55469 328 if (val
329 && val->lattice_val == CONSTANT
330 && (TREE_CODE (val->value) != INTEGER_CST
331 || double_int_zero_p (val->mask)))
15d138c9 332 return val->value;
333 return NULL_TREE;
334}
335
bfa30570 336/* Sets the value associated with VAR to VARYING. */
337
338static inline void
339set_value_varying (tree var)
340{
341 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
342
343 val->lattice_val = VARYING;
344 val->value = NULL_TREE;
b7e55469 345 val->mask = double_int_minus_one;
bfa30570 346}
4ee9c684 347
b31eb493 348/* For float types, modify the value of VAL to make ccp work correctly
349 for non-standard values (-0, NaN):
350
351 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
352 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
353 This is to fix the following problem (see PR 29921): Suppose we have
354
355 x = 0.0 * y
356
357 and we set value of y to NaN. This causes value of x to be set to NaN.
358 When we later determine that y is in fact VARYING, fold uses the fact
359 that HONOR_NANS is false, and we try to change the value of x to 0,
360 causing an ICE. With HONOR_NANS being false, the real appearance of
361 NaN would cause undefined behavior, though, so claiming that y (and x)
362 are UNDEFINED initially is correct. */
363
364static void
365canonicalize_float_value (prop_value_t *val)
366{
367 enum machine_mode mode;
368 tree type;
369 REAL_VALUE_TYPE d;
370
371 if (val->lattice_val != CONSTANT
372 || TREE_CODE (val->value) != REAL_CST)
373 return;
374
375 d = TREE_REAL_CST (val->value);
376 type = TREE_TYPE (val->value);
377 mode = TYPE_MODE (type);
378
379 if (!HONOR_SIGNED_ZEROS (mode)
380 && REAL_VALUE_MINUS_ZERO (d))
381 {
382 val->value = build_real (type, dconst0);
383 return;
384 }
385
386 if (!HONOR_NANS (mode)
387 && REAL_VALUE_ISNAN (d))
388 {
389 val->lattice_val = UNDEFINED;
390 val->value = NULL;
b31eb493 391 return;
392 }
393}
394
b7e55469 395/* Return whether the lattice transition is valid. */
396
397static bool
398valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
399{
400 /* Lattice transitions must always be monotonically increasing in
401 value. */
402 if (old_val.lattice_val < new_val.lattice_val)
403 return true;
404
405 if (old_val.lattice_val != new_val.lattice_val)
406 return false;
407
408 if (!old_val.value && !new_val.value)
409 return true;
410
411 /* Now both lattice values are CONSTANT. */
412
413 /* Allow transitioning from &x to &x & ~3. */
414 if (TREE_CODE (old_val.value) != INTEGER_CST
415 && TREE_CODE (new_val.value) == INTEGER_CST)
416 return true;
417
418 /* Bit-lattices have to agree in the still valid bits. */
419 if (TREE_CODE (old_val.value) == INTEGER_CST
420 && TREE_CODE (new_val.value) == INTEGER_CST)
421 return double_int_equal_p
422 (double_int_and_not (tree_to_double_int (old_val.value),
423 new_val.mask),
424 double_int_and_not (tree_to_double_int (new_val.value),
425 new_val.mask));
426
427 /* Otherwise constant values have to agree. */
428 return operand_equal_p (old_val.value, new_val.value, 0);
429}
430
88dbf20f 431/* Set the value for variable VAR to NEW_VAL. Return true if the new
432 value is different from VAR's previous value. */
4ee9c684 433
41511585 434static bool
88dbf20f 435set_lattice_value (tree var, prop_value_t new_val)
4ee9c684 436{
6d0bf6d6 437 /* We can deal with old UNINITIALIZED values just fine here. */
438 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
88dbf20f 439
b31eb493 440 canonicalize_float_value (&new_val);
441
b7e55469 442 /* We have to be careful to not go up the bitwise lattice
443 represented by the mask.
444 ??? This doesn't seem to be the best place to enforce this. */
445 if (new_val.lattice_val == CONSTANT
446 && old_val->lattice_val == CONSTANT
447 && TREE_CODE (new_val.value) == INTEGER_CST
448 && TREE_CODE (old_val->value) == INTEGER_CST)
449 {
450 double_int diff;
451 diff = double_int_xor (tree_to_double_int (new_val.value),
452 tree_to_double_int (old_val->value));
453 new_val.mask = double_int_ior (new_val.mask,
454 double_int_ior (old_val->mask, diff));
455 }
bfa30570 456
b7e55469 457 gcc_assert (valid_lattice_transition (*old_val, new_val));
88dbf20f 458
b7e55469 459 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
460 caller that this was a non-transition. */
461 if (old_val->lattice_val != new_val.lattice_val
462 || (new_val.lattice_val == CONSTANT
463 && TREE_CODE (new_val.value) == INTEGER_CST
464 && (TREE_CODE (old_val->value) != INTEGER_CST
465 || !double_int_equal_p (new_val.mask, old_val->mask))))
4ee9c684 466 {
b7e55469 467 /* ??? We would like to delay creation of INTEGER_CSTs from
468 partially constants here. */
469
41511585 470 if (dump_file && (dump_flags & TDF_DETAILS))
471 {
88dbf20f 472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
bfa30570 473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
41511585 474 }
475
88dbf20f 476 *old_val = new_val;
477
6d0bf6d6 478 gcc_assert (new_val.lattice_val != UNINITIALIZED);
bfa30570 479 return true;
4ee9c684 480 }
41511585 481
482 return false;
4ee9c684 483}
484
b7e55469 485static prop_value_t get_value_for_expr (tree, bool);
486static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
487static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
488 tree, double_int, double_int,
489 tree, double_int, double_int);
490
491/* Return a double_int that can be used for bitwise simplifications
492 from VAL. */
493
494static double_int
495value_to_double_int (prop_value_t val)
496{
497 if (val.value
498 && TREE_CODE (val.value) == INTEGER_CST)
499 return tree_to_double_int (val.value);
500 else
501 return double_int_zero;
502}
503
504/* Return the value for the address expression EXPR based on alignment
505 information. */
6d0bf6d6 506
507static prop_value_t
b7e55469 508get_value_from_alignment (tree expr)
509{
510 prop_value_t val;
511 HOST_WIDE_INT bitsize, bitpos;
512 tree base, offset;
513 enum machine_mode mode;
514 int align;
515
516 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
517
518 base = get_inner_reference (TREE_OPERAND (expr, 0),
519 &bitsize, &bitpos, &offset,
520 &mode, &align, &align, false);
5d9de213 521 if (TREE_CODE (base) == MEM_REF)
b7e55469 522 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
523 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
524 else if (base
98ab9e8f 525 && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT))
b7e55469 526 > BITS_PER_UNIT))
527 {
528 val.lattice_val = CONSTANT;
529 /* We assume pointers are zero-extended. */
530 val.mask = double_int_and_not
531 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
532 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
533 val.value = build_int_cst (TREE_TYPE (expr), 0);
534 }
535 else
536 {
537 val.lattice_val = VARYING;
538 val.mask = double_int_minus_one;
539 val.value = NULL_TREE;
540 }
541 if (bitpos != 0)
542 {
543 double_int value, mask;
544 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
545 TREE_TYPE (expr), value_to_double_int (val), val.mask,
546 TREE_TYPE (expr),
547 shwi_to_double_int (bitpos / BITS_PER_UNIT),
548 double_int_zero);
549 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
550 val.mask = mask;
551 if (val.lattice_val == CONSTANT)
552 val.value = double_int_to_tree (TREE_TYPE (expr), value);
553 else
554 val.value = NULL_TREE;
555 }
556 /* ??? We should handle i * 4 and more complex expressions from
557 the offset, possibly by just expanding get_value_for_expr. */
558 if (offset != NULL_TREE)
559 {
560 double_int value, mask;
561 prop_value_t oval = get_value_for_expr (offset, true);
562 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
563 TREE_TYPE (expr), value_to_double_int (val), val.mask,
564 TREE_TYPE (expr), value_to_double_int (oval),
565 oval.mask);
566 val.mask = mask;
567 if (double_int_minus_one_p (mask))
568 {
569 val.lattice_val = VARYING;
570 val.value = NULL_TREE;
571 }
572 else
573 {
574 val.lattice_val = CONSTANT;
575 val.value = double_int_to_tree (TREE_TYPE (expr), value);
576 }
577 }
578
579 return val;
580}
581
582/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
583 return constant bits extracted from alignment information for
584 invariant addresses. */
585
586static prop_value_t
587get_value_for_expr (tree expr, bool for_bits_p)
6d0bf6d6 588{
589 prop_value_t val;
590
591 if (TREE_CODE (expr) == SSA_NAME)
b7e55469 592 {
593 val = *get_value (expr);
594 if (for_bits_p
595 && val.lattice_val == CONSTANT
596 && TREE_CODE (val.value) == ADDR_EXPR)
597 val = get_value_from_alignment (val.value);
598 }
599 else if (is_gimple_min_invariant (expr)
600 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
6d0bf6d6 601 {
602 val.lattice_val = CONSTANT;
603 val.value = expr;
b7e55469 604 val.mask = double_int_zero;
6d0bf6d6 605 canonicalize_float_value (&val);
606 }
b7e55469 607 else if (TREE_CODE (expr) == ADDR_EXPR)
608 val = get_value_from_alignment (expr);
6d0bf6d6 609 else
610 {
611 val.lattice_val = VARYING;
b7e55469 612 val.mask = double_int_minus_one;
6d0bf6d6 613 val.value = NULL_TREE;
614 }
6d0bf6d6 615 return val;
616}
617
88dbf20f 618/* Return the likely CCP lattice value for STMT.
4ee9c684 619
41511585 620 If STMT has no operands, then return CONSTANT.
4ee9c684 621
d61b9af3 622 Else if undefinedness of operands of STMT cause its value to be
623 undefined, then return UNDEFINED.
4ee9c684 624
41511585 625 Else if any operands of STMT are constants, then return CONSTANT.
4ee9c684 626
41511585 627 Else return VARYING. */
4ee9c684 628
88dbf20f 629static ccp_lattice_t
75a70cf9 630likely_value (gimple stmt)
41511585 631{
d61b9af3 632 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
41511585 633 tree use;
634 ssa_op_iter iter;
8edeb88b 635 unsigned i;
4ee9c684 636
590c3166 637 enum gimple_code code = gimple_code (stmt);
75a70cf9 638
639 /* This function appears to be called only for assignments, calls,
640 conditionals, and switches, due to the logic in visit_stmt. */
641 gcc_assert (code == GIMPLE_ASSIGN
642 || code == GIMPLE_CALL
643 || code == GIMPLE_COND
644 || code == GIMPLE_SWITCH);
88dbf20f 645
646 /* If the statement has volatile operands, it won't fold to a
647 constant value. */
75a70cf9 648 if (gimple_has_volatile_ops (stmt))
88dbf20f 649 return VARYING;
650
75a70cf9 651 /* Arrive here for more complex cases. */
bfa30570 652 has_constant_operand = false;
d61b9af3 653 has_undefined_operand = false;
654 all_undefined_operands = true;
8edeb88b 655 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
41511585 656 {
bfa30570 657 prop_value_t *val = get_value (use);
41511585 658
bfa30570 659 if (val->lattice_val == UNDEFINED)
d61b9af3 660 has_undefined_operand = true;
661 else
662 all_undefined_operands = false;
88dbf20f 663
41511585 664 if (val->lattice_val == CONSTANT)
bfa30570 665 has_constant_operand = true;
4ee9c684 666 }
41511585 667
dd277d48 668 /* There may be constants in regular rhs operands. For calls we
669 have to ignore lhs, fndecl and static chain, otherwise only
670 the lhs. */
671 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
8edeb88b 672 i < gimple_num_ops (stmt); ++i)
673 {
674 tree op = gimple_op (stmt, i);
675 if (!op || TREE_CODE (op) == SSA_NAME)
676 continue;
677 if (is_gimple_min_invariant (op))
678 has_constant_operand = true;
679 }
680
87c0a9fc 681 if (has_constant_operand)
682 all_undefined_operands = false;
683
d61b9af3 684 /* If the operation combines operands like COMPLEX_EXPR make sure to
685 not mark the result UNDEFINED if only one part of the result is
686 undefined. */
75a70cf9 687 if (has_undefined_operand && all_undefined_operands)
d61b9af3 688 return UNDEFINED;
75a70cf9 689 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
d61b9af3 690 {
75a70cf9 691 switch (gimple_assign_rhs_code (stmt))
d61b9af3 692 {
693 /* Unary operators are handled with all_undefined_operands. */
694 case PLUS_EXPR:
695 case MINUS_EXPR:
d61b9af3 696 case POINTER_PLUS_EXPR:
d61b9af3 697 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
698 Not bitwise operators, one VARYING operand may specify the
699 result completely. Not logical operators for the same reason.
05a936a0 700 Not COMPLEX_EXPR as one VARYING operand makes the result partly
701 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
702 the undefined operand may be promoted. */
d61b9af3 703 return UNDEFINED;
704
705 default:
706 ;
707 }
708 }
709 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
710 fall back to VARYING even if there were CONSTANT operands. */
711 if (has_undefined_operand)
712 return VARYING;
713
8edeb88b 714 /* We do not consider virtual operands here -- load from read-only
715 memory may have only VARYING virtual operands, but still be
716 constant. */
bfa30570 717 if (has_constant_operand
8edeb88b 718 || gimple_references_memory_p (stmt))
88dbf20f 719 return CONSTANT;
720
bfa30570 721 return VARYING;
4ee9c684 722}
723
bfa30570 724/* Returns true if STMT cannot be constant. */
725
726static bool
75a70cf9 727surely_varying_stmt_p (gimple stmt)
bfa30570 728{
729 /* If the statement has operands that we cannot handle, it cannot be
730 constant. */
75a70cf9 731 if (gimple_has_volatile_ops (stmt))
bfa30570 732 return true;
733
f257af64 734 /* If it is a call and does not return a value or is not a
735 builtin and not an indirect call, it is varying. */
75a70cf9 736 if (is_gimple_call (stmt))
f257af64 737 {
738 tree fndecl;
739 if (!gimple_call_lhs (stmt)
740 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
5768aeb3 741 && !DECL_BUILT_IN (fndecl)))
f257af64 742 return true;
743 }
bfa30570 744
8edeb88b 745 /* Any other store operation is not interesting. */
dd277d48 746 else if (gimple_vdef (stmt))
8edeb88b 747 return true;
748
bfa30570 749 /* Anything other than assignments and conditional jumps are not
750 interesting for CCP. */
75a70cf9 751 if (gimple_code (stmt) != GIMPLE_ASSIGN
f257af64 752 && gimple_code (stmt) != GIMPLE_COND
753 && gimple_code (stmt) != GIMPLE_SWITCH
754 && gimple_code (stmt) != GIMPLE_CALL)
bfa30570 755 return true;
756
757 return false;
758}
4ee9c684 759
41511585 760/* Initialize local data structures for CCP. */
4ee9c684 761
762static void
41511585 763ccp_initialize (void)
4ee9c684 764{
41511585 765 basic_block bb;
4ee9c684 766
43959b95 767 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
4ee9c684 768
41511585 769 /* Initialize simulation flags for PHI nodes and statements. */
770 FOR_EACH_BB (bb)
4ee9c684 771 {
75a70cf9 772 gimple_stmt_iterator i;
4ee9c684 773
75a70cf9 774 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
41511585 775 {
75a70cf9 776 gimple stmt = gsi_stmt (i);
2193544e 777 bool is_varying;
778
779 /* If the statement is a control insn, then we do not
780 want to avoid simulating the statement once. Failure
781 to do so means that those edges will never get added. */
782 if (stmt_ends_bb_p (stmt))
783 is_varying = false;
784 else
785 is_varying = surely_varying_stmt_p (stmt);
4ee9c684 786
bfa30570 787 if (is_varying)
41511585 788 {
88dbf20f 789 tree def;
790 ssa_op_iter iter;
791
792 /* If the statement will not produce a constant, mark
793 all its outputs VARYING. */
794 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
8edeb88b 795 set_value_varying (def);
41511585 796 }
75a70cf9 797 prop_set_simulate_again (stmt, !is_varying);
41511585 798 }
4ee9c684 799 }
800
75a70cf9 801 /* Now process PHI nodes. We never clear the simulate_again flag on
802 phi nodes, since we do not know which edges are executable yet,
803 except for phi nodes for virtual operands when we do not do store ccp. */
41511585 804 FOR_EACH_BB (bb)
4ee9c684 805 {
75a70cf9 806 gimple_stmt_iterator i;
41511585 807
75a70cf9 808 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
809 {
810 gimple phi = gsi_stmt (i);
811
61207d43 812 if (!is_gimple_reg (gimple_phi_result (phi)))
75a70cf9 813 prop_set_simulate_again (phi, false);
bfa30570 814 else
75a70cf9 815 prop_set_simulate_again (phi, true);
41511585 816 }
4ee9c684 817 }
41511585 818}
4ee9c684 819
43fb76c1 820/* Debug count support. Reset the values of ssa names
821 VARYING when the total number ssa names analyzed is
822 beyond the debug count specified. */
823
824static void
825do_dbg_cnt (void)
826{
827 unsigned i;
828 for (i = 0; i < num_ssa_names; i++)
829 {
830 if (!dbg_cnt (ccp))
831 {
832 const_val[i].lattice_val = VARYING;
b7e55469 833 const_val[i].mask = double_int_minus_one;
43fb76c1 834 const_val[i].value = NULL_TREE;
835 }
836 }
837}
838
4ee9c684 839
88dbf20f 840/* Do final substitution of propagated values, cleanup the flowgraph and
48e1416a 841 free allocated storage.
4ee9c684 842
33a34f1e 843 Return TRUE when something was optimized. */
844
845static bool
88dbf20f 846ccp_finalize (void)
4ee9c684 847{
43fb76c1 848 bool something_changed;
153c3b50 849 unsigned i;
43fb76c1 850
851 do_dbg_cnt ();
153c3b50 852
853 /* Derive alignment and misalignment information from partially
854 constant pointers in the lattice. */
855 for (i = 1; i < num_ssa_names; ++i)
856 {
857 tree name = ssa_name (i);
858 prop_value_t *val;
859 struct ptr_info_def *pi;
860 unsigned int tem, align;
861
862 if (!name
863 || !POINTER_TYPE_P (TREE_TYPE (name)))
864 continue;
865
866 val = get_value (name);
867 if (val->lattice_val != CONSTANT
868 || TREE_CODE (val->value) != INTEGER_CST)
869 continue;
870
871 /* Trailing constant bits specify the alignment, trailing value
872 bits the misalignment. */
873 tem = val->mask.low;
874 align = (tem & -tem);
875 if (align == 1)
876 continue;
877
878 pi = get_ptr_info (name);
879 pi->align = align;
880 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
881 }
882
88dbf20f 883 /* Perform substitutions based on the known constant values. */
14f101cf 884 something_changed = substitute_and_fold (get_constant_value,
885 ccp_fold_stmt, true);
4ee9c684 886
88dbf20f 887 free (const_val);
e004838d 888 const_val = NULL;
33a34f1e 889 return something_changed;;
4ee9c684 890}
891
892
88dbf20f 893/* Compute the meet operator between *VAL1 and *VAL2. Store the result
894 in VAL1.
895
896 any M UNDEFINED = any
88dbf20f 897 any M VARYING = VARYING
898 Ci M Cj = Ci if (i == j)
899 Ci M Cj = VARYING if (i != j)
bfa30570 900 */
4ee9c684 901
902static void
88dbf20f 903ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
4ee9c684 904{
88dbf20f 905 if (val1->lattice_val == UNDEFINED)
4ee9c684 906 {
88dbf20f 907 /* UNDEFINED M any = any */
908 *val1 = *val2;
41511585 909 }
88dbf20f 910 else if (val2->lattice_val == UNDEFINED)
92481a4d 911 {
88dbf20f 912 /* any M UNDEFINED = any
913 Nothing to do. VAL1 already contains the value we want. */
914 ;
92481a4d 915 }
88dbf20f 916 else if (val1->lattice_val == VARYING
917 || val2->lattice_val == VARYING)
41511585 918 {
88dbf20f 919 /* any M VARYING = VARYING. */
920 val1->lattice_val = VARYING;
b7e55469 921 val1->mask = double_int_minus_one;
88dbf20f 922 val1->value = NULL_TREE;
41511585 923 }
b7e55469 924 else if (val1->lattice_val == CONSTANT
925 && val2->lattice_val == CONSTANT
926 && TREE_CODE (val1->value) == INTEGER_CST
927 && TREE_CODE (val2->value) == INTEGER_CST)
928 {
929 /* Ci M Cj = Ci if (i == j)
930 Ci M Cj = VARYING if (i != j)
931
932 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
933 drop to varying. */
934 val1->mask
935 = double_int_ior (double_int_ior (val1->mask,
936 val2->mask),
937 double_int_xor (tree_to_double_int (val1->value),
938 tree_to_double_int (val2->value)));
939 if (double_int_minus_one_p (val1->mask))
940 {
941 val1->lattice_val = VARYING;
942 val1->value = NULL_TREE;
943 }
944 }
88dbf20f 945 else if (val1->lattice_val == CONSTANT
946 && val2->lattice_val == CONSTANT
61207d43 947 && simple_cst_equal (val1->value, val2->value) == 1)
41511585 948 {
88dbf20f 949 /* Ci M Cj = Ci if (i == j)
950 Ci M Cj = VARYING if (i != j)
951
b7e55469 952 VAL1 already contains the value we want for equivalent values. */
953 }
954 else if (val1->lattice_val == CONSTANT
955 && val2->lattice_val == CONSTANT
956 && (TREE_CODE (val1->value) == ADDR_EXPR
957 || TREE_CODE (val2->value) == ADDR_EXPR))
958 {
959 /* When not equal addresses are involved try meeting for
960 alignment. */
961 prop_value_t tem = *val2;
962 if (TREE_CODE (val1->value) == ADDR_EXPR)
963 *val1 = get_value_for_expr (val1->value, true);
964 if (TREE_CODE (val2->value) == ADDR_EXPR)
965 tem = get_value_for_expr (val2->value, true);
966 ccp_lattice_meet (val1, &tem);
41511585 967 }
968 else
969 {
88dbf20f 970 /* Any other combination is VARYING. */
971 val1->lattice_val = VARYING;
b7e55469 972 val1->mask = double_int_minus_one;
88dbf20f 973 val1->value = NULL_TREE;
41511585 974 }
4ee9c684 975}
976
977
41511585 978/* Loop through the PHI_NODE's parameters for BLOCK and compare their
979 lattice values to determine PHI_NODE's lattice value. The value of a
88dbf20f 980 PHI node is determined calling ccp_lattice_meet with all the arguments
41511585 981 of the PHI node that are incoming via executable edges. */
4ee9c684 982
41511585 983static enum ssa_prop_result
75a70cf9 984ccp_visit_phi_node (gimple phi)
4ee9c684 985{
75a70cf9 986 unsigned i;
88dbf20f 987 prop_value_t *old_val, new_val;
4ee9c684 988
41511585 989 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 990 {
41511585 991 fprintf (dump_file, "\nVisiting PHI node: ");
75a70cf9 992 print_gimple_stmt (dump_file, phi, 0, dump_flags);
4ee9c684 993 }
4ee9c684 994
75a70cf9 995 old_val = get_value (gimple_phi_result (phi));
41511585 996 switch (old_val->lattice_val)
997 {
998 case VARYING:
88dbf20f 999 return SSA_PROP_VARYING;
4ee9c684 1000
41511585 1001 case CONSTANT:
1002 new_val = *old_val;
1003 break;
4ee9c684 1004
41511585 1005 case UNDEFINED:
41511585 1006 new_val.lattice_val = UNDEFINED;
88dbf20f 1007 new_val.value = NULL_TREE;
41511585 1008 break;
4ee9c684 1009
41511585 1010 default:
8c0963c4 1011 gcc_unreachable ();
41511585 1012 }
4ee9c684 1013
75a70cf9 1014 for (i = 0; i < gimple_phi_num_args (phi); i++)
41511585 1015 {
88dbf20f 1016 /* Compute the meet operator over all the PHI arguments flowing
1017 through executable edges. */
75a70cf9 1018 edge e = gimple_phi_arg_edge (phi, i);
4ee9c684 1019
41511585 1020 if (dump_file && (dump_flags & TDF_DETAILS))
1021 {
1022 fprintf (dump_file,
1023 "\n Argument #%d (%d -> %d %sexecutable)\n",
1024 i, e->src->index, e->dest->index,
1025 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1026 }
1027
1028 /* If the incoming edge is executable, Compute the meet operator for
1029 the existing value of the PHI node and the current PHI argument. */
1030 if (e->flags & EDGE_EXECUTABLE)
1031 {
75a70cf9 1032 tree arg = gimple_phi_arg (phi, i)->def;
b7e55469 1033 prop_value_t arg_val = get_value_for_expr (arg, false);
4ee9c684 1034
88dbf20f 1035 ccp_lattice_meet (&new_val, &arg_val);
4ee9c684 1036
41511585 1037 if (dump_file && (dump_flags & TDF_DETAILS))
1038 {
1039 fprintf (dump_file, "\t");
88dbf20f 1040 print_generic_expr (dump_file, arg, dump_flags);
1041 dump_lattice_value (dump_file, "\tValue: ", arg_val);
41511585 1042 fprintf (dump_file, "\n");
1043 }
4ee9c684 1044
41511585 1045 if (new_val.lattice_val == VARYING)
1046 break;
1047 }
1048 }
4ee9c684 1049
1050 if (dump_file && (dump_flags & TDF_DETAILS))
41511585 1051 {
1052 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1053 fprintf (dump_file, "\n\n");
1054 }
1055
bfa30570 1056 /* Make the transition to the new value. */
75a70cf9 1057 if (set_lattice_value (gimple_phi_result (phi), new_val))
41511585 1058 {
1059 if (new_val.lattice_val == VARYING)
1060 return SSA_PROP_VARYING;
1061 else
1062 return SSA_PROP_INTERESTING;
1063 }
1064 else
1065 return SSA_PROP_NOT_INTERESTING;
4ee9c684 1066}
1067
15d138c9 1068/* Return the constant value for OP or OP otherwise. */
00f4f705 1069
1070static tree
15d138c9 1071valueize_op (tree op)
00f4f705 1072{
00f4f705 1073 if (TREE_CODE (op) == SSA_NAME)
1074 {
15d138c9 1075 tree tem = get_constant_value (op);
1076 if (tem)
1077 return tem;
00f4f705 1078 }
1079 return op;
1080}
1081
41511585 1082/* CCP specific front-end to the non-destructive constant folding
1083 routines.
4ee9c684 1084
1085 Attempt to simplify the RHS of STMT knowing that one or more
1086 operands are constants.
1087
1088 If simplification is possible, return the simplified RHS,
75a70cf9 1089 otherwise return the original RHS or NULL_TREE. */
4ee9c684 1090
1091static tree
75a70cf9 1092ccp_fold (gimple stmt)
4ee9c684 1093{
389dd41b 1094 location_t loc = gimple_location (stmt);
75a70cf9 1095 switch (gimple_code (stmt))
88dbf20f 1096 {
75a70cf9 1097 case GIMPLE_ASSIGN:
1098 {
1099 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1100
1101 switch (get_gimple_rhs_class (subcode))
1102 {
1103 case GIMPLE_SINGLE_RHS:
1104 {
1105 tree rhs = gimple_assign_rhs1 (stmt);
1106 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
1107
1108 if (TREE_CODE (rhs) == SSA_NAME)
1109 {
1110 /* If the RHS is an SSA_NAME, return its known constant value,
1111 if any. */
15d138c9 1112 return get_constant_value (rhs);
75a70cf9 1113 }
1114 /* Handle propagating invariant addresses into address operations.
1115 The folding we do here matches that in tree-ssa-forwprop.c. */
1116 else if (TREE_CODE (rhs) == ADDR_EXPR)
1117 {
1118 tree *base;
1119 base = &TREE_OPERAND (rhs, 0);
1120 while (handled_component_p (*base))
1121 base = &TREE_OPERAND (*base, 0);
182cf5a9 1122 if (TREE_CODE (*base) == MEM_REF
75a70cf9 1123 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
1124 {
15d138c9 1125 tree val = get_constant_value (TREE_OPERAND (*base, 0));
1126 if (val
1127 && TREE_CODE (val) == ADDR_EXPR)
75a70cf9 1128 {
182cf5a9 1129 tree ret, save = *base;
1130 tree new_base;
1131 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
15d138c9 1132 unshare_expr (val),
182cf5a9 1133 TREE_OPERAND (*base, 1));
75a70cf9 1134 /* We need to return a new tree, not modify the IL
1135 or share parts of it. So play some tricks to
1136 avoid manually building it. */
182cf5a9 1137 *base = new_base;
75a70cf9 1138 ret = unshare_expr (rhs);
1139 recompute_tree_invariant_for_addr_expr (ret);
1140 *base = save;
1141 return ret;
1142 }
1143 }
1144 }
388a0bc7 1145 else if (TREE_CODE (rhs) == CONSTRUCTOR
1146 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
1147 && (CONSTRUCTOR_NELTS (rhs)
1148 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
1149 {
1150 unsigned i;
1151 tree val, list;
1152
1153 list = NULL_TREE;
1154 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
1155 {
15d138c9 1156 val = valueize_op (val);
388a0bc7 1157 if (TREE_CODE (val) == INTEGER_CST
1158 || TREE_CODE (val) == REAL_CST
1159 || TREE_CODE (val) == FIXED_CST)
1160 list = tree_cons (NULL_TREE, val, list);
1161 else
1162 return NULL_TREE;
1163 }
1164
1165 return build_vector (TREE_TYPE (rhs), nreverse (list));
1166 }
4ee9c684 1167
75a70cf9 1168 if (kind == tcc_reference)
3e4be816 1169 {
0fefde02 1170 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1171 || TREE_CODE (rhs) == REALPART_EXPR
1172 || TREE_CODE (rhs) == IMAGPART_EXPR)
3e4be816 1173 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1174 {
15d138c9 1175 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1176 if (val)
389dd41b 1177 return fold_unary_loc (EXPR_LOCATION (rhs),
15d138c9 1178 TREE_CODE (rhs),
1179 TREE_TYPE (rhs), val);
3e4be816 1180 }
182cf5a9 1181 else if (TREE_CODE (rhs) == MEM_REF
8edeb88b 1182 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1183 {
15d138c9 1184 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1185 if (val
1186 && TREE_CODE (val) == ADDR_EXPR)
182cf5a9 1187 {
1188 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
15d138c9 1189 unshare_expr (val),
182cf5a9 1190 TREE_OPERAND (rhs, 1));
1191 if (tem)
1192 rhs = tem;
1193 }
8edeb88b 1194 }
3e4be816 1195 return fold_const_aggregate_ref (rhs);
1196 }
75a70cf9 1197 else if (kind == tcc_declaration)
1198 return get_symbol_constant_value (rhs);
1199 return rhs;
1200 }
48e1416a 1201
75a70cf9 1202 case GIMPLE_UNARY_RHS:
1203 {
1204 /* Handle unary operators that can appear in GIMPLE form.
1205 Note that we know the single operand must be a constant,
1206 so this should almost always return a simplified RHS. */
1207 tree lhs = gimple_assign_lhs (stmt);
15d138c9 1208 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
75a70cf9 1209
1210 /* Conversions are useless for CCP purposes if they are
1211 value-preserving. Thus the restrictions that
1212 useless_type_conversion_p places for pointer type conversions
1213 do not apply here. Substitution later will only substitute to
1214 allowed places. */
d9659041 1215 if (CONVERT_EXPR_CODE_P (subcode)
5768aeb3 1216 && POINTER_TYPE_P (TREE_TYPE (lhs))
182cf5a9 1217 && POINTER_TYPE_P (TREE_TYPE (op0)))
5768aeb3 1218 {
1219 tree tem;
182cf5a9 1220 /* Try to re-construct array references on-the-fly. */
5768aeb3 1221 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1222 TREE_TYPE (op0))
1223 && ((tem = maybe_fold_offset_to_address
389dd41b 1224 (loc,
e60a6f7b 1225 op0, integer_zero_node, TREE_TYPE (lhs)))
5768aeb3 1226 != NULL_TREE))
1227 return tem;
1228 return op0;
1229 }
75a70cf9 1230
48e1416a 1231 return
389dd41b 1232 fold_unary_ignore_overflow_loc (loc, subcode,
1233 gimple_expr_type (stmt), op0);
f1fb2997 1234 }
75a70cf9 1235
1236 case GIMPLE_BINARY_RHS:
1237 {
1238 /* Handle binary operators that can appear in GIMPLE form. */
15d138c9 1239 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1240 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
75a70cf9 1241
182cf5a9 1242 /* Translate &x + CST into an invariant form suitable for
1243 further propagation. */
5768aeb3 1244 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1245 && TREE_CODE (op0) == ADDR_EXPR
1246 && TREE_CODE (op1) == INTEGER_CST)
1247 {
182cf5a9 1248 tree off = fold_convert (ptr_type_node, op1);
1249 return build_fold_addr_expr
1250 (fold_build2 (MEM_REF,
1251 TREE_TYPE (TREE_TYPE (op0)),
1252 unshare_expr (op0), off));
5768aeb3 1253 }
1254
389dd41b 1255 return fold_binary_loc (loc, subcode,
182cf5a9 1256 gimple_expr_type (stmt), op0, op1);
75a70cf9 1257 }
1258
00f4f705 1259 case GIMPLE_TERNARY_RHS:
1260 {
15d138c9 1261 /* Handle ternary operators that can appear in GIMPLE form. */
1262 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1263 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1264 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
00f4f705 1265
1266 return fold_ternary_loc (loc, subcode,
1267 gimple_expr_type (stmt), op0, op1, op2);
1268 }
1269
75a70cf9 1270 default:
1271 gcc_unreachable ();
1272 }
1273 }
1274 break;
4ee9c684 1275
75a70cf9 1276 case GIMPLE_CALL:
f257af64 1277 {
15d138c9 1278 tree fn = valueize_op (gimple_call_fn (stmt));
f257af64 1279 if (TREE_CODE (fn) == ADDR_EXPR
8ef4f124 1280 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
f257af64 1281 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1282 {
1283 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1284 tree call, retval;
1285 unsigned i;
1286 for (i = 0; i < gimple_call_num_args (stmt); ++i)
15d138c9 1287 args[i] = valueize_op (gimple_call_arg (stmt, i));
389dd41b 1288 call = build_call_array_loc (loc,
1289 gimple_call_return_type (stmt),
1290 fn, gimple_call_num_args (stmt), args);
1291 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
f257af64 1292 if (retval)
1293 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1294 STRIP_NOPS (retval);
1295 return retval;
1296 }
1297 return NULL_TREE;
1298 }
4ee9c684 1299
75a70cf9 1300 case GIMPLE_COND:
1301 {
1302 /* Handle comparison operators that can appear in GIMPLE form. */
15d138c9 1303 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1304 tree op1 = valueize_op (gimple_cond_rhs (stmt));
75a70cf9 1305 enum tree_code code = gimple_cond_code (stmt);
389dd41b 1306 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
75a70cf9 1307 }
4ee9c684 1308
75a70cf9 1309 case GIMPLE_SWITCH:
1310 {
15d138c9 1311 /* Return the constant switch index. */
1312 return valueize_op (gimple_switch_index (stmt));
75a70cf9 1313 }
912f109f 1314
75a70cf9 1315 default:
1316 gcc_unreachable ();
4ee9c684 1317 }
4ee9c684 1318}
1319
98d92e3c 1320/* See if we can find constructor defining value of BASE.
4912473d 1321 When we know the consructor with constant offset (such as
1322 base is array[40] and we do know constructor of array), then
1323 BIT_OFFSET is adjusted accordingly.
98d92e3c 1324
1325 As a special case, return error_mark_node when constructor
1326 is not explicitly available, but it is known to be zero
1327 such as 'static const int a;'. */
1328static tree
4912473d 1329get_base_constructor (tree base, HOST_WIDE_INT *bit_offset)
98d92e3c 1330{
4912473d 1331 HOST_WIDE_INT bit_offset2, size, max_size;
98d92e3c 1332 if (TREE_CODE (base) == MEM_REF)
1333 {
1334 if (!integer_zerop (TREE_OPERAND (base, 1)))
4912473d 1335 {
1336 if (!host_integerp (TREE_OPERAND (base, 1), 0))
1337 return NULL_TREE;
1338 *bit_offset += (mem_ref_offset (base).low
1339 * BITS_PER_UNIT);
1340 }
98d92e3c 1341
1342 base = get_constant_value (TREE_OPERAND (base, 0));
1343 if (!base || TREE_CODE (base) != ADDR_EXPR)
1344 return NULL_TREE;
1345 base = TREE_OPERAND (base, 0);
1346 }
1347
1348 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1349 DECL_INITIAL. If BASE is a nested reference into another
1350 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1351 the inner reference. */
1352 switch (TREE_CODE (base))
1353 {
1354 case VAR_DECL:
7ae8b539 1355 if (!const_value_known_p (base))
98d92e3c 1356 return NULL_TREE;
1357
1358 /* Fallthru. */
1359 case CONST_DECL:
1360 if (!DECL_INITIAL (base)
1361 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1362 return error_mark_node;
1363 return DECL_INITIAL (base);
98d92e3c 1364
1365 case ARRAY_REF:
1366 case COMPONENT_REF:
4912473d 1367 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
1368 if (max_size == -1 || size != max_size)
1369 return NULL_TREE;
1370 *bit_offset += bit_offset2;
1371 return get_base_constructor (base, bit_offset);
98d92e3c 1372
1373 case STRING_CST:
1374 case CONSTRUCTOR:
1375 return base;
98d92e3c 1376
1377 default:
1378 return NULL_TREE;
1379 }
1380}
1381
83f4b93b 1382/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
1383 to the memory at bit OFFSET.
1384
1385 We do only simple job of folding byte accesses. */
1386
1387static tree
1388fold_string_cst_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
1389 unsigned HOST_WIDE_INT size)
1390{
1391 if (INTEGRAL_TYPE_P (type)
1392 && (TYPE_MODE (type)
1393 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1394 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1395 == MODE_INT)
1396 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1397 && size == BITS_PER_UNIT
1398 && !(offset % BITS_PER_UNIT))
1399 {
1400 offset /= BITS_PER_UNIT;
1401 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
1402 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
1403 [offset]));
1404 /* Folding
1405 const char a[20]="hello";
1406 return a[10];
1407
1408 might lead to offset greater than string length. In this case we
1409 know value is either initialized to 0 or out of bounds. Return 0
1410 in both cases. */
1411 return build_zero_cst (type);
1412 }
1413 return NULL_TREE;
1414}
1415
1416/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
1417 SIZE to the memory at bit OFFSET. */
1418
1419static tree
1420fold_array_ctor_reference (tree type, tree ctor,
1421 unsigned HOST_WIDE_INT offset,
1422 unsigned HOST_WIDE_INT size)
1423{
1424 unsigned HOST_WIDE_INT cnt;
1425 tree cfield, cval;
1426 double_int low_bound, elt_size;
1427 double_int index, max_index;
1428 double_int access_index;
1429 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
1430 HOST_WIDE_INT inner_offset;
1431
1432 /* Compute low bound and elt size. */
1433 if (domain_type && TYPE_MIN_VALUE (domain_type))
1434 {
1435 /* Static constructors for variably sized objects makes no sense. */
1436 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
1437 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
1438 }
1439 else
1440 low_bound = double_int_zero;
1441 /* Static constructors for variably sized objects makes no sense. */
1442 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
1443 == INTEGER_CST);
1444 elt_size =
1445 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
1446
1447
1448 /* We can handle only constantly sized accesses that are known to not
1449 be larger than size of array element. */
1450 if (!TYPE_SIZE_UNIT (type)
1451 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1452 || double_int_cmp (elt_size,
1453 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
1454 return NULL_TREE;
1455
1456 /* Compute the array index we look for. */
1457 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
1458 elt_size, TRUNC_DIV_EXPR);
1459 access_index = double_int_add (access_index, low_bound);
1460
1461 /* And offset within the access. */
1462 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
1463
1464 /* See if the array field is large enough to span whole access. We do not
1465 care to fold accesses spanning multiple array indexes. */
1466 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
1467 return NULL_TREE;
1468
1469 index = double_int_sub (low_bound, double_int_one);
1470 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1471 {
1472 /* Array constructor might explicitely set index, or specify range
1473 or leave index NULL meaning that it is next index after previous
1474 one. */
1475 if (cfield)
1476 {
1477 if (TREE_CODE (cfield) == INTEGER_CST)
1478 max_index = index = tree_to_double_int (cfield);
1479 else
1480 {
1481 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
1482 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
1483 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
1484 }
1485 }
1486 else
1487 max_index = index = double_int_add (index, double_int_one);
1488
1489 /* Do we have match? */
1490 if (double_int_cmp (access_index, index, 1) >= 0
1491 && double_int_cmp (access_index, max_index, 1) <= 0)
1492 return fold_ctor_reference (type, cval, inner_offset, size);
1493 }
1494 /* When memory is not explicitely mentioned in constructor,
1495 it is 0 (or out of range). */
1496 return build_zero_cst (type);
1497}
1498
1499/* CTOR is CONSTRUCTOR of an aggregate or vector.
1500 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
1501
1502static tree
1503fold_nonarray_ctor_reference (tree type, tree ctor,
1504 unsigned HOST_WIDE_INT offset,
1505 unsigned HOST_WIDE_INT size)
1506{
1507 unsigned HOST_WIDE_INT cnt;
1508 tree cfield, cval;
1509
1510 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
1511 cval)
1512 {
1513 tree byte_offset = DECL_FIELD_OFFSET (cfield);
1514 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
1515 tree field_size = DECL_SIZE (cfield);
1516 double_int bitoffset;
1517 double_int byte_offset_cst = tree_to_double_int (byte_offset);
1518 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
1519 double_int bitoffset_end;
1520
4938e520 1521 /* Variable sized objects in static constructors makes no sense,
1522 but field_size can be NULL for flexible array members. */
83f4b93b 1523 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
1524 && TREE_CODE (byte_offset) == INTEGER_CST
4938e520 1525 && (field_size != NULL_TREE
1526 ? TREE_CODE (field_size) == INTEGER_CST
1527 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
83f4b93b 1528
1529 /* Compute bit offset of the field. */
1530 bitoffset = double_int_add (tree_to_double_int (field_offset),
1531 double_int_mul (byte_offset_cst,
1532 bits_per_unit_cst));
1533 /* Compute bit offset where the field ends. */
4938e520 1534 if (field_size != NULL_TREE)
1535 bitoffset_end = double_int_add (bitoffset,
1536 tree_to_double_int (field_size));
1537 else
1538 bitoffset_end = double_int_zero;
83f4b93b 1539
1540 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
1541 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
4938e520 1542 && (field_size == NULL_TREE
1543 || double_int_cmp (uhwi_to_double_int (offset),
1544 bitoffset_end, 0) < 0))
83f4b93b 1545 {
1546 double_int access_end = double_int_add (uhwi_to_double_int (offset),
1547 uhwi_to_double_int (size));
1548 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
1549 bitoffset);
1550 /* We do have overlap. Now see if field is large enough to
1551 cover the access. Give up for accesses spanning multiple
1552 fields. */
1553 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
1554 return NULL_TREE;
1555 return fold_ctor_reference (type, cval,
1556 double_int_to_uhwi (inner_offset), size);
1557 }
1558 }
1559 /* When memory is not explicitely mentioned in constructor, it is 0. */
1560 return build_zero_cst (type);
1561}
1562
1563/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
1564 to the memory at bit OFFSET. */
1565
1566static tree
1567fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
1568 unsigned HOST_WIDE_INT size)
1569{
1570 tree ret;
1571
1572 /* We found the field with exact match. */
1573 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
1574 && !offset)
1575 return canonicalize_constructor_val (ctor);
1576
1577 /* We are at the end of walk, see if we can view convert the
1578 result. */
1579 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
1580 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
1581 && operand_equal_p (TYPE_SIZE (type),
1582 TYPE_SIZE (TREE_TYPE (ctor)), 0))
1583 {
1584 ret = canonicalize_constructor_val (ctor);
1585 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
1586 if (ret)
1587 STRIP_NOPS (ret);
1588 return ret;
1589 }
1590 if (TREE_CODE (ctor) == STRING_CST)
1591 return fold_string_cst_ctor_reference (type, ctor, offset, size);
1592 if (TREE_CODE (ctor) == CONSTRUCTOR)
1593 {
1594
1595 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
1596 return fold_array_ctor_reference (type, ctor, offset, size);
1597 else
1598 return fold_nonarray_ctor_reference (type, ctor, offset, size);
1599 }
1600
1601 return NULL_TREE;
1602}
1603
8782adcf 1604/* Return the tree representing the element referenced by T if T is an
1605 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1606 NULL_TREE otherwise. */
1607
e004838d 1608tree
8782adcf 1609fold_const_aggregate_ref (tree t)
1610{
83f4b93b 1611 tree ctor, idx, base;
1612 HOST_WIDE_INT offset, size, max_size;
15d138c9 1613 tree tem;
8782adcf 1614
8edeb88b 1615 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1616 return get_symbol_constant_value (t);
1617
15ba153a 1618 tem = fold_read_from_constant_string (t);
1619 if (tem)
1620 return tem;
1621
8782adcf 1622 switch (TREE_CODE (t))
1623 {
1624 case ARRAY_REF:
83f4b93b 1625 case ARRAY_RANGE_REF:
1626 /* Constant indexes are handled well by get_base_constructor.
1627 Only special case variable offsets.
1628 FIXME: This code can't handle nested references with variable indexes
1629 (they will be handled only by iteration of ccp). Perhaps we can bring
1630 get_ref_base_and_extent here and make it use get_constant_value. */
1631 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
1632 && (idx = get_constant_value (TREE_OPERAND (t, 1)))
1633 && host_integerp (idx, 0))
1634 {
1635 tree low_bound, unit_size;
8782adcf 1636
83f4b93b 1637 /* If the resulting bit-offset is constant, track it. */
1638 if ((low_bound = array_ref_low_bound (t),
1639 host_integerp (low_bound, 0))
1640 && (unit_size = array_ref_element_size (t),
1641 host_integerp (unit_size, 1)))
1642 {
1643 offset = TREE_INT_CST_LOW (idx);
1644 offset -= TREE_INT_CST_LOW (low_bound);
1645 offset *= TREE_INT_CST_LOW (unit_size);
1646 offset *= BITS_PER_UNIT;
1647
1648 base = TREE_OPERAND (t, 0);
4912473d 1649 ctor = get_base_constructor (base, &offset);
83f4b93b 1650 /* Empty constructor. Always fold to 0. */
1651 if (ctor == error_mark_node)
1652 return build_zero_cst (TREE_TYPE (t));
1653 /* Out of bound array access. Value is undefined, but don't fold. */
1654 if (offset < 0)
1655 return NULL_TREE;
1656 /* We can not determine ctor. */
1657 if (!ctor)
1658 return NULL_TREE;
1659 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
1660 TREE_INT_CST_LOW (unit_size)
1661 * BITS_PER_UNIT);
1662 }
1663 }
1664 /* Fallthru. */
1665
1666 case COMPONENT_REF:
1667 case BIT_FIELD_REF:
1668 case TARGET_MEM_REF:
1669 case MEM_REF:
1670 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
4912473d 1671 ctor = get_base_constructor (base, &offset);
04236c3a 1672
83f4b93b 1673 /* Empty constructor. Always fold to 0. */
98d92e3c 1674 if (ctor == error_mark_node)
1675 return build_zero_cst (TREE_TYPE (t));
83f4b93b 1676 /* We do not know precise address. */
1677 if (max_size == -1 || max_size != size)
1678 return NULL_TREE;
1679 /* We can not determine ctor. */
1680 if (!ctor)
8782adcf 1681 return NULL_TREE;
1682
83f4b93b 1683 /* Out of bound array access. Value is undefined, but don't fold. */
1684 if (offset < 0)
98d92e3c 1685 return NULL_TREE;
8782adcf 1686
83f4b93b 1687 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
8782adcf 1688
908cb59d 1689 case REALPART_EXPR:
1690 case IMAGPART_EXPR:
1691 {
1692 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1693 if (c && TREE_CODE (c) == COMPLEX_CST)
389dd41b 1694 return fold_build1_loc (EXPR_LOCATION (t),
1695 TREE_CODE (t), TREE_TYPE (t), c);
908cb59d 1696 break;
1697 }
04236c3a 1698
8782adcf 1699 default:
1700 break;
1701 }
1702
1703 return NULL_TREE;
1704}
75a70cf9 1705
b7e55469 1706/* Apply the operation CODE in type TYPE to the value, mask pair
1707 RVAL and RMASK representing a value of type RTYPE and set
1708 the value, mask pair *VAL and *MASK to the result. */
1709
1710static void
1711bit_value_unop_1 (enum tree_code code, tree type,
1712 double_int *val, double_int *mask,
1713 tree rtype, double_int rval, double_int rmask)
1714{
1715 switch (code)
1716 {
1717 case BIT_NOT_EXPR:
1718 *mask = rmask;
1719 *val = double_int_not (rval);
1720 break;
1721
1722 case NEGATE_EXPR:
1723 {
1724 double_int temv, temm;
1725 /* Return ~rval + 1. */
1726 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1727 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1728 type, temv, temm,
1729 type, double_int_one, double_int_zero);
1730 break;
1731 }
1732
1733 CASE_CONVERT:
1734 {
1735 bool uns;
1736
1737 /* First extend mask and value according to the original type. */
1738 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1739 ? 0 : TYPE_UNSIGNED (rtype));
1740 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1741 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1742
1743 /* Then extend mask and value according to the target type. */
1744 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1745 ? 0 : TYPE_UNSIGNED (type));
1746 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1747 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1748 break;
1749 }
1750
1751 default:
1752 *mask = double_int_minus_one;
1753 break;
1754 }
1755}
1756
1757/* Apply the operation CODE in type TYPE to the value, mask pairs
1758 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1759 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1760
1761static void
1762bit_value_binop_1 (enum tree_code code, tree type,
1763 double_int *val, double_int *mask,
1764 tree r1type, double_int r1val, double_int r1mask,
1765 tree r2type, double_int r2val, double_int r2mask)
1766{
1767 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1768 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1769 /* Assume we'll get a constant result. Use an initial varying value,
1770 we fall back to varying in the end if necessary. */
1771 *mask = double_int_minus_one;
1772 switch (code)
1773 {
1774 case BIT_AND_EXPR:
1775 /* The mask is constant where there is a known not
1776 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1777 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1778 double_int_and (double_int_ior (r1val, r1mask),
1779 double_int_ior (r2val, r2mask)));
1780 *val = double_int_and (r1val, r2val);
1781 break;
1782
1783 case BIT_IOR_EXPR:
1784 /* The mask is constant where there is a known
1785 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1786 *mask = double_int_and_not
1787 (double_int_ior (r1mask, r2mask),
1788 double_int_ior (double_int_and_not (r1val, r1mask),
1789 double_int_and_not (r2val, r2mask)));
1790 *val = double_int_ior (r1val, r2val);
1791 break;
1792
1793 case BIT_XOR_EXPR:
1794 /* m1 | m2 */
1795 *mask = double_int_ior (r1mask, r2mask);
1796 *val = double_int_xor (r1val, r2val);
1797 break;
1798
1799 case LROTATE_EXPR:
1800 case RROTATE_EXPR:
1801 if (double_int_zero_p (r2mask))
1802 {
1803 HOST_WIDE_INT shift = r2val.low;
1804 if (code == RROTATE_EXPR)
1805 shift = -shift;
1806 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1807 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1808 }
1809 break;
1810
1811 case LSHIFT_EXPR:
1812 case RSHIFT_EXPR:
1813 /* ??? We can handle partially known shift counts if we know
1814 its sign. That way we can tell that (x << (y | 8)) & 255
1815 is zero. */
1816 if (double_int_zero_p (r2mask))
1817 {
1818 HOST_WIDE_INT shift = r2val.low;
1819 if (code == RSHIFT_EXPR)
1820 shift = -shift;
1821 /* We need to know if we are doing a left or a right shift
1822 to properly shift in zeros for left shift and unsigned
1823 right shifts and the sign bit for signed right shifts.
1824 For signed right shifts we shift in varying in case
1825 the sign bit was varying. */
1826 if (shift > 0)
1827 {
1828 *mask = double_int_lshift (r1mask, shift,
1829 TYPE_PRECISION (type), false);
1830 *val = double_int_lshift (r1val, shift,
1831 TYPE_PRECISION (type), false);
1832 }
1833 else if (shift < 0)
1834 {
1835 shift = -shift;
1836 *mask = double_int_rshift (r1mask, shift,
1837 TYPE_PRECISION (type), !uns);
1838 *val = double_int_rshift (r1val, shift,
1839 TYPE_PRECISION (type), !uns);
1840 }
1841 else
1842 {
1843 *mask = r1mask;
1844 *val = r1val;
1845 }
1846 }
1847 break;
1848
1849 case PLUS_EXPR:
1850 case POINTER_PLUS_EXPR:
1851 {
1852 double_int lo, hi;
1853 /* Do the addition with unknown bits set to zero, to give carry-ins of
1854 zero wherever possible. */
1855 lo = double_int_add (double_int_and_not (r1val, r1mask),
1856 double_int_and_not (r2val, r2mask));
1857 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1858 /* Do the addition with unknown bits set to one, to give carry-ins of
1859 one wherever possible. */
1860 hi = double_int_add (double_int_ior (r1val, r1mask),
1861 double_int_ior (r2val, r2mask));
1862 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1863 /* Each bit in the result is known if (a) the corresponding bits in
1864 both inputs are known, and (b) the carry-in to that bit position
1865 is known. We can check condition (b) by seeing if we got the same
1866 result with minimised carries as with maximised carries. */
1867 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1868 double_int_xor (lo, hi));
1869 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1870 /* It shouldn't matter whether we choose lo or hi here. */
1871 *val = lo;
1872 break;
1873 }
1874
1875 case MINUS_EXPR:
1876 {
1877 double_int temv, temm;
1878 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1879 r2type, r2val, r2mask);
1880 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1881 r1type, r1val, r1mask,
1882 r2type, temv, temm);
1883 break;
1884 }
1885
1886 case MULT_EXPR:
1887 {
1888 /* Just track trailing zeros in both operands and transfer
1889 them to the other. */
1890 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1891 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1892 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1893 {
1894 *mask = double_int_zero;
1895 *val = double_int_zero;
1896 }
1897 else if (r1tz + r2tz > 0)
1898 {
1899 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1900 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1901 *val = double_int_zero;
1902 }
1903 break;
1904 }
1905
1906 case EQ_EXPR:
1907 case NE_EXPR:
1908 {
1909 double_int m = double_int_ior (r1mask, r2mask);
1910 if (!double_int_equal_p (double_int_and_not (r1val, m),
1911 double_int_and_not (r2val, m)))
1912 {
1913 *mask = double_int_zero;
1914 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1915 }
1916 else
1917 {
1918 /* We know the result of a comparison is always one or zero. */
1919 *mask = double_int_one;
1920 *val = double_int_zero;
1921 }
1922 break;
1923 }
1924
1925 case GE_EXPR:
1926 case GT_EXPR:
1927 {
1928 double_int tem = r1val;
1929 r1val = r2val;
1930 r2val = tem;
1931 tem = r1mask;
1932 r1mask = r2mask;
1933 r2mask = tem;
1934 code = swap_tree_comparison (code);
1935 }
1936 /* Fallthru. */
1937 case LT_EXPR:
1938 case LE_EXPR:
1939 {
1940 int minmax, maxmin;
1941 /* If the most significant bits are not known we know nothing. */
1942 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1943 break;
1944
1945 /* If we know the most significant bits we know the values
1946 value ranges by means of treating varying bits as zero
1947 or one. Do a cross comparison of the max/min pairs. */
1948 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1949 double_int_and_not (r2val, r2mask), uns);
1950 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1951 double_int_ior (r2val, r2mask), uns);
1952 if (maxmin < 0) /* r1 is less than r2. */
1953 {
1954 *mask = double_int_zero;
1955 *val = double_int_one;
1956 }
1957 else if (minmax > 0) /* r1 is not less or equal to r2. */
1958 {
1959 *mask = double_int_zero;
1960 *val = double_int_zero;
1961 }
1962 else if (maxmin == minmax) /* r1 and r2 are equal. */
1963 {
1964 /* This probably should never happen as we'd have
1965 folded the thing during fully constant value folding. */
1966 *mask = double_int_zero;
1967 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1968 }
1969 else
1970 {
1971 /* We know the result of a comparison is always one or zero. */
1972 *mask = double_int_one;
1973 *val = double_int_zero;
1974 }
1975 break;
1976 }
1977
1978 default:;
1979 }
1980}
1981
1982/* Return the propagation value when applying the operation CODE to
1983 the value RHS yielding type TYPE. */
1984
1985static prop_value_t
1986bit_value_unop (enum tree_code code, tree type, tree rhs)
1987{
1988 prop_value_t rval = get_value_for_expr (rhs, true);
1989 double_int value, mask;
1990 prop_value_t val;
1991 gcc_assert ((rval.lattice_val == CONSTANT
1992 && TREE_CODE (rval.value) == INTEGER_CST)
1993 || double_int_minus_one_p (rval.mask));
1994 bit_value_unop_1 (code, type, &value, &mask,
1995 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1996 if (!double_int_minus_one_p (mask))
1997 {
1998 val.lattice_val = CONSTANT;
1999 val.mask = mask;
2000 /* ??? Delay building trees here. */
2001 val.value = double_int_to_tree (type, value);
2002 }
2003 else
2004 {
2005 val.lattice_val = VARYING;
2006 val.value = NULL_TREE;
2007 val.mask = double_int_minus_one;
2008 }
2009 return val;
2010}
2011
2012/* Return the propagation value when applying the operation CODE to
2013 the values RHS1 and RHS2 yielding type TYPE. */
2014
2015static prop_value_t
2016bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
2017{
2018 prop_value_t r1val = get_value_for_expr (rhs1, true);
2019 prop_value_t r2val = get_value_for_expr (rhs2, true);
2020 double_int value, mask;
2021 prop_value_t val;
2022 gcc_assert ((r1val.lattice_val == CONSTANT
2023 && TREE_CODE (r1val.value) == INTEGER_CST)
2024 || double_int_minus_one_p (r1val.mask));
2025 gcc_assert ((r2val.lattice_val == CONSTANT
2026 && TREE_CODE (r2val.value) == INTEGER_CST)
2027 || double_int_minus_one_p (r2val.mask));
2028 bit_value_binop_1 (code, type, &value, &mask,
2029 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
2030 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
2031 if (!double_int_minus_one_p (mask))
2032 {
2033 val.lattice_val = CONSTANT;
2034 val.mask = mask;
2035 /* ??? Delay building trees here. */
2036 val.value = double_int_to_tree (type, value);
2037 }
2038 else
2039 {
2040 val.lattice_val = VARYING;
2041 val.value = NULL_TREE;
2042 val.mask = double_int_minus_one;
2043 }
2044 return val;
2045}
2046
75a70cf9 2047/* Evaluate statement STMT.
2048 Valid only for assignments, calls, conditionals, and switches. */
4ee9c684 2049
88dbf20f 2050static prop_value_t
75a70cf9 2051evaluate_stmt (gimple stmt)
4ee9c684 2052{
88dbf20f 2053 prop_value_t val;
4f61cce6 2054 tree simplified = NULL_TREE;
88dbf20f 2055 ccp_lattice_t likelyvalue = likely_value (stmt);
b7e55469 2056 bool is_constant = false;
88dbf20f 2057
b7e55469 2058 if (dump_file && (dump_flags & TDF_DETAILS))
2059 {
2060 fprintf (dump_file, "which is likely ");
2061 switch (likelyvalue)
2062 {
2063 case CONSTANT:
2064 fprintf (dump_file, "CONSTANT");
2065 break;
2066 case UNDEFINED:
2067 fprintf (dump_file, "UNDEFINED");
2068 break;
2069 case VARYING:
2070 fprintf (dump_file, "VARYING");
2071 break;
2072 default:;
2073 }
2074 fprintf (dump_file, "\n");
2075 }
add6ee5e 2076
4ee9c684 2077 /* If the statement is likely to have a CONSTANT result, then try
2078 to fold the statement to determine the constant value. */
75a70cf9 2079 /* FIXME. This is the only place that we call ccp_fold.
2080 Since likely_value never returns CONSTANT for calls, we will
2081 not attempt to fold them, including builtins that may profit. */
4ee9c684 2082 if (likelyvalue == CONSTANT)
b7e55469 2083 {
2084 fold_defer_overflow_warnings ();
2085 simplified = ccp_fold (stmt);
2086 is_constant = simplified && is_gimple_min_invariant (simplified);
2087 fold_undefer_overflow_warnings (is_constant, stmt, 0);
2088 if (is_constant)
2089 {
2090 /* The statement produced a constant value. */
2091 val.lattice_val = CONSTANT;
2092 val.value = simplified;
2093 val.mask = double_int_zero;
2094 }
2095 }
4ee9c684 2096 /* If the statement is likely to have a VARYING result, then do not
2097 bother folding the statement. */
04236c3a 2098 else if (likelyvalue == VARYING)
75a70cf9 2099 {
590c3166 2100 enum gimple_code code = gimple_code (stmt);
75a70cf9 2101 if (code == GIMPLE_ASSIGN)
2102 {
2103 enum tree_code subcode = gimple_assign_rhs_code (stmt);
48e1416a 2104
75a70cf9 2105 /* Other cases cannot satisfy is_gimple_min_invariant
2106 without folding. */
2107 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
2108 simplified = gimple_assign_rhs1 (stmt);
2109 }
2110 else if (code == GIMPLE_SWITCH)
2111 simplified = gimple_switch_index (stmt);
2112 else
a65c4d64 2113 /* These cannot satisfy is_gimple_min_invariant without folding. */
2114 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
b7e55469 2115 is_constant = simplified && is_gimple_min_invariant (simplified);
2116 if (is_constant)
2117 {
2118 /* The statement produced a constant value. */
2119 val.lattice_val = CONSTANT;
2120 val.value = simplified;
2121 val.mask = double_int_zero;
2122 }
75a70cf9 2123 }
4ee9c684 2124
b7e55469 2125 /* Resort to simplification for bitwise tracking. */
2126 if (flag_tree_bit_ccp
2127 && likelyvalue == CONSTANT
2128 && !is_constant)
912f109f 2129 {
b7e55469 2130 enum gimple_code code = gimple_code (stmt);
153c3b50 2131 tree fndecl;
b7e55469 2132 val.lattice_val = VARYING;
2133 val.value = NULL_TREE;
2134 val.mask = double_int_minus_one;
2135 if (code == GIMPLE_ASSIGN)
912f109f 2136 {
b7e55469 2137 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2138 tree rhs1 = gimple_assign_rhs1 (stmt);
2139 switch (get_gimple_rhs_class (subcode))
2140 {
2141 case GIMPLE_SINGLE_RHS:
2142 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2143 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2144 val = get_value_for_expr (rhs1, true);
2145 break;
2146
2147 case GIMPLE_UNARY_RHS:
2148 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2149 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2150 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
2151 || POINTER_TYPE_P (gimple_expr_type (stmt))))
2152 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
2153 break;
2154
2155 case GIMPLE_BINARY_RHS:
2156 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2157 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2158 {
2159 tree rhs2 = gimple_assign_rhs2 (stmt);
2160 val = bit_value_binop (subcode,
2161 TREE_TYPE (rhs1), rhs1, rhs2);
2162 }
2163 break;
2164
2165 default:;
2166 }
912f109f 2167 }
b7e55469 2168 else if (code == GIMPLE_COND)
2169 {
2170 enum tree_code code = gimple_cond_code (stmt);
2171 tree rhs1 = gimple_cond_lhs (stmt);
2172 tree rhs2 = gimple_cond_rhs (stmt);
2173 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2174 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2175 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2176 }
153c3b50 2177 else if (code == GIMPLE_CALL
2178 && (fndecl = gimple_call_fndecl (stmt))
2179 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2180 {
2181 switch (DECL_FUNCTION_CODE (fndecl))
2182 {
2183 case BUILT_IN_MALLOC:
2184 case BUILT_IN_REALLOC:
2185 case BUILT_IN_CALLOC:
2186 val.lattice_val = CONSTANT;
2187 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2188 val.mask = shwi_to_double_int
2189 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
2190 / BITS_PER_UNIT - 1));
2191 break;
2192
2193 case BUILT_IN_ALLOCA:
2194 val.lattice_val = CONSTANT;
2195 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2196 val.mask = shwi_to_double_int
2197 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
2198 / BITS_PER_UNIT - 1));
2199 break;
2200
2201 default:;
2202 }
2203 }
b7e55469 2204 is_constant = (val.lattice_val == CONSTANT);
912f109f 2205 }
2206
b7e55469 2207 if (!is_constant)
4ee9c684 2208 {
2209 /* The statement produced a nonconstant value. If the statement
88dbf20f 2210 had UNDEFINED operands, then the result of the statement
2211 should be UNDEFINED. Otherwise, the statement is VARYING. */
bfa30570 2212 if (likelyvalue == UNDEFINED)
b7e55469 2213 {
2214 val.lattice_val = likelyvalue;
2215 val.mask = double_int_zero;
2216 }
b765fa12 2217 else
b7e55469 2218 {
2219 val.lattice_val = VARYING;
2220 val.mask = double_int_minus_one;
2221 }
b765fa12 2222
88dbf20f 2223 val.value = NULL_TREE;
4ee9c684 2224 }
41511585 2225
2226 return val;
4ee9c684 2227}
2228
6688f8ec 2229/* Fold the stmt at *GSI with CCP specific information that propagating
2230 and regular folding does not catch. */
2231
2232static bool
2233ccp_fold_stmt (gimple_stmt_iterator *gsi)
2234{
2235 gimple stmt = gsi_stmt (*gsi);
6688f8ec 2236
94144e68 2237 switch (gimple_code (stmt))
2238 {
2239 case GIMPLE_COND:
2240 {
2241 prop_value_t val;
2242 /* Statement evaluation will handle type mismatches in constants
2243 more gracefully than the final propagation. This allows us to
2244 fold more conditionals here. */
2245 val = evaluate_stmt (stmt);
2246 if (val.lattice_val != CONSTANT
b7e55469 2247 || !double_int_zero_p (val.mask))
94144e68 2248 return false;
2249
b7e55469 2250 if (dump_file)
2251 {
2252 fprintf (dump_file, "Folding predicate ");
2253 print_gimple_expr (dump_file, stmt, 0, 0);
2254 fprintf (dump_file, " to ");
2255 print_generic_expr (dump_file, val.value, 0);
2256 fprintf (dump_file, "\n");
2257 }
2258
94144e68 2259 if (integer_zerop (val.value))
2260 gimple_cond_make_false (stmt);
2261 else
2262 gimple_cond_make_true (stmt);
6688f8ec 2263
94144e68 2264 return true;
2265 }
6688f8ec 2266
94144e68 2267 case GIMPLE_CALL:
2268 {
2269 tree lhs = gimple_call_lhs (stmt);
15d138c9 2270 tree val;
94144e68 2271 tree argt;
e16f4c39 2272 tree callee;
94144e68 2273 bool changed = false;
2274 unsigned i;
2275
2276 /* If the call was folded into a constant make sure it goes
2277 away even if we cannot propagate into all uses because of
2278 type issues. */
2279 if (lhs
2280 && TREE_CODE (lhs) == SSA_NAME
15d138c9 2281 && (val = get_constant_value (lhs)))
94144e68 2282 {
15d138c9 2283 tree new_rhs = unshare_expr (val);
338cce8f 2284 bool res;
94144e68 2285 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2286 TREE_TYPE (new_rhs)))
2287 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
338cce8f 2288 res = update_call_from_tree (gsi, new_rhs);
2289 gcc_assert (res);
94144e68 2290 return true;
2291 }
2292
2293 /* Propagate into the call arguments. Compared to replace_uses_in
2294 this can use the argument slot types for type verification
2295 instead of the current argument type. We also can safely
2296 drop qualifiers here as we are dealing with constants anyway. */
2297 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
2298 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2299 ++i, argt = TREE_CHAIN (argt))
2300 {
2301 tree arg = gimple_call_arg (stmt, i);
2302 if (TREE_CODE (arg) == SSA_NAME
15d138c9 2303 && (val = get_constant_value (arg))
94144e68 2304 && useless_type_conversion_p
2305 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
15d138c9 2306 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
94144e68 2307 {
15d138c9 2308 gimple_call_set_arg (stmt, i, unshare_expr (val));
94144e68 2309 changed = true;
2310 }
2311 }
e16f4c39 2312
2313 callee = gimple_call_fn (stmt);
2314 if (TREE_CODE (callee) == OBJ_TYPE_REF
2315 && TREE_CODE (OBJ_TYPE_REF_EXPR (callee)) == SSA_NAME)
ede94584 2316 {
e16f4c39 2317 tree expr = OBJ_TYPE_REF_EXPR (callee);
2318 OBJ_TYPE_REF_EXPR (callee) = valueize_op (expr);
2319 if (TREE_CODE (OBJ_TYPE_REF_EXPR (callee)) == ADDR_EXPR)
2320 {
2321 tree t;
2322 t = gimple_fold_obj_type_ref (callee, NULL_TREE);
2323 if (t)
2324 {
2325 gimple_call_set_fn (stmt, t);
2326 changed = true;
2327 }
2328 }
2329 OBJ_TYPE_REF_EXPR (callee) = expr;
ede94584 2330 }
94144e68 2331
2332 return changed;
2333 }
6688f8ec 2334
6872bf3c 2335 case GIMPLE_ASSIGN:
2336 {
2337 tree lhs = gimple_assign_lhs (stmt);
15d138c9 2338 tree val;
6872bf3c 2339
2340 /* If we have a load that turned out to be constant replace it
2341 as we cannot propagate into all uses in all cases. */
2342 if (gimple_assign_single_p (stmt)
2343 && TREE_CODE (lhs) == SSA_NAME
15d138c9 2344 && (val = get_constant_value (lhs)))
6872bf3c 2345 {
15d138c9 2346 tree rhs = unshare_expr (val);
6872bf3c 2347 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
182cf5a9 2348 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
6872bf3c 2349 gimple_assign_set_rhs_from_tree (gsi, rhs);
2350 return true;
2351 }
2352
2353 return false;
2354 }
2355
94144e68 2356 default:
2357 return false;
2358 }
6688f8ec 2359}
2360
41511585 2361/* Visit the assignment statement STMT. Set the value of its LHS to the
88dbf20f 2362 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2363 creates virtual definitions, set the value of each new name to that
75a70cf9 2364 of the RHS (if we can derive a constant out of the RHS).
2365 Value-returning call statements also perform an assignment, and
2366 are handled here. */
4ee9c684 2367
41511585 2368static enum ssa_prop_result
75a70cf9 2369visit_assignment (gimple stmt, tree *output_p)
4ee9c684 2370{
88dbf20f 2371 prop_value_t val;
88dbf20f 2372 enum ssa_prop_result retval;
4ee9c684 2373
75a70cf9 2374 tree lhs = gimple_get_lhs (stmt);
4ee9c684 2375
75a70cf9 2376 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2377 || gimple_call_lhs (stmt) != NULL_TREE);
2378
15d138c9 2379 if (gimple_assign_single_p (stmt)
2380 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2381 /* For a simple copy operation, we copy the lattice values. */
2382 val = *get_value (gimple_assign_rhs1 (stmt));
41511585 2383 else
75a70cf9 2384 /* Evaluate the statement, which could be
2385 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
04236c3a 2386 val = evaluate_stmt (stmt);
4ee9c684 2387
88dbf20f 2388 retval = SSA_PROP_NOT_INTERESTING;
4ee9c684 2389
41511585 2390 /* Set the lattice value of the statement's output. */
88dbf20f 2391 if (TREE_CODE (lhs) == SSA_NAME)
4ee9c684 2392 {
88dbf20f 2393 /* If STMT is an assignment to an SSA_NAME, we only have one
2394 value to set. */
2395 if (set_lattice_value (lhs, val))
2396 {
2397 *output_p = lhs;
2398 if (val.lattice_val == VARYING)
2399 retval = SSA_PROP_VARYING;
2400 else
2401 retval = SSA_PROP_INTERESTING;
2402 }
4ee9c684 2403 }
88dbf20f 2404
2405 return retval;
4ee9c684 2406}
2407
4ee9c684 2408
41511585 2409/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2410 if it can determine which edge will be taken. Otherwise, return
2411 SSA_PROP_VARYING. */
2412
2413static enum ssa_prop_result
75a70cf9 2414visit_cond_stmt (gimple stmt, edge *taken_edge_p)
4ee9c684 2415{
88dbf20f 2416 prop_value_t val;
41511585 2417 basic_block block;
2418
75a70cf9 2419 block = gimple_bb (stmt);
41511585 2420 val = evaluate_stmt (stmt);
b7e55469 2421 if (val.lattice_val != CONSTANT
2422 || !double_int_zero_p (val.mask))
2423 return SSA_PROP_VARYING;
41511585 2424
2425 /* Find which edge out of the conditional block will be taken and add it
2426 to the worklist. If no single edge can be determined statically,
2427 return SSA_PROP_VARYING to feed all the outgoing edges to the
2428 propagation engine. */
b7e55469 2429 *taken_edge_p = find_taken_edge (block, val.value);
41511585 2430 if (*taken_edge_p)
2431 return SSA_PROP_INTERESTING;
2432 else
2433 return SSA_PROP_VARYING;
4ee9c684 2434}
2435
4ee9c684 2436
41511585 2437/* Evaluate statement STMT. If the statement produces an output value and
2438 its evaluation changes the lattice value of its output, return
2439 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2440 output value.
48e1416a 2441
41511585 2442 If STMT is a conditional branch and we can determine its truth
2443 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2444 value, return SSA_PROP_VARYING. */
4ee9c684 2445
41511585 2446static enum ssa_prop_result
75a70cf9 2447ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
41511585 2448{
41511585 2449 tree def;
2450 ssa_op_iter iter;
4ee9c684 2451
41511585 2452 if (dump_file && (dump_flags & TDF_DETAILS))
4ee9c684 2453 {
88dbf20f 2454 fprintf (dump_file, "\nVisiting statement:\n");
75a70cf9 2455 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2456 }
4ee9c684 2457
75a70cf9 2458 switch (gimple_code (stmt))
4ee9c684 2459 {
75a70cf9 2460 case GIMPLE_ASSIGN:
2461 /* If the statement is an assignment that produces a single
2462 output value, evaluate its RHS to see if the lattice value of
2463 its output has changed. */
2464 return visit_assignment (stmt, output_p);
2465
2466 case GIMPLE_CALL:
2467 /* A value-returning call also performs an assignment. */
2468 if (gimple_call_lhs (stmt) != NULL_TREE)
2469 return visit_assignment (stmt, output_p);
2470 break;
2471
2472 case GIMPLE_COND:
2473 case GIMPLE_SWITCH:
2474 /* If STMT is a conditional branch, see if we can determine
2475 which branch will be taken. */
2476 /* FIXME. It appears that we should be able to optimize
2477 computed GOTOs here as well. */
2478 return visit_cond_stmt (stmt, taken_edge_p);
2479
2480 default:
2481 break;
4ee9c684 2482 }
4ee9c684 2483
41511585 2484 /* Any other kind of statement is not interesting for constant
2485 propagation and, therefore, not worth simulating. */
41511585 2486 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
4ee9c684 2488
41511585 2489 /* Definitions made by statements other than assignments to
2490 SSA_NAMEs represent unknown modifications to their outputs.
2491 Mark them VARYING. */
88dbf20f 2492 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2493 {
b7e55469 2494 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
88dbf20f 2495 set_lattice_value (def, v);
2496 }
4ee9c684 2497
41511585 2498 return SSA_PROP_VARYING;
2499}
4ee9c684 2500
4ee9c684 2501
88dbf20f 2502/* Main entry point for SSA Conditional Constant Propagation. */
41511585 2503
33a34f1e 2504static unsigned int
61207d43 2505do_ssa_ccp (void)
41511585 2506{
2507 ccp_initialize ();
2508 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
33a34f1e 2509 if (ccp_finalize ())
eb9161e7 2510 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
33a34f1e 2511 else
2512 return 0;
4ee9c684 2513}
2514
5664499b 2515
2516static bool
41511585 2517gate_ccp (void)
5664499b 2518{
41511585 2519 return flag_tree_ccp != 0;
5664499b 2520}
2521
4ee9c684 2522
48e1416a 2523struct gimple_opt_pass pass_ccp =
41511585 2524{
20099e35 2525 {
2526 GIMPLE_PASS,
41511585 2527 "ccp", /* name */
2528 gate_ccp, /* gate */
88dbf20f 2529 do_ssa_ccp, /* execute */
41511585 2530 NULL, /* sub */
2531 NULL, /* next */
2532 0, /* static_pass_number */
2533 TV_TREE_CCP, /* tv_id */
49290934 2534 PROP_cfg | PROP_ssa, /* properties_required */
41511585 2535 0, /* properties_provided */
b6246c40 2536 0, /* properties_destroyed */
41511585 2537 0, /* todo_flags_start */
33a34f1e 2538 TODO_dump_func | TODO_verify_ssa
20099e35 2539 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2540 }
41511585 2541};
4ee9c684 2542
4ee9c684 2543
75a70cf9 2544
bdd0e199 2545/* Try to optimize out __builtin_stack_restore. Optimize it out
2546 if there is another __builtin_stack_restore in the same basic
2547 block and no calls or ASM_EXPRs are in between, or if this block's
2548 only outgoing edge is to EXIT_BLOCK and there are no calls or
2549 ASM_EXPRs after this __builtin_stack_restore. */
2550
2551static tree
75a70cf9 2552optimize_stack_restore (gimple_stmt_iterator i)
bdd0e199 2553{
6ea999da 2554 tree callee;
2555 gimple stmt;
75a70cf9 2556
2557 basic_block bb = gsi_bb (i);
2558 gimple call = gsi_stmt (i);
bdd0e199 2559
75a70cf9 2560 if (gimple_code (call) != GIMPLE_CALL
2561 || gimple_call_num_args (call) != 1
2562 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2563 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
bdd0e199 2564 return NULL_TREE;
2565
75a70cf9 2566 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
bdd0e199 2567 {
75a70cf9 2568 stmt = gsi_stmt (i);
2569 if (gimple_code (stmt) == GIMPLE_ASM)
bdd0e199 2570 return NULL_TREE;
75a70cf9 2571 if (gimple_code (stmt) != GIMPLE_CALL)
bdd0e199 2572 continue;
2573
75a70cf9 2574 callee = gimple_call_fndecl (stmt);
c40a6f90 2575 if (!callee
2576 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2577 /* All regular builtins are ok, just obviously not alloca. */
2578 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
bdd0e199 2579 return NULL_TREE;
2580
2581 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
6ea999da 2582 goto second_stack_restore;
bdd0e199 2583 }
2584
6ea999da 2585 if (!gsi_end_p (i))
bdd0e199 2586 return NULL_TREE;
2587
6ea999da 2588 /* Allow one successor of the exit block, or zero successors. */
2589 switch (EDGE_COUNT (bb->succs))
2590 {
2591 case 0:
2592 break;
2593 case 1:
2594 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2595 return NULL_TREE;
2596 break;
2597 default:
2598 return NULL_TREE;
2599 }
2600 second_stack_restore:
bdd0e199 2601
6ea999da 2602 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2603 If there are multiple uses, then the last one should remove the call.
2604 In any case, whether the call to __builtin_stack_save can be removed
2605 or not is irrelevant to removing the call to __builtin_stack_restore. */
2606 if (has_single_use (gimple_call_arg (call, 0)))
2607 {
2608 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2609 if (is_gimple_call (stack_save))
2610 {
2611 callee = gimple_call_fndecl (stack_save);
2612 if (callee
2613 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2614 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2615 {
2616 gimple_stmt_iterator stack_save_gsi;
2617 tree rhs;
bdd0e199 2618
6ea999da 2619 stack_save_gsi = gsi_for_stmt (stack_save);
2620 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2621 update_call_from_tree (&stack_save_gsi, rhs);
2622 }
2623 }
2624 }
bdd0e199 2625
75a70cf9 2626 /* No effect, so the statement will be deleted. */
bdd0e199 2627 return integer_zero_node;
2628}
75a70cf9 2629
8a58ed0a 2630/* If va_list type is a simple pointer and nothing special is needed,
2631 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2632 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2633 pointer assignment. */
2634
2635static tree
75a70cf9 2636optimize_stdarg_builtin (gimple call)
8a58ed0a 2637{
5f57a8b1 2638 tree callee, lhs, rhs, cfun_va_list;
8a58ed0a 2639 bool va_list_simple_ptr;
389dd41b 2640 location_t loc = gimple_location (call);
8a58ed0a 2641
75a70cf9 2642 if (gimple_code (call) != GIMPLE_CALL)
8a58ed0a 2643 return NULL_TREE;
2644
75a70cf9 2645 callee = gimple_call_fndecl (call);
5f57a8b1 2646
2647 cfun_va_list = targetm.fn_abi_va_list (callee);
2648 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2649 && (TREE_TYPE (cfun_va_list) == void_type_node
2650 || TREE_TYPE (cfun_va_list) == char_type_node);
2651
8a58ed0a 2652 switch (DECL_FUNCTION_CODE (callee))
2653 {
2654 case BUILT_IN_VA_START:
2655 if (!va_list_simple_ptr
2656 || targetm.expand_builtin_va_start != NULL
75a70cf9 2657 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
8a58ed0a 2658 return NULL_TREE;
2659
75a70cf9 2660 if (gimple_call_num_args (call) != 2)
8a58ed0a 2661 return NULL_TREE;
2662
75a70cf9 2663 lhs = gimple_call_arg (call, 0);
8a58ed0a 2664 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2665 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2666 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2667 return NULL_TREE;
48e1416a 2668
389dd41b 2669 lhs = build_fold_indirect_ref_loc (loc, lhs);
2670 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
75a70cf9 2671 1, integer_zero_node);
389dd41b 2672 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2673 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2674
2675 case BUILT_IN_VA_COPY:
2676 if (!va_list_simple_ptr)
2677 return NULL_TREE;
2678
75a70cf9 2679 if (gimple_call_num_args (call) != 2)
8a58ed0a 2680 return NULL_TREE;
2681
75a70cf9 2682 lhs = gimple_call_arg (call, 0);
8a58ed0a 2683 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2684 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
5f57a8b1 2685 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2686 return NULL_TREE;
2687
389dd41b 2688 lhs = build_fold_indirect_ref_loc (loc, lhs);
75a70cf9 2689 rhs = gimple_call_arg (call, 1);
8a58ed0a 2690 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
5f57a8b1 2691 != TYPE_MAIN_VARIANT (cfun_va_list))
8a58ed0a 2692 return NULL_TREE;
2693
389dd41b 2694 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
8a58ed0a 2695 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2696
2697 case BUILT_IN_VA_END:
75a70cf9 2698 /* No effect, so the statement will be deleted. */
8a58ed0a 2699 return integer_zero_node;
2700
2701 default:
2702 gcc_unreachable ();
2703 }
2704}
75a70cf9 2705
4ee9c684 2706/* A simple pass that attempts to fold all builtin functions. This pass
2707 is run after we've propagated as many constants as we can. */
2708
2a1990e9 2709static unsigned int
4ee9c684 2710execute_fold_all_builtins (void)
2711{
b36237eb 2712 bool cfg_changed = false;
4ee9c684 2713 basic_block bb;
b1b7c0c4 2714 unsigned int todoflags = 0;
48e1416a 2715
4ee9c684 2716 FOR_EACH_BB (bb)
2717 {
75a70cf9 2718 gimple_stmt_iterator i;
2719 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4ee9c684 2720 {
75a70cf9 2721 gimple stmt, old_stmt;
4ee9c684 2722 tree callee, result;
0a39fd54 2723 enum built_in_function fcode;
4ee9c684 2724
75a70cf9 2725 stmt = gsi_stmt (i);
2726
2727 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2728 {
75a70cf9 2729 gsi_next (&i);
0a39fd54 2730 continue;
2731 }
75a70cf9 2732 callee = gimple_call_fndecl (stmt);
4ee9c684 2733 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
0a39fd54 2734 {
75a70cf9 2735 gsi_next (&i);
0a39fd54 2736 continue;
2737 }
2738 fcode = DECL_FUNCTION_CODE (callee);
4ee9c684 2739
2d18b16d 2740 result = gimple_fold_builtin (stmt);
5a4b7e1e 2741
2742 if (result)
75a70cf9 2743 gimple_remove_stmt_histograms (cfun, stmt);
5a4b7e1e 2744
4ee9c684 2745 if (!result)
2746 switch (DECL_FUNCTION_CODE (callee))
2747 {
2748 case BUILT_IN_CONSTANT_P:
2749 /* Resolve __builtin_constant_p. If it hasn't been
2750 folded to integer_one_node by now, it's fairly
2751 certain that the value simply isn't constant. */
75a70cf9 2752 result = integer_zero_node;
4ee9c684 2753 break;
2754
bdd0e199 2755 case BUILT_IN_STACK_RESTORE:
75a70cf9 2756 result = optimize_stack_restore (i);
8a58ed0a 2757 if (result)
2758 break;
75a70cf9 2759 gsi_next (&i);
8a58ed0a 2760 continue;
2761
2762 case BUILT_IN_VA_START:
2763 case BUILT_IN_VA_END:
2764 case BUILT_IN_VA_COPY:
2765 /* These shouldn't be folded before pass_stdarg. */
75a70cf9 2766 result = optimize_stdarg_builtin (stmt);
bdd0e199 2767 if (result)
2768 break;
2769 /* FALLTHRU */
2770
4ee9c684 2771 default:
75a70cf9 2772 gsi_next (&i);
4ee9c684 2773 continue;
2774 }
2775
2776 if (dump_file && (dump_flags & TDF_DETAILS))
2777 {
2778 fprintf (dump_file, "Simplified\n ");
75a70cf9 2779 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2780 }
2781
75a70cf9 2782 old_stmt = stmt;
75a70cf9 2783 if (!update_call_from_tree (&i, result))
0fefde02 2784 {
2785 gimplify_and_update_call_from_tree (&i, result);
2786 todoflags |= TODO_update_address_taken;
2787 }
de6ed584 2788
75a70cf9 2789 stmt = gsi_stmt (i);
4c5fd53c 2790 update_stmt (stmt);
de6ed584 2791
75a70cf9 2792 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2793 && gimple_purge_dead_eh_edges (bb))
b36237eb 2794 cfg_changed = true;
4ee9c684 2795
2796 if (dump_file && (dump_flags & TDF_DETAILS))
2797 {
2798 fprintf (dump_file, "to\n ");
75a70cf9 2799 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
4ee9c684 2800 fprintf (dump_file, "\n");
2801 }
0a39fd54 2802
2803 /* Retry the same statement if it changed into another
2804 builtin, there might be new opportunities now. */
75a70cf9 2805 if (gimple_code (stmt) != GIMPLE_CALL)
0a39fd54 2806 {
75a70cf9 2807 gsi_next (&i);
0a39fd54 2808 continue;
2809 }
75a70cf9 2810 callee = gimple_call_fndecl (stmt);
0a39fd54 2811 if (!callee
75a70cf9 2812 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
0a39fd54 2813 || DECL_FUNCTION_CODE (callee) == fcode)
75a70cf9 2814 gsi_next (&i);
4ee9c684 2815 }
2816 }
48e1416a 2817
b36237eb 2818 /* Delete unreachable blocks. */
b1b7c0c4 2819 if (cfg_changed)
2820 todoflags |= TODO_cleanup_cfg;
48e1416a 2821
b1b7c0c4 2822 return todoflags;
4ee9c684 2823}
2824
41511585 2825
48e1416a 2826struct gimple_opt_pass pass_fold_builtins =
4ee9c684 2827{
20099e35 2828 {
2829 GIMPLE_PASS,
4ee9c684 2830 "fab", /* name */
2831 NULL, /* gate */
2832 execute_fold_all_builtins, /* execute */
2833 NULL, /* sub */
2834 NULL, /* next */
2835 0, /* static_pass_number */
0b1615c1 2836 TV_NONE, /* tv_id */
49290934 2837 PROP_cfg | PROP_ssa, /* properties_required */
4ee9c684 2838 0, /* properties_provided */
2839 0, /* properties_destroyed */
2840 0, /* todo_flags_start */
909e5ecb 2841 TODO_dump_func
2842 | TODO_verify_ssa
20099e35 2843 | TODO_update_ssa /* todo_flags_finish */
2844 }
4ee9c684 2845};