]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
re PR tree-optimization/47140 (error: conversion of register to a different size)
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
1cdaa211
RG
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
6de9cd9a
DN
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6
7This file is part of GCC.
b8698a0f 8
6de9cd9a
DN
9GCC is free software; you can redistribute it and/or modify it
10under the terms of the GNU General Public License as published by the
9dcd6f09 11Free Software Foundation; either version 3, or (at your option) any
6de9cd9a 12later version.
b8698a0f 13
6de9cd9a
DN
14GCC is distributed in the hope that it will be useful, but WITHOUT
15ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
b8698a0f 18
6de9cd9a 19You should have received a copy of the GNU General Public License
9dcd6f09
NC
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
6de9cd9a 22
0bca51f0
DN
23/* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
29 following values:
30
106dec71
ZD
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
0bca51f0
DN
38
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
42 or not.
43
44 CONSTANT -> V_i has been found to hold a constant
45 value C.
46
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
49 at compile time.
50
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
52
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
60 can be visited.
61
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
b8698a0f 66
0bca51f0
DN
67
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
73
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
76
77 if (PRED)
78 a_9 = 3;
79 else
80 a_10 = 100;
81 a_11 = PHI (a_9, a_10)
82
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
87
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
95
96
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
101
6de9cd9a
DN
102 References:
103
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
106
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
109
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
112
113#include "config.h"
114#include "system.h"
115#include "coretypes.h"
116#include "tm.h"
6de9cd9a 117#include "tree.h"
750628d8 118#include "flags.h"
6de9cd9a 119#include "tm_p.h"
6de9cd9a 120#include "basic-block.h"
750628d8 121#include "output.h"
750628d8 122#include "function.h"
cf835838
JM
123#include "tree-pretty-print.h"
124#include "gimple-pretty-print.h"
750628d8 125#include "timevar.h"
6de9cd9a 126#include "tree-dump.h"
750628d8 127#include "tree-flow.h"
6de9cd9a 128#include "tree-pass.h"
750628d8 129#include "tree-ssa-propagate.h"
53a8f709 130#include "value-prof.h"
750628d8 131#include "langhooks.h"
ae3df618 132#include "target.h"
718f9c0f 133#include "diagnostic-core.h"
74fe548b 134#include "dbgcnt.h"
6de9cd9a
DN
135
136
137/* Possible lattice values. */
138typedef enum
139{
106dec71 140 UNINITIALIZED,
6de9cd9a
DN
141 UNDEFINED,
142 CONSTANT,
143 VARYING
0bca51f0 144} ccp_lattice_t;
6de9cd9a 145
455e6d5b
RG
146struct prop_value_d {
147 /* Lattice value. */
148 ccp_lattice_t lattice_val;
149
150 /* Propagated value. */
151 tree value;
0b4b14ac
RG
152
153 /* Mask that applies to the propagated value during CCP. For
154 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
155 double_int mask;
455e6d5b
RG
156};
157
158typedef struct prop_value_d prop_value_t;
159
0bca51f0
DN
160/* Array of propagated constant values. After propagation,
161 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
162 the constant is held in an SSA name representing a memory store
38635499
DN
163 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
164 memory reference used to store (i.e., the LHS of the assignment
165 doing the store). */
404f4351 166static prop_value_t *const_val;
6de9cd9a 167
25e20805 168static void canonicalize_float_value (prop_value_t *);
f61e18ec 169static bool ccp_fold_stmt (gimple_stmt_iterator *);
697c3575
JH
170static tree fold_ctor_reference (tree type, tree ctor,
171 unsigned HOST_WIDE_INT offset,
172 unsigned HOST_WIDE_INT size);
25e20805 173
0bca51f0 174/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
95eec0d6
DB
175
176static void
0bca51f0 177dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
95eec0d6 178{
750628d8 179 switch (val.lattice_val)
95eec0d6 180 {
0bca51f0
DN
181 case UNINITIALIZED:
182 fprintf (outf, "%sUNINITIALIZED", prefix);
183 break;
750628d8
DN
184 case UNDEFINED:
185 fprintf (outf, "%sUNDEFINED", prefix);
186 break;
187 case VARYING:
188 fprintf (outf, "%sVARYING", prefix);
189 break;
750628d8
DN
190 case CONSTANT:
191 fprintf (outf, "%sCONSTANT ", prefix);
0b4b14ac
RG
192 if (TREE_CODE (val.value) != INTEGER_CST
193 || double_int_zero_p (val.mask))
194 print_generic_expr (outf, val.value, dump_flags);
195 else
196 {
197 double_int cval = double_int_and_not (tree_to_double_int (val.value),
198 val.mask);
199 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
200 prefix, cval.high, cval.low);
201 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
202 val.mask.high, val.mask.low);
203 }
750628d8
DN
204 break;
205 default:
1e128c5f 206 gcc_unreachable ();
750628d8 207 }
95eec0d6 208}
6de9cd9a 209
6de9cd9a 210
0bca51f0
DN
211/* Print lattice value VAL to stderr. */
212
213void debug_lattice_value (prop_value_t val);
214
24e47c76 215DEBUG_FUNCTION void
0bca51f0
DN
216debug_lattice_value (prop_value_t val)
217{
218 dump_lattice_value (stderr, "", val);
219 fprintf (stderr, "\n");
220}
6de9cd9a 221
6de9cd9a 222
0bca51f0
DN
223/* Compute a default value for variable VAR and store it in the
224 CONST_VAL array. The following rules are used to get default
225 values:
95eec0d6 226
0bca51f0
DN
227 1- Global and static variables that are declared constant are
228 considered CONSTANT.
229
230 2- Any other value is considered UNDEFINED. This is useful when
750628d8
DN
231 considering PHI nodes. PHI arguments that are undefined do not
232 change the constant value of the PHI node, which allows for more
0bca51f0 233 constants to be propagated.
6de9cd9a 234
caf55296 235 3- Variables defined by statements other than assignments and PHI
0bca51f0 236 nodes are considered VARYING.
6de9cd9a 237
caf55296 238 4- Initial values of variables that are not GIMPLE registers are
106dec71 239 considered VARYING. */
6de9cd9a 240
0bca51f0
DN
241static prop_value_t
242get_default_value (tree var)
243{
244 tree sym = SSA_NAME_VAR (var);
0b4b14ac 245 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
e8114fba
RG
246 gimple stmt;
247
248 stmt = SSA_NAME_DEF_STMT (var);
249
250 if (gimple_nop_p (stmt))
6de9cd9a 251 {
e8114fba
RG
252 /* Variables defined by an empty statement are those used
253 before being initialized. If VAR is a local variable, we
254 can assume initially that it is UNDEFINED, otherwise we must
255 consider it VARYING. */
6938f93f
JH
256 if (is_gimple_reg (sym)
257 && TREE_CODE (sym) == VAR_DECL)
e8114fba
RG
258 val.lattice_val = UNDEFINED;
259 else
0b4b14ac
RG
260 {
261 val.lattice_val = VARYING;
262 val.mask = double_int_minus_one;
263 }
6de9cd9a 264 }
e8114fba
RG
265 else if (is_gimple_assign (stmt)
266 /* Value-returning GIMPLE_CALL statements assign to
267 a variable, and are treated similarly to GIMPLE_ASSIGN. */
268 || (is_gimple_call (stmt)
269 && gimple_call_lhs (stmt) != NULL_TREE)
270 || gimple_code (stmt) == GIMPLE_PHI)
750628d8 271 {
e8114fba
RG
272 tree cst;
273 if (gimple_assign_single_p (stmt)
274 && DECL_P (gimple_assign_rhs1 (stmt))
275 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
0bca51f0 276 {
e8114fba
RG
277 val.lattice_val = CONSTANT;
278 val.value = cst;
0bca51f0
DN
279 }
280 else
e8114fba
RG
281 /* Any other variable defined by an assignment or a PHI node
282 is considered UNDEFINED. */
283 val.lattice_val = UNDEFINED;
284 }
285 else
286 {
287 /* Otherwise, VAR will never take on a constant value. */
288 val.lattice_val = VARYING;
0b4b14ac 289 val.mask = double_int_minus_one;
750628d8 290 }
6de9cd9a 291
750628d8
DN
292 return val;
293}
6de9cd9a 294
6de9cd9a 295
106dec71 296/* Get the constant value associated with variable VAR. */
6de9cd9a 297
106dec71
ZD
298static inline prop_value_t *
299get_value (tree var)
0bca51f0 300{
ed97ddc6 301 prop_value_t *val;
106dec71 302
ed97ddc6
RG
303 if (const_val == NULL)
304 return NULL;
305
306 val = &const_val[SSA_NAME_VERSION (var)];
106dec71 307 if (val->lattice_val == UNINITIALIZED)
6de9cd9a
DN
308 *val = get_default_value (var);
309
25e20805
RG
310 canonicalize_float_value (val);
311
6de9cd9a
DN
312 return val;
313}
314
84d77ca6
RG
315/* Return the constant tree value associated with VAR. */
316
317static inline tree
318get_constant_value (tree var)
319{
e196b221
JH
320 prop_value_t *val;
321 if (TREE_CODE (var) != SSA_NAME)
322 {
323 if (is_gimple_min_invariant (var))
324 return var;
325 return NULL_TREE;
326 }
327 val = get_value (var);
0b4b14ac
RG
328 if (val
329 && val->lattice_val == CONSTANT
330 && (TREE_CODE (val->value) != INTEGER_CST
331 || double_int_zero_p (val->mask)))
84d77ca6
RG
332 return val->value;
333 return NULL_TREE;
334}
335
106dec71
ZD
336/* Sets the value associated with VAR to VARYING. */
337
338static inline void
339set_value_varying (tree var)
340{
341 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
342
343 val->lattice_val = VARYING;
344 val->value = NULL_TREE;
0b4b14ac 345 val->mask = double_int_minus_one;
106dec71 346}
6de9cd9a 347
fbb5445b
L
348/* For float types, modify the value of VAL to make ccp work correctly
349 for non-standard values (-0, NaN):
350
351 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
352 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
353 This is to fix the following problem (see PR 29921): Suppose we have
354
355 x = 0.0 * y
356
357 and we set value of y to NaN. This causes value of x to be set to NaN.
358 When we later determine that y is in fact VARYING, fold uses the fact
359 that HONOR_NANS is false, and we try to change the value of x to 0,
360 causing an ICE. With HONOR_NANS being false, the real appearance of
361 NaN would cause undefined behavior, though, so claiming that y (and x)
362 are UNDEFINED initially is correct. */
363
364static void
365canonicalize_float_value (prop_value_t *val)
366{
367 enum machine_mode mode;
368 tree type;
369 REAL_VALUE_TYPE d;
370
371 if (val->lattice_val != CONSTANT
372 || TREE_CODE (val->value) != REAL_CST)
373 return;
374
375 d = TREE_REAL_CST (val->value);
376 type = TREE_TYPE (val->value);
377 mode = TYPE_MODE (type);
378
379 if (!HONOR_SIGNED_ZEROS (mode)
380 && REAL_VALUE_MINUS_ZERO (d))
381 {
382 val->value = build_real (type, dconst0);
383 return;
384 }
385
386 if (!HONOR_NANS (mode)
387 && REAL_VALUE_ISNAN (d))
388 {
389 val->lattice_val = UNDEFINED;
390 val->value = NULL;
fbb5445b
L
391 return;
392 }
393}
394
0b4b14ac
RG
395/* Return whether the lattice transition is valid. */
396
397static bool
398valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
399{
400 /* Lattice transitions must always be monotonically increasing in
401 value. */
402 if (old_val.lattice_val < new_val.lattice_val)
403 return true;
404
405 if (old_val.lattice_val != new_val.lattice_val)
406 return false;
407
408 if (!old_val.value && !new_val.value)
409 return true;
410
411 /* Now both lattice values are CONSTANT. */
412
413 /* Allow transitioning from &x to &x & ~3. */
414 if (TREE_CODE (old_val.value) != INTEGER_CST
415 && TREE_CODE (new_val.value) == INTEGER_CST)
416 return true;
417
418 /* Bit-lattices have to agree in the still valid bits. */
419 if (TREE_CODE (old_val.value) == INTEGER_CST
420 && TREE_CODE (new_val.value) == INTEGER_CST)
421 return double_int_equal_p
422 (double_int_and_not (tree_to_double_int (old_val.value),
423 new_val.mask),
424 double_int_and_not (tree_to_double_int (new_val.value),
425 new_val.mask));
426
427 /* Otherwise constant values have to agree. */
428 return operand_equal_p (old_val.value, new_val.value, 0);
429}
430
0bca51f0
DN
431/* Set the value for variable VAR to NEW_VAL. Return true if the new
432 value is different from VAR's previous value. */
6de9cd9a 433
750628d8 434static bool
0bca51f0 435set_lattice_value (tree var, prop_value_t new_val)
6de9cd9a 436{
7a95d078
RG
437 /* We can deal with old UNINITIALIZED values just fine here. */
438 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
0bca51f0 439
fbb5445b
L
440 canonicalize_float_value (&new_val);
441
0b4b14ac
RG
442 /* We have to be careful to not go up the bitwise lattice
443 represented by the mask.
444 ??? This doesn't seem to be the best place to enforce this. */
445 if (new_val.lattice_val == CONSTANT
446 && old_val->lattice_val == CONSTANT
447 && TREE_CODE (new_val.value) == INTEGER_CST
448 && TREE_CODE (old_val->value) == INTEGER_CST)
449 {
450 double_int diff;
451 diff = double_int_xor (tree_to_double_int (new_val.value),
452 tree_to_double_int (old_val->value));
453 new_val.mask = double_int_ior (new_val.mask,
454 double_int_ior (old_val->mask, diff));
455 }
106dec71 456
0b4b14ac 457 gcc_assert (valid_lattice_transition (*old_val, new_val));
0bca51f0 458
0b4b14ac
RG
459 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
460 caller that this was a non-transition. */
461 if (old_val->lattice_val != new_val.lattice_val
462 || (new_val.lattice_val == CONSTANT
463 && TREE_CODE (new_val.value) == INTEGER_CST
464 && (TREE_CODE (old_val->value) != INTEGER_CST
465 || !double_int_equal_p (new_val.mask, old_val->mask))))
6de9cd9a 466 {
0b4b14ac
RG
467 /* ??? We would like to delay creation of INTEGER_CSTs from
468 partially constants here. */
469
750628d8
DN
470 if (dump_file && (dump_flags & TDF_DETAILS))
471 {
0bca51f0 472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
106dec71 473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
750628d8
DN
474 }
475
0bca51f0
DN
476 *old_val = new_val;
477
7a95d078 478 gcc_assert (new_val.lattice_val != UNINITIALIZED);
106dec71 479 return true;
6de9cd9a 480 }
750628d8
DN
481
482 return false;
6de9cd9a
DN
483}
484
0b4b14ac
RG
485static prop_value_t get_value_for_expr (tree, bool);
486static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
487static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
488 tree, double_int, double_int,
489 tree, double_int, double_int);
490
491/* Return a double_int that can be used for bitwise simplifications
492 from VAL. */
493
494static double_int
495value_to_double_int (prop_value_t val)
496{
497 if (val.value
498 && TREE_CODE (val.value) == INTEGER_CST)
499 return tree_to_double_int (val.value);
500 else
501 return double_int_zero;
502}
503
504/* Return the value for the address expression EXPR based on alignment
505 information. */
7a95d078
RG
506
507static prop_value_t
0b4b14ac
RG
508get_value_from_alignment (tree expr)
509{
510 prop_value_t val;
511 HOST_WIDE_INT bitsize, bitpos;
512 tree base, offset;
513 enum machine_mode mode;
514 int align;
515
516 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
517
518 base = get_inner_reference (TREE_OPERAND (expr, 0),
519 &bitsize, &bitpos, &offset,
520 &mode, &align, &align, false);
be1ac4ec 521 if (TREE_CODE (base) == MEM_REF)
0b4b14ac
RG
522 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
523 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
524 else if (base
e80c2726 525 && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT))
0b4b14ac
RG
526 > BITS_PER_UNIT))
527 {
528 val.lattice_val = CONSTANT;
529 /* We assume pointers are zero-extended. */
530 val.mask = double_int_and_not
531 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
532 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
533 val.value = build_int_cst (TREE_TYPE (expr), 0);
534 }
535 else
536 {
537 val.lattice_val = VARYING;
538 val.mask = double_int_minus_one;
539 val.value = NULL_TREE;
540 }
541 if (bitpos != 0)
542 {
543 double_int value, mask;
544 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
545 TREE_TYPE (expr), value_to_double_int (val), val.mask,
546 TREE_TYPE (expr),
547 shwi_to_double_int (bitpos / BITS_PER_UNIT),
548 double_int_zero);
549 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
550 val.mask = mask;
551 if (val.lattice_val == CONSTANT)
552 val.value = double_int_to_tree (TREE_TYPE (expr), value);
553 else
554 val.value = NULL_TREE;
555 }
556 /* ??? We should handle i * 4 and more complex expressions from
557 the offset, possibly by just expanding get_value_for_expr. */
558 if (offset != NULL_TREE)
559 {
560 double_int value, mask;
561 prop_value_t oval = get_value_for_expr (offset, true);
562 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
563 TREE_TYPE (expr), value_to_double_int (val), val.mask,
564 TREE_TYPE (expr), value_to_double_int (oval),
565 oval.mask);
566 val.mask = mask;
567 if (double_int_minus_one_p (mask))
568 {
569 val.lattice_val = VARYING;
570 val.value = NULL_TREE;
571 }
572 else
573 {
574 val.lattice_val = CONSTANT;
575 val.value = double_int_to_tree (TREE_TYPE (expr), value);
576 }
577 }
578
579 return val;
580}
581
582/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
583 return constant bits extracted from alignment information for
584 invariant addresses. */
585
586static prop_value_t
587get_value_for_expr (tree expr, bool for_bits_p)
7a95d078
RG
588{
589 prop_value_t val;
590
591 if (TREE_CODE (expr) == SSA_NAME)
0b4b14ac
RG
592 {
593 val = *get_value (expr);
594 if (for_bits_p
595 && val.lattice_val == CONSTANT
596 && TREE_CODE (val.value) == ADDR_EXPR)
597 val = get_value_from_alignment (val.value);
598 }
599 else if (is_gimple_min_invariant (expr)
600 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
7a95d078
RG
601 {
602 val.lattice_val = CONSTANT;
603 val.value = expr;
0b4b14ac 604 val.mask = double_int_zero;
7a95d078
RG
605 canonicalize_float_value (&val);
606 }
0b4b14ac
RG
607 else if (TREE_CODE (expr) == ADDR_EXPR)
608 val = get_value_from_alignment (expr);
7a95d078
RG
609 else
610 {
611 val.lattice_val = VARYING;
0b4b14ac 612 val.mask = double_int_minus_one;
7a95d078
RG
613 val.value = NULL_TREE;
614 }
7a95d078
RG
615 return val;
616}
617
0bca51f0 618/* Return the likely CCP lattice value for STMT.
6de9cd9a 619
750628d8 620 If STMT has no operands, then return CONSTANT.
6de9cd9a 621
7f879c96
RG
622 Else if undefinedness of operands of STMT cause its value to be
623 undefined, then return UNDEFINED.
6de9cd9a 624
750628d8 625 Else if any operands of STMT are constants, then return CONSTANT.
6de9cd9a 626
750628d8 627 Else return VARYING. */
6de9cd9a 628
0bca51f0 629static ccp_lattice_t
726a989a 630likely_value (gimple stmt)
750628d8 631{
7f879c96 632 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
750628d8
DN
633 tree use;
634 ssa_op_iter iter;
e8114fba 635 unsigned i;
6de9cd9a 636
e0c68ce9 637 enum gimple_code code = gimple_code (stmt);
726a989a
RB
638
639 /* This function appears to be called only for assignments, calls,
640 conditionals, and switches, due to the logic in visit_stmt. */
641 gcc_assert (code == GIMPLE_ASSIGN
642 || code == GIMPLE_CALL
643 || code == GIMPLE_COND
644 || code == GIMPLE_SWITCH);
0bca51f0
DN
645
646 /* If the statement has volatile operands, it won't fold to a
647 constant value. */
726a989a 648 if (gimple_has_volatile_ops (stmt))
0bca51f0
DN
649 return VARYING;
650
726a989a 651 /* Arrive here for more complex cases. */
106dec71 652 has_constant_operand = false;
7f879c96
RG
653 has_undefined_operand = false;
654 all_undefined_operands = true;
e8114fba 655 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
750628d8 656 {
106dec71 657 prop_value_t *val = get_value (use);
750628d8 658
106dec71 659 if (val->lattice_val == UNDEFINED)
7f879c96
RG
660 has_undefined_operand = true;
661 else
662 all_undefined_operands = false;
0bca51f0 663
750628d8 664 if (val->lattice_val == CONSTANT)
106dec71 665 has_constant_operand = true;
6de9cd9a 666 }
750628d8 667
5006671f
RG
668 /* There may be constants in regular rhs operands. For calls we
669 have to ignore lhs, fndecl and static chain, otherwise only
670 the lhs. */
671 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
e8114fba
RG
672 i < gimple_num_ops (stmt); ++i)
673 {
674 tree op = gimple_op (stmt, i);
675 if (!op || TREE_CODE (op) == SSA_NAME)
676 continue;
677 if (is_gimple_min_invariant (op))
678 has_constant_operand = true;
679 }
680
1cdaa211
RG
681 if (has_constant_operand)
682 all_undefined_operands = false;
683
7f879c96
RG
684 /* If the operation combines operands like COMPLEX_EXPR make sure to
685 not mark the result UNDEFINED if only one part of the result is
686 undefined. */
726a989a 687 if (has_undefined_operand && all_undefined_operands)
7f879c96 688 return UNDEFINED;
726a989a 689 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
7f879c96 690 {
726a989a 691 switch (gimple_assign_rhs_code (stmt))
7f879c96
RG
692 {
693 /* Unary operators are handled with all_undefined_operands. */
694 case PLUS_EXPR:
695 case MINUS_EXPR:
7f879c96 696 case POINTER_PLUS_EXPR:
7f879c96
RG
697 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
698 Not bitwise operators, one VARYING operand may specify the
699 result completely. Not logical operators for the same reason.
0cedb9e9
RG
700 Not COMPLEX_EXPR as one VARYING operand makes the result partly
701 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
702 the undefined operand may be promoted. */
7f879c96
RG
703 return UNDEFINED;
704
705 default:
706 ;
707 }
708 }
709 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
710 fall back to VARYING even if there were CONSTANT operands. */
711 if (has_undefined_operand)
712 return VARYING;
713
e8114fba
RG
714 /* We do not consider virtual operands here -- load from read-only
715 memory may have only VARYING virtual operands, but still be
716 constant. */
106dec71 717 if (has_constant_operand
e8114fba 718 || gimple_references_memory_p (stmt))
0bca51f0
DN
719 return CONSTANT;
720
106dec71 721 return VARYING;
6de9cd9a
DN
722}
723
106dec71
ZD
724/* Returns true if STMT cannot be constant. */
725
726static bool
726a989a 727surely_varying_stmt_p (gimple stmt)
106dec71
ZD
728{
729 /* If the statement has operands that we cannot handle, it cannot be
730 constant. */
726a989a 731 if (gimple_has_volatile_ops (stmt))
106dec71
ZD
732 return true;
733
174ef36d
RG
734 /* If it is a call and does not return a value or is not a
735 builtin and not an indirect call, it is varying. */
726a989a 736 if (is_gimple_call (stmt))
174ef36d
RG
737 {
738 tree fndecl;
739 if (!gimple_call_lhs (stmt)
740 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
99f536cc 741 && !DECL_BUILT_IN (fndecl)))
174ef36d
RG
742 return true;
743 }
106dec71 744
e8114fba 745 /* Any other store operation is not interesting. */
5006671f 746 else if (gimple_vdef (stmt))
e8114fba
RG
747 return true;
748
106dec71
ZD
749 /* Anything other than assignments and conditional jumps are not
750 interesting for CCP. */
726a989a 751 if (gimple_code (stmt) != GIMPLE_ASSIGN
174ef36d
RG
752 && gimple_code (stmt) != GIMPLE_COND
753 && gimple_code (stmt) != GIMPLE_SWITCH
754 && gimple_code (stmt) != GIMPLE_CALL)
106dec71
ZD
755 return true;
756
757 return false;
758}
6de9cd9a 759
750628d8 760/* Initialize local data structures for CCP. */
6de9cd9a
DN
761
762static void
750628d8 763ccp_initialize (void)
6de9cd9a 764{
750628d8 765 basic_block bb;
6de9cd9a 766
b9eae1a9 767 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
6de9cd9a 768
750628d8
DN
769 /* Initialize simulation flags for PHI nodes and statements. */
770 FOR_EACH_BB (bb)
6de9cd9a 771 {
726a989a 772 gimple_stmt_iterator i;
6de9cd9a 773
726a989a 774 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
750628d8 775 {
726a989a 776 gimple stmt = gsi_stmt (i);
cd6ea7a2
RH
777 bool is_varying;
778
779 /* If the statement is a control insn, then we do not
780 want to avoid simulating the statement once. Failure
781 to do so means that those edges will never get added. */
782 if (stmt_ends_bb_p (stmt))
783 is_varying = false;
784 else
785 is_varying = surely_varying_stmt_p (stmt);
6de9cd9a 786
106dec71 787 if (is_varying)
750628d8 788 {
0bca51f0
DN
789 tree def;
790 ssa_op_iter iter;
791
792 /* If the statement will not produce a constant, mark
793 all its outputs VARYING. */
794 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
e8114fba 795 set_value_varying (def);
750628d8 796 }
726a989a 797 prop_set_simulate_again (stmt, !is_varying);
750628d8 798 }
6de9cd9a
DN
799 }
800
726a989a
RB
801 /* Now process PHI nodes. We never clear the simulate_again flag on
802 phi nodes, since we do not know which edges are executable yet,
803 except for phi nodes for virtual operands when we do not do store ccp. */
750628d8 804 FOR_EACH_BB (bb)
6de9cd9a 805 {
726a989a 806 gimple_stmt_iterator i;
750628d8 807
726a989a
RB
808 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
809 {
810 gimple phi = gsi_stmt (i);
811
dce2b2f6 812 if (!is_gimple_reg (gimple_phi_result (phi)))
726a989a 813 prop_set_simulate_again (phi, false);
106dec71 814 else
726a989a 815 prop_set_simulate_again (phi, true);
750628d8 816 }
6de9cd9a 817 }
750628d8 818}
6de9cd9a 819
74fe548b
XDL
820/* Debug count support. Reset the values of ssa names
821 VARYING when the total number ssa names analyzed is
822 beyond the debug count specified. */
823
824static void
825do_dbg_cnt (void)
826{
827 unsigned i;
828 for (i = 0; i < num_ssa_names; i++)
829 {
830 if (!dbg_cnt (ccp))
831 {
832 const_val[i].lattice_val = VARYING;
0b4b14ac 833 const_val[i].mask = double_int_minus_one;
74fe548b
XDL
834 const_val[i].value = NULL_TREE;
835 }
836 }
837}
838
6de9cd9a 839
0bca51f0 840/* Do final substitution of propagated values, cleanup the flowgraph and
b8698a0f 841 free allocated storage.
6de9cd9a 842
3253eafb
JH
843 Return TRUE when something was optimized. */
844
845static bool
0bca51f0 846ccp_finalize (void)
6de9cd9a 847{
74fe548b 848 bool something_changed;
1be38ccb 849 unsigned i;
74fe548b
XDL
850
851 do_dbg_cnt ();
1be38ccb
RG
852
853 /* Derive alignment and misalignment information from partially
854 constant pointers in the lattice. */
855 for (i = 1; i < num_ssa_names; ++i)
856 {
857 tree name = ssa_name (i);
858 prop_value_t *val;
859 struct ptr_info_def *pi;
860 unsigned int tem, align;
861
862 if (!name
863 || !POINTER_TYPE_P (TREE_TYPE (name)))
864 continue;
865
866 val = get_value (name);
867 if (val->lattice_val != CONSTANT
868 || TREE_CODE (val->value) != INTEGER_CST)
869 continue;
870
871 /* Trailing constant bits specify the alignment, trailing value
872 bits the misalignment. */
873 tem = val->mask.low;
874 align = (tem & -tem);
875 if (align == 1)
876 continue;
877
878 pi = get_ptr_info (name);
879 pi->align = align;
880 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
881 }
882
0bca51f0 883 /* Perform substitutions based on the known constant values. */
455e6d5b
RG
884 something_changed = substitute_and_fold (get_constant_value,
885 ccp_fold_stmt, true);
6de9cd9a 886
0bca51f0 887 free (const_val);
ed97ddc6 888 const_val = NULL;
3253eafb 889 return something_changed;;
6de9cd9a
DN
890}
891
892
0bca51f0
DN
893/* Compute the meet operator between *VAL1 and *VAL2. Store the result
894 in VAL1.
895
896 any M UNDEFINED = any
0bca51f0
DN
897 any M VARYING = VARYING
898 Ci M Cj = Ci if (i == j)
899 Ci M Cj = VARYING if (i != j)
106dec71 900 */
6de9cd9a
DN
901
902static void
0bca51f0 903ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
6de9cd9a 904{
0bca51f0 905 if (val1->lattice_val == UNDEFINED)
6de9cd9a 906 {
0bca51f0
DN
907 /* UNDEFINED M any = any */
908 *val1 = *val2;
750628d8 909 }
0bca51f0 910 else if (val2->lattice_val == UNDEFINED)
195da47b 911 {
0bca51f0
DN
912 /* any M UNDEFINED = any
913 Nothing to do. VAL1 already contains the value we want. */
914 ;
195da47b 915 }
0bca51f0
DN
916 else if (val1->lattice_val == VARYING
917 || val2->lattice_val == VARYING)
750628d8 918 {
0bca51f0
DN
919 /* any M VARYING = VARYING. */
920 val1->lattice_val = VARYING;
0b4b14ac 921 val1->mask = double_int_minus_one;
0bca51f0 922 val1->value = NULL_TREE;
750628d8 923 }
0b4b14ac
RG
924 else if (val1->lattice_val == CONSTANT
925 && val2->lattice_val == CONSTANT
926 && TREE_CODE (val1->value) == INTEGER_CST
927 && TREE_CODE (val2->value) == INTEGER_CST)
928 {
929 /* Ci M Cj = Ci if (i == j)
930 Ci M Cj = VARYING if (i != j)
931
932 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
933 drop to varying. */
934 val1->mask
935 = double_int_ior (double_int_ior (val1->mask,
936 val2->mask),
937 double_int_xor (tree_to_double_int (val1->value),
938 tree_to_double_int (val2->value)));
939 if (double_int_minus_one_p (val1->mask))
940 {
941 val1->lattice_val = VARYING;
942 val1->value = NULL_TREE;
943 }
944 }
0bca51f0
DN
945 else if (val1->lattice_val == CONSTANT
946 && val2->lattice_val == CONSTANT
dce2b2f6 947 && simple_cst_equal (val1->value, val2->value) == 1)
750628d8 948 {
0bca51f0
DN
949 /* Ci M Cj = Ci if (i == j)
950 Ci M Cj = VARYING if (i != j)
951
0b4b14ac
RG
952 VAL1 already contains the value we want for equivalent values. */
953 }
954 else if (val1->lattice_val == CONSTANT
955 && val2->lattice_val == CONSTANT
956 && (TREE_CODE (val1->value) == ADDR_EXPR
957 || TREE_CODE (val2->value) == ADDR_EXPR))
958 {
959 /* When not equal addresses are involved try meeting for
960 alignment. */
961 prop_value_t tem = *val2;
962 if (TREE_CODE (val1->value) == ADDR_EXPR)
963 *val1 = get_value_for_expr (val1->value, true);
964 if (TREE_CODE (val2->value) == ADDR_EXPR)
965 tem = get_value_for_expr (val2->value, true);
966 ccp_lattice_meet (val1, &tem);
750628d8
DN
967 }
968 else
969 {
0bca51f0
DN
970 /* Any other combination is VARYING. */
971 val1->lattice_val = VARYING;
0b4b14ac 972 val1->mask = double_int_minus_one;
0bca51f0 973 val1->value = NULL_TREE;
750628d8 974 }
6de9cd9a
DN
975}
976
977
750628d8
DN
978/* Loop through the PHI_NODE's parameters for BLOCK and compare their
979 lattice values to determine PHI_NODE's lattice value. The value of a
0bca51f0 980 PHI node is determined calling ccp_lattice_meet with all the arguments
750628d8 981 of the PHI node that are incoming via executable edges. */
6de9cd9a 982
750628d8 983static enum ssa_prop_result
726a989a 984ccp_visit_phi_node (gimple phi)
6de9cd9a 985{
726a989a 986 unsigned i;
0bca51f0 987 prop_value_t *old_val, new_val;
6de9cd9a 988
750628d8 989 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 990 {
750628d8 991 fprintf (dump_file, "\nVisiting PHI node: ");
726a989a 992 print_gimple_stmt (dump_file, phi, 0, dump_flags);
6de9cd9a 993 }
6de9cd9a 994
726a989a 995 old_val = get_value (gimple_phi_result (phi));
750628d8
DN
996 switch (old_val->lattice_val)
997 {
998 case VARYING:
0bca51f0 999 return SSA_PROP_VARYING;
6de9cd9a 1000
750628d8
DN
1001 case CONSTANT:
1002 new_val = *old_val;
1003 break;
6de9cd9a 1004
750628d8 1005 case UNDEFINED:
750628d8 1006 new_val.lattice_val = UNDEFINED;
0bca51f0 1007 new_val.value = NULL_TREE;
750628d8 1008 break;
6de9cd9a 1009
750628d8 1010 default:
1e128c5f 1011 gcc_unreachable ();
750628d8 1012 }
6de9cd9a 1013
726a989a 1014 for (i = 0; i < gimple_phi_num_args (phi); i++)
750628d8 1015 {
0bca51f0
DN
1016 /* Compute the meet operator over all the PHI arguments flowing
1017 through executable edges. */
726a989a 1018 edge e = gimple_phi_arg_edge (phi, i);
6de9cd9a 1019
750628d8
DN
1020 if (dump_file && (dump_flags & TDF_DETAILS))
1021 {
1022 fprintf (dump_file,
1023 "\n Argument #%d (%d -> %d %sexecutable)\n",
1024 i, e->src->index, e->dest->index,
1025 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1026 }
1027
1028 /* If the incoming edge is executable, Compute the meet operator for
1029 the existing value of the PHI node and the current PHI argument. */
1030 if (e->flags & EDGE_EXECUTABLE)
1031 {
726a989a 1032 tree arg = gimple_phi_arg (phi, i)->def;
0b4b14ac 1033 prop_value_t arg_val = get_value_for_expr (arg, false);
6de9cd9a 1034
0bca51f0 1035 ccp_lattice_meet (&new_val, &arg_val);
6de9cd9a 1036
750628d8
DN
1037 if (dump_file && (dump_flags & TDF_DETAILS))
1038 {
1039 fprintf (dump_file, "\t");
0bca51f0
DN
1040 print_generic_expr (dump_file, arg, dump_flags);
1041 dump_lattice_value (dump_file, "\tValue: ", arg_val);
750628d8
DN
1042 fprintf (dump_file, "\n");
1043 }
6de9cd9a 1044
750628d8
DN
1045 if (new_val.lattice_val == VARYING)
1046 break;
1047 }
1048 }
6de9cd9a
DN
1049
1050 if (dump_file && (dump_flags & TDF_DETAILS))
750628d8
DN
1051 {
1052 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1053 fprintf (dump_file, "\n\n");
1054 }
1055
106dec71 1056 /* Make the transition to the new value. */
726a989a 1057 if (set_lattice_value (gimple_phi_result (phi), new_val))
750628d8
DN
1058 {
1059 if (new_val.lattice_val == VARYING)
1060 return SSA_PROP_VARYING;
1061 else
1062 return SSA_PROP_INTERESTING;
1063 }
1064 else
1065 return SSA_PROP_NOT_INTERESTING;
6de9cd9a
DN
1066}
1067
84d77ca6 1068/* Return the constant value for OP or OP otherwise. */
0354c0c7
BS
1069
1070static tree
84d77ca6 1071valueize_op (tree op)
0354c0c7 1072{
0354c0c7
BS
1073 if (TREE_CODE (op) == SSA_NAME)
1074 {
84d77ca6
RG
1075 tree tem = get_constant_value (op);
1076 if (tem)
1077 return tem;
0354c0c7
BS
1078 }
1079 return op;
1080}
1081
750628d8
DN
1082/* CCP specific front-end to the non-destructive constant folding
1083 routines.
6de9cd9a
DN
1084
1085 Attempt to simplify the RHS of STMT knowing that one or more
1086 operands are constants.
1087
1088 If simplification is possible, return the simplified RHS,
726a989a 1089 otherwise return the original RHS or NULL_TREE. */
6de9cd9a
DN
1090
1091static tree
726a989a 1092ccp_fold (gimple stmt)
6de9cd9a 1093{
db3927fb 1094 location_t loc = gimple_location (stmt);
726a989a 1095 switch (gimple_code (stmt))
0bca51f0 1096 {
726a989a
RB
1097 case GIMPLE_ASSIGN:
1098 {
1099 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1100
1101 switch (get_gimple_rhs_class (subcode))
1102 {
1103 case GIMPLE_SINGLE_RHS:
1104 {
1105 tree rhs = gimple_assign_rhs1 (stmt);
1106 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
1107
1108 if (TREE_CODE (rhs) == SSA_NAME)
1109 {
1110 /* If the RHS is an SSA_NAME, return its known constant value,
1111 if any. */
84d77ca6 1112 return get_constant_value (rhs);
726a989a
RB
1113 }
1114 /* Handle propagating invariant addresses into address operations.
1115 The folding we do here matches that in tree-ssa-forwprop.c. */
1116 else if (TREE_CODE (rhs) == ADDR_EXPR)
1117 {
1118 tree *base;
1119 base = &TREE_OPERAND (rhs, 0);
1120 while (handled_component_p (*base))
1121 base = &TREE_OPERAND (*base, 0);
70f34814 1122 if (TREE_CODE (*base) == MEM_REF
726a989a
RB
1123 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
1124 {
84d77ca6
RG
1125 tree val = get_constant_value (TREE_OPERAND (*base, 0));
1126 if (val
1127 && TREE_CODE (val) == ADDR_EXPR)
726a989a 1128 {
70f34814
RG
1129 tree ret, save = *base;
1130 tree new_base;
1131 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
84d77ca6 1132 unshare_expr (val),
70f34814 1133 TREE_OPERAND (*base, 1));
726a989a
RB
1134 /* We need to return a new tree, not modify the IL
1135 or share parts of it. So play some tricks to
1136 avoid manually building it. */
70f34814 1137 *base = new_base;
726a989a
RB
1138 ret = unshare_expr (rhs);
1139 recompute_tree_invariant_for_addr_expr (ret);
1140 *base = save;
1141 return ret;
1142 }
1143 }
1144 }
23977d3c
RG
1145 else if (TREE_CODE (rhs) == CONSTRUCTOR
1146 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
1147 && (CONSTRUCTOR_NELTS (rhs)
1148 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
1149 {
1150 unsigned i;
1151 tree val, list;
1152
1153 list = NULL_TREE;
1154 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
1155 {
84d77ca6 1156 val = valueize_op (val);
23977d3c
RG
1157 if (TREE_CODE (val) == INTEGER_CST
1158 || TREE_CODE (val) == REAL_CST
1159 || TREE_CODE (val) == FIXED_CST)
1160 list = tree_cons (NULL_TREE, val, list);
1161 else
1162 return NULL_TREE;
1163 }
1164
1165 return build_vector (TREE_TYPE (rhs), nreverse (list));
1166 }
6de9cd9a 1167
726a989a 1168 if (kind == tcc_reference)
c76a1f18 1169 {
4bad83f5
RG
1170 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1171 || TREE_CODE (rhs) == REALPART_EXPR
1172 || TREE_CODE (rhs) == IMAGPART_EXPR)
c76a1f18
RG
1173 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1174 {
84d77ca6
RG
1175 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1176 if (val)
db3927fb 1177 return fold_unary_loc (EXPR_LOCATION (rhs),
84d77ca6
RG
1178 TREE_CODE (rhs),
1179 TREE_TYPE (rhs), val);
c76a1f18 1180 }
70f34814 1181 else if (TREE_CODE (rhs) == MEM_REF
e8114fba
RG
1182 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1183 {
84d77ca6
RG
1184 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1185 if (val
1186 && TREE_CODE (val) == ADDR_EXPR)
70f34814
RG
1187 {
1188 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
84d77ca6 1189 unshare_expr (val),
70f34814
RG
1190 TREE_OPERAND (rhs, 1));
1191 if (tem)
1192 rhs = tem;
1193 }
e8114fba 1194 }
c76a1f18
RG
1195 return fold_const_aggregate_ref (rhs);
1196 }
726a989a
RB
1197 else if (kind == tcc_declaration)
1198 return get_symbol_constant_value (rhs);
1199 return rhs;
1200 }
b8698a0f 1201
726a989a
RB
1202 case GIMPLE_UNARY_RHS:
1203 {
1204 /* Handle unary operators that can appear in GIMPLE form.
1205 Note that we know the single operand must be a constant,
1206 so this should almost always return a simplified RHS. */
1207 tree lhs = gimple_assign_lhs (stmt);
84d77ca6 1208 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
726a989a
RB
1209
1210 /* Conversions are useless for CCP purposes if they are
1211 value-preserving. Thus the restrictions that
1212 useless_type_conversion_p places for pointer type conversions
1213 do not apply here. Substitution later will only substitute to
1214 allowed places. */
1a87cf0c 1215 if (CONVERT_EXPR_CODE_P (subcode)
99f536cc 1216 && POINTER_TYPE_P (TREE_TYPE (lhs))
70f34814 1217 && POINTER_TYPE_P (TREE_TYPE (op0)))
99f536cc
RG
1218 {
1219 tree tem;
70f34814 1220 /* Try to re-construct array references on-the-fly. */
99f536cc
RG
1221 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1222 TREE_TYPE (op0))
1223 && ((tem = maybe_fold_offset_to_address
db3927fb 1224 (loc,
c2255bc4 1225 op0, integer_zero_node, TREE_TYPE (lhs)))
99f536cc
RG
1226 != NULL_TREE))
1227 return tem;
1228 return op0;
1229 }
726a989a 1230
b8698a0f 1231 return
db3927fb
AH
1232 fold_unary_ignore_overflow_loc (loc, subcode,
1233 gimple_expr_type (stmt), op0);
ff8b183b 1234 }
726a989a
RB
1235
1236 case GIMPLE_BINARY_RHS:
1237 {
1238 /* Handle binary operators that can appear in GIMPLE form. */
84d77ca6
RG
1239 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1240 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
726a989a 1241
70f34814
RG
1242 /* Translate &x + CST into an invariant form suitable for
1243 further propagation. */
99f536cc
RG
1244 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1245 && TREE_CODE (op0) == ADDR_EXPR
1246 && TREE_CODE (op1) == INTEGER_CST)
1247 {
70f34814
RG
1248 tree off = fold_convert (ptr_type_node, op1);
1249 return build_fold_addr_expr
1250 (fold_build2 (MEM_REF,
1251 TREE_TYPE (TREE_TYPE (op0)),
1252 unshare_expr (op0), off));
99f536cc
RG
1253 }
1254
db3927fb 1255 return fold_binary_loc (loc, subcode,
70f34814 1256 gimple_expr_type (stmt), op0, op1);
726a989a
RB
1257 }
1258
0354c0c7
BS
1259 case GIMPLE_TERNARY_RHS:
1260 {
84d77ca6
RG
1261 /* Handle ternary operators that can appear in GIMPLE form. */
1262 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1263 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1264 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
0354c0c7
BS
1265
1266 return fold_ternary_loc (loc, subcode,
1267 gimple_expr_type (stmt), op0, op1, op2);
1268 }
1269
726a989a
RB
1270 default:
1271 gcc_unreachable ();
1272 }
1273 }
1274 break;
6de9cd9a 1275
726a989a 1276 case GIMPLE_CALL:
174ef36d 1277 {
84d77ca6 1278 tree fn = valueize_op (gimple_call_fn (stmt));
174ef36d 1279 if (TREE_CODE (fn) == ADDR_EXPR
a135b1c4 1280 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
174ef36d
RG
1281 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1282 {
1283 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1284 tree call, retval;
1285 unsigned i;
1286 for (i = 0; i < gimple_call_num_args (stmt); ++i)
84d77ca6 1287 args[i] = valueize_op (gimple_call_arg (stmt, i));
db3927fb
AH
1288 call = build_call_array_loc (loc,
1289 gimple_call_return_type (stmt),
1290 fn, gimple_call_num_args (stmt), args);
1291 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
174ef36d
RG
1292 if (retval)
1293 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1294 STRIP_NOPS (retval);
1295 return retval;
1296 }
1297 return NULL_TREE;
1298 }
6de9cd9a 1299
726a989a
RB
1300 case GIMPLE_COND:
1301 {
1302 /* Handle comparison operators that can appear in GIMPLE form. */
84d77ca6
RG
1303 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1304 tree op1 = valueize_op (gimple_cond_rhs (stmt));
726a989a 1305 enum tree_code code = gimple_cond_code (stmt);
db3927fb 1306 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
726a989a 1307 }
6de9cd9a 1308
726a989a
RB
1309 case GIMPLE_SWITCH:
1310 {
84d77ca6
RG
1311 /* Return the constant switch index. */
1312 return valueize_op (gimple_switch_index (stmt));
726a989a 1313 }
00d382a8 1314
726a989a
RB
1315 default:
1316 gcc_unreachable ();
6de9cd9a 1317 }
6de9cd9a
DN
1318}
1319
e196b221 1320/* See if we can find constructor defining value of BASE.
4a2da105
JH
1321 When we know the consructor with constant offset (such as
1322 base is array[40] and we do know constructor of array), then
1323 BIT_OFFSET is adjusted accordingly.
e196b221
JH
1324
1325 As a special case, return error_mark_node when constructor
1326 is not explicitly available, but it is known to be zero
1327 such as 'static const int a;'. */
1328static tree
4a2da105 1329get_base_constructor (tree base, HOST_WIDE_INT *bit_offset)
e196b221 1330{
4a2da105 1331 HOST_WIDE_INT bit_offset2, size, max_size;
e196b221
JH
1332 if (TREE_CODE (base) == MEM_REF)
1333 {
1334 if (!integer_zerop (TREE_OPERAND (base, 1)))
4a2da105
JH
1335 {
1336 if (!host_integerp (TREE_OPERAND (base, 1), 0))
1337 return NULL_TREE;
1338 *bit_offset += (mem_ref_offset (base).low
1339 * BITS_PER_UNIT);
1340 }
e196b221
JH
1341
1342 base = get_constant_value (TREE_OPERAND (base, 0));
1343 if (!base || TREE_CODE (base) != ADDR_EXPR)
1344 return NULL_TREE;
1345 base = TREE_OPERAND (base, 0);
1346 }
1347
1348 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1349 DECL_INITIAL. If BASE is a nested reference into another
1350 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1351 the inner reference. */
1352 switch (TREE_CODE (base))
1353 {
1354 case VAR_DECL:
64e0f5ff 1355 if (!const_value_known_p (base))
e196b221
JH
1356 return NULL_TREE;
1357
1358 /* Fallthru. */
1359 case CONST_DECL:
1360 if (!DECL_INITIAL (base)
1361 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1362 return error_mark_node;
1363 return DECL_INITIAL (base);
e196b221
JH
1364
1365 case ARRAY_REF:
1366 case COMPONENT_REF:
4a2da105
JH
1367 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
1368 if (max_size == -1 || size != max_size)
1369 return NULL_TREE;
1370 *bit_offset += bit_offset2;
1371 return get_base_constructor (base, bit_offset);
e196b221
JH
1372
1373 case STRING_CST:
1374 case CONSTRUCTOR:
1375 return base;
e196b221
JH
1376
1377 default:
1378 return NULL_TREE;
1379 }
1380}
1381
697c3575
JH
1382/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
1383 to the memory at bit OFFSET.
1384
1385 We do only simple job of folding byte accesses. */
1386
1387static tree
1388fold_string_cst_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
1389 unsigned HOST_WIDE_INT size)
1390{
1391 if (INTEGRAL_TYPE_P (type)
1392 && (TYPE_MODE (type)
1393 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1394 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1395 == MODE_INT)
1396 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1397 && size == BITS_PER_UNIT
1398 && !(offset % BITS_PER_UNIT))
1399 {
1400 offset /= BITS_PER_UNIT;
1401 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
1402 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
1403 [offset]));
1404 /* Folding
1405 const char a[20]="hello";
1406 return a[10];
1407
1408 might lead to offset greater than string length. In this case we
1409 know value is either initialized to 0 or out of bounds. Return 0
1410 in both cases. */
1411 return build_zero_cst (type);
1412 }
1413 return NULL_TREE;
1414}
1415
1416/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
1417 SIZE to the memory at bit OFFSET. */
1418
1419static tree
1420fold_array_ctor_reference (tree type, tree ctor,
1421 unsigned HOST_WIDE_INT offset,
1422 unsigned HOST_WIDE_INT size)
1423{
1424 unsigned HOST_WIDE_INT cnt;
1425 tree cfield, cval;
1426 double_int low_bound, elt_size;
1427 double_int index, max_index;
1428 double_int access_index;
1429 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
1430 HOST_WIDE_INT inner_offset;
1431
1432 /* Compute low bound and elt size. */
1433 if (domain_type && TYPE_MIN_VALUE (domain_type))
1434 {
1435 /* Static constructors for variably sized objects makes no sense. */
1436 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
1437 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
1438 }
1439 else
1440 low_bound = double_int_zero;
1441 /* Static constructors for variably sized objects makes no sense. */
1442 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
1443 == INTEGER_CST);
1444 elt_size =
1445 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
1446
1447
1448 /* We can handle only constantly sized accesses that are known to not
1449 be larger than size of array element. */
1450 if (!TYPE_SIZE_UNIT (type)
1451 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1452 || double_int_cmp (elt_size,
1453 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
1454 return NULL_TREE;
1455
1456 /* Compute the array index we look for. */
1457 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
1458 elt_size, TRUNC_DIV_EXPR);
1459 access_index = double_int_add (access_index, low_bound);
1460
1461 /* And offset within the access. */
1462 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
1463
1464 /* See if the array field is large enough to span whole access. We do not
1465 care to fold accesses spanning multiple array indexes. */
1466 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
1467 return NULL_TREE;
1468
1469 index = double_int_sub (low_bound, double_int_one);
1470 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1471 {
1472 /* Array constructor might explicitely set index, or specify range
1473 or leave index NULL meaning that it is next index after previous
1474 one. */
1475 if (cfield)
1476 {
1477 if (TREE_CODE (cfield) == INTEGER_CST)
1478 max_index = index = tree_to_double_int (cfield);
1479 else
1480 {
1481 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
1482 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
1483 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
1484 }
1485 }
1486 else
1487 max_index = index = double_int_add (index, double_int_one);
1488
1489 /* Do we have match? */
1490 if (double_int_cmp (access_index, index, 1) >= 0
1491 && double_int_cmp (access_index, max_index, 1) <= 0)
1492 return fold_ctor_reference (type, cval, inner_offset, size);
1493 }
1494 /* When memory is not explicitely mentioned in constructor,
1495 it is 0 (or out of range). */
1496 return build_zero_cst (type);
1497}
1498
1499/* CTOR is CONSTRUCTOR of an aggregate or vector.
1500 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
1501
1502static tree
1503fold_nonarray_ctor_reference (tree type, tree ctor,
1504 unsigned HOST_WIDE_INT offset,
1505 unsigned HOST_WIDE_INT size)
1506{
1507 unsigned HOST_WIDE_INT cnt;
1508 tree cfield, cval;
1509
1510 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
1511 cval)
1512 {
1513 tree byte_offset = DECL_FIELD_OFFSET (cfield);
1514 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
1515 tree field_size = DECL_SIZE (cfield);
1516 double_int bitoffset;
1517 double_int byte_offset_cst = tree_to_double_int (byte_offset);
1518 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
1519 double_int bitoffset_end;
1520
f1e344ed
JJ
1521 /* Variable sized objects in static constructors makes no sense,
1522 but field_size can be NULL for flexible array members. */
697c3575
JH
1523 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
1524 && TREE_CODE (byte_offset) == INTEGER_CST
f1e344ed
JJ
1525 && (field_size != NULL_TREE
1526 ? TREE_CODE (field_size) == INTEGER_CST
1527 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
697c3575
JH
1528
1529 /* Compute bit offset of the field. */
1530 bitoffset = double_int_add (tree_to_double_int (field_offset),
1531 double_int_mul (byte_offset_cst,
1532 bits_per_unit_cst));
1533 /* Compute bit offset where the field ends. */
f1e344ed
JJ
1534 if (field_size != NULL_TREE)
1535 bitoffset_end = double_int_add (bitoffset,
1536 tree_to_double_int (field_size));
1537 else
1538 bitoffset_end = double_int_zero;
697c3575
JH
1539
1540 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
1541 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
f1e344ed
JJ
1542 && (field_size == NULL_TREE
1543 || double_int_cmp (uhwi_to_double_int (offset),
1544 bitoffset_end, 0) < 0))
697c3575
JH
1545 {
1546 double_int access_end = double_int_add (uhwi_to_double_int (offset),
1547 uhwi_to_double_int (size));
1548 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
1549 bitoffset);
1550 /* We do have overlap. Now see if field is large enough to
1551 cover the access. Give up for accesses spanning multiple
1552 fields. */
1553 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
1554 return NULL_TREE;
1555 return fold_ctor_reference (type, cval,
1556 double_int_to_uhwi (inner_offset), size);
1557 }
1558 }
1559 /* When memory is not explicitely mentioned in constructor, it is 0. */
1560 return build_zero_cst (type);
1561}
1562
1563/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
1564 to the memory at bit OFFSET. */
1565
1566static tree
1567fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
1568 unsigned HOST_WIDE_INT size)
1569{
1570 tree ret;
1571
1572 /* We found the field with exact match. */
1573 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
1574 && !offset)
1575 return canonicalize_constructor_val (ctor);
1576
1577 /* We are at the end of walk, see if we can view convert the
1578 result. */
1579 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
1580 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
1581 && operand_equal_p (TYPE_SIZE (type),
1582 TYPE_SIZE (TREE_TYPE (ctor)), 0))
1583 {
1584 ret = canonicalize_constructor_val (ctor);
1585 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
1586 if (ret)
1587 STRIP_NOPS (ret);
1588 return ret;
1589 }
1590 if (TREE_CODE (ctor) == STRING_CST)
1591 return fold_string_cst_ctor_reference (type, ctor, offset, size);
1592 if (TREE_CODE (ctor) == CONSTRUCTOR)
1593 {
1594
1595 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
1596 return fold_array_ctor_reference (type, ctor, offset, size);
1597 else
1598 return fold_nonarray_ctor_reference (type, ctor, offset, size);
1599 }
1600
1601 return NULL_TREE;
1602}
1603
ae3df618
SB
1604/* Return the tree representing the element referenced by T if T is an
1605 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1606 NULL_TREE otherwise. */
1607
ed97ddc6 1608tree
ae3df618
SB
1609fold_const_aggregate_ref (tree t)
1610{
697c3575
JH
1611 tree ctor, idx, base;
1612 HOST_WIDE_INT offset, size, max_size;
84d77ca6 1613 tree tem;
ae3df618 1614
e8114fba
RG
1615 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1616 return get_symbol_constant_value (t);
1617
17f39a39
JH
1618 tem = fold_read_from_constant_string (t);
1619 if (tem)
1620 return tem;
1621
ae3df618
SB
1622 switch (TREE_CODE (t))
1623 {
1624 case ARRAY_REF:
697c3575
JH
1625 case ARRAY_RANGE_REF:
1626 /* Constant indexes are handled well by get_base_constructor.
1627 Only special case variable offsets.
1628 FIXME: This code can't handle nested references with variable indexes
1629 (they will be handled only by iteration of ccp). Perhaps we can bring
1630 get_ref_base_and_extent here and make it use get_constant_value. */
1631 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
1632 && (idx = get_constant_value (TREE_OPERAND (t, 1)))
1633 && host_integerp (idx, 0))
1634 {
1635 tree low_bound, unit_size;
ae3df618 1636
697c3575
JH
1637 /* If the resulting bit-offset is constant, track it. */
1638 if ((low_bound = array_ref_low_bound (t),
1639 host_integerp (low_bound, 0))
1640 && (unit_size = array_ref_element_size (t),
1641 host_integerp (unit_size, 1)))
1642 {
1643 offset = TREE_INT_CST_LOW (idx);
1644 offset -= TREE_INT_CST_LOW (low_bound);
1645 offset *= TREE_INT_CST_LOW (unit_size);
1646 offset *= BITS_PER_UNIT;
1647
1648 base = TREE_OPERAND (t, 0);
4a2da105 1649 ctor = get_base_constructor (base, &offset);
697c3575
JH
1650 /* Empty constructor. Always fold to 0. */
1651 if (ctor == error_mark_node)
1652 return build_zero_cst (TREE_TYPE (t));
1653 /* Out of bound array access. Value is undefined, but don't fold. */
1654 if (offset < 0)
1655 return NULL_TREE;
1656 /* We can not determine ctor. */
1657 if (!ctor)
1658 return NULL_TREE;
1659 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
1660 TREE_INT_CST_LOW (unit_size)
1661 * BITS_PER_UNIT);
1662 }
1663 }
1664 /* Fallthru. */
1665
1666 case COMPONENT_REF:
1667 case BIT_FIELD_REF:
1668 case TARGET_MEM_REF:
1669 case MEM_REF:
1670 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
4a2da105 1671 ctor = get_base_constructor (base, &offset);
87e1e42b 1672
697c3575 1673 /* Empty constructor. Always fold to 0. */
e196b221
JH
1674 if (ctor == error_mark_node)
1675 return build_zero_cst (TREE_TYPE (t));
697c3575
JH
1676 /* We do not know precise address. */
1677 if (max_size == -1 || max_size != size)
1678 return NULL_TREE;
1679 /* We can not determine ctor. */
1680 if (!ctor)
ae3df618
SB
1681 return NULL_TREE;
1682
697c3575
JH
1683 /* Out of bound array access. Value is undefined, but don't fold. */
1684 if (offset < 0)
e196b221 1685 return NULL_TREE;
ae3df618 1686
697c3575 1687 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
ae3df618 1688
1ebd8d9a
SB
1689 case REALPART_EXPR:
1690 case IMAGPART_EXPR:
1691 {
1692 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1693 if (c && TREE_CODE (c) == COMPLEX_CST)
db3927fb
AH
1694 return fold_build1_loc (EXPR_LOCATION (t),
1695 TREE_CODE (t), TREE_TYPE (t), c);
1ebd8d9a
SB
1696 break;
1697 }
87e1e42b 1698
ae3df618
SB
1699 default:
1700 break;
1701 }
1702
1703 return NULL_TREE;
1704}
726a989a 1705
0b4b14ac
RG
1706/* Apply the operation CODE in type TYPE to the value, mask pair
1707 RVAL and RMASK representing a value of type RTYPE and set
1708 the value, mask pair *VAL and *MASK to the result. */
1709
1710static void
1711bit_value_unop_1 (enum tree_code code, tree type,
1712 double_int *val, double_int *mask,
1713 tree rtype, double_int rval, double_int rmask)
1714{
1715 switch (code)
1716 {
1717 case BIT_NOT_EXPR:
1718 *mask = rmask;
1719 *val = double_int_not (rval);
1720 break;
1721
1722 case NEGATE_EXPR:
1723 {
1724 double_int temv, temm;
1725 /* Return ~rval + 1. */
1726 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1727 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1728 type, temv, temm,
1729 type, double_int_one, double_int_zero);
1730 break;
1731 }
1732
1733 CASE_CONVERT:
1734 {
1735 bool uns;
1736
1737 /* First extend mask and value according to the original type. */
1738 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1739 ? 0 : TYPE_UNSIGNED (rtype));
1740 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1741 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1742
1743 /* Then extend mask and value according to the target type. */
1744 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1745 ? 0 : TYPE_UNSIGNED (type));
1746 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1747 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1748 break;
1749 }
1750
1751 default:
1752 *mask = double_int_minus_one;
1753 break;
1754 }
1755}
1756
1757/* Apply the operation CODE in type TYPE to the value, mask pairs
1758 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1759 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1760
1761static void
1762bit_value_binop_1 (enum tree_code code, tree type,
1763 double_int *val, double_int *mask,
1764 tree r1type, double_int r1val, double_int r1mask,
1765 tree r2type, double_int r2val, double_int r2mask)
1766{
1767 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1768 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1769 /* Assume we'll get a constant result. Use an initial varying value,
1770 we fall back to varying in the end if necessary. */
1771 *mask = double_int_minus_one;
1772 switch (code)
1773 {
1774 case BIT_AND_EXPR:
1775 /* The mask is constant where there is a known not
1776 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1777 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1778 double_int_and (double_int_ior (r1val, r1mask),
1779 double_int_ior (r2val, r2mask)));
1780 *val = double_int_and (r1val, r2val);
1781 break;
1782
1783 case BIT_IOR_EXPR:
1784 /* The mask is constant where there is a known
1785 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1786 *mask = double_int_and_not
1787 (double_int_ior (r1mask, r2mask),
1788 double_int_ior (double_int_and_not (r1val, r1mask),
1789 double_int_and_not (r2val, r2mask)));
1790 *val = double_int_ior (r1val, r2val);
1791 break;
1792
1793 case BIT_XOR_EXPR:
1794 /* m1 | m2 */
1795 *mask = double_int_ior (r1mask, r2mask);
1796 *val = double_int_xor (r1val, r2val);
1797 break;
1798
1799 case LROTATE_EXPR:
1800 case RROTATE_EXPR:
1801 if (double_int_zero_p (r2mask))
1802 {
1803 HOST_WIDE_INT shift = r2val.low;
1804 if (code == RROTATE_EXPR)
1805 shift = -shift;
1806 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1807 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1808 }
1809 break;
1810
1811 case LSHIFT_EXPR:
1812 case RSHIFT_EXPR:
1813 /* ??? We can handle partially known shift counts if we know
1814 its sign. That way we can tell that (x << (y | 8)) & 255
1815 is zero. */
1816 if (double_int_zero_p (r2mask))
1817 {
1818 HOST_WIDE_INT shift = r2val.low;
1819 if (code == RSHIFT_EXPR)
1820 shift = -shift;
1821 /* We need to know if we are doing a left or a right shift
1822 to properly shift in zeros for left shift and unsigned
1823 right shifts and the sign bit for signed right shifts.
1824 For signed right shifts we shift in varying in case
1825 the sign bit was varying. */
1826 if (shift > 0)
1827 {
1828 *mask = double_int_lshift (r1mask, shift,
1829 TYPE_PRECISION (type), false);
1830 *val = double_int_lshift (r1val, shift,
1831 TYPE_PRECISION (type), false);
1832 }
1833 else if (shift < 0)
1834 {
1835 shift = -shift;
1836 *mask = double_int_rshift (r1mask, shift,
1837 TYPE_PRECISION (type), !uns);
1838 *val = double_int_rshift (r1val, shift,
1839 TYPE_PRECISION (type), !uns);
1840 }
1841 else
1842 {
1843 *mask = r1mask;
1844 *val = r1val;
1845 }
1846 }
1847 break;
1848
1849 case PLUS_EXPR:
1850 case POINTER_PLUS_EXPR:
1851 {
1852 double_int lo, hi;
1853 /* Do the addition with unknown bits set to zero, to give carry-ins of
1854 zero wherever possible. */
1855 lo = double_int_add (double_int_and_not (r1val, r1mask),
1856 double_int_and_not (r2val, r2mask));
1857 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1858 /* Do the addition with unknown bits set to one, to give carry-ins of
1859 one wherever possible. */
1860 hi = double_int_add (double_int_ior (r1val, r1mask),
1861 double_int_ior (r2val, r2mask));
1862 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1863 /* Each bit in the result is known if (a) the corresponding bits in
1864 both inputs are known, and (b) the carry-in to that bit position
1865 is known. We can check condition (b) by seeing if we got the same
1866 result with minimised carries as with maximised carries. */
1867 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1868 double_int_xor (lo, hi));
1869 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1870 /* It shouldn't matter whether we choose lo or hi here. */
1871 *val = lo;
1872 break;
1873 }
1874
1875 case MINUS_EXPR:
1876 {
1877 double_int temv, temm;
1878 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1879 r2type, r2val, r2mask);
1880 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1881 r1type, r1val, r1mask,
1882 r2type, temv, temm);
1883 break;
1884 }
1885
1886 case MULT_EXPR:
1887 {
1888 /* Just track trailing zeros in both operands and transfer
1889 them to the other. */
1890 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1891 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1892 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1893 {
1894 *mask = double_int_zero;
1895 *val = double_int_zero;
1896 }
1897 else if (r1tz + r2tz > 0)
1898 {
1899 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1900 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1901 *val = double_int_zero;
1902 }
1903 break;
1904 }
1905
1906 case EQ_EXPR:
1907 case NE_EXPR:
1908 {
1909 double_int m = double_int_ior (r1mask, r2mask);
1910 if (!double_int_equal_p (double_int_and_not (r1val, m),
1911 double_int_and_not (r2val, m)))
1912 {
1913 *mask = double_int_zero;
1914 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1915 }
1916 else
1917 {
1918 /* We know the result of a comparison is always one or zero. */
1919 *mask = double_int_one;
1920 *val = double_int_zero;
1921 }
1922 break;
1923 }
1924
1925 case GE_EXPR:
1926 case GT_EXPR:
1927 {
1928 double_int tem = r1val;
1929 r1val = r2val;
1930 r2val = tem;
1931 tem = r1mask;
1932 r1mask = r2mask;
1933 r2mask = tem;
1934 code = swap_tree_comparison (code);
1935 }
1936 /* Fallthru. */
1937 case LT_EXPR:
1938 case LE_EXPR:
1939 {
1940 int minmax, maxmin;
1941 /* If the most significant bits are not known we know nothing. */
1942 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1943 break;
1944
1945 /* If we know the most significant bits we know the values
1946 value ranges by means of treating varying bits as zero
1947 or one. Do a cross comparison of the max/min pairs. */
1948 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1949 double_int_and_not (r2val, r2mask), uns);
1950 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1951 double_int_ior (r2val, r2mask), uns);
1952 if (maxmin < 0) /* r1 is less than r2. */
1953 {
1954 *mask = double_int_zero;
1955 *val = double_int_one;
1956 }
1957 else if (minmax > 0) /* r1 is not less or equal to r2. */
1958 {
1959 *mask = double_int_zero;
1960 *val = double_int_zero;
1961 }
1962 else if (maxmin == minmax) /* r1 and r2 are equal. */
1963 {
1964 /* This probably should never happen as we'd have
1965 folded the thing during fully constant value folding. */
1966 *mask = double_int_zero;
1967 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1968 }
1969 else
1970 {
1971 /* We know the result of a comparison is always one or zero. */
1972 *mask = double_int_one;
1973 *val = double_int_zero;
1974 }
1975 break;
1976 }
1977
1978 default:;
1979 }
1980}
1981
1982/* Return the propagation value when applying the operation CODE to
1983 the value RHS yielding type TYPE. */
1984
1985static prop_value_t
1986bit_value_unop (enum tree_code code, tree type, tree rhs)
1987{
1988 prop_value_t rval = get_value_for_expr (rhs, true);
1989 double_int value, mask;
1990 prop_value_t val;
1991 gcc_assert ((rval.lattice_val == CONSTANT
1992 && TREE_CODE (rval.value) == INTEGER_CST)
1993 || double_int_minus_one_p (rval.mask));
1994 bit_value_unop_1 (code, type, &value, &mask,
1995 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1996 if (!double_int_minus_one_p (mask))
1997 {
1998 val.lattice_val = CONSTANT;
1999 val.mask = mask;
2000 /* ??? Delay building trees here. */
2001 val.value = double_int_to_tree (type, value);
2002 }
2003 else
2004 {
2005 val.lattice_val = VARYING;
2006 val.value = NULL_TREE;
2007 val.mask = double_int_minus_one;
2008 }
2009 return val;
2010}
2011
2012/* Return the propagation value when applying the operation CODE to
2013 the values RHS1 and RHS2 yielding type TYPE. */
2014
2015static prop_value_t
2016bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
2017{
2018 prop_value_t r1val = get_value_for_expr (rhs1, true);
2019 prop_value_t r2val = get_value_for_expr (rhs2, true);
2020 double_int value, mask;
2021 prop_value_t val;
2022 gcc_assert ((r1val.lattice_val == CONSTANT
2023 && TREE_CODE (r1val.value) == INTEGER_CST)
2024 || double_int_minus_one_p (r1val.mask));
2025 gcc_assert ((r2val.lattice_val == CONSTANT
2026 && TREE_CODE (r2val.value) == INTEGER_CST)
2027 || double_int_minus_one_p (r2val.mask));
2028 bit_value_binop_1 (code, type, &value, &mask,
2029 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
2030 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
2031 if (!double_int_minus_one_p (mask))
2032 {
2033 val.lattice_val = CONSTANT;
2034 val.mask = mask;
2035 /* ??? Delay building trees here. */
2036 val.value = double_int_to_tree (type, value);
2037 }
2038 else
2039 {
2040 val.lattice_val = VARYING;
2041 val.value = NULL_TREE;
2042 val.mask = double_int_minus_one;
2043 }
2044 return val;
2045}
2046
726a989a
RB
2047/* Evaluate statement STMT.
2048 Valid only for assignments, calls, conditionals, and switches. */
6de9cd9a 2049
0bca51f0 2050static prop_value_t
726a989a 2051evaluate_stmt (gimple stmt)
6de9cd9a 2052{
0bca51f0 2053 prop_value_t val;
faaf1436 2054 tree simplified = NULL_TREE;
0bca51f0 2055 ccp_lattice_t likelyvalue = likely_value (stmt);
0b4b14ac 2056 bool is_constant = false;
0bca51f0 2057
0b4b14ac
RG
2058 if (dump_file && (dump_flags & TDF_DETAILS))
2059 {
2060 fprintf (dump_file, "which is likely ");
2061 switch (likelyvalue)
2062 {
2063 case CONSTANT:
2064 fprintf (dump_file, "CONSTANT");
2065 break;
2066 case UNDEFINED:
2067 fprintf (dump_file, "UNDEFINED");
2068 break;
2069 case VARYING:
2070 fprintf (dump_file, "VARYING");
2071 break;
2072 default:;
2073 }
2074 fprintf (dump_file, "\n");
2075 }
6ac01510 2076
6de9cd9a
DN
2077 /* If the statement is likely to have a CONSTANT result, then try
2078 to fold the statement to determine the constant value. */
726a989a
RB
2079 /* FIXME. This is the only place that we call ccp_fold.
2080 Since likely_value never returns CONSTANT for calls, we will
2081 not attempt to fold them, including builtins that may profit. */
6de9cd9a 2082 if (likelyvalue == CONSTANT)
0b4b14ac
RG
2083 {
2084 fold_defer_overflow_warnings ();
2085 simplified = ccp_fold (stmt);
2086 is_constant = simplified && is_gimple_min_invariant (simplified);
2087 fold_undefer_overflow_warnings (is_constant, stmt, 0);
2088 if (is_constant)
2089 {
2090 /* The statement produced a constant value. */
2091 val.lattice_val = CONSTANT;
2092 val.value = simplified;
2093 val.mask = double_int_zero;
2094 }
2095 }
6de9cd9a
DN
2096 /* If the statement is likely to have a VARYING result, then do not
2097 bother folding the statement. */
87e1e42b 2098 else if (likelyvalue == VARYING)
726a989a 2099 {
e0c68ce9 2100 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2101 if (code == GIMPLE_ASSIGN)
2102 {
2103 enum tree_code subcode = gimple_assign_rhs_code (stmt);
b8698a0f 2104
726a989a
RB
2105 /* Other cases cannot satisfy is_gimple_min_invariant
2106 without folding. */
2107 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
2108 simplified = gimple_assign_rhs1 (stmt);
2109 }
2110 else if (code == GIMPLE_SWITCH)
2111 simplified = gimple_switch_index (stmt);
2112 else
44e10129
MM
2113 /* These cannot satisfy is_gimple_min_invariant without folding. */
2114 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
0b4b14ac
RG
2115 is_constant = simplified && is_gimple_min_invariant (simplified);
2116 if (is_constant)
2117 {
2118 /* The statement produced a constant value. */
2119 val.lattice_val = CONSTANT;
2120 val.value = simplified;
2121 val.mask = double_int_zero;
2122 }
726a989a 2123 }
6de9cd9a 2124
0b4b14ac
RG
2125 /* Resort to simplification for bitwise tracking. */
2126 if (flag_tree_bit_ccp
2127 && likelyvalue == CONSTANT
2128 && !is_constant)
00d382a8 2129 {
0b4b14ac 2130 enum gimple_code code = gimple_code (stmt);
1be38ccb 2131 tree fndecl;
0b4b14ac
RG
2132 val.lattice_val = VARYING;
2133 val.value = NULL_TREE;
2134 val.mask = double_int_minus_one;
2135 if (code == GIMPLE_ASSIGN)
00d382a8 2136 {
0b4b14ac
RG
2137 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2138 tree rhs1 = gimple_assign_rhs1 (stmt);
2139 switch (get_gimple_rhs_class (subcode))
2140 {
2141 case GIMPLE_SINGLE_RHS:
2142 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2143 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2144 val = get_value_for_expr (rhs1, true);
2145 break;
2146
2147 case GIMPLE_UNARY_RHS:
2148 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2149 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2150 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
2151 || POINTER_TYPE_P (gimple_expr_type (stmt))))
2152 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
2153 break;
2154
2155 case GIMPLE_BINARY_RHS:
2156 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2157 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2158 {
4e996296 2159 tree lhs = gimple_assign_lhs (stmt);
0b4b14ac
RG
2160 tree rhs2 = gimple_assign_rhs2 (stmt);
2161 val = bit_value_binop (subcode,
4e996296 2162 TREE_TYPE (lhs), rhs1, rhs2);
0b4b14ac
RG
2163 }
2164 break;
2165
2166 default:;
2167 }
00d382a8 2168 }
0b4b14ac
RG
2169 else if (code == GIMPLE_COND)
2170 {
2171 enum tree_code code = gimple_cond_code (stmt);
2172 tree rhs1 = gimple_cond_lhs (stmt);
2173 tree rhs2 = gimple_cond_rhs (stmt);
2174 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2175 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2176 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2177 }
1be38ccb
RG
2178 else if (code == GIMPLE_CALL
2179 && (fndecl = gimple_call_fndecl (stmt))
2180 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2181 {
2182 switch (DECL_FUNCTION_CODE (fndecl))
2183 {
2184 case BUILT_IN_MALLOC:
2185 case BUILT_IN_REALLOC:
2186 case BUILT_IN_CALLOC:
2187 val.lattice_val = CONSTANT;
2188 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2189 val.mask = shwi_to_double_int
2190 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
2191 / BITS_PER_UNIT - 1));
2192 break;
2193
2194 case BUILT_IN_ALLOCA:
2195 val.lattice_val = CONSTANT;
2196 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2197 val.mask = shwi_to_double_int
2198 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
2199 / BITS_PER_UNIT - 1));
2200 break;
2201
2202 default:;
2203 }
2204 }
0b4b14ac 2205 is_constant = (val.lattice_val == CONSTANT);
00d382a8
RG
2206 }
2207
0b4b14ac 2208 if (!is_constant)
6de9cd9a
DN
2209 {
2210 /* The statement produced a nonconstant value. If the statement
0bca51f0
DN
2211 had UNDEFINED operands, then the result of the statement
2212 should be UNDEFINED. Otherwise, the statement is VARYING. */
106dec71 2213 if (likelyvalue == UNDEFINED)
0b4b14ac
RG
2214 {
2215 val.lattice_val = likelyvalue;
2216 val.mask = double_int_zero;
2217 }
a318e3ac 2218 else
0b4b14ac
RG
2219 {
2220 val.lattice_val = VARYING;
2221 val.mask = double_int_minus_one;
2222 }
a318e3ac 2223
0bca51f0 2224 val.value = NULL_TREE;
6de9cd9a 2225 }
750628d8
DN
2226
2227 return val;
6de9cd9a
DN
2228}
2229
f61e18ec
RG
2230/* Fold the stmt at *GSI with CCP specific information that propagating
2231 and regular folding does not catch. */
2232
2233static bool
2234ccp_fold_stmt (gimple_stmt_iterator *gsi)
2235{
2236 gimple stmt = gsi_stmt (*gsi);
f61e18ec 2237
830bc550
RG
2238 switch (gimple_code (stmt))
2239 {
2240 case GIMPLE_COND:
2241 {
2242 prop_value_t val;
2243 /* Statement evaluation will handle type mismatches in constants
2244 more gracefully than the final propagation. This allows us to
2245 fold more conditionals here. */
2246 val = evaluate_stmt (stmt);
2247 if (val.lattice_val != CONSTANT
0b4b14ac 2248 || !double_int_zero_p (val.mask))
830bc550
RG
2249 return false;
2250
0b4b14ac
RG
2251 if (dump_file)
2252 {
2253 fprintf (dump_file, "Folding predicate ");
2254 print_gimple_expr (dump_file, stmt, 0, 0);
2255 fprintf (dump_file, " to ");
2256 print_generic_expr (dump_file, val.value, 0);
2257 fprintf (dump_file, "\n");
2258 }
2259
830bc550
RG
2260 if (integer_zerop (val.value))
2261 gimple_cond_make_false (stmt);
2262 else
2263 gimple_cond_make_true (stmt);
f61e18ec 2264
830bc550
RG
2265 return true;
2266 }
f61e18ec 2267
830bc550
RG
2268 case GIMPLE_CALL:
2269 {
2270 tree lhs = gimple_call_lhs (stmt);
84d77ca6 2271 tree val;
830bc550 2272 tree argt;
74e80a24 2273 tree callee;
830bc550
RG
2274 bool changed = false;
2275 unsigned i;
2276
2277 /* If the call was folded into a constant make sure it goes
2278 away even if we cannot propagate into all uses because of
2279 type issues. */
2280 if (lhs
2281 && TREE_CODE (lhs) == SSA_NAME
84d77ca6 2282 && (val = get_constant_value (lhs)))
830bc550 2283 {
84d77ca6 2284 tree new_rhs = unshare_expr (val);
eb6b98c7 2285 bool res;
830bc550
RG
2286 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2287 TREE_TYPE (new_rhs)))
2288 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
eb6b98c7
RG
2289 res = update_call_from_tree (gsi, new_rhs);
2290 gcc_assert (res);
830bc550
RG
2291 return true;
2292 }
2293
2294 /* Propagate into the call arguments. Compared to replace_uses_in
2295 this can use the argument slot types for type verification
2296 instead of the current argument type. We also can safely
2297 drop qualifiers here as we are dealing with constants anyway. */
2298 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
2299 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2300 ++i, argt = TREE_CHAIN (argt))
2301 {
2302 tree arg = gimple_call_arg (stmt, i);
2303 if (TREE_CODE (arg) == SSA_NAME
84d77ca6 2304 && (val = get_constant_value (arg))
830bc550
RG
2305 && useless_type_conversion_p
2306 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
84d77ca6 2307 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
830bc550 2308 {
84d77ca6 2309 gimple_call_set_arg (stmt, i, unshare_expr (val));
830bc550
RG
2310 changed = true;
2311 }
2312 }
74e80a24
RG
2313
2314 callee = gimple_call_fn (stmt);
2315 if (TREE_CODE (callee) == OBJ_TYPE_REF
2316 && TREE_CODE (OBJ_TYPE_REF_EXPR (callee)) == SSA_NAME)
31ceb574 2317 {
74e80a24
RG
2318 tree expr = OBJ_TYPE_REF_EXPR (callee);
2319 OBJ_TYPE_REF_EXPR (callee) = valueize_op (expr);
ceeffab0
MJ
2320 if (gimple_fold_call (gsi, false))
2321 changed = true;
74e80a24 2322 OBJ_TYPE_REF_EXPR (callee) = expr;
31ceb574 2323 }
830bc550
RG
2324
2325 return changed;
2326 }
f61e18ec 2327
5c95f07b
RG
2328 case GIMPLE_ASSIGN:
2329 {
2330 tree lhs = gimple_assign_lhs (stmt);
84d77ca6 2331 tree val;
5c95f07b
RG
2332
2333 /* If we have a load that turned out to be constant replace it
2334 as we cannot propagate into all uses in all cases. */
2335 if (gimple_assign_single_p (stmt)
2336 && TREE_CODE (lhs) == SSA_NAME
84d77ca6 2337 && (val = get_constant_value (lhs)))
5c95f07b 2338 {
84d77ca6 2339 tree rhs = unshare_expr (val);
5c95f07b 2340 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
70f34814 2341 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
5c95f07b
RG
2342 gimple_assign_set_rhs_from_tree (gsi, rhs);
2343 return true;
2344 }
2345
2346 return false;
2347 }
2348
830bc550
RG
2349 default:
2350 return false;
2351 }
f61e18ec
RG
2352}
2353
750628d8 2354/* Visit the assignment statement STMT. Set the value of its LHS to the
0bca51f0
DN
2355 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2356 creates virtual definitions, set the value of each new name to that
726a989a
RB
2357 of the RHS (if we can derive a constant out of the RHS).
2358 Value-returning call statements also perform an assignment, and
2359 are handled here. */
6de9cd9a 2360
750628d8 2361static enum ssa_prop_result
726a989a 2362visit_assignment (gimple stmt, tree *output_p)
6de9cd9a 2363{
0bca51f0 2364 prop_value_t val;
0bca51f0 2365 enum ssa_prop_result retval;
6de9cd9a 2366
726a989a 2367 tree lhs = gimple_get_lhs (stmt);
6de9cd9a 2368
726a989a
RB
2369 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2370 || gimple_call_lhs (stmt) != NULL_TREE);
2371
84d77ca6
RG
2372 if (gimple_assign_single_p (stmt)
2373 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2374 /* For a simple copy operation, we copy the lattice values. */
2375 val = *get_value (gimple_assign_rhs1 (stmt));
750628d8 2376 else
726a989a
RB
2377 /* Evaluate the statement, which could be
2378 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
87e1e42b 2379 val = evaluate_stmt (stmt);
6de9cd9a 2380
0bca51f0 2381 retval = SSA_PROP_NOT_INTERESTING;
6de9cd9a 2382
750628d8 2383 /* Set the lattice value of the statement's output. */
0bca51f0 2384 if (TREE_CODE (lhs) == SSA_NAME)
6de9cd9a 2385 {
0bca51f0
DN
2386 /* If STMT is an assignment to an SSA_NAME, we only have one
2387 value to set. */
2388 if (set_lattice_value (lhs, val))
2389 {
2390 *output_p = lhs;
2391 if (val.lattice_val == VARYING)
2392 retval = SSA_PROP_VARYING;
2393 else
2394 retval = SSA_PROP_INTERESTING;
2395 }
6de9cd9a 2396 }
0bca51f0
DN
2397
2398 return retval;
6de9cd9a
DN
2399}
2400
6de9cd9a 2401
750628d8
DN
2402/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2403 if it can determine which edge will be taken. Otherwise, return
2404 SSA_PROP_VARYING. */
2405
2406static enum ssa_prop_result
726a989a 2407visit_cond_stmt (gimple stmt, edge *taken_edge_p)
6de9cd9a 2408{
0bca51f0 2409 prop_value_t val;
750628d8
DN
2410 basic_block block;
2411
726a989a 2412 block = gimple_bb (stmt);
750628d8 2413 val = evaluate_stmt (stmt);
0b4b14ac
RG
2414 if (val.lattice_val != CONSTANT
2415 || !double_int_zero_p (val.mask))
2416 return SSA_PROP_VARYING;
750628d8
DN
2417
2418 /* Find which edge out of the conditional block will be taken and add it
2419 to the worklist. If no single edge can be determined statically,
2420 return SSA_PROP_VARYING to feed all the outgoing edges to the
2421 propagation engine. */
0b4b14ac 2422 *taken_edge_p = find_taken_edge (block, val.value);
750628d8
DN
2423 if (*taken_edge_p)
2424 return SSA_PROP_INTERESTING;
2425 else
2426 return SSA_PROP_VARYING;
6de9cd9a
DN
2427}
2428
6de9cd9a 2429
750628d8
DN
2430/* Evaluate statement STMT. If the statement produces an output value and
2431 its evaluation changes the lattice value of its output, return
2432 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2433 output value.
b8698a0f 2434
750628d8
DN
2435 If STMT is a conditional branch and we can determine its truth
2436 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2437 value, return SSA_PROP_VARYING. */
6de9cd9a 2438
750628d8 2439static enum ssa_prop_result
726a989a 2440ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
750628d8 2441{
750628d8
DN
2442 tree def;
2443 ssa_op_iter iter;
6de9cd9a 2444
750628d8 2445 if (dump_file && (dump_flags & TDF_DETAILS))
6de9cd9a 2446 {
0bca51f0 2447 fprintf (dump_file, "\nVisiting statement:\n");
726a989a 2448 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a 2449 }
6de9cd9a 2450
726a989a 2451 switch (gimple_code (stmt))
6de9cd9a 2452 {
726a989a
RB
2453 case GIMPLE_ASSIGN:
2454 /* If the statement is an assignment that produces a single
2455 output value, evaluate its RHS to see if the lattice value of
2456 its output has changed. */
2457 return visit_assignment (stmt, output_p);
2458
2459 case GIMPLE_CALL:
2460 /* A value-returning call also performs an assignment. */
2461 if (gimple_call_lhs (stmt) != NULL_TREE)
2462 return visit_assignment (stmt, output_p);
2463 break;
2464
2465 case GIMPLE_COND:
2466 case GIMPLE_SWITCH:
2467 /* If STMT is a conditional branch, see if we can determine
2468 which branch will be taken. */
2469 /* FIXME. It appears that we should be able to optimize
2470 computed GOTOs here as well. */
2471 return visit_cond_stmt (stmt, taken_edge_p);
2472
2473 default:
2474 break;
6de9cd9a 2475 }
6de9cd9a 2476
750628d8
DN
2477 /* Any other kind of statement is not interesting for constant
2478 propagation and, therefore, not worth simulating. */
750628d8
DN
2479 if (dump_file && (dump_flags & TDF_DETAILS))
2480 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
6de9cd9a 2481
750628d8
DN
2482 /* Definitions made by statements other than assignments to
2483 SSA_NAMEs represent unknown modifications to their outputs.
2484 Mark them VARYING. */
0bca51f0
DN
2485 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2486 {
0b4b14ac 2487 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
0bca51f0
DN
2488 set_lattice_value (def, v);
2489 }
6de9cd9a 2490
750628d8
DN
2491 return SSA_PROP_VARYING;
2492}
6de9cd9a 2493
6de9cd9a 2494
0bca51f0 2495/* Main entry point for SSA Conditional Constant Propagation. */
750628d8 2496
3253eafb 2497static unsigned int
dce2b2f6 2498do_ssa_ccp (void)
750628d8
DN
2499{
2500 ccp_initialize ();
2501 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
3253eafb 2502 if (ccp_finalize ())
706ca88e 2503 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
3253eafb
JH
2504 else
2505 return 0;
6de9cd9a
DN
2506}
2507
173b818d
BB
2508
2509static bool
750628d8 2510gate_ccp (void)
173b818d 2511{
750628d8 2512 return flag_tree_ccp != 0;
173b818d
BB
2513}
2514
6de9cd9a 2515
b8698a0f 2516struct gimple_opt_pass pass_ccp =
750628d8 2517{
8ddbbcae
JH
2518 {
2519 GIMPLE_PASS,
750628d8
DN
2520 "ccp", /* name */
2521 gate_ccp, /* gate */
0bca51f0 2522 do_ssa_ccp, /* execute */
750628d8
DN
2523 NULL, /* sub */
2524 NULL, /* next */
2525 0, /* static_pass_number */
2526 TV_TREE_CCP, /* tv_id */
7faade0f 2527 PROP_cfg | PROP_ssa, /* properties_required */
750628d8 2528 0, /* properties_provided */
ae07b463 2529 0, /* properties_destroyed */
750628d8 2530 0, /* todo_flags_start */
3253eafb 2531 TODO_dump_func | TODO_verify_ssa
8ddbbcae
JH
2532 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2533 }
750628d8 2534};
6de9cd9a 2535
6de9cd9a 2536
726a989a 2537
cb8e078d
JJ
2538/* Try to optimize out __builtin_stack_restore. Optimize it out
2539 if there is another __builtin_stack_restore in the same basic
2540 block and no calls or ASM_EXPRs are in between, or if this block's
2541 only outgoing edge is to EXIT_BLOCK and there are no calls or
2542 ASM_EXPRs after this __builtin_stack_restore. */
2543
2544static tree
726a989a 2545optimize_stack_restore (gimple_stmt_iterator i)
cb8e078d 2546{
ff9d1adc
RH
2547 tree callee;
2548 gimple stmt;
726a989a
RB
2549
2550 basic_block bb = gsi_bb (i);
2551 gimple call = gsi_stmt (i);
cb8e078d 2552
726a989a
RB
2553 if (gimple_code (call) != GIMPLE_CALL
2554 || gimple_call_num_args (call) != 1
2555 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2556 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
cb8e078d
JJ
2557 return NULL_TREE;
2558
726a989a 2559 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
cb8e078d 2560 {
726a989a
RB
2561 stmt = gsi_stmt (i);
2562 if (gimple_code (stmt) == GIMPLE_ASM)
cb8e078d 2563 return NULL_TREE;
726a989a 2564 if (gimple_code (stmt) != GIMPLE_CALL)
cb8e078d
JJ
2565 continue;
2566
726a989a 2567 callee = gimple_call_fndecl (stmt);
12f9ddbc
RG
2568 if (!callee
2569 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2570 /* All regular builtins are ok, just obviously not alloca. */
2571 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
cb8e078d
JJ
2572 return NULL_TREE;
2573
2574 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
ff9d1adc 2575 goto second_stack_restore;
cb8e078d
JJ
2576 }
2577
ff9d1adc 2578 if (!gsi_end_p (i))
cb8e078d
JJ
2579 return NULL_TREE;
2580
ff9d1adc
RH
2581 /* Allow one successor of the exit block, or zero successors. */
2582 switch (EDGE_COUNT (bb->succs))
2583 {
2584 case 0:
2585 break;
2586 case 1:
2587 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2588 return NULL_TREE;
2589 break;
2590 default:
2591 return NULL_TREE;
2592 }
2593 second_stack_restore:
cb8e078d 2594
ff9d1adc
RH
2595 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2596 If there are multiple uses, then the last one should remove the call.
2597 In any case, whether the call to __builtin_stack_save can be removed
2598 or not is irrelevant to removing the call to __builtin_stack_restore. */
2599 if (has_single_use (gimple_call_arg (call, 0)))
2600 {
2601 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2602 if (is_gimple_call (stack_save))
2603 {
2604 callee = gimple_call_fndecl (stack_save);
2605 if (callee
2606 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2607 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2608 {
2609 gimple_stmt_iterator stack_save_gsi;
2610 tree rhs;
cb8e078d 2611
ff9d1adc
RH
2612 stack_save_gsi = gsi_for_stmt (stack_save);
2613 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2614 update_call_from_tree (&stack_save_gsi, rhs);
2615 }
2616 }
2617 }
cb8e078d 2618
726a989a 2619 /* No effect, so the statement will be deleted. */
cb8e078d
JJ
2620 return integer_zero_node;
2621}
726a989a 2622
d7bd8aeb
JJ
2623/* If va_list type is a simple pointer and nothing special is needed,
2624 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2625 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2626 pointer assignment. */
2627
2628static tree
726a989a 2629optimize_stdarg_builtin (gimple call)
d7bd8aeb 2630{
35cbb299 2631 tree callee, lhs, rhs, cfun_va_list;
d7bd8aeb 2632 bool va_list_simple_ptr;
db3927fb 2633 location_t loc = gimple_location (call);
d7bd8aeb 2634
726a989a 2635 if (gimple_code (call) != GIMPLE_CALL)
d7bd8aeb
JJ
2636 return NULL_TREE;
2637
726a989a 2638 callee = gimple_call_fndecl (call);
35cbb299
KT
2639
2640 cfun_va_list = targetm.fn_abi_va_list (callee);
2641 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2642 && (TREE_TYPE (cfun_va_list) == void_type_node
2643 || TREE_TYPE (cfun_va_list) == char_type_node);
2644
d7bd8aeb
JJ
2645 switch (DECL_FUNCTION_CODE (callee))
2646 {
2647 case BUILT_IN_VA_START:
2648 if (!va_list_simple_ptr
2649 || targetm.expand_builtin_va_start != NULL
726a989a 2650 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
d7bd8aeb
JJ
2651 return NULL_TREE;
2652
726a989a 2653 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
2654 return NULL_TREE;
2655
726a989a 2656 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
2657 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2658 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 2659 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb 2660 return NULL_TREE;
b8698a0f 2661
db3927fb
AH
2662 lhs = build_fold_indirect_ref_loc (loc, lhs);
2663 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
726a989a 2664 1, integer_zero_node);
db3927fb 2665 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
d7bd8aeb
JJ
2666 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2667
2668 case BUILT_IN_VA_COPY:
2669 if (!va_list_simple_ptr)
2670 return NULL_TREE;
2671
726a989a 2672 if (gimple_call_num_args (call) != 2)
d7bd8aeb
JJ
2673 return NULL_TREE;
2674
726a989a 2675 lhs = gimple_call_arg (call, 0);
d7bd8aeb
JJ
2676 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2677 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
35cbb299 2678 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
2679 return NULL_TREE;
2680
db3927fb 2681 lhs = build_fold_indirect_ref_loc (loc, lhs);
726a989a 2682 rhs = gimple_call_arg (call, 1);
d7bd8aeb 2683 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
35cbb299 2684 != TYPE_MAIN_VARIANT (cfun_va_list))
d7bd8aeb
JJ
2685 return NULL_TREE;
2686
db3927fb 2687 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
d7bd8aeb
JJ
2688 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2689
2690 case BUILT_IN_VA_END:
726a989a 2691 /* No effect, so the statement will be deleted. */
d7bd8aeb
JJ
2692 return integer_zero_node;
2693
2694 default:
2695 gcc_unreachable ();
2696 }
2697}
726a989a 2698
6de9cd9a
DN
2699/* A simple pass that attempts to fold all builtin functions. This pass
2700 is run after we've propagated as many constants as we can. */
2701
c2924966 2702static unsigned int
6de9cd9a
DN
2703execute_fold_all_builtins (void)
2704{
a7d6ba24 2705 bool cfg_changed = false;
6de9cd9a 2706 basic_block bb;
7b0e48fb 2707 unsigned int todoflags = 0;
b8698a0f 2708
6de9cd9a
DN
2709 FOR_EACH_BB (bb)
2710 {
726a989a
RB
2711 gimple_stmt_iterator i;
2712 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
6de9cd9a 2713 {
726a989a 2714 gimple stmt, old_stmt;
6de9cd9a 2715 tree callee, result;
10a0d495 2716 enum built_in_function fcode;
6de9cd9a 2717
726a989a
RB
2718 stmt = gsi_stmt (i);
2719
2720 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 2721 {
726a989a 2722 gsi_next (&i);
10a0d495
JJ
2723 continue;
2724 }
726a989a 2725 callee = gimple_call_fndecl (stmt);
6de9cd9a 2726 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
10a0d495 2727 {
726a989a 2728 gsi_next (&i);
10a0d495
JJ
2729 continue;
2730 }
2731 fcode = DECL_FUNCTION_CODE (callee);
6de9cd9a 2732
cbdd87d4 2733 result = gimple_fold_builtin (stmt);
53a8f709
UB
2734
2735 if (result)
726a989a 2736 gimple_remove_stmt_histograms (cfun, stmt);
53a8f709 2737
6de9cd9a
DN
2738 if (!result)
2739 switch (DECL_FUNCTION_CODE (callee))
2740 {
2741 case BUILT_IN_CONSTANT_P:
2742 /* Resolve __builtin_constant_p. If it hasn't been
2743 folded to integer_one_node by now, it's fairly
2744 certain that the value simply isn't constant. */
726a989a 2745 result = integer_zero_node;
6de9cd9a
DN
2746 break;
2747
cb8e078d 2748 case BUILT_IN_STACK_RESTORE:
726a989a 2749 result = optimize_stack_restore (i);
d7bd8aeb
JJ
2750 if (result)
2751 break;
726a989a 2752 gsi_next (&i);
d7bd8aeb
JJ
2753 continue;
2754
2755 case BUILT_IN_VA_START:
2756 case BUILT_IN_VA_END:
2757 case BUILT_IN_VA_COPY:
2758 /* These shouldn't be folded before pass_stdarg. */
726a989a 2759 result = optimize_stdarg_builtin (stmt);
cb8e078d
JJ
2760 if (result)
2761 break;
2762 /* FALLTHRU */
2763
6de9cd9a 2764 default:
726a989a 2765 gsi_next (&i);
6de9cd9a
DN
2766 continue;
2767 }
2768
2769 if (dump_file && (dump_flags & TDF_DETAILS))
2770 {
2771 fprintf (dump_file, "Simplified\n ");
726a989a 2772 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
2773 }
2774
726a989a 2775 old_stmt = stmt;
726a989a 2776 if (!update_call_from_tree (&i, result))
4bad83f5
RG
2777 {
2778 gimplify_and_update_call_from_tree (&i, result);
2779 todoflags |= TODO_update_address_taken;
2780 }
cfaab3a9 2781
726a989a 2782 stmt = gsi_stmt (i);
cff4e50d 2783 update_stmt (stmt);
cfaab3a9 2784
726a989a
RB
2785 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2786 && gimple_purge_dead_eh_edges (bb))
a7d6ba24 2787 cfg_changed = true;
6de9cd9a
DN
2788
2789 if (dump_file && (dump_flags & TDF_DETAILS))
2790 {
2791 fprintf (dump_file, "to\n ");
726a989a 2792 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
2793 fprintf (dump_file, "\n");
2794 }
10a0d495
JJ
2795
2796 /* Retry the same statement if it changed into another
2797 builtin, there might be new opportunities now. */
726a989a 2798 if (gimple_code (stmt) != GIMPLE_CALL)
10a0d495 2799 {
726a989a 2800 gsi_next (&i);
10a0d495
JJ
2801 continue;
2802 }
726a989a 2803 callee = gimple_call_fndecl (stmt);
10a0d495 2804 if (!callee
726a989a 2805 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
10a0d495 2806 || DECL_FUNCTION_CODE (callee) == fcode)
726a989a 2807 gsi_next (&i);
6de9cd9a
DN
2808 }
2809 }
b8698a0f 2810
a7d6ba24 2811 /* Delete unreachable blocks. */
7b0e48fb
DB
2812 if (cfg_changed)
2813 todoflags |= TODO_cleanup_cfg;
b8698a0f 2814
7b0e48fb 2815 return todoflags;
6de9cd9a
DN
2816}
2817
750628d8 2818
b8698a0f 2819struct gimple_opt_pass pass_fold_builtins =
6de9cd9a 2820{
8ddbbcae
JH
2821 {
2822 GIMPLE_PASS,
6de9cd9a
DN
2823 "fab", /* name */
2824 NULL, /* gate */
2825 execute_fold_all_builtins, /* execute */
2826 NULL, /* sub */
2827 NULL, /* next */
2828 0, /* static_pass_number */
7072a650 2829 TV_NONE, /* tv_id */
7faade0f 2830 PROP_cfg | PROP_ssa, /* properties_required */
6de9cd9a
DN
2831 0, /* properties_provided */
2832 0, /* properties_destroyed */
2833 0, /* todo_flags_start */
b28b1600
JJ
2834 TODO_dump_func
2835 | TODO_verify_ssa
8ddbbcae
JH
2836 | TODO_update_ssa /* todo_flags_finish */
2837 }
6de9cd9a 2838};