]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
builtins.c (expand_builtin_stpcpy): Un-simplify decay of stpcpy to strcpy.
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
6de9cd9a 1/* Conditional constant propagation pass for the GNU compiler.
06a9b53f 2 Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
6de9cd9a
DN
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
10Free Software Foundation; either version 2, or (at your option) any
11later version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
22
23/* Conditional constant propagation.
24
25 References:
26
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
29
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
32
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "tm.h"
40#include "errors.h"
41#include "ggc.h"
42#include "tree.h"
43#include "langhooks.h"
44
45/* These RTL headers are needed for basic-block.h. */
46#include "rtl.h"
47#include "tm_p.h"
48#include "hard-reg-set.h"
49#include "basic-block.h"
50
51#include "diagnostic.h"
52#include "tree-inline.h"
53#include "tree-flow.h"
eadf906f 54#include "tree-gimple.h"
6de9cd9a
DN
55#include "tree-dump.h"
56#include "tree-pass.h"
57#include "timevar.h"
58#include "expr.h"
59#include "flags.h"
60
61
62/* Possible lattice values. */
63typedef enum
64{
65 UNINITIALIZED = 0,
66 UNDEFINED,
67 CONSTANT,
68 VARYING
69} latticevalue;
70
71/* Use the TREE_VISITED bitflag to mark statements and PHI nodes that have
72 been deemed VARYING and shouldn't be simulated again. */
73#define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T)
74
75/* Main structure for CCP. Contains the lattice value and, if it's a
76 constant, the constant value. */
77typedef struct
78{
79 latticevalue lattice_val;
80 tree const_val;
81} value;
82
83/* A bitmap to keep track of executable blocks in the CFG. */
84static sbitmap executable_blocks;
85
86/* Array of control flow edges on the worklist. */
87static GTY(()) varray_type cfg_blocks = NULL;
88
89static unsigned int cfg_blocks_num = 0;
90static int cfg_blocks_tail;
91static int cfg_blocks_head;
92
93static sbitmap bb_in_list;
94
95/* This is used to track the current value of each variable. */
96static value *value_vector;
97
98/* Worklist of SSA edges which will need reexamination as their definition
99 has changed. SSA edges are def-use edges in the SSA web. For each
100 edge, we store the definition statement or PHI node D. The destination
95eec0d6
DB
101 nodes that need to be visited are accessed using immediate_uses
102 (D). */
6de9cd9a
DN
103static GTY(()) varray_type ssa_edges;
104
95eec0d6
DB
105/* Identical to SSA_EDGES. For performance reasons, the list of SSA
106 edges is split into two. One contains all SSA edges who need to be
107 reexamined because their lattice value changed to varying (this
108 worklist), and the other contains all other SSA edges to be
109 reexamined (ssa_edges).
110
111 Since most values in the program are varying, the ideal situation
112 is to move them to that lattice value as quickly as possible.
113 Thus, it doesn't make sense to process any other type of lattice
114 value until all varying values are propagated fully, which is one
115 thing using the varying worklist achieves. In addition, if you
116 don't use a separate worklist for varying edges, you end up with
117 situations where lattice values move from
118 undefined->constant->varying instead of undefined->varying.
119*/
120static GTY(()) varray_type varying_ssa_edges;
121
122
6de9cd9a
DN
123static void initialize (void);
124static void finalize (void);
125static void visit_phi_node (tree);
126static tree ccp_fold (tree);
127static value cp_lattice_meet (value, value);
128static void visit_stmt (tree);
129static void visit_cond_stmt (tree);
130static void visit_assignment (tree);
95eec0d6 131static void add_var_to_ssa_edges_worklist (tree, value);
6de9cd9a
DN
132static void add_outgoing_control_edges (basic_block);
133static void add_control_edge (edge);
134static void def_to_varying (tree);
135static void set_lattice_value (tree, value);
136static void simulate_block (basic_block);
137static void simulate_stmt (tree);
138static void substitute_and_fold (void);
139static value evaluate_stmt (tree);
140static void dump_lattice_value (FILE *, const char *, value);
141static bool replace_uses_in (tree, bool *);
142static latticevalue likely_value (tree);
143static tree get_rhs (tree);
06a9b53f 144static bool set_rhs (tree *, tree);
6de9cd9a
DN
145static value *get_value (tree);
146static value get_default_value (tree);
147static tree ccp_fold_builtin (tree, tree);
148static bool get_strlen (tree, tree *, bitmap);
149static inline bool cfg_blocks_empty_p (void);
150static void cfg_blocks_add (basic_block);
151static basic_block cfg_blocks_get (void);
152static bool need_imm_uses_for (tree var);
153
95eec0d6
DB
154/* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
155 drain. This pops statements off the given WORKLIST and processes
156 them until there are no more statements on WORKLIST. */
157
158static void
159process_ssa_edge_worklist (varray_type *worklist)
160{
161 /* Drain the entire worklist. */
162 while (VARRAY_ACTIVE_SIZE (*worklist) > 0)
163 {
164 /* Pull the statement to simulate off the worklist. */
165 tree stmt = VARRAY_TOP_TREE (*worklist);
166 stmt_ann_t ann = stmt_ann (stmt);
167 VARRAY_POP (*worklist);
168
169 /* visit_stmt can "cancel" reevaluation of some statements.
170 If it does, then in_ccp_worklist will be zero. */
171 if (ann->in_ccp_worklist)
172 {
173 ann->in_ccp_worklist = 0;
174 simulate_stmt (stmt);
175 }
176 }
177}
178
6de9cd9a
DN
179/* Main entry point for SSA Conditional Constant Propagation. FNDECL is
180 the declaration for the function to optimize.
181
182 On exit, VARS_TO_RENAME will contain the symbols that have been exposed by
183 the propagation of ADDR_EXPR expressions into pointer dereferences and need
184 to be renamed into SSA.
185
186 PHASE indicates which dump file from the DUMP_FILES array to use when
187 dumping debugging information. */
188
189static void
190tree_ssa_ccp (void)
191{
192 initialize ();
193
194 /* Iterate until the worklists are empty. */
95eec0d6
DB
195 while (!cfg_blocks_empty_p ()
196 || VARRAY_ACTIVE_SIZE (ssa_edges) > 0
197 || VARRAY_ACTIVE_SIZE (varying_ssa_edges) > 0)
6de9cd9a
DN
198 {
199 if (!cfg_blocks_empty_p ())
200 {
201 /* Pull the next block to simulate off the worklist. */
202 basic_block dest_block = cfg_blocks_get ();
203 simulate_block (dest_block);
204 }
205
95eec0d6
DB
206 /* In order to move things to varying as quickly as
207 possible,process the VARYING_SSA_EDGES worklist first. */
208 process_ssa_edge_worklist (&varying_ssa_edges);
209
210 /* Now process the SSA_EDGES worklist. */
211 process_ssa_edge_worklist (&ssa_edges);
6de9cd9a
DN
212 }
213
214 /* Now perform substitutions based on the known constant values. */
215 substitute_and_fold ();
216
217 /* Now cleanup any unreachable code. */
218 cleanup_tree_cfg ();
219
220 /* Free allocated memory. */
221 finalize ();
222
223 /* Debugging dumps. */
224 if (dump_file && (dump_flags & TDF_DETAILS))
225 {
226 dump_referenced_vars (dump_file);
227 fprintf (dump_file, "\n\n");
228 }
229}
230
231static bool
232gate_ccp (void)
233{
234 return flag_tree_ccp != 0;
235}
236
237struct tree_opt_pass pass_ccp =
238{
239 "ccp", /* name */
240 gate_ccp, /* gate */
241 tree_ssa_ccp, /* execute */
242 NULL, /* sub */
243 NULL, /* next */
244 0, /* static_pass_number */
245 TV_TREE_CCP, /* tv_id */
246 PROP_cfg | PROP_ssa, /* properties_required */
247 0, /* properties_provided */
248 0, /* properties_destroyed */
249 0, /* todo_flags_start */
250 TODO_dump_func | TODO_rename_vars
1eaba2f2
RH
251 | TODO_ggc_collect | TODO_verify_ssa
252 | TODO_verify_stmts /* todo_flags_finish */
6de9cd9a
DN
253};
254
255
256/* Get the constant value associated with variable VAR. */
257
258static value *
259get_value (tree var)
260{
261 value *val;
262
263#if defined ENABLE_CHECKING
264 if (TREE_CODE (var) != SSA_NAME)
265 abort ();
266#endif
267
268 val = &value_vector[SSA_NAME_VERSION (var)];
269 if (val->lattice_val == UNINITIALIZED)
270 *val = get_default_value (var);
271
272 return val;
273}
274
275
276/* Simulate the execution of BLOCK. Evaluate the statement associated
277 with each variable reference inside the block. */
278
279static void
280simulate_block (basic_block block)
281{
282 tree phi;
283
284 /* There is nothing to do for the exit block. */
285 if (block == EXIT_BLOCK_PTR)
286 return;
287
288 if (dump_file && (dump_flags & TDF_DETAILS))
289 fprintf (dump_file, "\nSimulating block %d\n", block->index);
290
291 /* Always simulate PHI nodes, even if we have simulated this block
292 before. */
17192884 293 for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
294 visit_phi_node (phi);
295
296 /* If this is the first time we've simulated this block, then we
297 must simulate each of its statements. */
298 if (!TEST_BIT (executable_blocks, block->index))
299 {
300 block_stmt_iterator j;
301 unsigned int normal_edge_count;
302 edge e, normal_edge;
303
304 /* Note that we have simulated this block. */
305 SET_BIT (executable_blocks, block->index);
306
307 for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j))
308 visit_stmt (bsi_stmt (j));
309
310 /* We can not predict when abnormal edges will be executed, so
311 once a block is considered executable, we consider any
312 outgoing abnormal edges as executable.
313
314 At the same time, if this block has only one successor that is
315 reached by non-abnormal edges, then add that successor to the
316 worklist. */
317 normal_edge_count = 0;
318 normal_edge = NULL;
319 for (e = block->succ; e; e = e->succ_next)
320 {
321 if (e->flags & EDGE_ABNORMAL)
322 {
323 add_control_edge (e);
324 }
325 else
326 {
327 normal_edge_count++;
328 normal_edge = e;
329 }
330 }
331
332 if (normal_edge_count == 1)
333 add_control_edge (normal_edge);
334 }
335}
336
337
338/* Follow the def-use edges for statement DEF_STMT and simulate all the
339 statements reached by it. */
340
341static void
342simulate_stmt (tree use_stmt)
343{
344 basic_block use_bb = bb_for_stmt (use_stmt);
345
346 if (dump_file && (dump_flags & TDF_DETAILS))
347 {
348 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
349 print_generic_stmt (dump_file, use_stmt, dump_flags);
350 }
351
352 if (TREE_CODE (use_stmt) == PHI_NODE)
353 {
354 /* PHI nodes are always visited, regardless of whether or not the
355 destination block is executable. */
356 visit_phi_node (use_stmt);
357 }
358 else if (TEST_BIT (executable_blocks, use_bb->index))
359 {
360 /* Otherwise, visit the statement containing the use reached by
361 DEF, only if the destination block is marked executable. */
362 visit_stmt (use_stmt);
363 }
364}
365
366
367/* Perform final substitution and folding. After this pass the program
368 should still be in SSA form. */
369
370static void
371substitute_and_fold (void)
372{
373 basic_block bb;
374
375 if (dump_file && (dump_flags & TDF_DETAILS))
376 fprintf (dump_file,
377 "\nSubstituing constants and folding statements\n\n");
378
379 /* Substitute constants in every statement of every basic block. */
380 FOR_EACH_BB (bb)
381 {
382 block_stmt_iterator i;
383 tree phi;
384
385 /* Propagate our known constants into PHI nodes. */
17192884 386 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
387 {
388 int i;
389
390 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
391 {
392 value *new_val;
d00ad49b
AM
393 use_operand_p orig_p = PHI_ARG_DEF_PTR (phi, i);
394 tree orig = USE_FROM_PTR (orig_p);
6de9cd9a 395
d00ad49b 396 if (! SSA_VAR_P (orig))
6de9cd9a
DN
397 break;
398
d00ad49b 399 new_val = get_value (orig);
6de9cd9a 400 if (new_val->lattice_val == CONSTANT
d00ad49b
AM
401 && may_propagate_copy (orig, new_val->const_val))
402 SET_USE (orig_p, new_val->const_val);
6de9cd9a
DN
403 }
404 }
405
406 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
407 {
408 bool replaced_address;
409 tree stmt = bsi_stmt (i);
410
411 /* Skip statements that have been folded already. */
412 if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
413 continue;
414
415 /* Replace the statement with its folded version and mark it
416 folded. */
417 if (dump_file && (dump_flags & TDF_DETAILS))
418 {
419 fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
420 print_generic_stmt (dump_file, stmt, TDF_SLIM);
421 }
422
423 if (replace_uses_in (stmt, &replaced_address))
424 {
425 bool changed = fold_stmt (bsi_stmt_ptr (i));
426 stmt = bsi_stmt(i);
427 modify_stmt (stmt);
428 /* If we folded a builtin function, we'll likely
429 need to rename VDEFs. */
430 if (replaced_address || changed)
1eaba2f2
RH
431 {
432 mark_new_vars_to_rename (stmt, vars_to_rename);
433 if (maybe_clean_eh_stmt (stmt))
434 tree_purge_dead_eh_edges (bb);
435 }
6de9cd9a
DN
436 }
437
438 if (dump_file && (dump_flags & TDF_DETAILS))
439 {
440 fprintf (dump_file, " with ");
441 print_generic_stmt (dump_file, stmt, TDF_SLIM);
442 fprintf (dump_file, "\n");
443 }
444 }
445 }
446}
447
448
449/* Loop through the PHI_NODE's parameters for BLOCK and compare their
450 lattice values to determine PHI_NODE's lattice value. The value of a
451 PHI node is determined calling cp_lattice_meet() with all the arguments
452 of the PHI node that are incoming via executable edges. */
453
454static void
455visit_phi_node (tree phi)
456{
457 bool short_circuit = 0;
458 value phi_val, *curr_val;
459 int i;
460
461 /* If the PHI node has already been deemed to be VARYING, don't simulate
462 it again. */
463 if (DONT_SIMULATE_AGAIN (phi))
464 return;
465
466 if (dump_file && (dump_flags & TDF_DETAILS))
467 {
468 fprintf (dump_file, "\nVisiting PHI node: ");
469 print_generic_expr (dump_file, phi, dump_flags);
470 }
471
472 curr_val = get_value (PHI_RESULT (phi));
473 switch (curr_val->lattice_val)
474 {
475 case VARYING:
476 if (dump_file && (dump_flags & TDF_DETAILS))
477 fprintf (dump_file, "\n Shortcircuit. Default of VARYING.");
478 short_circuit = 1;
479 break;
480
481 case CONSTANT:
482 phi_val = *curr_val;
483 break;
484
485 case UNDEFINED:
486 case UNINITIALIZED:
487 phi_val.lattice_val = UNDEFINED;
488 phi_val.const_val = NULL_TREE;
489 break;
490
491 default:
492 abort ();
493 }
494
495 /* If the variable is volatile or the variable is never referenced in a
496 real operand, then consider the PHI node VARYING. */
497 if (short_circuit || TREE_THIS_VOLATILE (SSA_NAME_VAR (PHI_RESULT (phi))))
498 {
499 phi_val.lattice_val = VARYING;
500 phi_val.const_val = NULL;
501 }
502 else
503 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
504 {
9cf737f8 505 /* Compute the meet operator over all the PHI arguments. */
6de9cd9a
DN
506 edge e = PHI_ARG_EDGE (phi, i);
507
508 if (dump_file && (dump_flags & TDF_DETAILS))
509 {
510 fprintf (dump_file,
511 "\n Argument #%d (%d -> %d %sexecutable)\n",
512 i, e->src->index, e->dest->index,
513 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
514 }
515
516 /* If the incoming edge is executable, Compute the meet operator for
517 the existing value of the PHI node and the current PHI argument. */
518 if (e->flags & EDGE_EXECUTABLE)
519 {
520 tree rdef = PHI_ARG_DEF (phi, i);
521 value *rdef_val, val;
522
523 if (is_gimple_min_invariant (rdef))
524 {
525 val.lattice_val = CONSTANT;
526 val.const_val = rdef;
527 rdef_val = &val;
528 }
529 else
530 rdef_val = get_value (rdef);
531
532 phi_val = cp_lattice_meet (phi_val, *rdef_val);
533
534 if (dump_file && (dump_flags & TDF_DETAILS))
535 {
536 fprintf (dump_file, "\t");
537 print_generic_expr (dump_file, rdef, dump_flags);
538 dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
539 fprintf (dump_file, "\n");
540 }
541
542 if (phi_val.lattice_val == VARYING)
543 break;
544 }
545 }
546
547 if (dump_file && (dump_flags & TDF_DETAILS))
548 {
549 dump_lattice_value (dump_file, "\n PHI node value: ", phi_val);
550 fprintf (dump_file, "\n\n");
551 }
552
553 set_lattice_value (PHI_RESULT (phi), phi_val);
554 if (phi_val.lattice_val == VARYING)
555 DONT_SIMULATE_AGAIN (phi) = 1;
556}
557
558
559/* Compute the meet operator between VAL1 and VAL2:
560
561 any M UNDEFINED = any
562 any M VARYING = VARYING
563 Ci M Cj = Ci if (i == j)
564 Ci M Cj = VARYING if (i != j) */
565static value
566cp_lattice_meet (value val1, value val2)
567{
568 value result;
569
570 /* any M UNDEFINED = any. */
571 if (val1.lattice_val == UNDEFINED)
572 return val2;
573 else if (val2.lattice_val == UNDEFINED)
574 return val1;
575
576 /* any M VARYING = VARYING. */
577 if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
578 {
579 result.lattice_val = VARYING;
580 result.const_val = NULL_TREE;
581 return result;
582 }
583
584 /* Ci M Cj = Ci if (i == j)
585 Ci M Cj = VARYING if (i != j) */
586 if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
587 {
588 result.lattice_val = CONSTANT;
589 result.const_val = val1.const_val;
590 }
591 else
592 {
593 result.lattice_val = VARYING;
594 result.const_val = NULL_TREE;
595 }
596
597 return result;
598}
599
600
601/* Evaluate statement STMT. If the statement produces an output value and
602 its evaluation changes the lattice value of its output, do the following:
603
604 - If the statement is an assignment, add all the SSA edges starting at
605 this definition.
606
607 - If the statement is a conditional branch:
608 . If the statement evaluates to non-constant, add all edges to
609 worklist.
610 . If the statement is constant, add the edge executed as the
611 result of the branch. */
612
613static void
614visit_stmt (tree stmt)
615{
616 size_t i;
617 stmt_ann_t ann;
618 def_optype defs;
a32b97a2
BB
619 v_may_def_optype v_may_defs;
620 v_must_def_optype v_must_defs;
6de9cd9a
DN
621
622 /* If the statement has already been deemed to be VARYING, don't simulate
623 it again. */
624 if (DONT_SIMULATE_AGAIN (stmt))
625 return;
626
627 if (dump_file && (dump_flags & TDF_DETAILS))
628 {
629 fprintf (dump_file, "\nVisiting statement: ");
630 print_generic_stmt (dump_file, stmt, TDF_SLIM);
631 fprintf (dump_file, "\n");
632 }
633
634 ann = stmt_ann (stmt);
635
636 /* If this statement is already in the worklist then "cancel" it. The
637 reevaluation implied by the worklist entry will produce the same
638 value we generate here and thus reevaluating it again from the
639 worklist is pointless. */
640 if (ann->in_ccp_worklist)
641 ann->in_ccp_worklist = 0;
642
643 /* Now examine the statement. If the statement is an assignment that
644 produces a single output value, evaluate its RHS to see if the lattice
645 value of its output has changed. */
646 if (TREE_CODE (stmt) == MODIFY_EXPR
647 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME)
648 visit_assignment (stmt);
649
650 /* Definitions made by statements other than assignments to SSA_NAMEs
651 represent unknown modifications to their outputs. Mark them VARYING. */
652 else if (NUM_DEFS (defs = DEF_OPS (ann)) != 0)
653 {
654 DONT_SIMULATE_AGAIN (stmt) = 1;
655 for (i = 0; i < NUM_DEFS (defs); i++)
656 {
657 tree def = DEF_OP (defs, i);
658 def_to_varying (def);
659 }
660 }
661
662 /* If STMT is a conditional branch, see if we can determine which branch
663 will be taken. */
664 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
665 visit_cond_stmt (stmt);
666
667 /* Any other kind of statement is not interesting for constant
668 propagation and, therefore, not worth simulating. */
669 else
670 {
671 DONT_SIMULATE_AGAIN (stmt) = 1;
672
673 /* If STMT is a computed goto, then mark all the output edges
674 executable. */
675 if (computed_goto_p (stmt))
676 add_outgoing_control_edges (bb_for_stmt (stmt));
677 }
678
a32b97a2
BB
679 /* Mark all V_MAY_DEF operands VARYING. */
680 v_may_defs = V_MAY_DEF_OPS (ann);
681 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
682 def_to_varying (V_MAY_DEF_RESULT (v_may_defs, i));
683
684 /* Mark all V_MUST_DEF operands VARYING. */
685 v_must_defs = V_MUST_DEF_OPS (ann);
686 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
687 def_to_varying (V_MUST_DEF_OP (v_must_defs, i));
6de9cd9a
DN
688}
689
690
691/* Visit the assignment statement STMT. Set the value of its LHS to the
692 value computed by the RHS. */
693
694static void
695visit_assignment (tree stmt)
696{
697 value val;
698 tree lhs, rhs;
699
700 lhs = TREE_OPERAND (stmt, 0);
701 rhs = TREE_OPERAND (stmt, 1);
702
703 if (TREE_THIS_VOLATILE (SSA_NAME_VAR (lhs)))
704 {
705 /* Volatile variables are always VARYING. */
706 val.lattice_val = VARYING;
707 val.const_val = NULL_TREE;
708 }
709 else if (TREE_CODE (rhs) == SSA_NAME)
710 {
711 /* For a simple copy operation, we copy the lattice values. */
712 value *nval = get_value (rhs);
713 val = *nval;
714 }
715 else
716 {
717 /* Evaluate the statement. */
718 val = evaluate_stmt (stmt);
719 }
720
721 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
722 the constant value into the type of the destination variable. This
723 should not be necessary if GCC represented bitfields properly. */
724 {
725 tree lhs = TREE_OPERAND (stmt, 0);
726 if (val.lattice_val == CONSTANT
727 && TREE_CODE (lhs) == COMPONENT_REF
728 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
729 {
730 tree w = widen_bitfield (val.const_val, TREE_OPERAND (lhs, 1), lhs);
731
732 if (w && is_gimple_min_invariant (w))
733 val.const_val = w;
734 else
735 {
736 val.lattice_val = VARYING;
737 val.const_val = NULL;
738 }
739 }
740 }
741
742 /* Set the lattice value of the statement's output. */
743 set_lattice_value (lhs, val);
744 if (val.lattice_val == VARYING)
745 DONT_SIMULATE_AGAIN (stmt) = 1;
746}
747
748
749/* Visit the conditional statement STMT. If it evaluates to a constant value,
750 mark outgoing edges appropriately. */
751
752static void
753visit_cond_stmt (tree stmt)
754{
755 edge e;
756 value val;
757 basic_block block;
758
759 block = bb_for_stmt (stmt);
760 val = evaluate_stmt (stmt);
761
762 /* Find which edge out of the conditional block will be taken and add it
763 to the worklist. If no single edge can be determined statically, add
764 all outgoing edges from BLOCK. */
765 e = find_taken_edge (block, val.const_val);
766 if (e)
767 add_control_edge (e);
768 else
769 {
770 DONT_SIMULATE_AGAIN (stmt) = 1;
771 add_outgoing_control_edges (block);
772 }
773}
774
775
776/* Add all the edges coming out of BB to the control flow worklist. */
777
778static void
779add_outgoing_control_edges (basic_block bb)
780{
781 edge e;
782
783 for (e = bb->succ; e; e = e->succ_next)
784 add_control_edge (e);
785}
786
787
788/* Add edge E to the control flow worklist. */
789
790static void
791add_control_edge (edge e)
792{
793 basic_block bb = e->dest;
794 if (bb == EXIT_BLOCK_PTR)
795 return;
796
797 /* If the edge had already been executed, skip it. */
798 if (e->flags & EDGE_EXECUTABLE)
799 return;
800
801 e->flags |= EDGE_EXECUTABLE;
802
803 /* If the block is already in the list, we're done. */
804 if (TEST_BIT (bb_in_list, bb->index))
805 return;
806
807 cfg_blocks_add (bb);
808
809 if (dump_file && (dump_flags & TDF_DETAILS))
810 fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
811 e->src->index, e->dest->index);
812}
813
814
815/* CCP specific front-end to the non-destructive constant folding routines.
816
817 Attempt to simplify the RHS of STMT knowing that one or more
818 operands are constants.
819
820 If simplification is possible, return the simplified RHS,
821 otherwise return the original RHS. */
822
823static tree
824ccp_fold (tree stmt)
825{
826 tree rhs = get_rhs (stmt);
827 enum tree_code code = TREE_CODE (rhs);
828 int kind = TREE_CODE_CLASS (code);
829 tree retval = NULL_TREE;
830
831 /* If the RHS is just a variable, then that variable must now have
832 a constant value that we can return directly. */
833 if (TREE_CODE (rhs) == SSA_NAME)
834 return get_value (rhs)->const_val;
835
836 /* Unary operators. Note that we know the single operand must
837 be a constant. So this should almost always return a
838 simplified RHS. */
839 if (kind == '1')
840 {
841 /* Handle unary operators which can appear in GIMPLE form. */
842 tree op0 = TREE_OPERAND (rhs, 0);
843
844 /* Simplify the operand down to a constant. */
845 if (TREE_CODE (op0) == SSA_NAME)
846 {
847 value *val = get_value (op0);
848 if (val->lattice_val == CONSTANT)
849 op0 = get_value (op0)->const_val;
850 }
851
852 retval = nondestructive_fold_unary_to_constant (code,
853 TREE_TYPE (rhs),
854 op0);
855
856 /* If we folded, but did not create an invariant, then we can not
857 use this expression. */
858 if (retval && ! is_gimple_min_invariant (retval))
859 return NULL;
860
861 /* If we could not fold the expression, but the arguments are all
862 constants and gimple values, then build and return the new
863 expression.
864
865 In some cases the new expression is still something we can
866 use as a replacement for an argument. This happens with
867 NOP conversions of types for example.
868
869 In other cases the new expression can not be used as a
870 replacement for an argument (as it would create non-gimple
871 code). But the new expression can still be used to derive
872 other constants. */
873 if (! retval && is_gimple_min_invariant (op0))
874 return build1 (code, TREE_TYPE (rhs), op0);
875 }
876
877 /* Binary and comparison operators. We know one or both of the
878 operands are constants. */
879 else if (kind == '2'
880 || kind == '<'
881 || code == TRUTH_AND_EXPR
882 || code == TRUTH_OR_EXPR
883 || code == TRUTH_XOR_EXPR)
884 {
885 /* Handle binary and comparison operators that can appear in
886 GIMPLE form. */
887 tree op0 = TREE_OPERAND (rhs, 0);
888 tree op1 = TREE_OPERAND (rhs, 1);
889
890 /* Simplify the operands down to constants when appropriate. */
891 if (TREE_CODE (op0) == SSA_NAME)
892 {
893 value *val = get_value (op0);
894 if (val->lattice_val == CONSTANT)
895 op0 = val->const_val;
896 }
897
898 if (TREE_CODE (op1) == SSA_NAME)
899 {
900 value *val = get_value (op1);
901 if (val->lattice_val == CONSTANT)
902 op1 = val->const_val;
903 }
904
905 retval = nondestructive_fold_binary_to_constant (code,
906 TREE_TYPE (rhs),
907 op0, op1);
908
909 /* If we folded, but did not create an invariant, then we can not
910 use this expression. */
911 if (retval && ! is_gimple_min_invariant (retval))
912 return NULL;
913
914 /* If we could not fold the expression, but the arguments are all
915 constants and gimple values, then build and return the new
916 expression.
917
918 In some cases the new expression is still something we can
919 use as a replacement for an argument. This happens with
920 NOP conversions of types for example.
921
922 In other cases the new expression can not be used as a
923 replacement for an argument (as it would create non-gimple
924 code). But the new expression can still be used to derive
925 other constants. */
926 if (! retval
927 && is_gimple_min_invariant (op0)
928 && is_gimple_min_invariant (op1))
929 return build (code, TREE_TYPE (rhs), op0, op1);
930 }
931
932 /* We may be able to fold away calls to builtin functions if their
9cf737f8 933 arguments are constants. */
6de9cd9a
DN
934 else if (code == CALL_EXPR
935 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
936 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
937 == FUNCTION_DECL)
938 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
939 {
940 use_optype uses = STMT_USE_OPS (stmt);
941 if (NUM_USES (uses) != 0)
942 {
943 tree *orig;
944 size_t i;
945
946 /* Preserve the original values of every operand. */
947 orig = xmalloc (sizeof (tree) * NUM_USES (uses));
948 for (i = 0; i < NUM_USES (uses); i++)
949 orig[i] = USE_OP (uses, i);
950
951 /* Substitute operands with their values and try to fold. */
952 replace_uses_in (stmt, NULL);
953 retval = fold_builtin (rhs);
954
955 /* Restore operands to their original form. */
956 for (i = 0; i < NUM_USES (uses); i++)
d00ad49b 957 SET_USE_OP (uses, i, orig[i]);
6de9cd9a
DN
958 free (orig);
959 }
960 }
961 else
962 return rhs;
963
964 /* If we got a simplified form, see if we need to convert its type. */
965 if (retval)
966 {
967 if (TREE_TYPE (retval) != TREE_TYPE (rhs))
e072ae27 968 retval = fold_convert (TREE_TYPE (rhs), retval);
6de9cd9a
DN
969
970 if (TREE_TYPE (retval) == TREE_TYPE (rhs))
971 return retval;
972 }
973
974 /* No simplification was possible. */
975 return rhs;
976}
977
978
979/* Evaluate statement STMT. */
980
981static value
982evaluate_stmt (tree stmt)
983{
984 value val;
985 tree simplified;
986 latticevalue likelyvalue = likely_value (stmt);
987
988 /* If the statement is likely to have a CONSTANT result, then try
989 to fold the statement to determine the constant value. */
990 if (likelyvalue == CONSTANT)
991 simplified = ccp_fold (stmt);
992 /* If the statement is likely to have a VARYING result, then do not
993 bother folding the statement. */
994 else if (likelyvalue == VARYING)
995 simplified = get_rhs (stmt);
996 /* Otherwise the statement is likely to have an UNDEFINED value and
997 there will be nothing to do. */
998 else
999 simplified = NULL_TREE;
1000
1001 if (simplified && is_gimple_min_invariant (simplified))
1002 {
1003 /* The statement produced a constant value. */
1004 val.lattice_val = CONSTANT;
1005 val.const_val = simplified;
1006 }
1007 else
1008 {
1009 /* The statement produced a nonconstant value. If the statement
1010 had undefined operands, then the result of the statement should
1011 be undefined. Else the result of the statement is VARYING. */
1012 val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
1013 val.const_val = NULL_TREE;
1014 }
1015
1016 return val;
1017}
1018
1019
1020/* Debugging dumps. */
1021
1022static void
1023dump_lattice_value (FILE *outf, const char *prefix, value val)
1024{
1025 switch (val.lattice_val)
1026 {
1027 case UNDEFINED:
1028 fprintf (outf, "%sUNDEFINED", prefix);
1029 break;
1030 case VARYING:
1031 fprintf (outf, "%sVARYING", prefix);
1032 break;
1033 case CONSTANT:
1034 fprintf (outf, "%sCONSTANT ", prefix);
1035 print_generic_expr (outf, val.const_val, dump_flags);
1036 break;
1037 default:
1038 abort ();
1039 }
1040}
1041
1042/* Given a constant value VAL for bitfield FIELD, and a destination
1043 variable VAR, return VAL appropriately widened to fit into VAR. If
1044 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1045
1046tree
1047widen_bitfield (tree val, tree field, tree var)
1048{
44de5aeb 1049 unsigned HOST_WIDE_INT var_size, field_size;
6de9cd9a
DN
1050 tree wide_val;
1051 unsigned HOST_WIDE_INT mask;
44de5aeb 1052 unsigned int i;
6de9cd9a 1053
44de5aeb
RK
1054 /* We can only do this if the size of the type and field and VAL are
1055 all constants representable in HOST_WIDE_INT. */
1056 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1057 || !host_integerp (DECL_SIZE (field), 1)
1058 || !host_integerp (val, 0))
1059 return NULL_TREE;
1060
1061 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1062 field_size = tree_low_cst (DECL_SIZE (field), 1);
6de9cd9a
DN
1063
1064 /* Give up if either the bitfield or the variable are too wide. */
1065 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
44de5aeb 1066 return NULL_TREE;
6de9cd9a
DN
1067
1068#if defined ENABLE_CHECKING
1069 if (var_size < field_size)
1070 abort ();
1071#endif
1072
44de5aeb
RK
1073 /* If the sign bit of the value is not set or the field's type is unsigned,
1074 just mask off the high order bits of the value. */
1075 if (DECL_UNSIGNED (field)
1076 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
6de9cd9a
DN
1077 {
1078 /* Zero extension. Build a mask with the lower 'field_size' bits
1079 set and a BIT_AND_EXPR node to clear the high order bits of
1080 the value. */
1081 for (i = 0, mask = 0; i < field_size; i++)
44de5aeb 1082 mask |= ((HOST_WIDE_INT) 1) << i;
6de9cd9a
DN
1083
1084 wide_val = build (BIT_AND_EXPR, TREE_TYPE (var), val,
44de5aeb 1085 fold_convert (TREE_TYPE (var), build_int_2 (mask, 0)));
6de9cd9a
DN
1086 }
1087 else
1088 {
1089 /* Sign extension. Create a mask with the upper 'field_size'
1090 bits set and a BIT_IOR_EXPR to set the high order bits of the
1091 value. */
1092 for (i = 0, mask = 0; i < (var_size - field_size); i++)
44de5aeb 1093 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
6de9cd9a
DN
1094
1095 wide_val = build (BIT_IOR_EXPR, TREE_TYPE (var), val,
44de5aeb 1096 fold_convert (TREE_TYPE (var), build_int_2 (mask, 0)));
6de9cd9a
DN
1097 }
1098
1099 return fold (wide_val);
1100}
1101
1102
1103/* Function indicating whether we ought to include information for 'var'
1104 when calculating immediate uses. */
1105
1106static bool
1107need_imm_uses_for (tree var)
1108{
1109 return get_value (var)->lattice_val != VARYING;
1110}
1111
1112
1113/* Initialize local data structures and worklists for CCP. */
1114
1115static void
1116initialize (void)
1117{
1118 edge e;
1119 basic_block bb;
1120 sbitmap virtual_var;
1121
95eec0d6 1122 /* Worklists of SSA edges. */
6de9cd9a 1123 VARRAY_TREE_INIT (ssa_edges, 20, "ssa_edges");
95eec0d6 1124 VARRAY_TREE_INIT (varying_ssa_edges, 20, "varying_ssa_edges");
6de9cd9a
DN
1125
1126 executable_blocks = sbitmap_alloc (last_basic_block);
1127 sbitmap_zero (executable_blocks);
1128
1129 bb_in_list = sbitmap_alloc (last_basic_block);
1130 sbitmap_zero (bb_in_list);
1131
95a3742c
DN
1132 value_vector = (value *) xmalloc (num_ssa_names * sizeof (value));
1133 memset (value_vector, 0, num_ssa_names * sizeof (value));
6de9cd9a
DN
1134
1135 /* 1 if ssa variable is used in a virtual variable context. */
95a3742c 1136 virtual_var = sbitmap_alloc (num_ssa_names);
6de9cd9a
DN
1137 sbitmap_zero (virtual_var);
1138
1139 /* Initialize default values and simulation flags for PHI nodes, statements
1140 and edges. */
1141 FOR_EACH_BB (bb)
1142 {
1143 block_stmt_iterator i;
1144 tree stmt;
1145 stmt_ann_t ann;
1146 def_optype defs;
a32b97a2
BB
1147 v_may_def_optype v_may_defs;
1148 v_must_def_optype v_must_defs;
6de9cd9a
DN
1149 size_t x;
1150 int vary;
1151
1152 /* Get the default value for each definition. */
1153 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1154 {
1155 vary = 0;
1156 stmt = bsi_stmt (i);
1157 get_stmt_operands (stmt);
1158 ann = stmt_ann (stmt);
1159 defs = DEF_OPS (ann);
1160 for (x = 0; x < NUM_DEFS (defs); x++)
1161 {
1162 tree def = DEF_OP (defs, x);
1163 if (get_value (def)->lattice_val == VARYING)
1164 vary = 1;
1165 }
1166 DONT_SIMULATE_AGAIN (stmt) = vary;
1167
a32b97a2
BB
1168 /* Mark all V_MAY_DEF operands VARYING. */
1169 v_may_defs = V_MAY_DEF_OPS (ann);
1170 for (x = 0; x < NUM_V_MAY_DEFS (v_may_defs); x++)
6de9cd9a 1171 {
a32b97a2 1172 tree res = V_MAY_DEF_RESULT (v_may_defs, x);
6de9cd9a
DN
1173 get_value (res)->lattice_val = VARYING;
1174 SET_BIT (virtual_var, SSA_NAME_VERSION (res));
1175 }
a32b97a2
BB
1176
1177 /* Mark all V_MUST_DEF operands VARYING. */
1178 v_must_defs = V_MUST_DEF_OPS (ann);
1179 for (x = 0; x < NUM_V_MUST_DEFS (v_must_defs); x++)
1180 {
1181 tree v_must_def = V_MUST_DEF_OP (v_must_defs, x);
1182 get_value (v_must_def)->lattice_val = VARYING;
1183 SET_BIT (virtual_var, SSA_NAME_VERSION (v_must_def));
1184 }
6de9cd9a
DN
1185 }
1186
1187 for (e = bb->succ; e; e = e->succ_next)
1188 e->flags &= ~EDGE_EXECUTABLE;
1189 }
1190
1191 /* Now process PHI nodes. */
1192 FOR_EACH_BB (bb)
1193 {
1194 tree phi, var;
1195 int x;
17192884 1196 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
6de9cd9a
DN
1197 {
1198 value *val;
1199 val = get_value (PHI_RESULT (phi));
1200 if (val->lattice_val != VARYING)
1201 {
1202 for (x = 0; x < PHI_NUM_ARGS (phi); x++)
1203 {
1204 var = PHI_ARG_DEF (phi, x);
1205 /* If one argument is virtual, the result is virtual, and
1206 therefore varying. */
1207 if (TREE_CODE (var) == SSA_NAME)
1208 {
1209 if (TEST_BIT (virtual_var, SSA_NAME_VERSION (var)))
1210 {
1211 val->lattice_val = VARYING;
1212 SET_BIT (virtual_var,
1213 SSA_NAME_VERSION (PHI_RESULT (phi)));
1214 break;
1215 }
1216 }
1217 }
1218 }
1219 DONT_SIMULATE_AGAIN (phi) = ((val->lattice_val == VARYING) ? 1 : 0);
1220 }
1221 }
1222
1223 sbitmap_free (virtual_var);
1224 /* Compute immediate uses for variables we care about. */
1225 compute_immediate_uses (TDFA_USE_OPS, need_imm_uses_for);
1226
1227 if (dump_file && (dump_flags & TDF_DETAILS))
1228 dump_immediate_uses (dump_file);
1229
1230 VARRAY_BB_INIT (cfg_blocks, 20, "cfg_blocks");
1231
1232 /* Seed the algorithm by adding the successors of the entry block to the
1233 edge worklist. */
1234 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1235 {
1236 if (e->dest != EXIT_BLOCK_PTR)
1237 {
1238 e->flags |= EDGE_EXECUTABLE;
1239 cfg_blocks_add (e->dest);
1240 }
1241 }
1242}
1243
1244
1245/* Free allocated storage. */
1246
1247static void
1248finalize (void)
1249{
1250 ssa_edges = NULL;
95eec0d6 1251 varying_ssa_edges = NULL;
6de9cd9a
DN
1252 cfg_blocks = NULL;
1253 free (value_vector);
1254 sbitmap_free (bb_in_list);
1255 sbitmap_free (executable_blocks);
1256 free_df ();
1257}
1258
1259/* Is the block worklist empty. */
1260
1261static inline bool
1262cfg_blocks_empty_p (void)
1263{
1264 return (cfg_blocks_num == 0);
1265}
1266
1267/* Add a basic block to the worklist. */
1268
1269static void
1270cfg_blocks_add (basic_block bb)
1271{
1272 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
1273 return;
1274
1275 if (TEST_BIT (bb_in_list, bb->index))
1276 return;
1277
1278 if (cfg_blocks_empty_p ())
1279 {
1280 cfg_blocks_tail = cfg_blocks_head = 0;
1281 cfg_blocks_num = 1;
1282 }
1283 else
1284 {
1285 cfg_blocks_num++;
1286 if (cfg_blocks_num > VARRAY_SIZE (cfg_blocks))
1287 {
1288 /* We have to grow the array now. Adjust to queue to occupy the
1289 full space of the original array. */
1290 cfg_blocks_tail = VARRAY_SIZE (cfg_blocks);
1291 cfg_blocks_head = 0;
1292 VARRAY_GROW (cfg_blocks, 2 * VARRAY_SIZE (cfg_blocks));
1293 }
1294 else
1295 cfg_blocks_tail = (cfg_blocks_tail + 1) % VARRAY_SIZE (cfg_blocks);
1296 }
1297 VARRAY_BB (cfg_blocks, cfg_blocks_tail) = bb;
1298 SET_BIT (bb_in_list, bb->index);
1299}
1300
1301/* Remove a block from the worklist. */
1302
1303static basic_block
1304cfg_blocks_get (void)
1305{
1306 basic_block bb;
1307
1308 bb = VARRAY_BB (cfg_blocks, cfg_blocks_head);
1309
1310#ifdef ENABLE_CHECKING
1311 if (cfg_blocks_empty_p () || !bb)
1312 abort ();
1313#endif
1314
1315 cfg_blocks_head = (cfg_blocks_head + 1) % VARRAY_SIZE (cfg_blocks);
1316 --cfg_blocks_num;
1317 RESET_BIT (bb_in_list, bb->index);
1318
1319 return bb;
1320}
1321
1322/* We have just defined a new value for VAR. Add all immediate uses
95eec0d6 1323 of VAR to the ssa_edges or varying_ssa_edges worklist. */
6de9cd9a 1324static void
95eec0d6 1325add_var_to_ssa_edges_worklist (tree var, value val)
6de9cd9a
DN
1326{
1327 tree stmt = SSA_NAME_DEF_STMT (var);
1328 dataflow_t df = get_immediate_uses (stmt);
1329 int num_uses = num_immediate_uses (df);
1330 int i;
1331
1332 for (i = 0; i < num_uses; i++)
1333 {
1334 tree use = immediate_use (df, i);
1335
1336 if (!DONT_SIMULATE_AGAIN (use))
1337 {
1338 stmt_ann_t ann = stmt_ann (use);
1339 if (ann->in_ccp_worklist == 0)
1340 {
1341 ann->in_ccp_worklist = 1;
95eec0d6
DB
1342 if (val.lattice_val == VARYING)
1343 VARRAY_PUSH_TREE (varying_ssa_edges, use);
1344 else
1345 VARRAY_PUSH_TREE (ssa_edges, use);
6de9cd9a
DN
1346 }
1347 }
1348 }
1349}
1350
1351/* Set the lattice value for the variable VAR to VARYING. */
1352
1353static void
1354def_to_varying (tree var)
1355{
1356 value val;
1357 val.lattice_val = VARYING;
1358 val.const_val = NULL_TREE;
1359 set_lattice_value (var, val);
1360}
1361
1362/* Set the lattice value for variable VAR to VAL. */
1363
1364static void
1365set_lattice_value (tree var, value val)
1366{
1367 value *old = get_value (var);
1368
1369#ifdef ENABLE_CHECKING
1370 if (val.lattice_val == UNDEFINED)
1371 {
1372 /* CONSTANT->UNDEFINED is never a valid state transition. */
1373 if (old->lattice_val == CONSTANT)
1374 abort ();
1375
1376 /* VARYING->UNDEFINED is generally not a valid state transition,
1377 except for values which are initialized to VARYING. */
1378 if (old->lattice_val == VARYING
1379 && get_default_value (var).lattice_val != VARYING)
1380 abort ();
1381 }
1382 else if (val.lattice_val == CONSTANT)
1383 {
1384 /* VARYING -> CONSTANT is an invalid state transition, except
1385 for objects which start off in a VARYING state. */
1386 if (old->lattice_val == VARYING
1387 && get_default_value (var).lattice_val != VARYING)
1388 abort ();
1389 }
1390#endif
1391
1392 /* If the constant for VAR has changed, then this VAR is really varying. */
1393 if (old->lattice_val == CONSTANT && val.lattice_val == CONSTANT
1394 && !simple_cst_equal (old->const_val, val.const_val))
1395 {
1396 val.lattice_val = VARYING;
1397 val.const_val = NULL_TREE;
1398 }
1399
1400 if (old->lattice_val != val.lattice_val)
1401 {
1402 if (dump_file && (dump_flags & TDF_DETAILS))
1403 {
1404 dump_lattice_value (dump_file,
1405 "Lattice value changed to ", val);
1406 fprintf (dump_file, ". Adding definition to SSA edges.\n");
1407 }
1408
95eec0d6 1409 add_var_to_ssa_edges_worklist (var, val);
6de9cd9a
DN
1410 *old = val;
1411 }
1412}
1413
1414/* Replace USE references in statement STMT with their immediate reaching
1415 definition. Return true if at least one reference was replaced. If
1416 REPLACED_ADDRESSES_P is given, it will be set to true if an address
1417 constant was replaced. */
1418
1419static bool
1420replace_uses_in (tree stmt, bool *replaced_addresses_p)
1421{
1422 bool replaced = false;
1423 use_optype uses;
1424 size_t i;
1425
1426 if (replaced_addresses_p)
1427 *replaced_addresses_p = false;
1428
1429 get_stmt_operands (stmt);
1430
1431 uses = STMT_USE_OPS (stmt);
1432 for (i = 0; i < NUM_USES (uses); i++)
1433 {
d00ad49b
AM
1434 use_operand_p use = USE_OP_PTR (uses, i);
1435 value *val = get_value (USE_FROM_PTR (use));
6de9cd9a
DN
1436
1437 if (val->lattice_val == CONSTANT)
1438 {
d00ad49b 1439 SET_USE (use, val->const_val);
6de9cd9a 1440 replaced = true;
d00ad49b
AM
1441 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (use)))
1442 && replaced_addresses_p)
6de9cd9a
DN
1443 *replaced_addresses_p = true;
1444 }
1445 }
1446
1447 return replaced;
1448}
1449
1450/* Return the likely latticevalue for STMT.
1451
1452 If STMT has no operands, then return CONSTANT.
1453
1454 Else if any operands of STMT are undefined, then return UNDEFINED.
1455
1456 Else if any operands of STMT are constants, then return CONSTANT.
1457
1458 Else return VARYING. */
1459
1460static latticevalue
1461likely_value (tree stmt)
1462{
1463 use_optype uses;
1464 size_t i;
1465 int found_constant = 0;
1466 stmt_ann_t ann;
1467
1468 /* If the statement makes aliased loads or has volatile operands, it
1469 won't fold to a constant value. */
1470 ann = stmt_ann (stmt);
1471 if (ann->makes_aliased_loads || ann->has_volatile_ops)
1472 return VARYING;
1473
1474 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
1475 in the presence of const and pure calls. */
1476 if (get_call_expr_in (stmt) != NULL_TREE)
1477 return VARYING;
1478
1479 get_stmt_operands (stmt);
1480
1481 uses = USE_OPS (ann);
1482 for (i = 0; i < NUM_USES (uses); i++)
1483 {
1484 tree use = USE_OP (uses, i);
1485 value *val = get_value (use);
1486
1487 if (val->lattice_val == UNDEFINED)
1488 return UNDEFINED;
1489
1490 if (val->lattice_val == CONSTANT)
1491 found_constant = 1;
1492 }
1493
1494 return ((found_constant || !uses) ? CONSTANT : VARYING);
1495}
1496
1497/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1498 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
9cf737f8 1499 is the desired result type. */
6de9cd9a
DN
1500
1501static tree
1502maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1503{
44de5aeb
RK
1504 tree min_idx, idx, elt_offset = integer_zero_node;
1505 tree array_type, elt_type, elt_size;
1506
1507 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1508 measured in units of the size of elements type) from that ARRAY_REF).
1509 We can't do anything if either is variable.
1510
1511 The case we handle here is *(&A[N]+O). */
1512 if (TREE_CODE (base) == ARRAY_REF)
1513 {
1514 tree low_bound = array_ref_low_bound (base);
1515
1516 elt_offset = TREE_OPERAND (base, 1);
1517 if (TREE_CODE (low_bound) != INTEGER_CST
1518 || TREE_CODE (elt_offset) != INTEGER_CST)
1519 return NULL_TREE;
1520
1521 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1522 base = TREE_OPERAND (base, 0);
1523 }
6de9cd9a
DN
1524
1525 /* Ignore stupid user tricks of indexing non-array variables. */
1526 array_type = TREE_TYPE (base);
1527 if (TREE_CODE (array_type) != ARRAY_TYPE)
1528 return NULL_TREE;
1529 elt_type = TREE_TYPE (array_type);
1530 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1531 return NULL_TREE;
1532
44de5aeb
RK
1533 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1534 element type (so we can use the alignment if it's not constant).
1535 Otherwise, compute the offset as an index by using a division. If the
1536 division isn't exact, then don't do anything. */
6de9cd9a 1537 elt_size = TYPE_SIZE_UNIT (elt_type);
44de5aeb
RK
1538 if (integer_zerop (offset))
1539 {
1540 if (TREE_CODE (elt_size) != INTEGER_CST)
1541 elt_size = size_int (TYPE_ALIGN (elt_type));
6de9cd9a 1542
44de5aeb
RK
1543 idx = integer_zero_node;
1544 }
1545 else
1546 {
1547 unsigned HOST_WIDE_INT lquo, lrem;
1548 HOST_WIDE_INT hquo, hrem;
1549
1550 if (TREE_CODE (elt_size) != INTEGER_CST
1551 || div_and_round_double (TRUNC_DIV_EXPR, 1,
1552 TREE_INT_CST_LOW (offset),
1553 TREE_INT_CST_HIGH (offset),
1554 TREE_INT_CST_LOW (elt_size),
1555 TREE_INT_CST_HIGH (elt_size),
1556 &lquo, &hquo, &lrem, &hrem)
1557 || lrem || hrem)
1558 return NULL_TREE;
6de9cd9a 1559
44de5aeb
RK
1560 idx = build_int_2_wide (lquo, hquo);
1561 }
1562
1563 /* Assume the low bound is zero. If there is a domain type, get the
1564 low bound, if any, convert the index into that type, and add the
1565 low bound. */
1566 min_idx = integer_zero_node;
1567 if (TYPE_DOMAIN (array_type))
6de9cd9a 1568 {
44de5aeb
RK
1569 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
1570 min_idx = TYPE_MIN_VALUE (TYPE_DOMAIN (array_type));
1571 else
1572 min_idx = fold_convert (TYPE_DOMAIN (array_type), min_idx);
1573
1574 if (TREE_CODE (min_idx) != INTEGER_CST)
1575 return NULL_TREE;
1576
1577 idx = fold_convert (TYPE_DOMAIN (array_type), idx);
1578 elt_offset = fold_convert (TYPE_DOMAIN (array_type), elt_offset);
6de9cd9a
DN
1579 }
1580
44de5aeb
RK
1581 if (!integer_zerop (min_idx))
1582 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1583 if (!integer_zerop (elt_offset))
1584 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1585
1586 return build (ARRAY_REF, orig_type, base, idx, min_idx,
1587 size_int (tree_low_cst (elt_size, 1)
1588 / (TYPE_ALIGN (elt_type) / BITS_PER_UNIT)));
6de9cd9a
DN
1589}
1590
1591/* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1592 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1593 is the desired result type. */
1594/* ??? This doesn't handle class inheritance. */
1595
1596static tree
1597maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1598 tree orig_type, bool base_is_ptr)
1599{
1600 tree f, t, field_type, tail_array_field;
1601
1602 if (TREE_CODE (record_type) != RECORD_TYPE
1603 && TREE_CODE (record_type) != UNION_TYPE
1604 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1605 return NULL_TREE;
1606
1607 /* Short-circuit silly cases. */
1608 if (lang_hooks.types_compatible_p (record_type, orig_type))
1609 return NULL_TREE;
1610
1611 tail_array_field = NULL_TREE;
1612 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1613 {
1614 int cmp;
1615
1616 if (TREE_CODE (f) != FIELD_DECL)
1617 continue;
1618 if (DECL_BIT_FIELD (f))
1619 continue;
1620 if (TREE_CODE (DECL_FIELD_OFFSET (f)) != INTEGER_CST)
1621 continue;
1622
1623 /* ??? Java creates "interesting" fields for representing base classes.
1624 They have no name, and have no context. With no context, we get into
1625 trouble with nonoverlapping_component_refs_p. Skip them. */
1626 if (!DECL_FIELD_CONTEXT (f))
1627 continue;
1628
1629 /* The previous array field isn't at the end. */
1630 tail_array_field = NULL_TREE;
1631
1632 /* Check to see if this offset overlaps with the field. */
1633 cmp = tree_int_cst_compare (DECL_FIELD_OFFSET (f), offset);
1634 if (cmp > 0)
1635 continue;
1636
1637 field_type = TREE_TYPE (f);
1638 if (cmp < 0)
1639 {
1640 /* Don't care about offsets into the middle of scalars. */
1641 if (!AGGREGATE_TYPE_P (field_type))
1642 continue;
1643
1644 /* Check for array at the end of the struct. This is often
1645 used as for flexible array members. We should be able to
1646 turn this into an array access anyway. */
1647 if (TREE_CODE (field_type) == ARRAY_TYPE)
1648 tail_array_field = f;
1649
1650 /* Check the end of the field against the offset. */
1651 if (!DECL_SIZE_UNIT (f)
1652 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1653 continue;
1654 t = int_const_binop (MINUS_EXPR, offset, DECL_FIELD_OFFSET (f), 1);
1655 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1656 continue;
1657
1658 /* If we matched, then set offset to the displacement into
1659 this field. */
1660 offset = t;
1661 }
1662
1663 /* Here we exactly match the offset being checked. If the types match,
1664 then we can return that field. */
1665 else if (lang_hooks.types_compatible_p (orig_type, field_type))
1666 {
1667 if (base_is_ptr)
1668 base = build1 (INDIRECT_REF, record_type, base);
44de5aeb 1669 t = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1670 return t;
1671 }
1672
1673 /* Don't care about type-punning of scalars. */
1674 else if (!AGGREGATE_TYPE_P (field_type))
1675 return NULL_TREE;
1676
1677 goto found;
1678 }
1679
1680 if (!tail_array_field)
1681 return NULL_TREE;
1682
1683 f = tail_array_field;
1684 field_type = TREE_TYPE (f);
1685
1686 found:
1687 /* If we get here, we've got an aggregate field, and a possibly
1ea7e6ad 1688 nonzero offset into them. Recurse and hope for a valid match. */
6de9cd9a
DN
1689 if (base_is_ptr)
1690 base = build1 (INDIRECT_REF, record_type, base);
44de5aeb 1691 base = build (COMPONENT_REF, field_type, base, f, NULL_TREE);
6de9cd9a
DN
1692
1693 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1694 if (t)
1695 return t;
1696 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1697 orig_type, false);
1698}
1699
1700/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1701 Return the simplified expression, or NULL if nothing could be done. */
1702
1703static tree
1704maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1705{
1706 tree t;
1707
1708 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1709 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1710 are sometimes added. */
1711 base = fold (base);
1712 STRIP_NOPS (base);
1713 TREE_OPERAND (expr, 0) = base;
1714
1715 /* One possibility is that the address reduces to a string constant. */
1716 t = fold_read_from_constant_string (expr);
1717 if (t)
1718 return t;
1719
1720 /* Add in any offset from a PLUS_EXPR. */
1721 if (TREE_CODE (base) == PLUS_EXPR)
1722 {
1723 tree offset2;
1724
1725 offset2 = TREE_OPERAND (base, 1);
1726 if (TREE_CODE (offset2) != INTEGER_CST)
1727 return NULL_TREE;
1728 base = TREE_OPERAND (base, 0);
1729
1730 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1731 }
1732
1733 if (TREE_CODE (base) == ADDR_EXPR)
1734 {
1735 /* Strip the ADDR_EXPR. */
1736 base = TREE_OPERAND (base, 0);
1737
1738 /* Try folding *(&B+O) to B[X]. */
1739 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1740 if (t)
1741 return t;
1742
1743 /* Try folding *(&B+O) to B.X. */
1744 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1745 TREE_TYPE (expr), false);
1746 if (t)
1747 return t;
1748
44de5aeb
RK
1749 /* Fold *&B to B. We can only do this if EXPR is the same type
1750 as BASE. We can't do this if EXPR is the element type of an array
1751 and BASE is the array. */
1752 if (integer_zerop (offset)
1753 && lang_hooks.types_compatible_p (TREE_TYPE (base),
1754 TREE_TYPE (expr)))
6de9cd9a
DN
1755 return base;
1756 }
1757 else
1758 {
1759 /* We can get here for out-of-range string constant accesses,
1760 such as "_"[3]. Bail out of the entire substitution search
1761 and arrange for the entire statement to be replaced by a
1762 call to __builtin_trap. In all likelyhood this will all be
1763 constant-folded away, but in the meantime we can't leave with
1764 something that get_expr_operands can't understand. */
1765
1766 t = base;
1767 STRIP_NOPS (t);
1768 if (TREE_CODE (t) == ADDR_EXPR
1769 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1770 {
1771 /* FIXME: Except that this causes problems elsewhere with dead
1772 code not being deleted, and we abort in the rtl expanders
1773 because we failed to remove some ssa_name. In the meantime,
1774 just return zero. */
1775 /* FIXME2: This condition should be signaled by
1776 fold_read_from_constant_string directly, rather than
1777 re-checking for it here. */
1778 return integer_zero_node;
1779 }
1780
1781 /* Try folding *(B+O) to B->X. Still an improvement. */
1782 if (POINTER_TYPE_P (TREE_TYPE (base)))
1783 {
1784 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1785 base, offset,
1786 TREE_TYPE (expr), true);
1787 if (t)
1788 return t;
1789 }
1790 }
1791
1792 /* Otherwise we had an offset that we could not simplify. */
1793 return NULL_TREE;
1794}
1795
1796/* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1797
1798 A quaint feature extant in our address arithmetic is that there
1799 can be hidden type changes here. The type of the result need
1800 not be the same as the type of the input pointer.
1801
1802 What we're after here is an expression of the form
1803 (T *)(&array + const)
1804 where the cast doesn't actually exist, but is implicit in the
1805 type of the PLUS_EXPR. We'd like to turn this into
1806 &array[x]
1807 which may be able to propagate further. */
1808
1809static tree
1810maybe_fold_stmt_addition (tree expr)
1811{
1812 tree op0 = TREE_OPERAND (expr, 0);
1813 tree op1 = TREE_OPERAND (expr, 1);
1814 tree ptr_type = TREE_TYPE (expr);
1815 tree ptd_type;
1816 tree t;
1817 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1818
1819 /* We're only interested in pointer arithmetic. */
1820 if (!POINTER_TYPE_P (ptr_type))
1821 return NULL_TREE;
1822 /* Canonicalize the integral operand to op1. */
1823 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1824 {
1825 if (subtract)
1826 return NULL_TREE;
1827 t = op0, op0 = op1, op1 = t;
1828 }
1829 /* It had better be a constant. */
1830 if (TREE_CODE (op1) != INTEGER_CST)
1831 return NULL_TREE;
1832 /* The first operand should be an ADDR_EXPR. */
1833 if (TREE_CODE (op0) != ADDR_EXPR)
1834 return NULL_TREE;
1835 op0 = TREE_OPERAND (op0, 0);
1836
1837 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1838 the offset into it. */
1839 while (TREE_CODE (op0) == ARRAY_REF)
1840 {
1841 tree array_obj = TREE_OPERAND (op0, 0);
1842 tree array_idx = TREE_OPERAND (op0, 1);
1843 tree elt_type = TREE_TYPE (op0);
1844 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1845 tree min_idx;
1846
1847 if (TREE_CODE (array_idx) != INTEGER_CST)
1848 break;
1849 if (TREE_CODE (elt_size) != INTEGER_CST)
1850 break;
1851
1852 /* Un-bias the index by the min index of the array type. */
1853 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1854 if (min_idx)
1855 {
1856 min_idx = TYPE_MIN_VALUE (min_idx);
1857 if (min_idx)
1858 {
44de5aeb
RK
1859 if (TREE_CODE (min_idx) != INTEGER_CST)
1860 break;
1861
6de9cd9a
DN
1862 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1863 if (!integer_zerop (min_idx))
1864 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1865 min_idx, 0);
1866 }
1867 }
1868
1869 /* Convert the index to a byte offset. */
1870 array_idx = convert (sizetype, array_idx);
1871 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1872
1873 /* Update the operands for the next round, or for folding. */
1874 /* If we're manipulating unsigned types, then folding into negative
1875 values can produce incorrect results. Particularly if the type
1876 is smaller than the width of the pointer. */
1877 if (subtract
1878 && TYPE_UNSIGNED (TREE_TYPE (op1))
1879 && tree_int_cst_lt (array_idx, op1))
1880 return NULL;
1881 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1882 array_idx, op1, 0);
1883 subtract = false;
1884 op0 = array_obj;
1885 }
1886
1887 /* If we weren't able to fold the subtraction into another array reference,
1888 canonicalize the integer for passing to the array and component ref
1889 simplification functions. */
1890 if (subtract)
1891 {
1892 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1893 return NULL;
1894 op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1));
1895 /* ??? In theory fold should always produce another integer. */
1896 if (TREE_CODE (op1) != INTEGER_CST)
1897 return NULL;
1898 }
1899
1900 ptd_type = TREE_TYPE (ptr_type);
1901
1902 /* At which point we can try some of the same things as for indirects. */
1903 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1904 if (!t)
1905 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1906 ptd_type, false);
1907 if (t)
1908 t = build1 (ADDR_EXPR, ptr_type, t);
1909
1910 return t;
1911}
1912
1913/* Subroutine of fold_stmt called via walk_tree. We perform several
1914 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1915
1916static tree
1917fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1918{
1919 bool *changed_p = data;
1920 tree expr = *expr_p, t;
1921
1922 /* ??? It'd be nice if walk_tree had a pre-order option. */
1923 switch (TREE_CODE (expr))
1924 {
1925 case INDIRECT_REF:
1926 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1927 if (t)
1928 return t;
1929 *walk_subtrees = 0;
1930
1931 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1932 integer_zero_node);
1933 break;
1934
1935 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1936 We'd only want to bother decomposing an existing ARRAY_REF if
1937 the base array is found to have another offset contained within.
1938 Otherwise we'd be wasting time. */
1939
1940 case ADDR_EXPR:
1941 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1942 if (t)
1943 return t;
1944 *walk_subtrees = 0;
1945
1946 /* Set TREE_INVARIANT properly so that the value is properly
1947 considered constant, and so gets propagated as expected. */
1948 if (*changed_p)
1949 recompute_tree_invarant_for_addr_expr (expr);
1950 return NULL_TREE;
1951
1952 case PLUS_EXPR:
1953 case MINUS_EXPR:
1954 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1955 if (t)
1956 return t;
1957 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1958 if (t)
1959 return t;
1960 *walk_subtrees = 0;
1961
1962 t = maybe_fold_stmt_addition (expr);
1963 break;
1964
1965 case COMPONENT_REF:
1966 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1967 if (t)
1968 return t;
1969 *walk_subtrees = 0;
1970
1971 /* Make sure the FIELD_DECL is actually a field in the type on
1972 the lhs. In cases with IMA it is possible that it came
1973 from another, equivalent type at this point. We have
1974 already checked the equivalence in this case.
1975 Match on type plus offset, to allow for unnamed fields.
1976 We won't necessarily get the corresponding field for
1977 unions; this is believed to be harmless. */
1978
1979 if ((current_file_decl && TREE_CHAIN (current_file_decl))
1980 && (DECL_FIELD_CONTEXT (TREE_OPERAND (expr, 1)) !=
1981 TREE_TYPE (TREE_OPERAND (expr, 0))))
1982 {
1983 tree f;
1984 tree orig_field = TREE_OPERAND (expr, 1);
1985 tree orig_type = TREE_TYPE (orig_field);
1986 for (f = TYPE_FIELDS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1987 f; f = TREE_CHAIN (f))
1988 {
1989 if (lang_hooks.types_compatible_p (TREE_TYPE (f), orig_type)
1990 && tree_int_cst_compare (DECL_FIELD_BIT_OFFSET (f),
1991 DECL_FIELD_BIT_OFFSET (orig_field))
1992 == 0
1993 && tree_int_cst_compare (DECL_FIELD_OFFSET (f),
1994 DECL_FIELD_OFFSET (orig_field))
1995 == 0)
1996 {
1997 TREE_OPERAND (expr, 1) = f;
1998 break;
1999 }
2000 }
9cf737f8 2001 /* Fall through is an error; it will be detected in tree-sra. */
6de9cd9a
DN
2002 }
2003 break;
2004
2005 default:
2006 return NULL_TREE;
2007 }
2008
2009 if (t)
2010 {
2011 *expr_p = t;
2012 *changed_p = true;
2013 }
2014
2015 return NULL_TREE;
2016}
2017
2018/* Fold the statement pointed by STMT_P. In some cases, this function may
2019 replace the whole statement with a new one. Returns true iff folding
2020 makes any changes. */
2021
2022bool
2023fold_stmt (tree *stmt_p)
2024{
2025 tree rhs, result, stmt;
2026 bool changed = false;
2027
2028 stmt = *stmt_p;
2029
2030 /* If we replaced constants and the statement makes pointer dereferences,
2031 then we may need to fold instances of *&VAR into VAR, etc. */
2032 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
2033 {
2034 *stmt_p
2035 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
2036 NULL);
2037 return true;
2038 }
2039
2040 rhs = get_rhs (stmt);
2041 if (!rhs)
2042 return changed;
2043 result = NULL_TREE;
2044
6de9cd9a
DN
2045 if (TREE_CODE (rhs) == CALL_EXPR)
2046 {
0f59171d
RH
2047 tree callee;
2048
2049 /* Check for builtins that CCP can handle using information not
2050 available in the generic fold routines. */
2051 callee = get_callee_fndecl (rhs);
6de9cd9a
DN
2052 if (callee && DECL_BUILT_IN (callee))
2053 result = ccp_fold_builtin (stmt, rhs);
0f59171d
RH
2054 else
2055 {
2056 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2057 here are when we've propagated the address of a decl into the
2058 object slot. */
2059 /* ??? Should perhaps do this in fold proper. However, doing it
2060 there requires that we create a new CALL_EXPR, and that requires
2061 copying EH region info to the new node. Easier to just do it
2062 here where we can just smash the call operand. */
2063 callee = TREE_OPERAND (rhs, 0);
2064 if (TREE_CODE (callee) == OBJ_TYPE_REF
2065 && lang_hooks.fold_obj_type_ref
2066 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2067 && DECL_P (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (callee), 0)))
2068 {
2069 tree t;
2070
5df6d966
RH
2071 /* ??? Caution: Broken ADDR_EXPR semantics means that
2072 looking at the type of the operand of the addr_expr
2073 can yield an array type. See silly exception in
2074 check_pointer_types_r. */
2075
2076 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
0f59171d
RH
2077 t = lang_hooks.fold_obj_type_ref (callee, t);
2078 if (t)
2079 {
2080 TREE_OPERAND (rhs, 0) = t;
2081 changed = true;
2082 }
2083 }
2084 }
6de9cd9a
DN
2085 }
2086
2087 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2088 if (result == NULL_TREE)
2089 result = fold (rhs);
2090
2091 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2092 may have been added by fold, and "useless" type conversions that might
2093 now be apparent due to propagation. */
6de9cd9a
DN
2094 STRIP_USELESS_TYPE_CONVERSION (result);
2095
2096 if (result != rhs)
06a9b53f 2097 changed |= set_rhs (stmt_p, result);
6de9cd9a
DN
2098
2099 return changed;
2100}
2101
2102/* Get the main expression from statement STMT. */
2103
2104static tree
2105get_rhs (tree stmt)
2106{
2107 enum tree_code code = TREE_CODE (stmt);
2108
cd709752 2109 switch (code)
6de9cd9a 2110 {
cd709752
RH
2111 case RETURN_EXPR:
2112 stmt = TREE_OPERAND (stmt, 0);
2113 if (stmt)
2114 return get_rhs (stmt);
6de9cd9a 2115 else
cd709752
RH
2116 return NULL;
2117
2118 case MODIFY_EXPR:
2119 return TREE_OPERAND (stmt, 1);
2120
2121 case COND_EXPR:
2122 return COND_EXPR_COND (stmt);
2123 case SWITCH_EXPR:
2124 return SWITCH_COND (stmt);
2125 case GOTO_EXPR:
2126 return GOTO_DESTINATION (stmt);
2127 case LABEL_EXPR:
2128 return LABEL_EXPR_LABEL (stmt);
2129
2130 default:
2131 return stmt;
6de9cd9a 2132 }
6de9cd9a
DN
2133}
2134
2135
2136/* Set the main expression of *STMT_P to EXPR. */
2137
06a9b53f 2138static bool
6de9cd9a
DN
2139set_rhs (tree *stmt_p, tree expr)
2140{
cd709752 2141 tree stmt = *stmt_p, op;
06a9b53f 2142 enum tree_code code = TREE_CODE (expr);
cd709752 2143 stmt_ann_t ann;
6de9cd9a 2144
06a9b53f
RS
2145 /* Verify the constant folded result is valid gimple. */
2146 if (TREE_CODE_CLASS (code) == '2')
2147 {
2148 if (!is_gimple_val (TREE_OPERAND (expr, 0))
2149 || !is_gimple_val (TREE_OPERAND (expr, 1)))
2150 return false;
2151 }
2152 else if (TREE_CODE_CLASS (code) == '1')
2153 {
2154 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
2155 return false;
2156 }
2157
cd709752 2158 switch (TREE_CODE (stmt))
6de9cd9a 2159 {
cd709752
RH
2160 case RETURN_EXPR:
2161 op = TREE_OPERAND (stmt, 0);
2162 if (TREE_CODE (op) != MODIFY_EXPR)
2163 {
2164 TREE_OPERAND (stmt, 0) = expr;
2165 break;
2166 }
2167 stmt = op;
2168 /* FALLTHRU */
2169
2170 case MODIFY_EXPR:
2171 TREE_OPERAND (stmt, 1) = expr;
2172 break;
2173
2174 case COND_EXPR:
2175 COND_EXPR_COND (stmt) = expr;
2176 break;
2177 case SWITCH_EXPR:
2178 SWITCH_COND (stmt) = expr;
2179 break;
2180 case GOTO_EXPR:
2181 GOTO_DESTINATION (stmt) = expr;
2182 break;
2183 case LABEL_EXPR:
2184 LABEL_EXPR_LABEL (stmt) = expr;
2185 break;
2186
2187 default:
6de9cd9a
DN
2188 /* Replace the whole statement with EXPR. If EXPR has no side
2189 effects, then replace *STMT_P with an empty statement. */
cd709752 2190 ann = stmt_ann (stmt);
6de9cd9a 2191 *stmt_p = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt ();
06d72ee6 2192 (*stmt_p)->common.ann = (tree_ann_t) ann;
6de9cd9a
DN
2193
2194 if (TREE_SIDE_EFFECTS (expr))
2195 {
2196 def_optype defs;
a32b97a2
BB
2197 v_may_def_optype v_may_defs;
2198 v_must_def_optype v_must_defs;
6de9cd9a
DN
2199 size_t i;
2200
2201 /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
2202 replacement. */
2203 defs = DEF_OPS (ann);
2204 for (i = 0; i < NUM_DEFS (defs); i++)
2205 {
2206 tree var = DEF_OP (defs, i);
2207 if (TREE_CODE (var) == SSA_NAME)
2208 SSA_NAME_DEF_STMT (var) = *stmt_p;
2209 }
2210
a32b97a2
BB
2211 v_may_defs = V_MAY_DEF_OPS (ann);
2212 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
2213 {
2214 tree var = V_MAY_DEF_RESULT (v_may_defs, i);
2215 if (TREE_CODE (var) == SSA_NAME)
2216 SSA_NAME_DEF_STMT (var) = *stmt_p;
2217 }
2218
2219 v_must_defs = V_MUST_DEF_OPS (ann);
2220 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
6de9cd9a 2221 {
a32b97a2 2222 tree var = V_MUST_DEF_OP (v_must_defs, i);
6de9cd9a
DN
2223 if (TREE_CODE (var) == SSA_NAME)
2224 SSA_NAME_DEF_STMT (var) = *stmt_p;
2225 }
2226 }
cd709752 2227 break;
6de9cd9a 2228 }
06a9b53f
RS
2229
2230 return true;
6de9cd9a
DN
2231}
2232
2233
2234/* Return a default value for variable VAR using the following rules:
2235
2236 1- Global and static variables are considered VARYING, unless they are
2237 declared const.
2238
2239 2- Function arguments are considered VARYING.
2240
2241 3- Any other value is considered UNDEFINED. This is useful when
2242 considering PHI nodes. PHI arguments that are undefined do not
2243 change the constant value of the PHI node, which allows for more
2244 constants to be propagated. */
2245
2246static value
2247get_default_value (tree var)
2248{
2249 value val;
2250 tree sym;
2251
2252 if (TREE_CODE (var) == SSA_NAME)
2253 sym = SSA_NAME_VAR (var);
2254 else
2255 {
2256#ifdef ENABLE_CHECKING
2257 if (!DECL_P (var))
2258 abort ();
2259#endif
2260 sym = var;
2261 }
2262
2263 val.lattice_val = UNDEFINED;
2264 val.const_val = NULL_TREE;
2265
2266 if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
2267 {
2268 /* Function arguments and volatile variables are considered VARYING. */
2269 val.lattice_val = VARYING;
2270 }
2271 else if (decl_function_context (sym) != current_function_decl
2272 || TREE_STATIC (sym))
2273 {
2274 /* Globals and static variables are considered VARYING, unless they
2275 are declared 'const'. */
2276 val.lattice_val = VARYING;
2277
2278 if (TREE_READONLY (sym)
2279 && DECL_INITIAL (sym)
2280 && is_gimple_min_invariant (DECL_INITIAL (sym)))
2281 {
2282 val.lattice_val = CONSTANT;
2283 val.const_val = DECL_INITIAL (sym);
2284 }
2285 }
2286 else
2287 {
2288 enum tree_code code;
2289 tree stmt = SSA_NAME_DEF_STMT (var);
2290
2291 if (!IS_EMPTY_STMT (stmt))
2292 {
2293 code = TREE_CODE (stmt);
2294 if (code != MODIFY_EXPR && code != PHI_NODE)
2295 val.lattice_val = VARYING;
2296 }
2297 }
2298
2299 return val;
2300}
2301
2302
2303/* Fold builtin call FN in statement STMT. If it cannot be folded into a
2304 constant, return NULL_TREE. Otherwise, return its constant value. */
2305
2306static tree
2307ccp_fold_builtin (tree stmt, tree fn)
2308{
2309 tree result, strlen_val[2];
2310 tree arglist = TREE_OPERAND (fn, 1), a;
2311 tree callee = get_callee_fndecl (fn);
2312 bitmap visited;
2313 int strlen_arg, i;
2314
2315 /* Ignore MD builtins. */
2316 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2317 return NULL_TREE;
2318
2319 /* First try the generic builtin folder. If that succeeds, return the
2320 result directly. */
2321 result = fold_builtin (fn);
2322 if (result)
2323 return result;
2324
2325 /* If the builtin could not be folded, and it has no argument list,
2326 we're done. */
2327 if (!arglist)
2328 return NULL_TREE;
2329
2330 /* Limit the work only for builtins we know how to simplify. */
2331 switch (DECL_FUNCTION_CODE (callee))
2332 {
2333 case BUILT_IN_STRLEN:
2334 case BUILT_IN_FPUTS:
2335 case BUILT_IN_FPUTS_UNLOCKED:
2336 strlen_arg = 1;
2337 break;
2338 case BUILT_IN_STRCPY:
2339 case BUILT_IN_STRNCPY:
2340 strlen_arg = 2;
2341 break;
2342 default:
2343 return NULL_TREE;
2344 }
2345
2346 /* Try to use the dataflow information gathered by the CCP process. */
2347 visited = BITMAP_XMALLOC ();
2348
2349 memset (strlen_val, 0, sizeof (strlen_val));
2350 for (i = 0, a = arglist;
2351 strlen_arg;
2352 i++, strlen_arg >>= 1, a = TREE_CHAIN (a))
2353 if (strlen_arg & 1)
2354 {
2355 bitmap_clear (visited);
2356 if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited))
2357 strlen_val[i] = NULL_TREE;
2358 }
2359
2360 BITMAP_XFREE (visited);
2361
2362 /* FIXME. All this code looks dangerous in the sense that it might
2363 create non-gimple expressions. */
2364 switch (DECL_FUNCTION_CODE (callee))
2365 {
2366 case BUILT_IN_STRLEN:
2367 /* Convert from the internal "sizetype" type to "size_t". */
2368 if (strlen_val[0]
2369 && size_type_node)
2370 {
2371 tree new = convert (size_type_node, strlen_val[0]);
2372
2373 /* If the result is not a valid gimple value, or not a cast
2374 of a valid gimple value, then we can not use the result. */
2375 if (is_gimple_val (new)
2376 || (is_gimple_cast (new)
2377 && is_gimple_val (TREE_OPERAND (new, 0))))
2378 return new;
2379 else
2380 return NULL_TREE;
2381 }
2382 return strlen_val[0];
2383 case BUILT_IN_STRCPY:
2384 if (strlen_val[1]
2385 && is_gimple_val (strlen_val[1]))
2386 return simplify_builtin_strcpy (arglist, strlen_val[1]);
2387 case BUILT_IN_STRNCPY:
2388 if (strlen_val[1]
2389 && is_gimple_val (strlen_val[1]))
2390 return simplify_builtin_strncpy (arglist, strlen_val[1]);
2391 case BUILT_IN_FPUTS:
2392 return simplify_builtin_fputs (arglist,
2393 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2394 strlen_val[0]);
2395 case BUILT_IN_FPUTS_UNLOCKED:
2396 return simplify_builtin_fputs (arglist,
2397 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2398 strlen_val[0]);
2399
2400 default:
2401 abort ();
2402 }
2403
2404 return NULL_TREE;
2405}
2406
2407
2408/* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
2409 follow its use-def chains. If LENGTH is not NULL and its value is not
2410 equal to the length we determine, or if we are unable to determine the
2411 length, return false. VISITED is a bitmap of visited variables. */
2412
2413static bool
2414get_strlen (tree arg, tree *length, bitmap visited)
2415{
2416 tree var, def_stmt, val;
2417
2418 if (TREE_CODE (arg) != SSA_NAME)
2419 {
2420 val = c_strlen (arg, 1);
2421 if (!val)
2422 return false;
2423
2424 if (*length && simple_cst_equal (val, *length) != 1)
2425 return false;
2426
2427 *length = val;
2428 return true;
2429 }
2430
2431 /* If we were already here, break the infinite cycle. */
2432 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2433 return true;
2434 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2435
2436 var = arg;
2437 def_stmt = SSA_NAME_DEF_STMT (var);
2438
2439 switch (TREE_CODE (def_stmt))
2440 {
2441 case MODIFY_EXPR:
2442 {
2443 tree len, rhs;
2444
2445 /* The RHS of the statement defining VAR must either have a
2446 constant length or come from another SSA_NAME with a constant
2447 length. */
2448 rhs = TREE_OPERAND (def_stmt, 1);
2449 STRIP_NOPS (rhs);
2450 if (TREE_CODE (rhs) == SSA_NAME)
2451 return get_strlen (rhs, length, visited);
2452
2453 /* See if the RHS is a constant length. */
2454 len = c_strlen (rhs, 1);
2455 if (len)
2456 {
2457 if (*length && simple_cst_equal (len, *length) != 1)
2458 return false;
2459
2460 *length = len;
2461 return true;
2462 }
2463
2464 break;
2465 }
2466
2467 case PHI_NODE:
2468 {
2469 /* All the arguments of the PHI node must have the same constant
2470 length. */
2471 int i;
2472
2473 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2474 {
2475 tree arg = PHI_ARG_DEF (def_stmt, i);
2476
2477 /* If this PHI has itself as an argument, we cannot
2478 determine the string length of this argument. However,
2479 if we can find a constant string length for the other
2480 PHI args then we can still be sure that this is a
2481 constant string length. So be optimistic and just
2482 continue with the next argument. */
2483 if (arg == PHI_RESULT (def_stmt))
2484 continue;
2485
2486 if (!get_strlen (arg, length, visited))
2487 return false;
2488 }
2489
2490 return true;
2491 }
2492
2493 default:
2494 break;
2495 }
2496
2497
2498 return false;
2499}
2500
2501\f
2502/* A simple pass that attempts to fold all builtin functions. This pass
2503 is run after we've propagated as many constants as we can. */
2504
2505static void
2506execute_fold_all_builtins (void)
2507{
2508 basic_block bb;
2509 FOR_EACH_BB (bb)
2510 {
2511 block_stmt_iterator i;
2512 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
2513 {
2514 tree *stmtp = bsi_stmt_ptr (i);
2515 tree call = get_rhs (*stmtp);
2516 tree callee, result;
2517
2518 if (!call || TREE_CODE (call) != CALL_EXPR)
2519 continue;
2520 callee = get_callee_fndecl (call);
2521 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2522 continue;
2523
2524 result = ccp_fold_builtin (*stmtp, call);
2525 if (!result)
2526 switch (DECL_FUNCTION_CODE (callee))
2527 {
2528 case BUILT_IN_CONSTANT_P:
2529 /* Resolve __builtin_constant_p. If it hasn't been
2530 folded to integer_one_node by now, it's fairly
2531 certain that the value simply isn't constant. */
2532 result = integer_zero_node;
2533 break;
2534
2535 default:
2536 continue;
2537 }
2538
2539 if (dump_file && (dump_flags & TDF_DETAILS))
2540 {
2541 fprintf (dump_file, "Simplified\n ");
2542 print_generic_stmt (dump_file, *stmtp, dump_flags);
2543 }
2544
06a9b53f
RS
2545 if (set_rhs (stmtp, result))
2546 modify_stmt (*stmtp);
6de9cd9a
DN
2547
2548 if (dump_file && (dump_flags & TDF_DETAILS))
2549 {
2550 fprintf (dump_file, "to\n ");
2551 print_generic_stmt (dump_file, *stmtp, dump_flags);
2552 fprintf (dump_file, "\n");
2553 }
2554 }
2555 }
2556}
2557
2558struct tree_opt_pass pass_fold_builtins =
2559{
2560 "fab", /* name */
2561 NULL, /* gate */
2562 execute_fold_all_builtins, /* execute */
2563 NULL, /* sub */
2564 NULL, /* next */
2565 0, /* static_pass_number */
2566 0, /* tv_id */
2567 PROP_cfg | PROP_ssa, /* properties_required */
2568 0, /* properties_provided */
2569 0, /* properties_destroyed */
2570 0, /* todo_flags_start */
2571 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
2572};
2573
2574
2575#include "gt-tree-ssa-ccp.h"