]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-ccp.c
* configure.in: Check for srcdir/winsup rather than build directory winsup.
[thirdparty/gcc.git] / gcc / tree-ssa-ccp.c
CommitLineData
4ee9c684 1/* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
10Free Software Foundation; either version 2, or (at your option) any
11later version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
22
23/* Conditional constant propagation.
24
25 References:
26
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
29
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
32
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "tm.h"
40#include "errors.h"
41#include "ggc.h"
42#include "tree.h"
43#include "langhooks.h"
44
45/* These RTL headers are needed for basic-block.h. */
46#include "rtl.h"
47#include "tm_p.h"
48#include "hard-reg-set.h"
49#include "basic-block.h"
50
51#include "diagnostic.h"
52#include "tree-inline.h"
53#include "tree-flow.h"
88bce636 54#include "tree-gimple.h"
4ee9c684 55#include "tree-dump.h"
56#include "tree-pass.h"
57#include "timevar.h"
58#include "expr.h"
59#include "flags.h"
60
61
62/* Possible lattice values. */
63typedef enum
64{
65 UNINITIALIZED = 0,
66 UNDEFINED,
67 CONSTANT,
68 VARYING
69} latticevalue;
70
71/* Use the TREE_VISITED bitflag to mark statements and PHI nodes that have
72 been deemed VARYING and shouldn't be simulated again. */
73#define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T)
74
75/* Main structure for CCP. Contains the lattice value and, if it's a
76 constant, the constant value. */
77typedef struct
78{
79 latticevalue lattice_val;
80 tree const_val;
81} value;
82
83/* A bitmap to keep track of executable blocks in the CFG. */
84static sbitmap executable_blocks;
85
86/* Array of control flow edges on the worklist. */
87static GTY(()) varray_type cfg_blocks = NULL;
88
89static unsigned int cfg_blocks_num = 0;
90static int cfg_blocks_tail;
91static int cfg_blocks_head;
92
93static sbitmap bb_in_list;
94
95/* This is used to track the current value of each variable. */
96static value *value_vector;
97
98/* Worklist of SSA edges which will need reexamination as their definition
99 has changed. SSA edges are def-use edges in the SSA web. For each
100 edge, we store the definition statement or PHI node D. The destination
01406fc0 101 nodes that need to be visited are accessed using immediate_uses
102 (D). */
4ee9c684 103static GTY(()) varray_type ssa_edges;
104
01406fc0 105/* Identical to SSA_EDGES. For performance reasons, the list of SSA
106 edges is split into two. One contains all SSA edges who need to be
107 reexamined because their lattice value changed to varying (this
108 worklist), and the other contains all other SSA edges to be
109 reexamined (ssa_edges).
110
111 Since most values in the program are varying, the ideal situation
112 is to move them to that lattice value as quickly as possible.
113 Thus, it doesn't make sense to process any other type of lattice
114 value until all varying values are propagated fully, which is one
115 thing using the varying worklist achieves. In addition, if you
116 don't use a separate worklist for varying edges, you end up with
117 situations where lattice values move from
118 undefined->constant->varying instead of undefined->varying.
119*/
120static GTY(()) varray_type varying_ssa_edges;
121
122
4ee9c684 123static void initialize (void);
124static void finalize (void);
125static void visit_phi_node (tree);
126static tree ccp_fold (tree);
127static value cp_lattice_meet (value, value);
128static void visit_stmt (tree);
129static void visit_cond_stmt (tree);
130static void visit_assignment (tree);
01406fc0 131static void add_var_to_ssa_edges_worklist (tree, value);
4ee9c684 132static void add_outgoing_control_edges (basic_block);
133static void add_control_edge (edge);
134static void def_to_varying (tree);
135static void set_lattice_value (tree, value);
136static void simulate_block (basic_block);
137static void simulate_stmt (tree);
138static void substitute_and_fold (void);
139static value evaluate_stmt (tree);
140static void dump_lattice_value (FILE *, const char *, value);
141static bool replace_uses_in (tree, bool *);
142static latticevalue likely_value (tree);
143static tree get_rhs (tree);
144static void set_rhs (tree *, tree);
145static value *get_value (tree);
146static value get_default_value (tree);
147static tree ccp_fold_builtin (tree, tree);
148static bool get_strlen (tree, tree *, bitmap);
149static inline bool cfg_blocks_empty_p (void);
150static void cfg_blocks_add (basic_block);
151static basic_block cfg_blocks_get (void);
152static bool need_imm_uses_for (tree var);
153
01406fc0 154/* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
155 drain. This pops statements off the given WORKLIST and processes
156 them until there are no more statements on WORKLIST. */
157
158static void
159process_ssa_edge_worklist (varray_type *worklist)
160{
161 /* Drain the entire worklist. */
162 while (VARRAY_ACTIVE_SIZE (*worklist) > 0)
163 {
164 /* Pull the statement to simulate off the worklist. */
165 tree stmt = VARRAY_TOP_TREE (*worklist);
166 stmt_ann_t ann = stmt_ann (stmt);
167 VARRAY_POP (*worklist);
168
169 /* visit_stmt can "cancel" reevaluation of some statements.
170 If it does, then in_ccp_worklist will be zero. */
171 if (ann->in_ccp_worklist)
172 {
173 ann->in_ccp_worklist = 0;
174 simulate_stmt (stmt);
175 }
176 }
177}
178
4ee9c684 179/* Main entry point for SSA Conditional Constant Propagation. FNDECL is
180 the declaration for the function to optimize.
181
182 On exit, VARS_TO_RENAME will contain the symbols that have been exposed by
183 the propagation of ADDR_EXPR expressions into pointer dereferences and need
184 to be renamed into SSA.
185
186 PHASE indicates which dump file from the DUMP_FILES array to use when
187 dumping debugging information. */
188
189static void
190tree_ssa_ccp (void)
191{
192 initialize ();
193
194 /* Iterate until the worklists are empty. */
01406fc0 195 while (!cfg_blocks_empty_p ()
196 || VARRAY_ACTIVE_SIZE (ssa_edges) > 0
197 || VARRAY_ACTIVE_SIZE (varying_ssa_edges) > 0)
4ee9c684 198 {
199 if (!cfg_blocks_empty_p ())
200 {
201 /* Pull the next block to simulate off the worklist. */
202 basic_block dest_block = cfg_blocks_get ();
203 simulate_block (dest_block);
204 }
205
01406fc0 206 /* In order to move things to varying as quickly as
207 possible,process the VARYING_SSA_EDGES worklist first. */
208 process_ssa_edge_worklist (&varying_ssa_edges);
209
210 /* Now process the SSA_EDGES worklist. */
211 process_ssa_edge_worklist (&ssa_edges);
4ee9c684 212 }
213
214 /* Now perform substitutions based on the known constant values. */
215 substitute_and_fold ();
216
217 /* Now cleanup any unreachable code. */
218 cleanup_tree_cfg ();
219
220 /* Free allocated memory. */
221 finalize ();
222
223 /* Debugging dumps. */
224 if (dump_file && (dump_flags & TDF_DETAILS))
225 {
226 dump_referenced_vars (dump_file);
227 fprintf (dump_file, "\n\n");
228 }
229}
230
231static bool
232gate_ccp (void)
233{
234 return flag_tree_ccp != 0;
235}
236
237struct tree_opt_pass pass_ccp =
238{
239 "ccp", /* name */
240 gate_ccp, /* gate */
241 tree_ssa_ccp, /* execute */
242 NULL, /* sub */
243 NULL, /* next */
244 0, /* static_pass_number */
245 TV_TREE_CCP, /* tv_id */
246 PROP_cfg | PROP_ssa, /* properties_required */
247 0, /* properties_provided */
248 0, /* properties_destroyed */
249 0, /* todo_flags_start */
250 TODO_dump_func | TODO_rename_vars
251 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
252};
253
254
255/* Get the constant value associated with variable VAR. */
256
257static value *
258get_value (tree var)
259{
260 value *val;
261
262#if defined ENABLE_CHECKING
263 if (TREE_CODE (var) != SSA_NAME)
264 abort ();
265#endif
266
267 val = &value_vector[SSA_NAME_VERSION (var)];
268 if (val->lattice_val == UNINITIALIZED)
269 *val = get_default_value (var);
270
271 return val;
272}
273
274
275/* Simulate the execution of BLOCK. Evaluate the statement associated
276 with each variable reference inside the block. */
277
278static void
279simulate_block (basic_block block)
280{
281 tree phi;
282
283 /* There is nothing to do for the exit block. */
284 if (block == EXIT_BLOCK_PTR)
285 return;
286
287 if (dump_file && (dump_flags & TDF_DETAILS))
288 fprintf (dump_file, "\nSimulating block %d\n", block->index);
289
290 /* Always simulate PHI nodes, even if we have simulated this block
291 before. */
04f8eea3 292 for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
4ee9c684 293 visit_phi_node (phi);
294
295 /* If this is the first time we've simulated this block, then we
296 must simulate each of its statements. */
297 if (!TEST_BIT (executable_blocks, block->index))
298 {
299 block_stmt_iterator j;
300 unsigned int normal_edge_count;
301 edge e, normal_edge;
302
303 /* Note that we have simulated this block. */
304 SET_BIT (executable_blocks, block->index);
305
306 for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j))
307 visit_stmt (bsi_stmt (j));
308
309 /* We can not predict when abnormal edges will be executed, so
310 once a block is considered executable, we consider any
311 outgoing abnormal edges as executable.
312
313 At the same time, if this block has only one successor that is
314 reached by non-abnormal edges, then add that successor to the
315 worklist. */
316 normal_edge_count = 0;
317 normal_edge = NULL;
318 for (e = block->succ; e; e = e->succ_next)
319 {
320 if (e->flags & EDGE_ABNORMAL)
321 {
322 add_control_edge (e);
323 }
324 else
325 {
326 normal_edge_count++;
327 normal_edge = e;
328 }
329 }
330
331 if (normal_edge_count == 1)
332 add_control_edge (normal_edge);
333 }
334}
335
336
337/* Follow the def-use edges for statement DEF_STMT and simulate all the
338 statements reached by it. */
339
340static void
341simulate_stmt (tree use_stmt)
342{
343 basic_block use_bb = bb_for_stmt (use_stmt);
344
345 if (dump_file && (dump_flags & TDF_DETAILS))
346 {
347 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
348 print_generic_stmt (dump_file, use_stmt, dump_flags);
349 }
350
351 if (TREE_CODE (use_stmt) == PHI_NODE)
352 {
353 /* PHI nodes are always visited, regardless of whether or not the
354 destination block is executable. */
355 visit_phi_node (use_stmt);
356 }
357 else if (TEST_BIT (executable_blocks, use_bb->index))
358 {
359 /* Otherwise, visit the statement containing the use reached by
360 DEF, only if the destination block is marked executable. */
361 visit_stmt (use_stmt);
362 }
363}
364
365
366/* Perform final substitution and folding. After this pass the program
367 should still be in SSA form. */
368
369static void
370substitute_and_fold (void)
371{
372 basic_block bb;
373
374 if (dump_file && (dump_flags & TDF_DETAILS))
375 fprintf (dump_file,
376 "\nSubstituing constants and folding statements\n\n");
377
378 /* Substitute constants in every statement of every basic block. */
379 FOR_EACH_BB (bb)
380 {
381 block_stmt_iterator i;
382 tree phi;
383
384 /* Propagate our known constants into PHI nodes. */
04f8eea3 385 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4ee9c684 386 {
387 int i;
388
389 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
390 {
391 value *new_val;
56004dc5 392 use_operand_p orig_p = PHI_ARG_DEF_PTR (phi, i);
393 tree orig = USE_FROM_PTR (orig_p);
4ee9c684 394
56004dc5 395 if (! SSA_VAR_P (orig))
4ee9c684 396 break;
397
56004dc5 398 new_val = get_value (orig);
4ee9c684 399 if (new_val->lattice_val == CONSTANT
56004dc5 400 && may_propagate_copy (orig, new_val->const_val))
401 SET_USE (orig_p, new_val->const_val);
4ee9c684 402 }
403 }
404
405 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
406 {
407 bool replaced_address;
408 tree stmt = bsi_stmt (i);
409
410 /* Skip statements that have been folded already. */
411 if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
412 continue;
413
414 /* Replace the statement with its folded version and mark it
415 folded. */
416 if (dump_file && (dump_flags & TDF_DETAILS))
417 {
418 fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
419 print_generic_stmt (dump_file, stmt, TDF_SLIM);
420 }
421
422 if (replace_uses_in (stmt, &replaced_address))
423 {
424 bool changed = fold_stmt (bsi_stmt_ptr (i));
425 stmt = bsi_stmt(i);
426 modify_stmt (stmt);
427 /* If we folded a builtin function, we'll likely
428 need to rename VDEFs. */
429 if (replaced_address || changed)
430 mark_new_vars_to_rename (stmt, vars_to_rename);
431 }
432
433 if (dump_file && (dump_flags & TDF_DETAILS))
434 {
435 fprintf (dump_file, " with ");
436 print_generic_stmt (dump_file, stmt, TDF_SLIM);
437 fprintf (dump_file, "\n");
438 }
439 }
440 }
441}
442
443
444/* Loop through the PHI_NODE's parameters for BLOCK and compare their
445 lattice values to determine PHI_NODE's lattice value. The value of a
446 PHI node is determined calling cp_lattice_meet() with all the arguments
447 of the PHI node that are incoming via executable edges. */
448
449static void
450visit_phi_node (tree phi)
451{
452 bool short_circuit = 0;
453 value phi_val, *curr_val;
454 int i;
455
456 /* If the PHI node has already been deemed to be VARYING, don't simulate
457 it again. */
458 if (DONT_SIMULATE_AGAIN (phi))
459 return;
460
461 if (dump_file && (dump_flags & TDF_DETAILS))
462 {
463 fprintf (dump_file, "\nVisiting PHI node: ");
464 print_generic_expr (dump_file, phi, dump_flags);
465 }
466
467 curr_val = get_value (PHI_RESULT (phi));
468 switch (curr_val->lattice_val)
469 {
470 case VARYING:
471 if (dump_file && (dump_flags & TDF_DETAILS))
472 fprintf (dump_file, "\n Shortcircuit. Default of VARYING.");
473 short_circuit = 1;
474 break;
475
476 case CONSTANT:
477 phi_val = *curr_val;
478 break;
479
480 case UNDEFINED:
481 case UNINITIALIZED:
482 phi_val.lattice_val = UNDEFINED;
483 phi_val.const_val = NULL_TREE;
484 break;
485
486 default:
487 abort ();
488 }
489
490 /* If the variable is volatile or the variable is never referenced in a
491 real operand, then consider the PHI node VARYING. */
492 if (short_circuit || TREE_THIS_VOLATILE (SSA_NAME_VAR (PHI_RESULT (phi))))
493 {
494 phi_val.lattice_val = VARYING;
495 phi_val.const_val = NULL;
496 }
497 else
498 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
499 {
0bed3869 500 /* Compute the meet operator over all the PHI arguments. */
4ee9c684 501 edge e = PHI_ARG_EDGE (phi, i);
502
503 if (dump_file && (dump_flags & TDF_DETAILS))
504 {
505 fprintf (dump_file,
506 "\n Argument #%d (%d -> %d %sexecutable)\n",
507 i, e->src->index, e->dest->index,
508 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
509 }
510
511 /* If the incoming edge is executable, Compute the meet operator for
512 the existing value of the PHI node and the current PHI argument. */
513 if (e->flags & EDGE_EXECUTABLE)
514 {
515 tree rdef = PHI_ARG_DEF (phi, i);
516 value *rdef_val, val;
517
518 if (is_gimple_min_invariant (rdef))
519 {
520 val.lattice_val = CONSTANT;
521 val.const_val = rdef;
522 rdef_val = &val;
523 }
524 else
525 rdef_val = get_value (rdef);
526
527 phi_val = cp_lattice_meet (phi_val, *rdef_val);
528
529 if (dump_file && (dump_flags & TDF_DETAILS))
530 {
531 fprintf (dump_file, "\t");
532 print_generic_expr (dump_file, rdef, dump_flags);
533 dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
534 fprintf (dump_file, "\n");
535 }
536
537 if (phi_val.lattice_val == VARYING)
538 break;
539 }
540 }
541
542 if (dump_file && (dump_flags & TDF_DETAILS))
543 {
544 dump_lattice_value (dump_file, "\n PHI node value: ", phi_val);
545 fprintf (dump_file, "\n\n");
546 }
547
548 set_lattice_value (PHI_RESULT (phi), phi_val);
549 if (phi_val.lattice_val == VARYING)
550 DONT_SIMULATE_AGAIN (phi) = 1;
551}
552
553
554/* Compute the meet operator between VAL1 and VAL2:
555
556 any M UNDEFINED = any
557 any M VARYING = VARYING
558 Ci M Cj = Ci if (i == j)
559 Ci M Cj = VARYING if (i != j) */
560static value
561cp_lattice_meet (value val1, value val2)
562{
563 value result;
564
565 /* any M UNDEFINED = any. */
566 if (val1.lattice_val == UNDEFINED)
567 return val2;
568 else if (val2.lattice_val == UNDEFINED)
569 return val1;
570
571 /* any M VARYING = VARYING. */
572 if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
573 {
574 result.lattice_val = VARYING;
575 result.const_val = NULL_TREE;
576 return result;
577 }
578
579 /* Ci M Cj = Ci if (i == j)
580 Ci M Cj = VARYING if (i != j) */
581 if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
582 {
583 result.lattice_val = CONSTANT;
584 result.const_val = val1.const_val;
585 }
586 else
587 {
588 result.lattice_val = VARYING;
589 result.const_val = NULL_TREE;
590 }
591
592 return result;
593}
594
595
596/* Evaluate statement STMT. If the statement produces an output value and
597 its evaluation changes the lattice value of its output, do the following:
598
599 - If the statement is an assignment, add all the SSA edges starting at
600 this definition.
601
602 - If the statement is a conditional branch:
603 . If the statement evaluates to non-constant, add all edges to
604 worklist.
605 . If the statement is constant, add the edge executed as the
606 result of the branch. */
607
608static void
609visit_stmt (tree stmt)
610{
611 size_t i;
612 stmt_ann_t ann;
613 def_optype defs;
2cf24776 614 v_may_def_optype v_may_defs;
615 v_must_def_optype v_must_defs;
4ee9c684 616
617 /* If the statement has already been deemed to be VARYING, don't simulate
618 it again. */
619 if (DONT_SIMULATE_AGAIN (stmt))
620 return;
621
622 if (dump_file && (dump_flags & TDF_DETAILS))
623 {
624 fprintf (dump_file, "\nVisiting statement: ");
625 print_generic_stmt (dump_file, stmt, TDF_SLIM);
626 fprintf (dump_file, "\n");
627 }
628
629 ann = stmt_ann (stmt);
630
631 /* If this statement is already in the worklist then "cancel" it. The
632 reevaluation implied by the worklist entry will produce the same
633 value we generate here and thus reevaluating it again from the
634 worklist is pointless. */
635 if (ann->in_ccp_worklist)
636 ann->in_ccp_worklist = 0;
637
638 /* Now examine the statement. If the statement is an assignment that
639 produces a single output value, evaluate its RHS to see if the lattice
640 value of its output has changed. */
641 if (TREE_CODE (stmt) == MODIFY_EXPR
642 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME)
643 visit_assignment (stmt);
644
645 /* Definitions made by statements other than assignments to SSA_NAMEs
646 represent unknown modifications to their outputs. Mark them VARYING. */
647 else if (NUM_DEFS (defs = DEF_OPS (ann)) != 0)
648 {
649 DONT_SIMULATE_AGAIN (stmt) = 1;
650 for (i = 0; i < NUM_DEFS (defs); i++)
651 {
652 tree def = DEF_OP (defs, i);
653 def_to_varying (def);
654 }
655 }
656
657 /* If STMT is a conditional branch, see if we can determine which branch
658 will be taken. */
659 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
660 visit_cond_stmt (stmt);
661
662 /* Any other kind of statement is not interesting for constant
663 propagation and, therefore, not worth simulating. */
664 else
665 {
666 DONT_SIMULATE_AGAIN (stmt) = 1;
667
668 /* If STMT is a computed goto, then mark all the output edges
669 executable. */
670 if (computed_goto_p (stmt))
671 add_outgoing_control_edges (bb_for_stmt (stmt));
672 }
673
2cf24776 674 /* Mark all V_MAY_DEF operands VARYING. */
675 v_may_defs = V_MAY_DEF_OPS (ann);
676 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
677 def_to_varying (V_MAY_DEF_RESULT (v_may_defs, i));
678
679 /* Mark all V_MUST_DEF operands VARYING. */
680 v_must_defs = V_MUST_DEF_OPS (ann);
681 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
682 def_to_varying (V_MUST_DEF_OP (v_must_defs, i));
4ee9c684 683}
684
685
686/* Visit the assignment statement STMT. Set the value of its LHS to the
687 value computed by the RHS. */
688
689static void
690visit_assignment (tree stmt)
691{
692 value val;
693 tree lhs, rhs;
694
695 lhs = TREE_OPERAND (stmt, 0);
696 rhs = TREE_OPERAND (stmt, 1);
697
698 if (TREE_THIS_VOLATILE (SSA_NAME_VAR (lhs)))
699 {
700 /* Volatile variables are always VARYING. */
701 val.lattice_val = VARYING;
702 val.const_val = NULL_TREE;
703 }
704 else if (TREE_CODE (rhs) == SSA_NAME)
705 {
706 /* For a simple copy operation, we copy the lattice values. */
707 value *nval = get_value (rhs);
708 val = *nval;
709 }
710 else
711 {
712 /* Evaluate the statement. */
713 val = evaluate_stmt (stmt);
714 }
715
716 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
717 the constant value into the type of the destination variable. This
718 should not be necessary if GCC represented bitfields properly. */
719 {
720 tree lhs = TREE_OPERAND (stmt, 0);
721 if (val.lattice_val == CONSTANT
722 && TREE_CODE (lhs) == COMPONENT_REF
723 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
724 {
725 tree w = widen_bitfield (val.const_val, TREE_OPERAND (lhs, 1), lhs);
726
727 if (w && is_gimple_min_invariant (w))
728 val.const_val = w;
729 else
730 {
731 val.lattice_val = VARYING;
732 val.const_val = NULL;
733 }
734 }
735 }
736
737 /* Set the lattice value of the statement's output. */
738 set_lattice_value (lhs, val);
739 if (val.lattice_val == VARYING)
740 DONT_SIMULATE_AGAIN (stmt) = 1;
741}
742
743
744/* Visit the conditional statement STMT. If it evaluates to a constant value,
745 mark outgoing edges appropriately. */
746
747static void
748visit_cond_stmt (tree stmt)
749{
750 edge e;
751 value val;
752 basic_block block;
753
754 block = bb_for_stmt (stmt);
755 val = evaluate_stmt (stmt);
756
757 /* Find which edge out of the conditional block will be taken and add it
758 to the worklist. If no single edge can be determined statically, add
759 all outgoing edges from BLOCK. */
760 e = find_taken_edge (block, val.const_val);
761 if (e)
762 add_control_edge (e);
763 else
764 {
765 DONT_SIMULATE_AGAIN (stmt) = 1;
766 add_outgoing_control_edges (block);
767 }
768}
769
770
771/* Add all the edges coming out of BB to the control flow worklist. */
772
773static void
774add_outgoing_control_edges (basic_block bb)
775{
776 edge e;
777
778 for (e = bb->succ; e; e = e->succ_next)
779 add_control_edge (e);
780}
781
782
783/* Add edge E to the control flow worklist. */
784
785static void
786add_control_edge (edge e)
787{
788 basic_block bb = e->dest;
789 if (bb == EXIT_BLOCK_PTR)
790 return;
791
792 /* If the edge had already been executed, skip it. */
793 if (e->flags & EDGE_EXECUTABLE)
794 return;
795
796 e->flags |= EDGE_EXECUTABLE;
797
798 /* If the block is already in the list, we're done. */
799 if (TEST_BIT (bb_in_list, bb->index))
800 return;
801
802 cfg_blocks_add (bb);
803
804 if (dump_file && (dump_flags & TDF_DETAILS))
805 fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
806 e->src->index, e->dest->index);
807}
808
809
810/* CCP specific front-end to the non-destructive constant folding routines.
811
812 Attempt to simplify the RHS of STMT knowing that one or more
813 operands are constants.
814
815 If simplification is possible, return the simplified RHS,
816 otherwise return the original RHS. */
817
818static tree
819ccp_fold (tree stmt)
820{
821 tree rhs = get_rhs (stmt);
822 enum tree_code code = TREE_CODE (rhs);
823 int kind = TREE_CODE_CLASS (code);
824 tree retval = NULL_TREE;
825
826 /* If the RHS is just a variable, then that variable must now have
827 a constant value that we can return directly. */
828 if (TREE_CODE (rhs) == SSA_NAME)
829 return get_value (rhs)->const_val;
830
831 /* Unary operators. Note that we know the single operand must
832 be a constant. So this should almost always return a
833 simplified RHS. */
834 if (kind == '1')
835 {
836 /* Handle unary operators which can appear in GIMPLE form. */
837 tree op0 = TREE_OPERAND (rhs, 0);
838
839 /* Simplify the operand down to a constant. */
840 if (TREE_CODE (op0) == SSA_NAME)
841 {
842 value *val = get_value (op0);
843 if (val->lattice_val == CONSTANT)
844 op0 = get_value (op0)->const_val;
845 }
846
847 retval = nondestructive_fold_unary_to_constant (code,
848 TREE_TYPE (rhs),
849 op0);
850
851 /* If we folded, but did not create an invariant, then we can not
852 use this expression. */
853 if (retval && ! is_gimple_min_invariant (retval))
854 return NULL;
855
856 /* If we could not fold the expression, but the arguments are all
857 constants and gimple values, then build and return the new
858 expression.
859
860 In some cases the new expression is still something we can
861 use as a replacement for an argument. This happens with
862 NOP conversions of types for example.
863
864 In other cases the new expression can not be used as a
865 replacement for an argument (as it would create non-gimple
866 code). But the new expression can still be used to derive
867 other constants. */
868 if (! retval && is_gimple_min_invariant (op0))
869 return build1 (code, TREE_TYPE (rhs), op0);
870 }
871
872 /* Binary and comparison operators. We know one or both of the
873 operands are constants. */
874 else if (kind == '2'
875 || kind == '<'
876 || code == TRUTH_AND_EXPR
877 || code == TRUTH_OR_EXPR
878 || code == TRUTH_XOR_EXPR)
879 {
880 /* Handle binary and comparison operators that can appear in
881 GIMPLE form. */
882 tree op0 = TREE_OPERAND (rhs, 0);
883 tree op1 = TREE_OPERAND (rhs, 1);
884
885 /* Simplify the operands down to constants when appropriate. */
886 if (TREE_CODE (op0) == SSA_NAME)
887 {
888 value *val = get_value (op0);
889 if (val->lattice_val == CONSTANT)
890 op0 = val->const_val;
891 }
892
893 if (TREE_CODE (op1) == SSA_NAME)
894 {
895 value *val = get_value (op1);
896 if (val->lattice_val == CONSTANT)
897 op1 = val->const_val;
898 }
899
900 retval = nondestructive_fold_binary_to_constant (code,
901 TREE_TYPE (rhs),
902 op0, op1);
903
904 /* If we folded, but did not create an invariant, then we can not
905 use this expression. */
906 if (retval && ! is_gimple_min_invariant (retval))
907 return NULL;
908
909 /* If we could not fold the expression, but the arguments are all
910 constants and gimple values, then build and return the new
911 expression.
912
913 In some cases the new expression is still something we can
914 use as a replacement for an argument. This happens with
915 NOP conversions of types for example.
916
917 In other cases the new expression can not be used as a
918 replacement for an argument (as it would create non-gimple
919 code). But the new expression can still be used to derive
920 other constants. */
921 if (! retval
922 && is_gimple_min_invariant (op0)
923 && is_gimple_min_invariant (op1))
924 return build (code, TREE_TYPE (rhs), op0, op1);
925 }
926
927 /* We may be able to fold away calls to builtin functions if their
0bed3869 928 arguments are constants. */
4ee9c684 929 else if (code == CALL_EXPR
930 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
931 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
932 == FUNCTION_DECL)
933 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
934 {
935 use_optype uses = STMT_USE_OPS (stmt);
936 if (NUM_USES (uses) != 0)
937 {
938 tree *orig;
939 size_t i;
940
941 /* Preserve the original values of every operand. */
942 orig = xmalloc (sizeof (tree) * NUM_USES (uses));
943 for (i = 0; i < NUM_USES (uses); i++)
944 orig[i] = USE_OP (uses, i);
945
946 /* Substitute operands with their values and try to fold. */
947 replace_uses_in (stmt, NULL);
948 retval = fold_builtin (rhs);
949
950 /* Restore operands to their original form. */
951 for (i = 0; i < NUM_USES (uses); i++)
56004dc5 952 SET_USE_OP (uses, i, orig[i]);
4ee9c684 953 free (orig);
954 }
955 }
956 else
957 return rhs;
958
959 /* If we got a simplified form, see if we need to convert its type. */
960 if (retval)
961 {
962 if (TREE_TYPE (retval) != TREE_TYPE (rhs))
ccdd72c8 963 retval = fold_convert (TREE_TYPE (rhs), retval);
4ee9c684 964
965 if (TREE_TYPE (retval) == TREE_TYPE (rhs))
966 return retval;
967 }
968
969 /* No simplification was possible. */
970 return rhs;
971}
972
973
974/* Evaluate statement STMT. */
975
976static value
977evaluate_stmt (tree stmt)
978{
979 value val;
980 tree simplified;
981 latticevalue likelyvalue = likely_value (stmt);
982
983 /* If the statement is likely to have a CONSTANT result, then try
984 to fold the statement to determine the constant value. */
985 if (likelyvalue == CONSTANT)
986 simplified = ccp_fold (stmt);
987 /* If the statement is likely to have a VARYING result, then do not
988 bother folding the statement. */
989 else if (likelyvalue == VARYING)
990 simplified = get_rhs (stmt);
991 /* Otherwise the statement is likely to have an UNDEFINED value and
992 there will be nothing to do. */
993 else
994 simplified = NULL_TREE;
995
996 if (simplified && is_gimple_min_invariant (simplified))
997 {
998 /* The statement produced a constant value. */
999 val.lattice_val = CONSTANT;
1000 val.const_val = simplified;
1001 }
1002 else
1003 {
1004 /* The statement produced a nonconstant value. If the statement
1005 had undefined operands, then the result of the statement should
1006 be undefined. Else the result of the statement is VARYING. */
1007 val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
1008 val.const_val = NULL_TREE;
1009 }
1010
1011 return val;
1012}
1013
1014
1015/* Debugging dumps. */
1016
1017static void
1018dump_lattice_value (FILE *outf, const char *prefix, value val)
1019{
1020 switch (val.lattice_val)
1021 {
1022 case UNDEFINED:
1023 fprintf (outf, "%sUNDEFINED", prefix);
1024 break;
1025 case VARYING:
1026 fprintf (outf, "%sVARYING", prefix);
1027 break;
1028 case CONSTANT:
1029 fprintf (outf, "%sCONSTANT ", prefix);
1030 print_generic_expr (outf, val.const_val, dump_flags);
1031 break;
1032 default:
1033 abort ();
1034 }
1035}
1036
1037/* Given a constant value VAL for bitfield FIELD, and a destination
1038 variable VAR, return VAL appropriately widened to fit into VAR. If
1039 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1040
1041tree
1042widen_bitfield (tree val, tree field, tree var)
1043{
1044 unsigned var_size, field_size;
1045 tree wide_val;
1046 unsigned HOST_WIDE_INT mask;
1047 unsigned i;
1048
1049 var_size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE ((var))));
1050 field_size = TREE_INT_CST_LOW (DECL_SIZE (field));
1051
1052 /* Give up if either the bitfield or the variable are too wide. */
1053 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1054 return NULL;
1055
1056#if defined ENABLE_CHECKING
1057 if (var_size < field_size)
1058 abort ();
1059#endif
1060
1061 /* If VAL is not an integer constant, then give up. */
1062 if (TREE_CODE (val) != INTEGER_CST)
1063 return NULL;
1064
1065 /* If the sign bit of the value is not set, or the field's type is
1066 unsigned, then just mask off the high order bits of the value. */
1067 if ((TREE_INT_CST_LOW (val) & (1 << (field_size - 1))) == 0
1068 || DECL_UNSIGNED (field))
1069 {
1070 /* Zero extension. Build a mask with the lower 'field_size' bits
1071 set and a BIT_AND_EXPR node to clear the high order bits of
1072 the value. */
1073 for (i = 0, mask = 0; i < field_size; i++)
1074 mask |= 1 << i;
1075
1076 wide_val = build (BIT_AND_EXPR, TREE_TYPE (var), val,
1077 build_int_2 (mask, 0));
1078 }
1079 else
1080 {
1081 /* Sign extension. Create a mask with the upper 'field_size'
1082 bits set and a BIT_IOR_EXPR to set the high order bits of the
1083 value. */
1084 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1085 mask |= 1 << (var_size - i - 1);
1086
1087 wide_val = build (BIT_IOR_EXPR, TREE_TYPE (var), val,
1088 build_int_2 (mask, 0));
1089 }
1090
1091 return fold (wide_val);
1092}
1093
1094
1095/* Function indicating whether we ought to include information for 'var'
1096 when calculating immediate uses. */
1097
1098static bool
1099need_imm_uses_for (tree var)
1100{
1101 return get_value (var)->lattice_val != VARYING;
1102}
1103
1104
1105/* Initialize local data structures and worklists for CCP. */
1106
1107static void
1108initialize (void)
1109{
1110 edge e;
1111 basic_block bb;
1112 sbitmap virtual_var;
1113
01406fc0 1114 /* Worklists of SSA edges. */
4ee9c684 1115 VARRAY_TREE_INIT (ssa_edges, 20, "ssa_edges");
01406fc0 1116 VARRAY_TREE_INIT (varying_ssa_edges, 20, "varying_ssa_edges");
4ee9c684 1117
1118 executable_blocks = sbitmap_alloc (last_basic_block);
1119 sbitmap_zero (executable_blocks);
1120
1121 bb_in_list = sbitmap_alloc (last_basic_block);
1122 sbitmap_zero (bb_in_list);
1123
c211d998 1124 value_vector = (value *) xmalloc (num_ssa_names * sizeof (value));
1125 memset (value_vector, 0, num_ssa_names * sizeof (value));
4ee9c684 1126
1127 /* 1 if ssa variable is used in a virtual variable context. */
c211d998 1128 virtual_var = sbitmap_alloc (num_ssa_names);
4ee9c684 1129 sbitmap_zero (virtual_var);
1130
1131 /* Initialize default values and simulation flags for PHI nodes, statements
1132 and edges. */
1133 FOR_EACH_BB (bb)
1134 {
1135 block_stmt_iterator i;
1136 tree stmt;
1137 stmt_ann_t ann;
1138 def_optype defs;
2cf24776 1139 v_may_def_optype v_may_defs;
1140 v_must_def_optype v_must_defs;
4ee9c684 1141 size_t x;
1142 int vary;
1143
1144 /* Get the default value for each definition. */
1145 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1146 {
1147 vary = 0;
1148 stmt = bsi_stmt (i);
1149 get_stmt_operands (stmt);
1150 ann = stmt_ann (stmt);
1151 defs = DEF_OPS (ann);
1152 for (x = 0; x < NUM_DEFS (defs); x++)
1153 {
1154 tree def = DEF_OP (defs, x);
1155 if (get_value (def)->lattice_val == VARYING)
1156 vary = 1;
1157 }
1158 DONT_SIMULATE_AGAIN (stmt) = vary;
1159
2cf24776 1160 /* Mark all V_MAY_DEF operands VARYING. */
1161 v_may_defs = V_MAY_DEF_OPS (ann);
1162 for (x = 0; x < NUM_V_MAY_DEFS (v_may_defs); x++)
4ee9c684 1163 {
2cf24776 1164 tree res = V_MAY_DEF_RESULT (v_may_defs, x);
4ee9c684 1165 get_value (res)->lattice_val = VARYING;
1166 SET_BIT (virtual_var, SSA_NAME_VERSION (res));
1167 }
2cf24776 1168
1169 /* Mark all V_MUST_DEF operands VARYING. */
1170 v_must_defs = V_MUST_DEF_OPS (ann);
1171 for (x = 0; x < NUM_V_MUST_DEFS (v_must_defs); x++)
1172 {
1173 tree v_must_def = V_MUST_DEF_OP (v_must_defs, x);
1174 get_value (v_must_def)->lattice_val = VARYING;
1175 SET_BIT (virtual_var, SSA_NAME_VERSION (v_must_def));
1176 }
4ee9c684 1177 }
1178
1179 for (e = bb->succ; e; e = e->succ_next)
1180 e->flags &= ~EDGE_EXECUTABLE;
1181 }
1182
1183 /* Now process PHI nodes. */
1184 FOR_EACH_BB (bb)
1185 {
1186 tree phi, var;
1187 int x;
04f8eea3 1188 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4ee9c684 1189 {
1190 value *val;
1191 val = get_value (PHI_RESULT (phi));
1192 if (val->lattice_val != VARYING)
1193 {
1194 for (x = 0; x < PHI_NUM_ARGS (phi); x++)
1195 {
1196 var = PHI_ARG_DEF (phi, x);
1197 /* If one argument is virtual, the result is virtual, and
1198 therefore varying. */
1199 if (TREE_CODE (var) == SSA_NAME)
1200 {
1201 if (TEST_BIT (virtual_var, SSA_NAME_VERSION (var)))
1202 {
1203 val->lattice_val = VARYING;
1204 SET_BIT (virtual_var,
1205 SSA_NAME_VERSION (PHI_RESULT (phi)));
1206 break;
1207 }
1208 }
1209 }
1210 }
1211 DONT_SIMULATE_AGAIN (phi) = ((val->lattice_val == VARYING) ? 1 : 0);
1212 }
1213 }
1214
1215 sbitmap_free (virtual_var);
1216 /* Compute immediate uses for variables we care about. */
1217 compute_immediate_uses (TDFA_USE_OPS, need_imm_uses_for);
1218
1219 if (dump_file && (dump_flags & TDF_DETAILS))
1220 dump_immediate_uses (dump_file);
1221
1222 VARRAY_BB_INIT (cfg_blocks, 20, "cfg_blocks");
1223
1224 /* Seed the algorithm by adding the successors of the entry block to the
1225 edge worklist. */
1226 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1227 {
1228 if (e->dest != EXIT_BLOCK_PTR)
1229 {
1230 e->flags |= EDGE_EXECUTABLE;
1231 cfg_blocks_add (e->dest);
1232 }
1233 }
1234}
1235
1236
1237/* Free allocated storage. */
1238
1239static void
1240finalize (void)
1241{
1242 ssa_edges = NULL;
01406fc0 1243 varying_ssa_edges = NULL;
4ee9c684 1244 cfg_blocks = NULL;
1245 free (value_vector);
1246 sbitmap_free (bb_in_list);
1247 sbitmap_free (executable_blocks);
1248 free_df ();
1249}
1250
1251/* Is the block worklist empty. */
1252
1253static inline bool
1254cfg_blocks_empty_p (void)
1255{
1256 return (cfg_blocks_num == 0);
1257}
1258
1259/* Add a basic block to the worklist. */
1260
1261static void
1262cfg_blocks_add (basic_block bb)
1263{
1264 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
1265 return;
1266
1267 if (TEST_BIT (bb_in_list, bb->index))
1268 return;
1269
1270 if (cfg_blocks_empty_p ())
1271 {
1272 cfg_blocks_tail = cfg_blocks_head = 0;
1273 cfg_blocks_num = 1;
1274 }
1275 else
1276 {
1277 cfg_blocks_num++;
1278 if (cfg_blocks_num > VARRAY_SIZE (cfg_blocks))
1279 {
1280 /* We have to grow the array now. Adjust to queue to occupy the
1281 full space of the original array. */
1282 cfg_blocks_tail = VARRAY_SIZE (cfg_blocks);
1283 cfg_blocks_head = 0;
1284 VARRAY_GROW (cfg_blocks, 2 * VARRAY_SIZE (cfg_blocks));
1285 }
1286 else
1287 cfg_blocks_tail = (cfg_blocks_tail + 1) % VARRAY_SIZE (cfg_blocks);
1288 }
1289 VARRAY_BB (cfg_blocks, cfg_blocks_tail) = bb;
1290 SET_BIT (bb_in_list, bb->index);
1291}
1292
1293/* Remove a block from the worklist. */
1294
1295static basic_block
1296cfg_blocks_get (void)
1297{
1298 basic_block bb;
1299
1300 bb = VARRAY_BB (cfg_blocks, cfg_blocks_head);
1301
1302#ifdef ENABLE_CHECKING
1303 if (cfg_blocks_empty_p () || !bb)
1304 abort ();
1305#endif
1306
1307 cfg_blocks_head = (cfg_blocks_head + 1) % VARRAY_SIZE (cfg_blocks);
1308 --cfg_blocks_num;
1309 RESET_BIT (bb_in_list, bb->index);
1310
1311 return bb;
1312}
1313
1314/* We have just defined a new value for VAR. Add all immediate uses
01406fc0 1315 of VAR to the ssa_edges or varying_ssa_edges worklist. */
4ee9c684 1316static void
01406fc0 1317add_var_to_ssa_edges_worklist (tree var, value val)
4ee9c684 1318{
1319 tree stmt = SSA_NAME_DEF_STMT (var);
1320 dataflow_t df = get_immediate_uses (stmt);
1321 int num_uses = num_immediate_uses (df);
1322 int i;
1323
1324 for (i = 0; i < num_uses; i++)
1325 {
1326 tree use = immediate_use (df, i);
1327
1328 if (!DONT_SIMULATE_AGAIN (use))
1329 {
1330 stmt_ann_t ann = stmt_ann (use);
1331 if (ann->in_ccp_worklist == 0)
1332 {
1333 ann->in_ccp_worklist = 1;
01406fc0 1334 if (val.lattice_val == VARYING)
1335 VARRAY_PUSH_TREE (varying_ssa_edges, use);
1336 else
1337 VARRAY_PUSH_TREE (ssa_edges, use);
4ee9c684 1338 }
1339 }
1340 }
1341}
1342
1343/* Set the lattice value for the variable VAR to VARYING. */
1344
1345static void
1346def_to_varying (tree var)
1347{
1348 value val;
1349 val.lattice_val = VARYING;
1350 val.const_val = NULL_TREE;
1351 set_lattice_value (var, val);
1352}
1353
1354/* Set the lattice value for variable VAR to VAL. */
1355
1356static void
1357set_lattice_value (tree var, value val)
1358{
1359 value *old = get_value (var);
1360
1361#ifdef ENABLE_CHECKING
1362 if (val.lattice_val == UNDEFINED)
1363 {
1364 /* CONSTANT->UNDEFINED is never a valid state transition. */
1365 if (old->lattice_val == CONSTANT)
1366 abort ();
1367
1368 /* VARYING->UNDEFINED is generally not a valid state transition,
1369 except for values which are initialized to VARYING. */
1370 if (old->lattice_val == VARYING
1371 && get_default_value (var).lattice_val != VARYING)
1372 abort ();
1373 }
1374 else if (val.lattice_val == CONSTANT)
1375 {
1376 /* VARYING -> CONSTANT is an invalid state transition, except
1377 for objects which start off in a VARYING state. */
1378 if (old->lattice_val == VARYING
1379 && get_default_value (var).lattice_val != VARYING)
1380 abort ();
1381 }
1382#endif
1383
1384 /* If the constant for VAR has changed, then this VAR is really varying. */
1385 if (old->lattice_val == CONSTANT && val.lattice_val == CONSTANT
1386 && !simple_cst_equal (old->const_val, val.const_val))
1387 {
1388 val.lattice_val = VARYING;
1389 val.const_val = NULL_TREE;
1390 }
1391
1392 if (old->lattice_val != val.lattice_val)
1393 {
1394 if (dump_file && (dump_flags & TDF_DETAILS))
1395 {
1396 dump_lattice_value (dump_file,
1397 "Lattice value changed to ", val);
1398 fprintf (dump_file, ". Adding definition to SSA edges.\n");
1399 }
1400
01406fc0 1401 add_var_to_ssa_edges_worklist (var, val);
4ee9c684 1402 *old = val;
1403 }
1404}
1405
1406/* Replace USE references in statement STMT with their immediate reaching
1407 definition. Return true if at least one reference was replaced. If
1408 REPLACED_ADDRESSES_P is given, it will be set to true if an address
1409 constant was replaced. */
1410
1411static bool
1412replace_uses_in (tree stmt, bool *replaced_addresses_p)
1413{
1414 bool replaced = false;
1415 use_optype uses;
1416 size_t i;
1417
1418 if (replaced_addresses_p)
1419 *replaced_addresses_p = false;
1420
1421 get_stmt_operands (stmt);
1422
1423 uses = STMT_USE_OPS (stmt);
1424 for (i = 0; i < NUM_USES (uses); i++)
1425 {
56004dc5 1426 use_operand_p use = USE_OP_PTR (uses, i);
1427 value *val = get_value (USE_FROM_PTR (use));
4ee9c684 1428
1429 if (val->lattice_val == CONSTANT)
1430 {
56004dc5 1431 SET_USE (use, val->const_val);
4ee9c684 1432 replaced = true;
56004dc5 1433 if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (use)))
1434 && replaced_addresses_p)
4ee9c684 1435 *replaced_addresses_p = true;
1436 }
1437 }
1438
1439 return replaced;
1440}
1441
1442/* Return the likely latticevalue for STMT.
1443
1444 If STMT has no operands, then return CONSTANT.
1445
1446 Else if any operands of STMT are undefined, then return UNDEFINED.
1447
1448 Else if any operands of STMT are constants, then return CONSTANT.
1449
1450 Else return VARYING. */
1451
1452static latticevalue
1453likely_value (tree stmt)
1454{
1455 use_optype uses;
1456 size_t i;
1457 int found_constant = 0;
1458 stmt_ann_t ann;
1459
1460 /* If the statement makes aliased loads or has volatile operands, it
1461 won't fold to a constant value. */
1462 ann = stmt_ann (stmt);
1463 if (ann->makes_aliased_loads || ann->has_volatile_ops)
1464 return VARYING;
1465
1466 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
1467 in the presence of const and pure calls. */
1468 if (get_call_expr_in (stmt) != NULL_TREE)
1469 return VARYING;
1470
1471 get_stmt_operands (stmt);
1472
1473 uses = USE_OPS (ann);
1474 for (i = 0; i < NUM_USES (uses); i++)
1475 {
1476 tree use = USE_OP (uses, i);
1477 value *val = get_value (use);
1478
1479 if (val->lattice_val == UNDEFINED)
1480 return UNDEFINED;
1481
1482 if (val->lattice_val == CONSTANT)
1483 found_constant = 1;
1484 }
1485
1486 return ((found_constant || !uses) ? CONSTANT : VARYING);
1487}
1488
1489/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1490 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
0bed3869 1491 is the desired result type. */
4ee9c684 1492
1493static tree
1494maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1495{
1496 unsigned HOST_WIDE_INT lquo, lrem;
1497 HOST_WIDE_INT hquo, hrem;
1498 tree elt_size, min_idx, idx;
1499 tree array_type, elt_type;
1500
1501 /* Ignore stupid user tricks of indexing non-array variables. */
1502 array_type = TREE_TYPE (base);
1503 if (TREE_CODE (array_type) != ARRAY_TYPE)
1504 return NULL_TREE;
1505 elt_type = TREE_TYPE (array_type);
1506 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1507 return NULL_TREE;
1508
1509 /* Whee. Ignore indexing of variable sized types. */
1510 elt_size = TYPE_SIZE_UNIT (elt_type);
1511 if (TREE_CODE (elt_size) != INTEGER_CST)
1512 return NULL_TREE;
1513
1514 /* If the division isn't exact, then don't do anything. Equally
1515 invalid as the above indexing of non-array variables. */
1516 if (div_and_round_double (TRUNC_DIV_EXPR, 1,
1517 TREE_INT_CST_LOW (offset),
1518 TREE_INT_CST_HIGH (offset),
1519 TREE_INT_CST_LOW (elt_size),
1520 TREE_INT_CST_HIGH (elt_size),
1521 &lquo, &hquo, &lrem, &hrem)
1522 || lrem || hrem)
1523 return NULL_TREE;
1524 idx = build_int_2_wide (lquo, hquo);
1525
1526 /* Re-bias the index by the min index of the array type. */
1527 min_idx = TYPE_DOMAIN (TREE_TYPE (base));
1528 if (min_idx)
1529 {
1530 min_idx = TYPE_MIN_VALUE (min_idx);
1531 if (min_idx)
1532 {
1533 idx = convert (TREE_TYPE (min_idx), idx);
1534 if (!integer_zerop (min_idx))
1535 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 1);
1536 }
1537 }
1538
1539 return build (ARRAY_REF, orig_type, base, idx);
1540}
1541
1542/* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1543 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1544 is the desired result type. */
1545/* ??? This doesn't handle class inheritance. */
1546
1547static tree
1548maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1549 tree orig_type, bool base_is_ptr)
1550{
1551 tree f, t, field_type, tail_array_field;
1552
1553 if (TREE_CODE (record_type) != RECORD_TYPE
1554 && TREE_CODE (record_type) != UNION_TYPE
1555 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1556 return NULL_TREE;
1557
1558 /* Short-circuit silly cases. */
1559 if (lang_hooks.types_compatible_p (record_type, orig_type))
1560 return NULL_TREE;
1561
1562 tail_array_field = NULL_TREE;
1563 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1564 {
1565 int cmp;
1566
1567 if (TREE_CODE (f) != FIELD_DECL)
1568 continue;
1569 if (DECL_BIT_FIELD (f))
1570 continue;
1571 if (TREE_CODE (DECL_FIELD_OFFSET (f)) != INTEGER_CST)
1572 continue;
1573
1574 /* ??? Java creates "interesting" fields for representing base classes.
1575 They have no name, and have no context. With no context, we get into
1576 trouble with nonoverlapping_component_refs_p. Skip them. */
1577 if (!DECL_FIELD_CONTEXT (f))
1578 continue;
1579
1580 /* The previous array field isn't at the end. */
1581 tail_array_field = NULL_TREE;
1582
1583 /* Check to see if this offset overlaps with the field. */
1584 cmp = tree_int_cst_compare (DECL_FIELD_OFFSET (f), offset);
1585 if (cmp > 0)
1586 continue;
1587
1588 field_type = TREE_TYPE (f);
1589 if (cmp < 0)
1590 {
1591 /* Don't care about offsets into the middle of scalars. */
1592 if (!AGGREGATE_TYPE_P (field_type))
1593 continue;
1594
1595 /* Check for array at the end of the struct. This is often
1596 used as for flexible array members. We should be able to
1597 turn this into an array access anyway. */
1598 if (TREE_CODE (field_type) == ARRAY_TYPE)
1599 tail_array_field = f;
1600
1601 /* Check the end of the field against the offset. */
1602 if (!DECL_SIZE_UNIT (f)
1603 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1604 continue;
1605 t = int_const_binop (MINUS_EXPR, offset, DECL_FIELD_OFFSET (f), 1);
1606 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1607 continue;
1608
1609 /* If we matched, then set offset to the displacement into
1610 this field. */
1611 offset = t;
1612 }
1613
1614 /* Here we exactly match the offset being checked. If the types match,
1615 then we can return that field. */
1616 else if (lang_hooks.types_compatible_p (orig_type, field_type))
1617 {
1618 if (base_is_ptr)
1619 base = build1 (INDIRECT_REF, record_type, base);
1620 t = build (COMPONENT_REF, field_type, base, f);
1621 return t;
1622 }
1623
1624 /* Don't care about type-punning of scalars. */
1625 else if (!AGGREGATE_TYPE_P (field_type))
1626 return NULL_TREE;
1627
1628 goto found;
1629 }
1630
1631 if (!tail_array_field)
1632 return NULL_TREE;
1633
1634 f = tail_array_field;
1635 field_type = TREE_TYPE (f);
1636
1637 found:
1638 /* If we get here, we've got an aggregate field, and a possibly
365db11e 1639 nonzero offset into them. Recurse and hope for a valid match. */
4ee9c684 1640 if (base_is_ptr)
1641 base = build1 (INDIRECT_REF, record_type, base);
1642 base = build (COMPONENT_REF, field_type, base, f);
1643
1644 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1645 if (t)
1646 return t;
1647 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1648 orig_type, false);
1649}
1650
1651/* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1652 Return the simplified expression, or NULL if nothing could be done. */
1653
1654static tree
1655maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1656{
1657 tree t;
1658
1659 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1660 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1661 are sometimes added. */
1662 base = fold (base);
1663 STRIP_NOPS (base);
1664 TREE_OPERAND (expr, 0) = base;
1665
1666 /* One possibility is that the address reduces to a string constant. */
1667 t = fold_read_from_constant_string (expr);
1668 if (t)
1669 return t;
1670
1671 /* Add in any offset from a PLUS_EXPR. */
1672 if (TREE_CODE (base) == PLUS_EXPR)
1673 {
1674 tree offset2;
1675
1676 offset2 = TREE_OPERAND (base, 1);
1677 if (TREE_CODE (offset2) != INTEGER_CST)
1678 return NULL_TREE;
1679 base = TREE_OPERAND (base, 0);
1680
1681 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1682 }
1683
1684 if (TREE_CODE (base) == ADDR_EXPR)
1685 {
1686 /* Strip the ADDR_EXPR. */
1687 base = TREE_OPERAND (base, 0);
1688
1689 /* Try folding *(&B+O) to B[X]. */
1690 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1691 if (t)
1692 return t;
1693
1694 /* Try folding *(&B+O) to B.X. */
1695 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1696 TREE_TYPE (expr), false);
1697 if (t)
1698 return t;
1699
1700 /* Fold *&B to B. */
1701 if (integer_zerop (offset))
1702 return base;
1703 }
1704 else
1705 {
1706 /* We can get here for out-of-range string constant accesses,
1707 such as "_"[3]. Bail out of the entire substitution search
1708 and arrange for the entire statement to be replaced by a
1709 call to __builtin_trap. In all likelyhood this will all be
1710 constant-folded away, but in the meantime we can't leave with
1711 something that get_expr_operands can't understand. */
1712
1713 t = base;
1714 STRIP_NOPS (t);
1715 if (TREE_CODE (t) == ADDR_EXPR
1716 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1717 {
1718 /* FIXME: Except that this causes problems elsewhere with dead
1719 code not being deleted, and we abort in the rtl expanders
1720 because we failed to remove some ssa_name. In the meantime,
1721 just return zero. */
1722 /* FIXME2: This condition should be signaled by
1723 fold_read_from_constant_string directly, rather than
1724 re-checking for it here. */
1725 return integer_zero_node;
1726 }
1727
1728 /* Try folding *(B+O) to B->X. Still an improvement. */
1729 if (POINTER_TYPE_P (TREE_TYPE (base)))
1730 {
1731 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1732 base, offset,
1733 TREE_TYPE (expr), true);
1734 if (t)
1735 return t;
1736 }
1737 }
1738
1739 /* Otherwise we had an offset that we could not simplify. */
1740 return NULL_TREE;
1741}
1742
1743/* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1744
1745 A quaint feature extant in our address arithmetic is that there
1746 can be hidden type changes here. The type of the result need
1747 not be the same as the type of the input pointer.
1748
1749 What we're after here is an expression of the form
1750 (T *)(&array + const)
1751 where the cast doesn't actually exist, but is implicit in the
1752 type of the PLUS_EXPR. We'd like to turn this into
1753 &array[x]
1754 which may be able to propagate further. */
1755
1756static tree
1757maybe_fold_stmt_addition (tree expr)
1758{
1759 tree op0 = TREE_OPERAND (expr, 0);
1760 tree op1 = TREE_OPERAND (expr, 1);
1761 tree ptr_type = TREE_TYPE (expr);
1762 tree ptd_type;
1763 tree t;
1764 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1765
1766 /* We're only interested in pointer arithmetic. */
1767 if (!POINTER_TYPE_P (ptr_type))
1768 return NULL_TREE;
1769 /* Canonicalize the integral operand to op1. */
1770 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1771 {
1772 if (subtract)
1773 return NULL_TREE;
1774 t = op0, op0 = op1, op1 = t;
1775 }
1776 /* It had better be a constant. */
1777 if (TREE_CODE (op1) != INTEGER_CST)
1778 return NULL_TREE;
1779 /* The first operand should be an ADDR_EXPR. */
1780 if (TREE_CODE (op0) != ADDR_EXPR)
1781 return NULL_TREE;
1782 op0 = TREE_OPERAND (op0, 0);
1783
1784 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1785 the offset into it. */
1786 while (TREE_CODE (op0) == ARRAY_REF)
1787 {
1788 tree array_obj = TREE_OPERAND (op0, 0);
1789 tree array_idx = TREE_OPERAND (op0, 1);
1790 tree elt_type = TREE_TYPE (op0);
1791 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1792 tree min_idx;
1793
1794 if (TREE_CODE (array_idx) != INTEGER_CST)
1795 break;
1796 if (TREE_CODE (elt_size) != INTEGER_CST)
1797 break;
1798
1799 /* Un-bias the index by the min index of the array type. */
1800 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1801 if (min_idx)
1802 {
1803 min_idx = TYPE_MIN_VALUE (min_idx);
1804 if (min_idx)
1805 {
1806 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1807 if (!integer_zerop (min_idx))
1808 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1809 min_idx, 0);
1810 }
1811 }
1812
1813 /* Convert the index to a byte offset. */
1814 array_idx = convert (sizetype, array_idx);
1815 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1816
1817 /* Update the operands for the next round, or for folding. */
1818 /* If we're manipulating unsigned types, then folding into negative
1819 values can produce incorrect results. Particularly if the type
1820 is smaller than the width of the pointer. */
1821 if (subtract
1822 && TYPE_UNSIGNED (TREE_TYPE (op1))
1823 && tree_int_cst_lt (array_idx, op1))
1824 return NULL;
1825 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1826 array_idx, op1, 0);
1827 subtract = false;
1828 op0 = array_obj;
1829 }
1830
1831 /* If we weren't able to fold the subtraction into another array reference,
1832 canonicalize the integer for passing to the array and component ref
1833 simplification functions. */
1834 if (subtract)
1835 {
1836 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1837 return NULL;
1838 op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1));
1839 /* ??? In theory fold should always produce another integer. */
1840 if (TREE_CODE (op1) != INTEGER_CST)
1841 return NULL;
1842 }
1843
1844 ptd_type = TREE_TYPE (ptr_type);
1845
1846 /* At which point we can try some of the same things as for indirects. */
1847 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1848 if (!t)
1849 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1850 ptd_type, false);
1851 if (t)
1852 t = build1 (ADDR_EXPR, ptr_type, t);
1853
1854 return t;
1855}
1856
1857/* Subroutine of fold_stmt called via walk_tree. We perform several
1858 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1859
1860static tree
1861fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1862{
1863 bool *changed_p = data;
1864 tree expr = *expr_p, t;
1865
1866 /* ??? It'd be nice if walk_tree had a pre-order option. */
1867 switch (TREE_CODE (expr))
1868 {
1869 case INDIRECT_REF:
1870 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1871 if (t)
1872 return t;
1873 *walk_subtrees = 0;
1874
1875 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1876 integer_zero_node);
1877 break;
1878
1879 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1880 We'd only want to bother decomposing an existing ARRAY_REF if
1881 the base array is found to have another offset contained within.
1882 Otherwise we'd be wasting time. */
1883
1884 case ADDR_EXPR:
1885 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1886 if (t)
1887 return t;
1888 *walk_subtrees = 0;
1889
1890 /* Set TREE_INVARIANT properly so that the value is properly
1891 considered constant, and so gets propagated as expected. */
1892 if (*changed_p)
1893 recompute_tree_invarant_for_addr_expr (expr);
1894 return NULL_TREE;
1895
1896 case PLUS_EXPR:
1897 case MINUS_EXPR:
1898 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1899 if (t)
1900 return t;
1901 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1902 if (t)
1903 return t;
1904 *walk_subtrees = 0;
1905
1906 t = maybe_fold_stmt_addition (expr);
1907 break;
1908
1909 case COMPONENT_REF:
1910 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1911 if (t)
1912 return t;
1913 *walk_subtrees = 0;
1914
1915 /* Make sure the FIELD_DECL is actually a field in the type on
1916 the lhs. In cases with IMA it is possible that it came
1917 from another, equivalent type at this point. We have
1918 already checked the equivalence in this case.
1919 Match on type plus offset, to allow for unnamed fields.
1920 We won't necessarily get the corresponding field for
1921 unions; this is believed to be harmless. */
1922
1923 if ((current_file_decl && TREE_CHAIN (current_file_decl))
1924 && (DECL_FIELD_CONTEXT (TREE_OPERAND (expr, 1)) !=
1925 TREE_TYPE (TREE_OPERAND (expr, 0))))
1926 {
1927 tree f;
1928 tree orig_field = TREE_OPERAND (expr, 1);
1929 tree orig_type = TREE_TYPE (orig_field);
1930 for (f = TYPE_FIELDS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1931 f; f = TREE_CHAIN (f))
1932 {
1933 if (lang_hooks.types_compatible_p (TREE_TYPE (f), orig_type)
1934 && tree_int_cst_compare (DECL_FIELD_BIT_OFFSET (f),
1935 DECL_FIELD_BIT_OFFSET (orig_field))
1936 == 0
1937 && tree_int_cst_compare (DECL_FIELD_OFFSET (f),
1938 DECL_FIELD_OFFSET (orig_field))
1939 == 0)
1940 {
1941 TREE_OPERAND (expr, 1) = f;
1942 break;
1943 }
1944 }
0bed3869 1945 /* Fall through is an error; it will be detected in tree-sra. */
4ee9c684 1946 }
1947 break;
1948
1949 default:
1950 return NULL_TREE;
1951 }
1952
1953 if (t)
1954 {
1955 *expr_p = t;
1956 *changed_p = true;
1957 }
1958
1959 return NULL_TREE;
1960}
1961
1962/* Fold the statement pointed by STMT_P. In some cases, this function may
1963 replace the whole statement with a new one. Returns true iff folding
1964 makes any changes. */
1965
1966bool
1967fold_stmt (tree *stmt_p)
1968{
1969 tree rhs, result, stmt;
1970 bool changed = false;
1971
1972 stmt = *stmt_p;
1973
1974 /* If we replaced constants and the statement makes pointer dereferences,
1975 then we may need to fold instances of *&VAR into VAR, etc. */
1976 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
1977 {
1978 *stmt_p
1979 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
1980 NULL);
1981 return true;
1982 }
1983
1984 rhs = get_rhs (stmt);
1985 if (!rhs)
1986 return changed;
1987 result = NULL_TREE;
1988
1989 /* Check for builtins that CCP can handle using information not
1990 available in the generic fold routines. */
1991 if (TREE_CODE (rhs) == CALL_EXPR)
1992 {
1993 tree callee = get_callee_fndecl (rhs);
1994 if (callee && DECL_BUILT_IN (callee))
1995 result = ccp_fold_builtin (stmt, rhs);
1996 }
1997
1998 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
1999 if (result == NULL_TREE)
2000 result = fold (rhs);
2001
2002 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2003 may have been added by fold, and "useless" type conversions that might
2004 now be apparent due to propagation. */
2005 STRIP_MAIN_TYPE_NOPS (result);
2006 STRIP_USELESS_TYPE_CONVERSION (result);
2007
2008 if (result != rhs)
2009 {
2010 changed = true;
2011 set_rhs (stmt_p, result);
2012 }
2013
2014 return changed;
2015}
2016
2017/* Get the main expression from statement STMT. */
2018
2019static tree
2020get_rhs (tree stmt)
2021{
2022 enum tree_code code = TREE_CODE (stmt);
2023
2024 if (code == MODIFY_EXPR)
2025 return TREE_OPERAND (stmt, 1);
2026 if (code == COND_EXPR)
2027 return COND_EXPR_COND (stmt);
2028 else if (code == SWITCH_EXPR)
2029 return SWITCH_COND (stmt);
2030 else if (code == RETURN_EXPR)
2031 {
2032 if (!TREE_OPERAND (stmt, 0))
2033 return NULL_TREE;
2034 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
2035 return TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
2036 else
2037 return TREE_OPERAND (stmt, 0);
2038 }
2039 else if (code == GOTO_EXPR)
2040 return GOTO_DESTINATION (stmt);
2041 else if (code == LABEL_EXPR)
2042 return LABEL_EXPR_LABEL (stmt);
2043 else
2044 return stmt;
2045}
2046
2047
2048/* Set the main expression of *STMT_P to EXPR. */
2049
2050static void
2051set_rhs (tree *stmt_p, tree expr)
2052{
2053 tree stmt = *stmt_p;
2054 enum tree_code code = TREE_CODE (stmt);
2055
2056 if (code == MODIFY_EXPR)
2057 TREE_OPERAND (stmt, 1) = expr;
2058 else if (code == COND_EXPR)
2059 COND_EXPR_COND (stmt) = expr;
2060 else if (code == SWITCH_EXPR)
2061 SWITCH_COND (stmt) = expr;
2062 else if (code == RETURN_EXPR)
2063 {
2064 if (TREE_OPERAND (stmt, 0)
2065 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
2066 TREE_OPERAND (TREE_OPERAND (stmt, 0), 1) = expr;
2067 else
2068 TREE_OPERAND (stmt, 0) = expr;
2069 }
2070 else if (code == GOTO_EXPR)
2071 GOTO_DESTINATION (stmt) = expr;
2072 else if (code == LABEL_EXPR)
2073 LABEL_EXPR_LABEL (stmt) = expr;
2074 else
2075 {
2076 /* Replace the whole statement with EXPR. If EXPR has no side
2077 effects, then replace *STMT_P with an empty statement. */
2078 stmt_ann_t ann = stmt_ann (stmt);
2079 *stmt_p = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt ();
8a12616d 2080 (*stmt_p)->common.ann = (tree_ann_t) ann;
4ee9c684 2081
2082 if (TREE_SIDE_EFFECTS (expr))
2083 {
2084 def_optype defs;
2cf24776 2085 v_may_def_optype v_may_defs;
2086 v_must_def_optype v_must_defs;
4ee9c684 2087 size_t i;
2088
2089 /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
2090 replacement. */
2091 defs = DEF_OPS (ann);
2092 for (i = 0; i < NUM_DEFS (defs); i++)
2093 {
2094 tree var = DEF_OP (defs, i);
2095 if (TREE_CODE (var) == SSA_NAME)
2096 SSA_NAME_DEF_STMT (var) = *stmt_p;
2097 }
2098
2cf24776 2099 v_may_defs = V_MAY_DEF_OPS (ann);
2100 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
2101 {
2102 tree var = V_MAY_DEF_RESULT (v_may_defs, i);
2103 if (TREE_CODE (var) == SSA_NAME)
2104 SSA_NAME_DEF_STMT (var) = *stmt_p;
2105 }
2106
2107 v_must_defs = V_MUST_DEF_OPS (ann);
2108 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
4ee9c684 2109 {
2cf24776 2110 tree var = V_MUST_DEF_OP (v_must_defs, i);
4ee9c684 2111 if (TREE_CODE (var) == SSA_NAME)
2112 SSA_NAME_DEF_STMT (var) = *stmt_p;
2113 }
2114 }
2115 }
2116}
2117
2118
2119/* Return a default value for variable VAR using the following rules:
2120
2121 1- Global and static variables are considered VARYING, unless they are
2122 declared const.
2123
2124 2- Function arguments are considered VARYING.
2125
2126 3- Any other value is considered UNDEFINED. This is useful when
2127 considering PHI nodes. PHI arguments that are undefined do not
2128 change the constant value of the PHI node, which allows for more
2129 constants to be propagated. */
2130
2131static value
2132get_default_value (tree var)
2133{
2134 value val;
2135 tree sym;
2136
2137 if (TREE_CODE (var) == SSA_NAME)
2138 sym = SSA_NAME_VAR (var);
2139 else
2140 {
2141#ifdef ENABLE_CHECKING
2142 if (!DECL_P (var))
2143 abort ();
2144#endif
2145 sym = var;
2146 }
2147
2148 val.lattice_val = UNDEFINED;
2149 val.const_val = NULL_TREE;
2150
2151 if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
2152 {
2153 /* Function arguments and volatile variables are considered VARYING. */
2154 val.lattice_val = VARYING;
2155 }
2156 else if (decl_function_context (sym) != current_function_decl
2157 || TREE_STATIC (sym))
2158 {
2159 /* Globals and static variables are considered VARYING, unless they
2160 are declared 'const'. */
2161 val.lattice_val = VARYING;
2162
2163 if (TREE_READONLY (sym)
2164 && DECL_INITIAL (sym)
2165 && is_gimple_min_invariant (DECL_INITIAL (sym)))
2166 {
2167 val.lattice_val = CONSTANT;
2168 val.const_val = DECL_INITIAL (sym);
2169 }
2170 }
2171 else
2172 {
2173 enum tree_code code;
2174 tree stmt = SSA_NAME_DEF_STMT (var);
2175
2176 if (!IS_EMPTY_STMT (stmt))
2177 {
2178 code = TREE_CODE (stmt);
2179 if (code != MODIFY_EXPR && code != PHI_NODE)
2180 val.lattice_val = VARYING;
2181 }
2182 }
2183
2184 return val;
2185}
2186
2187
2188/* Fold builtin call FN in statement STMT. If it cannot be folded into a
2189 constant, return NULL_TREE. Otherwise, return its constant value. */
2190
2191static tree
2192ccp_fold_builtin (tree stmt, tree fn)
2193{
2194 tree result, strlen_val[2];
2195 tree arglist = TREE_OPERAND (fn, 1), a;
2196 tree callee = get_callee_fndecl (fn);
2197 bitmap visited;
2198 int strlen_arg, i;
2199
2200 /* Ignore MD builtins. */
2201 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2202 return NULL_TREE;
2203
2204 /* First try the generic builtin folder. If that succeeds, return the
2205 result directly. */
2206 result = fold_builtin (fn);
2207 if (result)
2208 return result;
2209
2210 /* If the builtin could not be folded, and it has no argument list,
2211 we're done. */
2212 if (!arglist)
2213 return NULL_TREE;
2214
2215 /* Limit the work only for builtins we know how to simplify. */
2216 switch (DECL_FUNCTION_CODE (callee))
2217 {
2218 case BUILT_IN_STRLEN:
2219 case BUILT_IN_FPUTS:
2220 case BUILT_IN_FPUTS_UNLOCKED:
2221 strlen_arg = 1;
2222 break;
2223 case BUILT_IN_STRCPY:
2224 case BUILT_IN_STRNCPY:
2225 strlen_arg = 2;
2226 break;
2227 default:
2228 return NULL_TREE;
2229 }
2230
2231 /* Try to use the dataflow information gathered by the CCP process. */
2232 visited = BITMAP_XMALLOC ();
2233
2234 memset (strlen_val, 0, sizeof (strlen_val));
2235 for (i = 0, a = arglist;
2236 strlen_arg;
2237 i++, strlen_arg >>= 1, a = TREE_CHAIN (a))
2238 if (strlen_arg & 1)
2239 {
2240 bitmap_clear (visited);
2241 if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited))
2242 strlen_val[i] = NULL_TREE;
2243 }
2244
2245 BITMAP_XFREE (visited);
2246
2247 /* FIXME. All this code looks dangerous in the sense that it might
2248 create non-gimple expressions. */
2249 switch (DECL_FUNCTION_CODE (callee))
2250 {
2251 case BUILT_IN_STRLEN:
2252 /* Convert from the internal "sizetype" type to "size_t". */
2253 if (strlen_val[0]
2254 && size_type_node)
2255 {
2256 tree new = convert (size_type_node, strlen_val[0]);
2257
2258 /* If the result is not a valid gimple value, or not a cast
2259 of a valid gimple value, then we can not use the result. */
2260 if (is_gimple_val (new)
2261 || (is_gimple_cast (new)
2262 && is_gimple_val (TREE_OPERAND (new, 0))))
2263 return new;
2264 else
2265 return NULL_TREE;
2266 }
2267 return strlen_val[0];
2268 case BUILT_IN_STRCPY:
2269 if (strlen_val[1]
2270 && is_gimple_val (strlen_val[1]))
2271 return simplify_builtin_strcpy (arglist, strlen_val[1]);
2272 case BUILT_IN_STRNCPY:
2273 if (strlen_val[1]
2274 && is_gimple_val (strlen_val[1]))
2275 return simplify_builtin_strncpy (arglist, strlen_val[1]);
2276 case BUILT_IN_FPUTS:
2277 return simplify_builtin_fputs (arglist,
2278 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2279 strlen_val[0]);
2280 case BUILT_IN_FPUTS_UNLOCKED:
2281 return simplify_builtin_fputs (arglist,
2282 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2283 strlen_val[0]);
2284
2285 default:
2286 abort ();
2287 }
2288
2289 return NULL_TREE;
2290}
2291
2292
2293/* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
2294 follow its use-def chains. If LENGTH is not NULL and its value is not
2295 equal to the length we determine, or if we are unable to determine the
2296 length, return false. VISITED is a bitmap of visited variables. */
2297
2298static bool
2299get_strlen (tree arg, tree *length, bitmap visited)
2300{
2301 tree var, def_stmt, val;
2302
2303 if (TREE_CODE (arg) != SSA_NAME)
2304 {
2305 val = c_strlen (arg, 1);
2306 if (!val)
2307 return false;
2308
2309 if (*length && simple_cst_equal (val, *length) != 1)
2310 return false;
2311
2312 *length = val;
2313 return true;
2314 }
2315
2316 /* If we were already here, break the infinite cycle. */
2317 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2318 return true;
2319 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2320
2321 var = arg;
2322 def_stmt = SSA_NAME_DEF_STMT (var);
2323
2324 switch (TREE_CODE (def_stmt))
2325 {
2326 case MODIFY_EXPR:
2327 {
2328 tree len, rhs;
2329
2330 /* The RHS of the statement defining VAR must either have a
2331 constant length or come from another SSA_NAME with a constant
2332 length. */
2333 rhs = TREE_OPERAND (def_stmt, 1);
2334 STRIP_NOPS (rhs);
2335 if (TREE_CODE (rhs) == SSA_NAME)
2336 return get_strlen (rhs, length, visited);
2337
2338 /* See if the RHS is a constant length. */
2339 len = c_strlen (rhs, 1);
2340 if (len)
2341 {
2342 if (*length && simple_cst_equal (len, *length) != 1)
2343 return false;
2344
2345 *length = len;
2346 return true;
2347 }
2348
2349 break;
2350 }
2351
2352 case PHI_NODE:
2353 {
2354 /* All the arguments of the PHI node must have the same constant
2355 length. */
2356 int i;
2357
2358 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2359 {
2360 tree arg = PHI_ARG_DEF (def_stmt, i);
2361
2362 /* If this PHI has itself as an argument, we cannot
2363 determine the string length of this argument. However,
2364 if we can find a constant string length for the other
2365 PHI args then we can still be sure that this is a
2366 constant string length. So be optimistic and just
2367 continue with the next argument. */
2368 if (arg == PHI_RESULT (def_stmt))
2369 continue;
2370
2371 if (!get_strlen (arg, length, visited))
2372 return false;
2373 }
2374
2375 return true;
2376 }
2377
2378 default:
2379 break;
2380 }
2381
2382
2383 return false;
2384}
2385
2386\f
2387/* A simple pass that attempts to fold all builtin functions. This pass
2388 is run after we've propagated as many constants as we can. */
2389
2390static void
2391execute_fold_all_builtins (void)
2392{
2393 basic_block bb;
2394 FOR_EACH_BB (bb)
2395 {
2396 block_stmt_iterator i;
2397 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
2398 {
2399 tree *stmtp = bsi_stmt_ptr (i);
2400 tree call = get_rhs (*stmtp);
2401 tree callee, result;
2402
2403 if (!call || TREE_CODE (call) != CALL_EXPR)
2404 continue;
2405 callee = get_callee_fndecl (call);
2406 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2407 continue;
2408
2409 result = ccp_fold_builtin (*stmtp, call);
2410 if (!result)
2411 switch (DECL_FUNCTION_CODE (callee))
2412 {
2413 case BUILT_IN_CONSTANT_P:
2414 /* Resolve __builtin_constant_p. If it hasn't been
2415 folded to integer_one_node by now, it's fairly
2416 certain that the value simply isn't constant. */
2417 result = integer_zero_node;
2418 break;
2419
2420 default:
2421 continue;
2422 }
2423
2424 if (dump_file && (dump_flags & TDF_DETAILS))
2425 {
2426 fprintf (dump_file, "Simplified\n ");
2427 print_generic_stmt (dump_file, *stmtp, dump_flags);
2428 }
2429
2430 set_rhs (stmtp, result);
2431 modify_stmt (*stmtp);
2432
2433 if (dump_file && (dump_flags & TDF_DETAILS))
2434 {
2435 fprintf (dump_file, "to\n ");
2436 print_generic_stmt (dump_file, *stmtp, dump_flags);
2437 fprintf (dump_file, "\n");
2438 }
2439 }
2440 }
2441}
2442
2443struct tree_opt_pass pass_fold_builtins =
2444{
2445 "fab", /* name */
2446 NULL, /* gate */
2447 execute_fold_all_builtins, /* execute */
2448 NULL, /* sub */
2449 NULL, /* next */
2450 0, /* static_pass_number */
2451 0, /* tv_id */
2452 PROP_cfg | PROP_ssa, /* properties_required */
2453 0, /* properties_provided */
2454 0, /* properties_destroyed */
2455 0, /* todo_flags_start */
2456 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
2457};
2458
2459
2460#include "gt-tree-ssa-ccp.h"