]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-sccvn.c
Allow automatics in equivalences
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
CommitLineData
9e9e6e3e 1/* SCC value numbering for trees
fbd26352 2 Copyright (C) 2006-2019 Free Software Foundation, Inc.
9e9e6e3e 3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
9e9e6e3e 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
9e9e6e3e 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
95579ce1 24#include "splay-tree.h"
9ef16211 25#include "backend.h"
7c29e30e 26#include "rtl.h"
9e9e6e3e 27#include "tree.h"
9ef16211 28#include "gimple.h"
9ef16211 29#include "ssa.h"
7c29e30e 30#include "expmed.h"
31#include "insn-config.h"
ad7b10a2 32#include "memmodel.h"
7c29e30e 33#include "emit-rtl.h"
34#include "cgraph.h"
35#include "gimple-pretty-print.h"
9ef16211 36#include "alias.h"
b20a8bb4 37#include "fold-const.h"
9ed99284 38#include "stor-layout.h"
94ea8568 39#include "cfganal.h"
9e9e6e3e 40#include "tree-inline.h"
bc61cadb 41#include "internal-fn.h"
42#include "gimple-fold.h"
43#include "tree-eh.h"
a8783bee 44#include "gimplify.h"
d53441c8 45#include "flags.h"
d53441c8 46#include "dojump.h"
47#include "explow.h"
48#include "calls.h"
d53441c8 49#include "varasm.h"
50#include "stmt.h"
9ed99284 51#include "expr.h"
073c1fd5 52#include "tree-dfa.h"
53#include "tree-ssa.h"
b9ed1410 54#include "dumpfile.h"
9e9e6e3e 55#include "cfgloop.h"
a9b2282e 56#include "params.h"
1c6d350b 57#include "tree-ssa-propagate.h"
85e9a542 58#include "tree-cfg.h"
59#include "domwalk.h"
db981500 60#include "gimple-iterator.h"
eb074ef3 61#include "gimple-match.h"
30a86690 62#include "stringpool.h"
63#include "attribs.h"
2201c330 64#include "tree-pass.h"
65#include "statistics.h"
66#include "langhooks.h"
67#include "ipa-utils.h"
68#include "dbgcnt.h"
69#include "tree-cfgcleanup.h"
70#include "tree-ssa-loop.h"
71#include "tree-scalar-evolution.h"
b51523c4 72#include "tree-ssa-loop-niter.h"
886e6c18 73#include "builtins.h"
2201c330 74#include "tree-ssa-sccvn.h"
9e9e6e3e 75
76/* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
81
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
85
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
90
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
95
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
99
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
104
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
108
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
111
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
117
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
99698cf3 121 equivalent.
9e9e6e3e 122 TODO:
123
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
131*/
132
51e85e64 133/* There's no BB_EXECUTABLE but we can use BB_VISITED. */
134#define BB_EXECUTABLE BB_VISITED
3e871d4d 135
5c08a518 136static vn_lookup_kind default_vn_walk_kind;
137
3e871d4d 138/* vn_nary_op hashtable helpers. */
139
770ff93b 140struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
3e871d4d 141{
9969c043 142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
3e871d4d 145};
146
147/* Return the computed hashcode for nary operation P1. */
148
149inline hashval_t
9969c043 150vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
3e871d4d 151{
152 return vno1->hashcode;
153}
154
155/* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
157
158inline bool
9969c043 159vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
3e871d4d 160{
c42ece58 161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
3e871d4d 162}
163
c1f445d2 164typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
3e871d4d 165typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166
167
168/* vn_phi hashtable helpers. */
169
170static int
171vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172
ca5aa39a 173struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
3e871d4d 174{
9969c043 175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
3e871d4d 177};
178
179/* Return the computed hashcode for phi operation P1. */
180
181inline hashval_t
9969c043 182vn_phi_hasher::hash (const vn_phi_s *vp1)
3e871d4d 183{
184 return vp1->hashcode;
185}
186
187/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188
189inline bool
9969c043 190vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
3e871d4d 191{
c42ece58 192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
3e871d4d 193}
194
c1f445d2 195typedef hash_table<vn_phi_hasher> vn_phi_table_type;
3e871d4d 196typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197
198
199/* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
201
202static int
203vn_reference_op_eq (const void *p1, const void *p2)
204{
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
217}
218
219/* Free a reference operation structure VP. */
220
221static inline void
222free_reference (vn_reference_s *vr)
223{
224 vr->operands.release ();
225}
226
227
228/* vn_reference hashtable helpers. */
229
ca5aa39a 230struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
3e871d4d 231{
9969c043 232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
3e871d4d 234};
235
236/* Return the hashcode for a given reference operation P1. */
237
238inline hashval_t
9969c043 239vn_reference_hasher::hash (const vn_reference_s *vr1)
3e871d4d 240{
241 return vr1->hashcode;
242}
243
244inline bool
9969c043 245vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
3e871d4d 246{
c42ece58 247 return v == c || vn_reference_eq (v, c);
3e871d4d 248}
249
c1f445d2 250typedef hash_table<vn_reference_hasher> vn_reference_table_type;
3e871d4d 251typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252
253
ca5aa39a 254/* The set of VN hashtables. */
9e9e6e3e 255
256typedef struct vn_tables_s
257{
c1f445d2 258 vn_nary_op_table_type *nary;
259 vn_phi_table_type *phis;
260 vn_reference_table_type *references;
9e9e6e3e 261} *vn_tables_t;
262
3e871d4d 263
264/* vn_constant hashtable helpers. */
265
298e7f9a 266struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
3e871d4d 267{
9969c043 268 static inline hashval_t hash (const vn_constant_s *);
269 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
3e871d4d 270};
271
272/* Hash table hash function for vn_constant_t. */
273
274inline hashval_t
9969c043 275vn_constant_hasher::hash (const vn_constant_s *vc1)
3e871d4d 276{
277 return vc1->hashcode;
278}
279
280/* Hash table equality function for vn_constant_t. */
281
282inline bool
9969c043 283vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
3e871d4d 284{
285 if (vc1->hashcode != vc2->hashcode)
286 return false;
287
288 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
289}
290
c1f445d2 291static hash_table<vn_constant_hasher> *constant_to_value_id;
f6c33c78 292static bitmap constant_value_ids;
9e9e6e3e 293
9e9e6e3e 294
ca5aa39a 295/* Obstack we allocate the vn-tables elements from. */
296static obstack vn_tables_obstack;
297/* Special obstack we never unwind. */
298static obstack vn_tables_insert_obstack;
299
c42ece58 300static vn_reference_t last_inserted_ref;
301static vn_phi_t last_inserted_phi;
302static vn_nary_op_t last_inserted_nary;
303
9e9e6e3e 304/* Valid hashtables storing information we have proven to be
305 correct. */
9e9e6e3e 306static vn_tables_t valid_info;
307
9e9e6e3e 308
51e85e64 309/* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 just return it. */
311tree (*vn_valueize) (tree);
9e9e6e3e 312
b8a2283e 313
9e9e6e3e 314/* This represents the top of the VN lattice, which is the universal
315 value. */
316
317tree VN_TOP;
318
f6c33c78 319/* Unique counter for our value ids. */
320
321static unsigned int next_value_id;
322
9e9e6e3e 323
b9584939 324/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
325 are allocated on an obstack for locality reasons, and to free them
f1f41a6c 326 without looping over the vec. */
9e9e6e3e 327
51e85e64 328struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
329{
330 typedef vn_ssa_aux_t value_type;
331 typedef tree compare_type;
332 static inline hashval_t hash (const value_type &);
333 static inline bool equal (const value_type &, const compare_type &);
334 static inline void mark_deleted (value_type &) {}
335 static inline void mark_empty (value_type &e) { e = NULL; }
336 static inline bool is_deleted (value_type &) { return false; }
337 static inline bool is_empty (value_type &e) { return e == NULL; }
338};
339
340hashval_t
341vn_ssa_aux_hasher::hash (const value_type &entry)
342{
343 return SSA_NAME_VERSION (entry->name);
344}
345
346bool
347vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
348{
349 return name == entry->name;
350}
351
352static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
353typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
b9584939 354static struct obstack vn_ssa_aux_obstack;
9e9e6e3e 355
51e85e64 356static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
357static unsigned int vn_nary_length_from_stmt (gimple *);
358static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
359static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
360 vn_nary_op_table_type *, bool);
361static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
362static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
363 enum tree_code, tree, tree *);
364static tree vn_lookup_simplify_result (gimple_match_op *);
95579ce1 365static vn_reference_t vn_reference_lookup_or_insert_for_pieces
366 (tree, alias_set_type, tree, vec<vn_reference_op_s, va_heap>, tree);
51e85e64 367
afb92221 368/* Return whether there is value numbering information for a given SSA name. */
369
370bool
371has_VN_INFO (tree name)
372{
51e85e64 373 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
afb92221 374}
375
9e9e6e3e 376vn_ssa_aux_t
377VN_INFO (tree name)
378{
51e85e64 379 vn_ssa_aux_t *res
380 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
381 INSERT);
382 if (*res != NULL)
383 return *res;
384
385 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
386 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
387 newinfo->name = name;
388 newinfo->valnum = VN_TOP;
389 /* We are using the visited flag to handle uses with defs not within the
390 region being value-numbered. */
391 newinfo->visited = false;
392
393 /* Given we create the VN_INFOs on-demand now we have to do initialization
394 different than VN_TOP here. */
395 if (SSA_NAME_IS_DEFAULT_DEF (name))
396 switch (TREE_CODE (SSA_NAME_VAR (name)))
397 {
398 case VAR_DECL:
399 /* All undefined vars are VARYING. */
400 newinfo->valnum = name;
401 newinfo->visited = true;
402 break;
403
404 case PARM_DECL:
405 /* Parameters are VARYING but we can record a condition
406 if we know it is a non-NULL pointer. */
407 newinfo->visited = true;
408 newinfo->valnum = name;
409 if (POINTER_TYPE_P (TREE_TYPE (name))
410 && nonnull_arg_p (SSA_NAME_VAR (name)))
411 {
412 tree ops[2];
413 ops[0] = name;
414 ops[1] = build_int_cst (TREE_TYPE (name), 0);
415 vn_nary_op_t nary;
416 /* Allocate from non-unwinding stack. */
417 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
418 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
419 boolean_type_node, ops);
420 nary->predicated_values = 0;
421 nary->u.result = boolean_true_node;
422 vn_nary_op_insert_into (nary, valid_info->nary, true);
423 gcc_assert (nary->unwind_to == NULL);
424 /* Also do not link it into the undo chain. */
425 last_inserted_nary = nary->next;
426 nary->next = (vn_nary_op_t)(void *)-1;
427 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
428 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
429 boolean_type_node, ops);
430 nary->predicated_values = 0;
431 nary->u.result = boolean_false_node;
432 vn_nary_op_insert_into (nary, valid_info->nary, true);
433 gcc_assert (nary->unwind_to == NULL);
434 last_inserted_nary = nary->next;
435 nary->next = (vn_nary_op_t)(void *)-1;
436 if (dump_file && (dump_flags & TDF_DETAILS))
437 {
438 fprintf (dump_file, "Recording ");
439 print_generic_expr (dump_file, name, TDF_SLIM);
440 fprintf (dump_file, " != 0\n");
441 }
442 }
443 break;
444
445 case RESULT_DECL:
446 /* If the result is passed by invisible reference the default
447 def is initialized, otherwise it's uninitialized. Still
448 undefined is varying. */
449 newinfo->visited = true;
450 newinfo->valnum = name;
451 break;
452
453 default:
454 gcc_unreachable ();
455 }
456 return newinfo;
9e9e6e3e 457}
458
51e85e64 459/* Return the SSA value of X. */
9e9e6e3e 460
51e85e64 461inline tree
2a06e47d 462SSA_VAL (tree x, bool *visited = NULL)
9e9e6e3e 463{
51e85e64 464 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
2a06e47d 465 if (visited)
466 *visited = tem && tem->visited;
51e85e64 467 return tem && tem->visited ? tem->valnum : x;
9e9e6e3e 468}
469
51e85e64 470/* Return the SSA value of the VUSE x, supporting released VDEFs
471 during elimination which will value-number the VDEF to the
472 associated VUSE (but not substitute in the whole lattice). */
9e9e6e3e 473
51e85e64 474static inline tree
475vuse_ssa_val (tree x)
9e9e6e3e 476{
51e85e64 477 if (!x)
478 return NULL_TREE;
b9584939 479
51e85e64 480 do
481 {
bbc91516 482 x = SSA_VAL (x);
483 gcc_assert (x != VN_TOP);
51e85e64 484 }
485 while (SSA_NAME_IN_FREE_LIST (x));
486
487 return x;
9e9e6e3e 488}
489
68697710 490/* Similar to the above but used as callback for walk_non_aliases_vuses
491 and thus should stop at unvisited VUSE to not walk across region
492 boundaries. */
493
494static tree
495vuse_valueize (tree vuse)
496{
497 do
498 {
499 bool visited;
500 vuse = SSA_VAL (vuse, &visited);
501 if (!visited)
502 return NULL_TREE;
503 gcc_assert (vuse != VN_TOP);
504 }
505 while (SSA_NAME_IN_FREE_LIST (vuse));
506 return vuse;
507}
508
9e9e6e3e 509
024fee2c 510/* Return the vn_kind the expression computed by the stmt should be
511 associated with. */
512
513enum vn_kind
42acab1c 514vn_get_stmt_kind (gimple *stmt)
024fee2c 515{
516 switch (gimple_code (stmt))
517 {
518 case GIMPLE_CALL:
519 return VN_REFERENCE;
520 case GIMPLE_PHI:
521 return VN_PHI;
522 case GIMPLE_ASSIGN:
523 {
524 enum tree_code code = gimple_assign_rhs_code (stmt);
525 tree rhs1 = gimple_assign_rhs1 (stmt);
526 switch (get_gimple_rhs_class (code))
527 {
528 case GIMPLE_UNARY_RHS:
529 case GIMPLE_BINARY_RHS:
530 case GIMPLE_TERNARY_RHS:
531 return VN_NARY;
532 case GIMPLE_SINGLE_RHS:
533 switch (TREE_CODE_CLASS (code))
534 {
535 case tcc_reference:
536 /* VOP-less references can go through unary case. */
537 if ((code == REALPART_EXPR
538 || code == IMAGPART_EXPR
539 || code == VIEW_CONVERT_EXPR
540 || code == BIT_FIELD_REF)
541 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
542 return VN_NARY;
543
544 /* Fallthrough. */
545 case tcc_declaration:
546 return VN_REFERENCE;
547
548 case tcc_constant:
549 return VN_CONSTANT;
550
551 default:
552 if (code == ADDR_EXPR)
553 return (is_gimple_min_invariant (rhs1)
554 ? VN_CONSTANT : VN_REFERENCE);
555 else if (code == CONSTRUCTOR)
556 return VN_NARY;
557 return VN_NONE;
558 }
559 default:
560 return VN_NONE;
561 }
562 }
563 default:
564 return VN_NONE;
565 }
566}
75a70cf9 567
8c8a7011 568/* Lookup a value id for CONSTANT and return it. If it does not
569 exist returns 0. */
570
571unsigned int
572get_constant_value_id (tree constant)
573{
3e871d4d 574 vn_constant_s **slot;
8c8a7011 575 struct vn_constant_s vc;
75a70cf9 576
577 vc.hashcode = vn_hash_constant_with_type (constant);
8c8a7011 578 vc.constant = constant;
c1f445d2 579 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
8c8a7011 580 if (slot)
3e871d4d 581 return (*slot)->value_id;
8c8a7011 582 return 0;
583}
584
f6c33c78 585/* Lookup a value id for CONSTANT, and if it does not exist, create a
586 new one and return it. If it does exist, return it. */
587
588unsigned int
589get_or_alloc_constant_value_id (tree constant)
590{
3e871d4d 591 vn_constant_s **slot;
88006128 592 struct vn_constant_s vc;
593 vn_constant_t vcp;
48e1416a 594
51e85e64 595 /* If the hashtable isn't initialized we're not running from PRE and thus
596 do not need value-ids. */
597 if (!constant_to_value_id)
598 return 0;
599
88006128 600 vc.hashcode = vn_hash_constant_with_type (constant);
601 vc.constant = constant;
c1f445d2 602 slot = constant_to_value_id->find_slot (&vc, INSERT);
f6c33c78 603 if (*slot)
3e871d4d 604 return (*slot)->value_id;
88006128 605
606 vcp = XNEW (struct vn_constant_s);
607 vcp->hashcode = vc.hashcode;
608 vcp->constant = constant;
609 vcp->value_id = get_next_value_id ();
3e871d4d 610 *slot = vcp;
88006128 611 bitmap_set_bit (constant_value_ids, vcp->value_id);
612 return vcp->value_id;
f6c33c78 613}
614
615/* Return true if V is a value id for a constant. */
616
617bool
618value_id_constant_p (unsigned int v)
619{
48e1416a 620 return bitmap_bit_p (constant_value_ids, v);
f6c33c78 621}
622
8f4173dc 623/* Compute the hash for a reference operand VRO1. */
9e9e6e3e 624
f32e91d5 625static void
626vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
9e9e6e3e 627{
f32e91d5 628 hstate.add_int (vro1->opcode);
3d2d7de7 629 if (vro1->op0)
f32e91d5 630 inchash::add_expr (vro1->op0, hstate);
3d2d7de7 631 if (vro1->op1)
f32e91d5 632 inchash::add_expr (vro1->op1, hstate);
3d2d7de7 633 if (vro1->op2)
f32e91d5 634 inchash::add_expr (vro1->op2, hstate);
9e9e6e3e 635}
636
9e9e6e3e 637/* Compute a hash for the reference operation VR1 and return it. */
638
2fd3ecff 639static hashval_t
9e9e6e3e 640vn_reference_compute_hash (const vn_reference_t vr1)
641{
f32e91d5 642 inchash::hash hstate;
643 hashval_t result;
9e9e6e3e 644 int i;
645 vn_reference_op_t vro;
fe60c82c 646 poly_int64 off = -1;
182cf5a9 647 bool deref = false;
9e9e6e3e 648
f1f41a6c 649 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
182cf5a9 650 {
651 if (vro->opcode == MEM_REF)
652 deref = true;
653 else if (vro->opcode != ADDR_EXPR)
654 deref = false;
fe60c82c 655 if (maybe_ne (vro->off, -1))
182cf5a9 656 {
fe60c82c 657 if (known_eq (off, -1))
182cf5a9 658 off = 0;
659 off += vro->off;
660 }
661 else
662 {
fe60c82c 663 if (maybe_ne (off, -1)
664 && maybe_ne (off, 0))
665 hstate.add_poly_int (off);
182cf5a9 666 off = -1;
667 if (deref
668 && vro->opcode == ADDR_EXPR)
669 {
670 if (vro->op0)
671 {
672 tree op = TREE_OPERAND (vro->op0, 0);
f32e91d5 673 hstate.add_int (TREE_CODE (op));
674 inchash::add_expr (op, hstate);
182cf5a9 675 }
676 }
677 else
f32e91d5 678 vn_reference_op_compute_hash (vro, hstate);
182cf5a9 679 }
680 }
f32e91d5 681 result = hstate.end ();
682 /* ??? We would ICE later if we hash instead of adding that in. */
84cd88b5 683 if (vr1->vuse)
684 result += SSA_NAME_VERSION (vr1->vuse);
9e9e6e3e 685
686 return result;
687}
688
3e871d4d 689/* Return true if reference operations VR1 and VR2 are equivalent. This
9e9e6e3e 690 means they have the same set of operands and vuses. */
691
3e871d4d 692bool
693vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
9e9e6e3e 694{
182cf5a9 695 unsigned i, j;
9e9e6e3e 696
dd277d48 697 /* Early out if this is not a hash collision. */
698 if (vr1->hashcode != vr2->hashcode)
699 return false;
9e9e6e3e 700
dd277d48 701 /* The VOP needs to be the same. */
702 if (vr1->vuse != vr2->vuse)
9e9e6e3e 703 return false;
704
dd277d48 705 /* If the operands are the same we are done. */
706 if (vr1->operands == vr2->operands)
707 return true;
708
182cf5a9 709 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
9e9e6e3e 710 return false;
711
87d822bb 712 if (INTEGRAL_TYPE_P (vr1->type)
713 && INTEGRAL_TYPE_P (vr2->type))
714 {
715 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
716 return false;
717 }
718 else if (INTEGRAL_TYPE_P (vr1->type)
719 && (TYPE_PRECISION (vr1->type)
f9ae6f95 720 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
87d822bb 721 return false;
722 else if (INTEGRAL_TYPE_P (vr2->type)
723 && (TYPE_PRECISION (vr2->type)
f9ae6f95 724 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
87d822bb 725 return false;
726
182cf5a9 727 i = 0;
728 j = 0;
729 do
730 {
fe60c82c 731 poly_int64 off1 = 0, off2 = 0;
182cf5a9 732 vn_reference_op_t vro1, vro2;
733 vn_reference_op_s tem1, tem2;
734 bool deref1 = false, deref2 = false;
f1f41a6c 735 for (; vr1->operands.iterate (i, &vro1); i++)
182cf5a9 736 {
737 if (vro1->opcode == MEM_REF)
738 deref1 = true;
292237f3 739 /* Do not look through a storage order barrier. */
740 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
741 return false;
fe60c82c 742 if (known_eq (vro1->off, -1))
182cf5a9 743 break;
744 off1 += vro1->off;
745 }
f1f41a6c 746 for (; vr2->operands.iterate (j, &vro2); j++)
182cf5a9 747 {
748 if (vro2->opcode == MEM_REF)
749 deref2 = true;
292237f3 750 /* Do not look through a storage order barrier. */
751 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
752 return false;
fe60c82c 753 if (known_eq (vro2->off, -1))
182cf5a9 754 break;
755 off2 += vro2->off;
756 }
fe60c82c 757 if (maybe_ne (off1, off2))
182cf5a9 758 return false;
759 if (deref1 && vro1->opcode == ADDR_EXPR)
760 {
761 memset (&tem1, 0, sizeof (tem1));
762 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
763 tem1.type = TREE_TYPE (tem1.op0);
764 tem1.opcode = TREE_CODE (tem1.op0);
765 vro1 = &tem1;
f9f051a3 766 deref1 = false;
182cf5a9 767 }
768 if (deref2 && vro2->opcode == ADDR_EXPR)
769 {
770 memset (&tem2, 0, sizeof (tem2));
771 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
772 tem2.type = TREE_TYPE (tem2.op0);
773 tem2.opcode = TREE_CODE (tem2.op0);
774 vro2 = &tem2;
f9f051a3 775 deref2 = false;
182cf5a9 776 }
f9f051a3 777 if (deref1 != deref2)
778 return false;
182cf5a9 779 if (!vn_reference_op_eq (vro1, vro2))
780 return false;
781 ++j;
782 ++i;
783 }
f1f41a6c 784 while (vr1->operands.length () != i
785 || vr2->operands.length () != j);
9e9e6e3e 786
dd277d48 787 return true;
9e9e6e3e 788}
789
75a70cf9 790/* Copy the operations present in load/store REF into RESULT, a vector of
9e9e6e3e 791 vn_reference_op_s's. */
792
2fd3ecff 793static void
f1f41a6c 794copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
9e9e6e3e 795{
9e9e6e3e 796 /* For non-calls, store the information that makes up the address. */
6a00bf6b 797 tree orig = ref;
9e9e6e3e 798 while (ref)
799 {
800 vn_reference_op_s temp;
801
802 memset (&temp, 0, sizeof (temp));
2be90eed 803 temp.type = TREE_TYPE (ref);
9e9e6e3e 804 temp.opcode = TREE_CODE (ref);
182cf5a9 805 temp.off = -1;
9e9e6e3e 806
807 switch (temp.opcode)
808 {
39215e09 809 case MODIFY_EXPR:
810 temp.op0 = TREE_OPERAND (ref, 1);
811 break;
8a19bda6 812 case WITH_SIZE_EXPR:
813 temp.op0 = TREE_OPERAND (ref, 1);
814 temp.off = 0;
815 break;
182cf5a9 816 case MEM_REF:
817 /* The base address gets its own vn_reference_op_s structure. */
818 temp.op0 = TREE_OPERAND (ref, 1);
90ca1268 819 if (!mem_ref_offset (ref).to_shwi (&temp.off))
820 temp.off = -1;
842c7753 821 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
822 temp.base = MR_DEPENDENCE_BASE (ref);
292237f3 823 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
182cf5a9 824 break;
87ee9f7a 825 case TARGET_MEM_REF:
826 /* The base address gets its own vn_reference_op_s structure. */
827 temp.op0 = TMR_INDEX (ref);
828 temp.op1 = TMR_STEP (ref);
829 temp.op2 = TMR_OFFSET (ref);
830 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
831 temp.base = MR_DEPENDENCE_BASE (ref);
832 result->safe_push (temp);
833 memset (&temp, 0, sizeof (temp));
834 temp.type = NULL_TREE;
835 temp.opcode = ERROR_MARK;
836 temp.op0 = TMR_INDEX2 (ref);
837 temp.off = -1;
838 break;
9e9e6e3e 839 case BIT_FIELD_REF:
292237f3 840 /* Record bits, position and storage order. */
9e9e6e3e 841 temp.op0 = TREE_OPERAND (ref, 1);
842 temp.op1 = TREE_OPERAND (ref, 2);
e580c75c 843 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
844 temp.off = -1;
292237f3 845 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
9e9e6e3e 846 break;
847 case COMPONENT_REF:
659ce413 848 /* The field decl is enough to unambiguously specify the field,
849 a matching type is not necessary and a mismatching type
850 is always a spurious difference. */
851 temp.type = NULL_TREE;
3918bd18 852 temp.op0 = TREE_OPERAND (ref, 1);
853 temp.op1 = TREE_OPERAND (ref, 2);
182cf5a9 854 {
855 tree this_offset = component_ref_field_offset (ref);
856 if (this_offset
fe60c82c 857 && poly_int_tree_p (this_offset))
182cf5a9 858 {
859 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
f9ae6f95 860 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
182cf5a9 861 {
fe60c82c 862 poly_offset_int off
863 = (wi::to_poly_offset (this_offset)
9fdc1ed4 864 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
fe60c82c 865 /* Probibit value-numbering zero offset components
866 of addresses the same before the pass folding
867 __builtin_object_size had a chance to run
868 (checking cfun->after_inlining does the
869 trick here). */
870 if (TREE_CODE (orig) != ADDR_EXPR
871 || maybe_ne (off, 0)
872 || cfun->after_inlining)
873 off.to_shwi (&temp.off);
182cf5a9 874 }
875 }
876 }
9e9e6e3e 877 break;
878 case ARRAY_RANGE_REF:
879 case ARRAY_REF:
8894566e 880 {
881 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
882 /* Record index as operand. */
883 temp.op0 = TREE_OPERAND (ref, 1);
884 /* Always record lower bounds and element size. */
885 temp.op1 = array_ref_low_bound (ref);
886 /* But record element size in units of the type alignment. */
887 temp.op2 = TREE_OPERAND (ref, 3);
888 temp.align = eltype->type_common.align;
889 if (! temp.op2)
890 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
891 size_int (TYPE_ALIGN_UNIT (eltype)));
fe60c82c 892 if (poly_int_tree_p (temp.op0)
893 && poly_int_tree_p (temp.op1)
8894566e 894 && TREE_CODE (temp.op2) == INTEGER_CST)
895 {
fe60c82c 896 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
897 - wi::to_poly_offset (temp.op1))
898 * wi::to_offset (temp.op2)
899 * vn_ref_op_align_unit (&temp));
900 off.to_shwi (&temp.off);
8894566e 901 }
902 }
9e9e6e3e 903 break;
2be90eed 904 case VAR_DECL:
905 if (DECL_HARD_REGISTER (ref))
906 {
907 temp.op0 = ref;
908 break;
909 }
910 /* Fallthru. */
911 case PARM_DECL:
912 case CONST_DECL:
913 case RESULT_DECL:
914 /* Canonicalize decls to MEM[&decl] which is what we end up with
915 when valueizing MEM[ptr] with ptr = &decl. */
916 temp.opcode = MEM_REF;
917 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
918 temp.off = 0;
f1f41a6c 919 result->safe_push (temp);
2be90eed 920 temp.opcode = ADDR_EXPR;
75aefb7b 921 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
2be90eed 922 temp.type = TREE_TYPE (temp.op0);
923 temp.off = -1;
924 break;
a0e3bc3a 925 case STRING_CST:
926 case INTEGER_CST:
927 case COMPLEX_CST:
928 case VECTOR_CST:
7342d4d1 929 case REAL_CST:
7f7ae544 930 case FIXED_CST:
2a2aef73 931 case CONSTRUCTOR:
9e9e6e3e 932 case SSA_NAME:
933 temp.op0 = ref;
934 break;
4be5a86a 935 case ADDR_EXPR:
936 if (is_gimple_min_invariant (ref))
937 {
938 temp.op0 = ref;
939 break;
940 }
a5650c86 941 break;
a0e3bc3a 942 /* These are only interesting for their operands, their
943 existence, and their type. They will never be the last
944 ref in the chain of references (IE they require an
945 operand), so we don't have to put anything
946 for op* as it will be handled by the iteration */
a0e3bc3a 947 case REALPART_EXPR:
292237f3 948 temp.off = 0;
949 break;
a0e3bc3a 950 case VIEW_CONVERT_EXPR:
182cf5a9 951 temp.off = 0;
292237f3 952 temp.reverse = storage_order_barrier_p (ref);
182cf5a9 953 break;
954 case IMAGPART_EXPR:
955 /* This is only interesting for its constant offset. */
f9ae6f95 956 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
9e9e6e3e 957 break;
a0e3bc3a 958 default:
959 gcc_unreachable ();
9e9e6e3e 960 }
f1f41a6c 961 result->safe_push (temp);
9e9e6e3e 962
4be5a86a 963 if (REFERENCE_CLASS_P (ref)
39215e09 964 || TREE_CODE (ref) == MODIFY_EXPR
8a19bda6 965 || TREE_CODE (ref) == WITH_SIZE_EXPR
4be5a86a 966 || (TREE_CODE (ref) == ADDR_EXPR
967 && !is_gimple_min_invariant (ref)))
9e9e6e3e 968 ref = TREE_OPERAND (ref, 0);
969 else
970 ref = NULL_TREE;
971 }
972}
973
3918bd18 974/* Build a alias-oracle reference abstraction in *REF from the vn_reference
975 operands in *OPS, the reference alias set SET and the reference type TYPE.
976 Return true if something useful was produced. */
02067dc5 977
3918bd18 978bool
979ao_ref_init_from_vn_reference (ao_ref *ref,
980 alias_set_type set, tree type,
db133a52 981 vec<vn_reference_op_s> ops)
02067dc5 982{
983 vn_reference_op_t op;
984 unsigned i;
3918bd18 985 tree base = NULL_TREE;
986 tree *op0_p = &base;
fe60c82c 987 poly_offset_int offset = 0;
988 poly_offset_int max_size;
989 poly_offset_int size = -1;
3918bd18 990 tree size_tree = NULL_TREE;
182cf5a9 991 alias_set_type base_alias_set = -1;
3918bd18 992
993 /* First get the final access size from just the outermost expression. */
f1f41a6c 994 op = &ops[0];
3918bd18 995 if (op->opcode == COMPONENT_REF)
182cf5a9 996 size_tree = DECL_SIZE (op->op0);
3918bd18 997 else if (op->opcode == BIT_FIELD_REF)
998 size_tree = op->op0;
999 else
1000 {
3754d046 1001 machine_mode mode = TYPE_MODE (type);
3918bd18 1002 if (mode == BLKmode)
1003 size_tree = TYPE_SIZE (type);
1004 else
fe60c82c 1005 size = GET_MODE_BITSIZE (mode);
3918bd18 1006 }
925e34bb 1007 if (size_tree != NULL_TREE
fe60c82c 1008 && poly_int_tree_p (size_tree))
1009 size = wi::to_poly_offset (size_tree);
3918bd18 1010
1011 /* Initially, maxsize is the same as the accessed element size.
1012 In the following it will only grow (or become -1). */
1013 max_size = size;
02067dc5 1014
3918bd18 1015 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1016 and find the ultimate containing object. */
f1f41a6c 1017 FOR_EACH_VEC_ELT (ops, i, op)
02067dc5 1018 {
1019 switch (op->opcode)
1020 {
3918bd18 1021 /* These may be in the reference ops, but we cannot do anything
1022 sensible with them here. */
3918bd18 1023 case ADDR_EXPR:
182cf5a9 1024 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1025 if (base != NULL_TREE
1026 && TREE_CODE (base) == MEM_REF
1027 && op->op0
1028 && DECL_P (TREE_OPERAND (op->op0, 0)))
1029 {
f1f41a6c 1030 vn_reference_op_t pop = &ops[i-1];
182cf5a9 1031 base = TREE_OPERAND (op->op0, 0);
fe60c82c 1032 if (known_eq (pop->off, -1))
182cf5a9 1033 {
1034 max_size = -1;
1035 offset = 0;
1036 }
1037 else
1038 offset += pop->off * BITS_PER_UNIT;
1039 op0_p = NULL;
1040 break;
1041 }
1042 /* Fallthru. */
1043 case CALL_EXPR:
3918bd18 1044 return false;
02067dc5 1045
3918bd18 1046 /* Record the base objects. */
182cf5a9 1047 case MEM_REF:
1048 base_alias_set = get_deref_alias_set (op->op0);
1049 *op0_p = build2 (MEM_REF, op->type,
1050 NULL_TREE, op->op0);
842c7753 1051 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1052 MR_DEPENDENCE_BASE (*op0_p) = op->base;
182cf5a9 1053 op0_p = &TREE_OPERAND (*op0_p, 0);
1054 break;
1055
3918bd18 1056 case VAR_DECL:
1057 case PARM_DECL:
1058 case RESULT_DECL:
1059 case SSA_NAME:
3918bd18 1060 *op0_p = op->op0;
182cf5a9 1061 op0_p = NULL;
3918bd18 1062 break;
1063
1064 /* And now the usual component-reference style ops. */
02067dc5 1065 case BIT_FIELD_REF:
a5b13ada 1066 offset += wi::to_poly_offset (op->op1);
02067dc5 1067 break;
1068
1069 case COMPONENT_REF:
3918bd18 1070 {
1071 tree field = op->op0;
1072 /* We do not have a complete COMPONENT_REF tree here so we
1073 cannot use component_ref_field_offset. Do the interesting
1074 parts manually. */
925e34bb 1075 tree this_offset = DECL_FIELD_OFFSET (field);
3918bd18 1076
fe60c82c 1077 if (op->op1 || !poly_int_tree_p (this_offset))
3918bd18 1078 max_size = -1;
1079 else
1080 {
fe60c82c 1081 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1082 << LOG2_BITS_PER_UNIT);
925e34bb 1083 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1084 offset += woffset;
3918bd18 1085 }
1086 break;
1087 }
02067dc5 1088
1089 case ARRAY_RANGE_REF:
1090 case ARRAY_REF:
9fa67218 1091 /* We recorded the lower bound and the element size. */
fe60c82c 1092 if (!poly_int_tree_p (op->op0)
1093 || !poly_int_tree_p (op->op1)
925e34bb 1094 || TREE_CODE (op->op2) != INTEGER_CST)
3918bd18 1095 max_size = -1;
1096 else
1097 {
fe60c82c 1098 poly_offset_int woffset
1099 = wi::sext (wi::to_poly_offset (op->op0)
1100 - wi::to_poly_offset (op->op1),
925e34bb 1101 TYPE_PRECISION (TREE_TYPE (op->op0)));
8894566e 1102 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
9fdc1ed4 1103 woffset <<= LOG2_BITS_PER_UNIT;
925e34bb 1104 offset += woffset;
3918bd18 1105 }
1106 break;
1107
1108 case REALPART_EXPR:
1109 break;
1110
1111 case IMAGPART_EXPR:
1112 offset += size;
1113 break;
1114
1115 case VIEW_CONVERT_EXPR:
02067dc5 1116 break;
1117
1118 case STRING_CST:
1119 case INTEGER_CST:
1120 case COMPLEX_CST:
1121 case VECTOR_CST:
1122 case REAL_CST:
1123 case CONSTRUCTOR:
02067dc5 1124 case CONST_DECL:
3918bd18 1125 return false;
02067dc5 1126
1127 default:
3918bd18 1128 return false;
02067dc5 1129 }
1130 }
1131
3918bd18 1132 if (base == NULL_TREE)
1133 return false;
1134
db133a52 1135 ref->ref = NULL_TREE;
3918bd18 1136 ref->base = base;
3918bd18 1137 ref->ref_alias_set = set;
182cf5a9 1138 if (base_alias_set != -1)
1139 ref->base_alias_set = base_alias_set;
1140 else
1141 ref->base_alias_set = get_alias_set (base);
3787db52 1142 /* We discount volatiles from value-numbering elsewhere. */
1143 ref->volatile_p = false;
3918bd18 1144
fe60c82c 1145 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
925e34bb 1146 {
1147 ref->offset = 0;
1148 ref->size = -1;
1149 ref->max_size = -1;
1150 return true;
1151 }
1152
fe60c82c 1153 if (!offset.to_shwi (&ref->offset))
925e34bb 1154 {
1155 ref->offset = 0;
1156 ref->max_size = -1;
1157 return true;
1158 }
1159
fe60c82c 1160 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
925e34bb 1161 ref->max_size = -1;
925e34bb 1162
3918bd18 1163 return true;
02067dc5 1164}
1165
75a70cf9 1166/* Copy the operations present in load/store/call REF into RESULT, a vector of
1167 vn_reference_op_s's. */
1168
2fd3ecff 1169static void
1a91d914 1170copy_reference_ops_from_call (gcall *call,
f1f41a6c 1171 vec<vn_reference_op_s> *result)
75a70cf9 1172{
1173 vn_reference_op_s temp;
75a70cf9 1174 unsigned i;
7ec657ff 1175 tree lhs = gimple_call_lhs (call);
27b0e9e4 1176 int lr;
7ec657ff 1177
1178 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1179 different. By adding the lhs here in the vector, we ensure that the
1180 hashcode is different, guaranteeing a different value number. */
1181 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1182 {
1183 memset (&temp, 0, sizeof (temp));
1184 temp.opcode = MODIFY_EXPR;
1185 temp.type = TREE_TYPE (lhs);
1186 temp.op0 = lhs;
1187 temp.off = -1;
f1f41a6c 1188 result->safe_push (temp);
7ec657ff 1189 }
75a70cf9 1190
27b0e9e4 1191 /* Copy the type, opcode, function, static chain and EH region, if any. */
75a70cf9 1192 memset (&temp, 0, sizeof (temp));
2b99a5c3 1193 temp.type = gimple_call_fntype (call);
75a70cf9 1194 temp.opcode = CALL_EXPR;
4be5a86a 1195 temp.op0 = gimple_call_fn (call);
0e3bb11d 1196 temp.op1 = gimple_call_chain (call);
aac19106 1197 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
27b0e9e4 1198 temp.op2 = size_int (lr);
182cf5a9 1199 temp.off = -1;
f1f41a6c 1200 result->safe_push (temp);
75a70cf9 1201
4be5a86a 1202 /* Copy the call arguments. As they can be references as well,
1203 just chain them together. */
75a70cf9 1204 for (i = 0; i < gimple_call_num_args (call); ++i)
1205 {
1206 tree callarg = gimple_call_arg (call, i);
4be5a86a 1207 copy_reference_ops_from_ref (callarg, result);
75a70cf9 1208 }
75a70cf9 1209}
1210
d12dee9c 1211/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1212 *I_P to point to the last element of the replacement. */
11beb29c 1213static bool
f1f41a6c 1214vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
d12dee9c 1215 unsigned int *i_p)
9e9e6e3e 1216{
d12dee9c 1217 unsigned int i = *i_p;
f1f41a6c 1218 vn_reference_op_t op = &(*ops)[i];
1219 vn_reference_op_t mem_op = &(*ops)[i - 1];
182cf5a9 1220 tree addr_base;
773078cb 1221 poly_int64 addr_offset = 0;
182cf5a9 1222
1223 /* The only thing we have to do is from &OBJ.foo.bar add the offset
9d75589a 1224 from .foo.bar to the preceding MEM_REF offset and replace the
182cf5a9 1225 address with &OBJ. */
1226 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1227 &addr_offset);
1228 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
d68e9408 1229 if (addr_base != TREE_OPERAND (op->op0, 0))
182cf5a9 1230 {
773078cb 1231 poly_offset_int off
1232 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1233 SIGNED)
1234 + addr_offset);
e913b5cd 1235 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
182cf5a9 1236 op->op0 = build_fold_addr_expr (addr_base);
e913b5cd 1237 if (tree_fits_shwi_p (mem_op->op0))
1238 mem_op->off = tree_to_shwi (mem_op->op0);
182cf5a9 1239 else
1240 mem_op->off = -1;
11beb29c 1241 return true;
d12dee9c 1242 }
11beb29c 1243 return false;
d12dee9c 1244}
9e9e6e3e 1245
37b80bde 1246/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1247 *I_P to point to the last element of the replacement. */
11beb29c 1248static bool
f1f41a6c 1249vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
37b80bde 1250 unsigned int *i_p)
1251{
9c4ce317 1252 bool changed = false;
1253 vn_reference_op_t op;
37b80bde 1254
9c4ce317 1255 do
37b80bde 1256 {
9c4ce317 1257 unsigned int i = *i_p;
1258 op = &(*ops)[i];
1259 vn_reference_op_t mem_op = &(*ops)[i - 1];
1260 gimple *def_stmt;
1261 enum tree_code code;
1262 poly_offset_int off;
1263
1264 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1265 if (!is_gimple_assign (def_stmt))
1266 return changed;
1267
1268 code = gimple_assign_rhs_code (def_stmt);
1269 if (code != ADDR_EXPR
1270 && code != POINTER_PLUS_EXPR)
1271 return changed;
1272
1273 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1274
1275 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1276 from .foo.bar to the preceding MEM_REF offset and replace the
1277 address with &OBJ. */
1278 if (code == ADDR_EXPR)
5c08a518 1279 {
9c4ce317 1280 tree addr, addr_base;
1281 poly_int64 addr_offset;
1282
1283 addr = gimple_assign_rhs1 (def_stmt);
1284 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1285 &addr_offset);
1286 /* If that didn't work because the address isn't invariant propagate
1287 the reference tree from the address operation in case the current
1288 dereference isn't offsetted. */
1289 if (!addr_base
1290 && *i_p == ops->length () - 1
1291 && known_eq (off, 0)
1292 /* This makes us disable this transform for PRE where the
1293 reference ops might be also used for code insertion which
1294 is invalid. */
1295 && default_vn_walk_kind == VN_WALKREWRITE)
894463cf 1296 {
9c4ce317 1297 auto_vec<vn_reference_op_s, 32> tem;
1298 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1299 /* Make sure to preserve TBAA info. The only objects not
1300 wrapped in MEM_REFs that can have their address taken are
1301 STRING_CSTs. */
1302 if (tem.length () >= 2
1303 && tem[tem.length () - 2].opcode == MEM_REF)
1304 {
1305 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1306 new_mem_op->op0
1307 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1308 wi::to_poly_wide (new_mem_op->op0));
1309 }
1310 else
1311 gcc_assert (tem.last ().opcode == STRING_CST);
1312 ops->pop ();
1313 ops->pop ();
1314 ops->safe_splice (tem);
1315 --*i_p;
1316 return true;
894463cf 1317 }
9c4ce317 1318 if (!addr_base
1319 || TREE_CODE (addr_base) != MEM_REF
1320 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1321 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1322 0))))
1323 return changed;
1324
1325 off += addr_offset;
1326 off += mem_ref_offset (addr_base);
1327 op->op0 = TREE_OPERAND (addr_base, 0);
1328 }
1329 else
1330 {
1331 tree ptr, ptroff;
1332 ptr = gimple_assign_rhs1 (def_stmt);
1333 ptroff = gimple_assign_rhs2 (def_stmt);
1334 if (TREE_CODE (ptr) != SSA_NAME
1335 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1336 /* Make sure to not endlessly recurse.
1337 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1338 happen when we value-number a PHI to its backedge value. */
1339 || SSA_VAL (ptr) == op->op0
1340 || !poly_int_tree_p (ptroff))
1341 return changed;
1342
1343 off += wi::to_poly_offset (ptroff);
1344 op->op0 = ptr;
5c08a518 1345 }
37b80bde 1346
9c4ce317 1347 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1348 if (tree_fits_shwi_p (mem_op->op0))
1349 mem_op->off = tree_to_shwi (mem_op->op0);
1350 else
1351 mem_op->off = -1;
1352 /* ??? Can end up with endless recursion here!?
1353 gcc.c-torture/execute/strcmp-1.c */
1354 if (TREE_CODE (op->op0) == SSA_NAME)
1355 op->op0 = SSA_VAL (op->op0);
1356 if (TREE_CODE (op->op0) != SSA_NAME)
1357 op->opcode = TREE_CODE (op->op0);
1358
1359 changed = true;
37b80bde 1360 }
9c4ce317 1361 /* Tail-recurse. */
1362 while (TREE_CODE (op->op0) == SSA_NAME);
37b80bde 1363
9c4ce317 1364 /* Fold a remaining *&. */
1365 if (TREE_CODE (op->op0) == ADDR_EXPR)
37b80bde 1366 vn_reference_fold_indirect (ops, i_p);
9c4ce317 1367
1368 return changed;
37b80bde 1369}
1370
c26ce8a9 1371/* Optimize the reference REF to a constant if possible or return
1372 NULL_TREE if not. */
1373
1374tree
1375fully_constant_vn_reference_p (vn_reference_t ref)
1376{
f1f41a6c 1377 vec<vn_reference_op_s> operands = ref->operands;
c26ce8a9 1378 vn_reference_op_t op;
1379
1380 /* Try to simplify the translated expression if it is
1381 a call to a builtin function with at most two arguments. */
f1f41a6c 1382 op = &operands[0];
c26ce8a9 1383 if (op->opcode == CALL_EXPR
1384 && TREE_CODE (op->op0) == ADDR_EXPR
1385 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
a0e9bfbb 1386 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
f1f41a6c 1387 && operands.length () >= 2
1388 && operands.length () <= 3)
c26ce8a9 1389 {
1390 vn_reference_op_t arg0, arg1 = NULL;
1391 bool anyconst = false;
f1f41a6c 1392 arg0 = &operands[1];
1393 if (operands.length () > 2)
1394 arg1 = &operands[2];
c26ce8a9 1395 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1396 || (arg0->opcode == ADDR_EXPR
1397 && is_gimple_min_invariant (arg0->op0)))
1398 anyconst = true;
1399 if (arg1
1400 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1401 || (arg1->opcode == ADDR_EXPR
1402 && is_gimple_min_invariant (arg1->op0))))
1403 anyconst = true;
1404 if (anyconst)
1405 {
1406 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1407 arg1 ? 2 : 1,
1408 arg0->op0,
1409 arg1 ? arg1->op0 : NULL);
1410 if (folded
1411 && TREE_CODE (folded) == NOP_EXPR)
1412 folded = TREE_OPERAND (folded, 0);
1413 if (folded
1414 && is_gimple_min_invariant (folded))
1415 return folded;
1416 }
1417 }
1418
a5650c86 1419 /* Simplify reads from constants or constant initializers. */
1420 else if (BITS_PER_UNIT == 8
7366a0f0 1421 && COMPLETE_TYPE_P (ref->type)
7d368d79 1422 && is_gimple_reg_type (ref->type))
c26ce8a9 1423 {
fe60c82c 1424 poly_int64 off = 0;
9f9cf897 1425 HOST_WIDE_INT size;
1426 if (INTEGRAL_TYPE_P (ref->type))
1427 size = TYPE_PRECISION (ref->type);
7d368d79 1428 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
9f9cf897 1429 size = tree_to_shwi (TYPE_SIZE (ref->type));
7d368d79 1430 else
1431 return NULL_TREE;
a5650c86 1432 if (size % BITS_PER_UNIT != 0
1433 || size > MAX_BITSIZE_MODE_ANY_MODE)
1434 return NULL_TREE;
1435 size /= BITS_PER_UNIT;
1436 unsigned i;
1437 for (i = 0; i < operands.length (); ++i)
1438 {
2be13cd5 1439 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1440 {
1441 ++i;
1442 break;
1443 }
fe60c82c 1444 if (known_eq (operands[i].off, -1))
a5650c86 1445 return NULL_TREE;
1446 off += operands[i].off;
1447 if (operands[i].opcode == MEM_REF)
1448 {
1449 ++i;
1450 break;
1451 }
1452 }
1453 vn_reference_op_t base = &operands[--i];
1454 tree ctor = error_mark_node;
1455 tree decl = NULL_TREE;
1456 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1457 ctor = base->op0;
1458 else if (base->opcode == MEM_REF
1459 && base[1].opcode == ADDR_EXPR
1460 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
8d803464 1461 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1462 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
a5650c86 1463 {
1464 decl = TREE_OPERAND (base[1].op0, 0);
8d803464 1465 if (TREE_CODE (decl) == STRING_CST)
1466 ctor = decl;
1467 else
1468 ctor = ctor_for_folding (decl);
a5650c86 1469 }
1470 if (ctor == NULL_TREE)
1471 return build_zero_cst (ref->type);
1472 else if (ctor != error_mark_node)
1473 {
fe60c82c 1474 HOST_WIDE_INT const_off;
a5650c86 1475 if (decl)
1476 {
1477 tree res = fold_ctor_reference (ref->type, ctor,
1478 off * BITS_PER_UNIT,
1479 size * BITS_PER_UNIT, decl);
1480 if (res)
1481 {
1482 STRIP_USELESS_TYPE_CONVERSION (res);
1483 if (is_gimple_min_invariant (res))
1484 return res;
1485 }
1486 }
fe60c82c 1487 else if (off.is_constant (&const_off))
a5650c86 1488 {
1489 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
fe60c82c 1490 int len = native_encode_expr (ctor, buf, size, const_off);
522ea93c 1491 if (len > 0)
1492 return native_interpret_expr (ref->type, buf, len);
a5650c86 1493 }
1494 }
c26ce8a9 1495 }
1496
1497 return NULL_TREE;
1498}
1499
292237f3 1500/* Return true if OPS contain a storage order barrier. */
1501
1502static bool
1503contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1504{
1505 vn_reference_op_t op;
1506 unsigned i;
1507
1508 FOR_EACH_VEC_ELT (ops, i, op)
1509 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1510 return true;
1511
1512 return false;
1513}
1514
9e9e6e3e 1515/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1516 structures into their value numbers. This is done in-place, and
882f8b55 1517 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1518 whether any operands were valueized. */
9e9e6e3e 1519
f1f41a6c 1520static vec<vn_reference_op_s>
51e85e64 1521valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1522 bool with_avail = false)
9e9e6e3e 1523{
1524 vn_reference_op_t vro;
d12dee9c 1525 unsigned int i;
9e9e6e3e 1526
882f8b55 1527 *valueized_anything = false;
1528
f1f41a6c 1529 FOR_EACH_VEC_ELT (orig, i, vro)
9e9e6e3e 1530 {
1531 if (vro->opcode == SSA_NAME
1532 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
f6c33c78 1533 {
51e85e64 1534 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
882f8b55 1535 if (tem != vro->op0)
1536 {
1537 *valueized_anything = true;
1538 vro->op0 = tem;
1539 }
f6c33c78 1540 /* If it transforms from an SSA_NAME to a constant, update
1541 the opcode. */
1542 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1543 vro->opcode = TREE_CODE (vro->op0);
1544 }
d12dee9c 1545 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
882f8b55 1546 {
51e85e64 1547 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
882f8b55 1548 if (tem != vro->op1)
1549 {
1550 *valueized_anything = true;
1551 vro->op1 = tem;
1552 }
1553 }
d12dee9c 1554 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
882f8b55 1555 {
51e85e64 1556 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
882f8b55 1557 if (tem != vro->op2)
1558 {
1559 *valueized_anything = true;
1560 vro->op2 = tem;
1561 }
1562 }
182cf5a9 1563 /* If it transforms from an SSA_NAME to an address, fold with
1564 a preceding indirect reference. */
1565 if (i > 0
1566 && vro->op0
1567 && TREE_CODE (vro->op0) == ADDR_EXPR
f1f41a6c 1568 && orig[i - 1].opcode == MEM_REF)
11beb29c 1569 {
1570 if (vn_reference_fold_indirect (&orig, &i))
1571 *valueized_anything = true;
1572 }
37b80bde 1573 else if (i > 0
1574 && vro->opcode == SSA_NAME
f1f41a6c 1575 && orig[i - 1].opcode == MEM_REF)
11beb29c 1576 {
1577 if (vn_reference_maybe_forwprop_address (&orig, &i))
1578 *valueized_anything = true;
1579 }
182cf5a9 1580 /* If it transforms a non-constant ARRAY_REF into a constant
1581 one, adjust the constant offset. */
1582 else if (vro->opcode == ARRAY_REF
fe60c82c 1583 && known_eq (vro->off, -1)
1584 && poly_int_tree_p (vro->op0)
1585 && poly_int_tree_p (vro->op1)
182cf5a9 1586 && TREE_CODE (vro->op2) == INTEGER_CST)
1587 {
fe60c82c 1588 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1589 - wi::to_poly_offset (vro->op1))
1590 * wi::to_offset (vro->op2)
1591 * vn_ref_op_align_unit (vro));
1592 off.to_shwi (&vro->off);
182cf5a9 1593 }
9e9e6e3e 1594 }
1595
1596 return orig;
1597}
1598
f1f41a6c 1599static vec<vn_reference_op_s>
1600valueize_refs (vec<vn_reference_op_s> orig)
882f8b55 1601{
1602 bool tem;
1603 return valueize_refs_1 (orig, &tem);
1604}
1605
f1f41a6c 1606static vec<vn_reference_op_s> shared_lookup_references;
d12dee9c 1607
1608/* Create a vector of vn_reference_op_s structures from REF, a
1609 REFERENCE_CLASS_P tree. The vector is shared among all callers of
882f8b55 1610 this function. *VALUEIZED_ANYTHING will specify whether any
1611 operands were valueized. */
d12dee9c 1612
f1f41a6c 1613static vec<vn_reference_op_s>
882f8b55 1614valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
d12dee9c 1615{
1616 if (!ref)
1e094109 1617 return vNULL;
f1f41a6c 1618 shared_lookup_references.truncate (0);
d12dee9c 1619 copy_reference_ops_from_ref (ref, &shared_lookup_references);
882f8b55 1620 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1621 valueized_anything);
d12dee9c 1622 return shared_lookup_references;
1623}
1624
1625/* Create a vector of vn_reference_op_s structures from CALL, a
1626 call statement. The vector is shared among all callers of
1627 this function. */
1628
f1f41a6c 1629static vec<vn_reference_op_s>
1a91d914 1630valueize_shared_reference_ops_from_call (gcall *call)
d12dee9c 1631{
1632 if (!call)
1e094109 1633 return vNULL;
f1f41a6c 1634 shared_lookup_references.truncate (0);
d12dee9c 1635 copy_reference_ops_from_call (call, &shared_lookup_references);
1636 shared_lookup_references = valueize_refs (shared_lookup_references);
1637 return shared_lookup_references;
1638}
1639
404d6be4 1640/* Lookup a SCCVN reference operation VR in the current hash table.
1641 Returns the resulting value number if it exists in the hash table,
f6c33c78 1642 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1643 vn_reference_t stored in the hashtable if something is found. */
404d6be4 1644
1645static tree
f6c33c78 1646vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
404d6be4 1647{
3e871d4d 1648 vn_reference_s **slot;
404d6be4 1649 hashval_t hash;
1650
1651 hash = vr->hashcode;
c42ece58 1652 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
404d6be4 1653 if (slot)
f6c33c78 1654 {
1655 if (vnresult)
1656 *vnresult = (vn_reference_t)*slot;
1657 return ((vn_reference_t)*slot)->result;
1658 }
48e1416a 1659
404d6be4 1660 return NULL_TREE;
1661}
1662
95579ce1 1663
1664/* Partial definition tracking support. */
1665
1666struct pd_range
1667{
1668 HOST_WIDE_INT offset;
1669 HOST_WIDE_INT size;
1670};
1671
1672struct pd_data
1673{
1674 tree rhs;
1675 HOST_WIDE_INT offset;
1676 HOST_WIDE_INT size;
1677};
1678
1679/* Context for alias walking. */
1680
f52fbd56 1681struct vn_walk_cb_data
1682{
f359a95b 1683 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
95579ce1 1684 vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
f359a95b 1685 : vr (vr_), last_vuse_ptr (last_vuse_ptr_),
1686 vn_walk_kind (vn_walk_kind_), tbaa_p (tbaa_p_), known_ranges (NULL)
1687 {
1688 ao_ref_init (&orig_ref, orig_ref_);
1689 }
95579ce1 1690 ~vn_walk_cb_data ();
1691 void *push_partial_def (const pd_data& pd, tree, HOST_WIDE_INT);
7dde7294 1692
f52fbd56 1693 vn_reference_t vr;
f359a95b 1694 ao_ref orig_ref;
f52fbd56 1695 tree *last_vuse_ptr;
1696 vn_lookup_kind vn_walk_kind;
7dde7294 1697 bool tbaa_p;
95579ce1 1698
1699 /* The VDEFs of partial defs we come along. */
1700 auto_vec<pd_data, 2> partial_defs;
1701 /* The first defs range to avoid splay tree setup in most cases. */
1702 pd_range first_range;
1703 tree first_vuse;
1704 splay_tree known_ranges;
1705 obstack ranges_obstack;
f52fbd56 1706};
1707
95579ce1 1708vn_walk_cb_data::~vn_walk_cb_data ()
1709{
1710 if (known_ranges)
1711 {
1712 splay_tree_delete (known_ranges);
1713 obstack_free (&ranges_obstack, NULL);
1714 }
1715}
1716
1717/* pd_range splay-tree helpers. */
1718
1719static int
1720pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1721{
1722 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1723 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1724 if (offset1 < offset2)
1725 return -1;
1726 else if (offset1 > offset2)
1727 return 1;
1728 return 0;
1729}
1730
1731static void *
1732pd_tree_alloc (int size, void *data_)
1733{
1734 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1735 return obstack_alloc (&data->ranges_obstack, size);
1736}
1737
1738static void
1739pd_tree_dealloc (void *, void *)
1740{
1741}
1742
1743/* Push PD to the vector of partial definitions returning a
1744 value when we are ready to combine things with VUSE and MAXSIZEI,
1745 NULL when we want to continue looking for partial defs or -1
1746 on failure. */
1747
1748void *
1749vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse,
1750 HOST_WIDE_INT maxsizei)
1751{
1752 if (partial_defs.is_empty ())
1753 {
1754 partial_defs.safe_push (pd);
1755 first_range.offset = pd.offset;
1756 first_range.size = pd.size;
1757 first_vuse = vuse;
1758 last_vuse_ptr = NULL;
6f7501ec 1759 /* Continue looking for partial defs. */
1760 return NULL;
1761 }
1762
1763 if (!known_ranges)
1764 {
1765 /* ??? Optimize the case where the 2nd partial def completes things. */
1766 gcc_obstack_init (&ranges_obstack);
1767 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1768 pd_tree_alloc,
1769 pd_tree_dealloc, this);
1770 splay_tree_insert (known_ranges,
1771 (splay_tree_key)&first_range.offset,
1772 (splay_tree_value)&first_range);
1773 }
1774
1775 pd_range newr = { pd.offset, pd.size };
1776 splay_tree_node n;
1777 pd_range *r;
1778 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1779 HOST_WIDE_INT loffset = newr.offset + 1;
1780 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1781 && ((r = (pd_range *)n->value), true)
1782 && ranges_known_overlap_p (r->offset, r->size + 1,
1783 newr.offset, newr.size))
1784 {
1785 /* Ignore partial defs already covered. */
1786 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1787 return NULL;
1788 r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
95579ce1 1789 }
1790 else
1791 {
6f7501ec 1792 /* newr.offset wasn't covered yet, insert the range. */
1793 r = XOBNEW (&ranges_obstack, pd_range);
1794 *r = newr;
1795 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1796 (splay_tree_value)r);
1797 }
1798 /* Merge r which now contains newr and is a member of the splay tree with
1799 adjacent overlapping ranges. */
1800 pd_range *rafter;
1801 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
1802 && ((rafter = (pd_range *)n->value), true)
1803 && ranges_known_overlap_p (r->offset, r->size + 1,
1804 rafter->offset, rafter->size))
1805 {
1806 r->size = MAX (r->offset + r->size,
1807 rafter->offset + rafter->size) - r->offset;
1808 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
1809 }
1810 partial_defs.safe_push (pd);
1811
1812 /* Now we have merged newr into the range tree. When we have covered
1813 [offseti, sizei] then the tree will contain exactly one node which has
1814 the desired properties and it will be 'r'. */
1815 if (!known_subrange_p (0, maxsizei / BITS_PER_UNIT, r->offset, r->size))
1816 /* Continue looking for partial defs. */
1817 return NULL;
1818
1819 /* Now simply native encode all partial defs in reverse order. */
1820 unsigned ndefs = partial_defs.length ();
1821 /* We support up to 512-bit values (for V8DFmode). */
1822 unsigned char buffer[64];
1823 int len;
1824
1825 while (!partial_defs.is_empty ())
1826 {
1827 pd_data pd = partial_defs.pop ();
1828 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
1829 /* Empty CONSTRUCTOR. */
1830 memset (buffer + MAX (0, pd.offset),
9807f911 1831 0, MIN ((HOST_WIDE_INT)sizeof (buffer) - MAX (0, pd.offset),
2e5a3564 1832 pd.size + MIN (0, pd.offset)));
6f7501ec 1833 else
95579ce1 1834 {
6f7501ec 1835 unsigned pad = 0;
1836 if (BYTES_BIG_ENDIAN
1837 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (pd.rhs))))
95579ce1 1838 {
6f7501ec 1839 /* On big-endian the padding is at the 'front' so just skip
1840 the initial bytes. */
1841 fixed_size_mode mode
1842 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (pd.rhs)));
1843 pad = GET_MODE_SIZE (mode) - pd.size;
95579ce1 1844 }
6f7501ec 1845 len = native_encode_expr (pd.rhs, buffer + MAX (0, pd.offset),
9807f911 1846 sizeof (buffer) - MAX (0, pd.offset),
6f7501ec 1847 MAX (0, -pd.offset) + pad);
1848 if (len <= 0 || len < (pd.size - MAX (0, -pd.offset)))
95579ce1 1849 {
6f7501ec 1850 if (dump_file && (dump_flags & TDF_DETAILS))
1851 fprintf (dump_file, "Failed to encode %u "
1852 "partial definitions\n", ndefs);
1853 return (void *)-1;
95579ce1 1854 }
6f7501ec 1855 }
1856 }
95579ce1 1857
6f7501ec 1858 tree type = vr->type;
1859 /* Make sure to interpret in a type that has a range covering the whole
1860 access size. */
1861 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
1862 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
1863 tree val = native_interpret_expr (type, buffer, maxsizei / BITS_PER_UNIT);
1864 /* If we chop off bits because the types precision doesn't match the memory
1865 access size this is ok when optimizing reads but not when called from
1866 the DSE code during elimination. */
1867 if (val && type != vr->type)
1868 {
1869 if (! int_fits_type_p (val, vr->type))
1870 val = NULL_TREE;
1871 else
1872 val = fold_convert (vr->type, val);
1873 }
95579ce1 1874
6f7501ec 1875 if (val)
1876 {
1877 if (dump_file && (dump_flags & TDF_DETAILS))
1878 fprintf (dump_file,
1879 "Successfully combined %u partial definitions\n", ndefs);
1880 return vn_reference_lookup_or_insert_for_pieces
1881 (first_vuse, vr->set, vr->type, vr->operands, val);
1882 }
1883 else
1884 {
1885 if (dump_file && (dump_flags & TDF_DETAILS))
1886 fprintf (dump_file,
1887 "Failed to interpret %u encoded partial definitions\n", ndefs);
1888 return (void *)-1;
95579ce1 1889 }
95579ce1 1890}
1891
dd277d48 1892/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1893 with the current VUSE and performs the expression lookup. */
1894
1895static void *
f52fbd56 1896vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
dd277d48 1897{
f52fbd56 1898 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1899 vn_reference_t vr = data->vr;
3e871d4d 1900 vn_reference_s **slot;
dd277d48 1901 hashval_t hash;
1902
95579ce1 1903 /* If we have partial definitions recorded we have to go through
1904 vn_reference_lookup_3. */
1905 if (!data->partial_defs.is_empty ())
1906 return NULL;
1907
f52fbd56 1908 if (data->last_vuse_ptr)
1909 *data->last_vuse_ptr = vuse;
4a83fadb 1910
dd277d48 1911 /* Fixup vuse and hash. */
84cd88b5 1912 if (vr->vuse)
1913 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
b8a2283e 1914 vr->vuse = vuse_ssa_val (vuse);
84cd88b5 1915 if (vr->vuse)
1916 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
dd277d48 1917
1918 hash = vr->hashcode;
c42ece58 1919 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
dd277d48 1920 if (slot)
1921 return *slot;
48e1416a 1922
dd277d48 1923 return NULL;
1924}
f6c33c78 1925
01fd46e3 1926/* Lookup an existing or insert a new vn_reference entry into the
1927 value table for the VUSE, SET, TYPE, OPERANDS reference which
a4f94d42 1928 has the value VALUE which is either a constant or an SSA name. */
01fd46e3 1929
1930static vn_reference_t
a4f94d42 1931vn_reference_lookup_or_insert_for_pieces (tree vuse,
1932 alias_set_type set,
1933 tree type,
f1f41a6c 1934 vec<vn_reference_op_s,
1935 va_heap> operands,
a4f94d42 1936 tree value)
01fd46e3 1937{
9251bb6f 1938 vn_reference_s vr1;
01fd46e3 1939 vn_reference_t result;
a4f94d42 1940 unsigned value_id;
1cf4fc02 1941 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
01fd46e3 1942 vr1.operands = operands;
1943 vr1.type = type;
1944 vr1.set = set;
1945 vr1.hashcode = vn_reference_compute_hash (&vr1);
1946 if (vn_reference_lookup_1 (&vr1, &result))
1947 return result;
a4f94d42 1948 if (TREE_CODE (value) == SSA_NAME)
1949 value_id = VN_INFO (value)->value_id;
1950 else
1951 value_id = get_or_alloc_constant_value_id (value);
01fd46e3 1952 return vn_reference_insert_pieces (vuse, set, type,
f1f41a6c 1953 operands.copy (), value, value_id);
01fd46e3 1954}
1955
76825907 1956/* Return a value-number for RCODE OPS... either by looking up an existing
97f2a90b 1957 value-number for the simplified result or by inserting the operation if
1958 INSERT is true. */
76825907 1959
1960static tree
49446baa 1961vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
76825907 1962{
1963 tree result = NULL_TREE;
1964 /* We will be creating a value number for
9bcd42cc 1965 RCODE (OPS...).
76825907 1966 So first simplify and lookup this expression to see if it
1967 is already available. */
25650bfa 1968 /* For simplification valueize. */
1969 unsigned i;
1970 for (i = 0; i < res_op->num_ops; ++i)
1971 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
1972 {
1973 tree tem = vn_valueize (res_op->ops[i]);
1974 if (!tem)
1975 break;
1976 res_op->ops[i] = tem;
1977 }
1978 /* If valueization of an operand fails (it is not available), skip
1979 simplification. */
76825907 1980 bool res = false;
25650bfa 1981 if (i == res_op->num_ops)
76825907 1982 {
25650bfa 1983 mprts_hook = vn_lookup_simplify_result;
1984 res = res_op->resimplify (NULL, vn_valueize);
1985 mprts_hook = NULL;
76825907 1986 }
76825907 1987 gimple *new_stmt = NULL;
1988 if (res
49446baa 1989 && gimple_simplified_result_is_gimple_val (res_op))
ab40e20b 1990 {
1991 /* The expression is already available. */
1992 result = res_op->ops[0];
1993 /* Valueize it, simplification returns sth in AVAIL only. */
1994 if (TREE_CODE (result) == SSA_NAME)
1995 result = SSA_VAL (result);
1996 }
76825907 1997 else
1998 {
49446baa 1999 tree val = vn_lookup_simplify_result (res_op);
97f2a90b 2000 if (!val && insert)
76825907 2001 {
2002 gimple_seq stmts = NULL;
49446baa 2003 result = maybe_push_res_to_seq (res_op, &stmts);
76825907 2004 if (result)
2005 {
2006 gcc_assert (gimple_seq_singleton_p (stmts));
2007 new_stmt = gimple_seq_first_stmt (stmts);
2008 }
2009 }
2010 else
2011 /* The expression is already available. */
2012 result = val;
2013 }
2014 if (new_stmt)
2015 {
2016 /* The expression is not yet available, value-number lhs to
2017 the new SSA_NAME we created. */
2018 /* Initialize value-number information properly. */
6f5bdb34 2019 vn_ssa_aux_t result_info = VN_INFO (result);
2020 result_info->valnum = result;
2021 result_info->value_id = get_next_value_id ();
2022 result_info->visited = 1;
76825907 2023 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2024 new_stmt);
6f5bdb34 2025 result_info->needs_insertion = true;
9bcd42cc 2026 /* ??? PRE phi-translation inserts NARYs without corresponding
2027 SSA name result. Re-use those but set their result according
2028 to the stmt we just built. */
2029 vn_nary_op_t nary = NULL;
2030 vn_nary_op_lookup_stmt (new_stmt, &nary);
2031 if (nary)
2032 {
51e85e64 2033 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2034 nary->u.result = gimple_assign_lhs (new_stmt);
9bcd42cc 2035 }
76825907 2036 /* As all "inserted" statements are singleton SCCs, insert
2037 to the valid table. This is strictly needed to
2038 avoid re-generating new value SSA_NAMEs for the same
2039 expression during SCC iteration over and over (the
2040 optimistic table gets cleared after each iteration).
2041 We do not need to insert into the optimistic table, as
2042 lookups there will fall back to the valid table. */
c42ece58 2043 else
76825907 2044 {
ca5aa39a 2045 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2046 vn_nary_op_t vno1
2047 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
6f5bdb34 2048 vno1->value_id = result_info->value_id;
ca5aa39a 2049 vno1->length = length;
51e85e64 2050 vno1->predicated_values = 0;
2051 vno1->u.result = result;
ca5aa39a 2052 init_vn_nary_op_from_stmt (vno1, new_stmt);
2053 vn_nary_op_insert_into (vno1, valid_info->nary, true);
c42ece58 2054 /* Also do not link it into the undo chain. */
2055 last_inserted_nary = vno1->next;
2056 vno1->next = (vn_nary_op_t)(void *)-1;
76825907 2057 }
76825907 2058 if (dump_file && (dump_flags & TDF_DETAILS))
2059 {
2060 fprintf (dump_file, "Inserting name ");
1ffa4346 2061 print_generic_expr (dump_file, result);
76825907 2062 fprintf (dump_file, " for expression ");
2063 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2064 fprintf (dump_file, "\n");
2065 }
2066 }
2067 return result;
2068}
2069
97f2a90b 2070/* Return a value-number for RCODE OPS... either by looking up an existing
2071 value-number for the simplified result or by inserting the operation. */
2072
2073static tree
49446baa 2074vn_nary_build_or_lookup (gimple_match_op *res_op)
97f2a90b 2075{
49446baa 2076 return vn_nary_build_or_lookup_1 (res_op, true);
97f2a90b 2077}
2078
2079/* Try to simplify the expression RCODE OPS... of type TYPE and return
2080 its value if present. */
2081
2082tree
2083vn_nary_simplify (vn_nary_op_t nary)
2084{
49446baa 2085 if (nary->length > gimple_match_op::MAX_NUM_OPS)
97f2a90b 2086 return NULL_TREE;
d8483dd1 2087 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2088 nary->type, nary->length);
49446baa 2089 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2090 return vn_nary_build_or_lookup_1 (&op, false);
97f2a90b 2091}
2092
6b8ca7f3 2093/* Elimination engine. */
2094
2095class eliminate_dom_walker : public dom_walker
2096{
2097public:
2098 eliminate_dom_walker (cdi_direction, bitmap);
2099 ~eliminate_dom_walker ();
2100
2101 virtual edge before_dom_children (basic_block);
2102 virtual void after_dom_children (basic_block);
2103
2104 virtual tree eliminate_avail (basic_block, tree op);
2105 virtual void eliminate_push_avail (basic_block, tree op);
2106 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2107
2108 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2109
2110 unsigned eliminate_cleanup (bool region_p = false);
2111
2112 bool do_pre;
2113 unsigned int el_todo;
2114 unsigned int eliminations;
2115 unsigned int insertions;
2116
2117 /* SSA names that had their defs inserted by PRE if do_pre. */
2118 bitmap inserted_exprs;
2119
2120 /* Blocks with statements that have had their EH properties changed. */
2121 bitmap need_eh_cleanup;
2122
2123 /* Blocks with statements that have had their AB properties changed. */
2124 bitmap need_ab_cleanup;
2125
2126 /* Local state for the eliminate domwalk. */
2127 auto_vec<gimple *> to_remove;
2128 auto_vec<gimple *> to_fixup;
2129 auto_vec<tree> avail;
2130 auto_vec<tree> avail_stack;
2131};
2132
2133/* Adaptor to the elimination engine using RPO availability. */
2134
2135class rpo_elim : public eliminate_dom_walker
2136{
2137public:
2138 rpo_elim(basic_block entry_)
21ffc389 2139 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2140 m_avail_freelist (NULL) {}
6b8ca7f3 2141
2142 virtual tree eliminate_avail (basic_block, tree op);
2143
2144 virtual void eliminate_push_avail (basic_block, tree);
2145
2146 basic_block entry;
21ffc389 2147 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2148 obstack. */
2149 vn_avail *m_avail_freelist;
6b8ca7f3 2150};
2151
2152/* Global RPO state for access from hooks. */
2153static rpo_elim *rpo_avail;
51e85e64 2154basic_block vn_context_bb;
97f2a90b 2155
7dde7294 2156/* Return true if BASE1 and BASE2 can be adjusted so they have the
2157 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2158 Otherwise return false. */
2159
2160static bool
2161adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2162 tree base2, poly_int64 *offset2)
2163{
2164 poly_int64 soff;
2165 if (TREE_CODE (base1) == MEM_REF
2166 && TREE_CODE (base2) == MEM_REF)
2167 {
2168 if (mem_ref_offset (base1).to_shwi (&soff))
2169 {
2170 base1 = TREE_OPERAND (base1, 0);
2171 *offset1 += soff * BITS_PER_UNIT;
2172 }
2173 if (mem_ref_offset (base2).to_shwi (&soff))
2174 {
2175 base2 = TREE_OPERAND (base2, 0);
2176 *offset2 += soff * BITS_PER_UNIT;
2177 }
2178 return operand_equal_p (base1, base2, 0);
2179 }
2180 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2181}
2182
d8021dea 2183/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2184 from the statement defining VUSE and if not successful tries to
9d75589a 2185 translate *REFP and VR_ through an aggregate copy at the definition
dddafd79 2186 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2187 of *REF and *VR. If only disambiguation was performed then
2188 *DISAMBIGUATE_ONLY is set to true. */
d8021dea 2189
2190static void *
f52fbd56 2191vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
dddafd79 2192 bool *disambiguate_only)
d8021dea 2193{
f52fbd56 2194 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2195 vn_reference_t vr = data->vr;
42acab1c 2196 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
dddafd79 2197 tree base = ao_ref_base (ref);
fe60c82c 2198 HOST_WIDE_INT offseti, maxsizei;
ce7bcd95 2199 static vec<vn_reference_op_s> lhs_ops;
66b86a74 2200 ao_ref lhs_ref;
2201 bool lhs_ref_ok = false;
fe60c82c 2202 poly_int64 copy_size;
d8021dea 2203
180572f4 2204 /* First try to disambiguate after value-replacing in the definitions LHS. */
2205 if (is_gimple_assign (def_stmt))
2206 {
2207 tree lhs = gimple_assign_lhs (def_stmt);
b11771e1 2208 bool valueized_anything = false;
66b86a74 2209 /* Avoid re-allocation overhead. */
f1f41a6c 2210 lhs_ops.truncate (0);
51e85e64 2211 basic_block saved_rpo_bb = vn_context_bb;
2212 vn_context_bb = gimple_bb (def_stmt);
66b86a74 2213 copy_reference_ops_from_ref (lhs, &lhs_ops);
51e85e64 2214 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
2215 vn_context_bb = saved_rpo_bb;
b11771e1 2216 if (valueized_anything)
2217 {
2218 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
2219 get_alias_set (lhs),
db133a52 2220 TREE_TYPE (lhs), lhs_ops);
b11771e1 2221 if (lhs_ref_ok
7dde7294 2222 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
dddafd79 2223 {
2224 *disambiguate_only = true;
2225 return NULL;
2226 }
b11771e1 2227 }
2228 else
2229 {
2230 ao_ref_init (&lhs_ref, lhs);
2231 lhs_ref_ok = true;
2232 }
3ab76bd4 2233
f359a95b 2234 /* Besides valueizing the LHS we can also use access-path based
2235 disambiguation on the original non-valueized ref. */
2236 if (!ref->ref
2237 && lhs_ref_ok
2238 && data->orig_ref.ref)
2239 {
2240 /* We want to use the non-valueized LHS for this, but avoid redundant
2241 work. */
2242 ao_ref *lref = &lhs_ref;
2243 ao_ref lref_alt;
2244 if (valueized_anything)
2245 {
2246 ao_ref_init (&lref_alt, lhs);
2247 lref = &lref_alt;
2248 }
2249 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2250 {
2251 *disambiguate_only = true;
2252 return NULL;
2253 }
2254 }
2255
3ab76bd4 2256 /* If we reach a clobbering statement try to skip it and see if
2257 we find a VN result with exactly the same value as the
2258 possible clobber. In this case we can ignore the clobber
886e6c18 2259 and return the found value. */
2260 if (is_gimple_reg_type (TREE_TYPE (lhs))
b0bef750 2261 && types_compatible_p (TREE_TYPE (lhs), vr->type)
886e6c18 2262 && ref->ref)
3ab76bd4 2263 {
f52fbd56 2264 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
3ab76bd4 2265 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
f52fbd56 2266 data->last_vuse_ptr = NULL;
3ab76bd4 2267 tree saved_vuse = vr->vuse;
2268 hashval_t saved_hashcode = vr->hashcode;
f52fbd56 2269 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
3ab76bd4 2270 /* Need to restore vr->vuse and vr->hashcode. */
2271 vr->vuse = saved_vuse;
2272 vr->hashcode = saved_hashcode;
f52fbd56 2273 data->last_vuse_ptr = saved_last_vuse_ptr;
3ab76bd4 2274 if (res && res != (void *)-1)
2275 {
2276 vn_reference_t vnresult = (vn_reference_t) res;
03bcc626 2277 tree rhs = gimple_assign_rhs1 (def_stmt);
2278 if (TREE_CODE (rhs) == SSA_NAME)
2279 rhs = SSA_VAL (rhs);
3ab76bd4 2280 if (vnresult->result
886e6c18 2281 && operand_equal_p (vnresult->result, rhs, 0)
2282 /* We have to honor our promise about union type punning
2283 and also support arbitrary overlaps with
2284 -fno-strict-aliasing. So simply resort to alignment to
2285 rule out overlaps. Do this check last because it is
2286 quite expensive compared to the hash-lookup above. */
2287 && multiple_p (get_object_alignment (ref->ref), ref->size)
2288 && multiple_p (get_object_alignment (lhs), ref->size))
3ab76bd4 2289 return res;
2290 }
2291 }
180572f4 2292 }
38168b16 2293 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2294 && gimple_call_num_args (def_stmt) <= 4)
2295 {
2296 /* For builtin calls valueize its arguments and call the
2297 alias oracle again. Valueization may improve points-to
2298 info of pointers and constify size and position arguments.
2299 Originally this was motivated by PR61034 which has
2300 conditional calls to free falsely clobbering ref because
2301 of imprecise points-to info of the argument. */
2302 tree oldargs[4];
2eb57bc3 2303 bool valueized_anything = false;
38168b16 2304 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2305 {
2306 oldargs[i] = gimple_call_arg (def_stmt, i);
1253e89f 2307 tree val = vn_valueize (oldargs[i]);
2308 if (val != oldargs[i])
38168b16 2309 {
1253e89f 2310 gimple_call_set_arg (def_stmt, i, val);
38168b16 2311 valueized_anything = true;
2312 }
2313 }
2314 if (valueized_anything)
2315 {
1a91d914 2316 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2317 ref);
38168b16 2318 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2319 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2320 if (!res)
dddafd79 2321 {
2322 *disambiguate_only = true;
2323 return NULL;
2324 }
38168b16 2325 }
2326 }
2327
7dde7294 2328 /* If we are looking for redundant stores do not create new hashtable
2329 entries from aliasing defs with made up alias-sets. */
2330 if (*disambiguate_only || !data->tbaa_p)
38168b16 2331 return (void *)-1;
180572f4 2332
d8021dea 2333 /* If we cannot constrain the size of the reference we cannot
2334 test if anything kills it. */
fe60c82c 2335 if (!ref->max_size_known_p ())
d8021dea 2336 return (void *)-1;
2337
fe60c82c 2338 poly_int64 offset = ref->offset;
2339 poly_int64 maxsize = ref->max_size;
2340
3c25489e 2341 /* We can't deduce anything useful from clobbers. */
2342 if (gimple_clobber_p (def_stmt))
2343 return (void *)-1;
2344
d8021dea 2345 /* def_stmt may-defs *ref. See if we can derive a value for *ref
3c25489e 2346 from that definition.
d8021dea 2347 1) Memset. */
3918bd18 2348 if (is_gimple_reg_type (vr->type)
77c7051b 2349 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
5bb50c85 2350 && (integer_zerop (gimple_call_arg (def_stmt, 1))
461814f5 2351 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2352 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
f927bad4 2353 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
f927bad4 2354 && offset.is_constant (&offseti)
2355 && offseti % BITS_PER_UNIT == 0))
fe60c82c 2356 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
5bb50c85 2357 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2358 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
d8021dea 2359 {
d8021dea 2360 tree base2;
f3c2a387 2361 poly_int64 offset2, size2, maxsize2;
292237f3 2362 bool reverse;
5bb50c85 2363 tree ref2 = gimple_call_arg (def_stmt, 0);
2364 if (TREE_CODE (ref2) == SSA_NAME)
2365 {
2366 ref2 = SSA_VAL (ref2);
2367 if (TREE_CODE (ref2) == SSA_NAME
2368 && (TREE_CODE (base) != MEM_REF
2369 || TREE_OPERAND (base, 0) != ref2))
2370 {
2371 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2372 if (gimple_assign_single_p (def_stmt)
2373 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2374 ref2 = gimple_assign_rhs1 (def_stmt);
2375 }
2376 }
2377 if (TREE_CODE (ref2) == ADDR_EXPR)
2378 {
2379 ref2 = TREE_OPERAND (ref2, 0);
2380 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2381 &reverse);
2382 if (!known_size_p (maxsize2)
13dd5f09 2383 || !known_eq (maxsize2, size2)
5bb50c85 2384 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2385 return (void *)-1;
2386 }
2387 else if (TREE_CODE (ref2) == SSA_NAME)
2388 {
2389 poly_int64 soff;
2390 if (TREE_CODE (base) != MEM_REF
2391 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2392 return (void *)-1;
2393 offset += soff;
2394 offset2 = 0;
2395 if (TREE_OPERAND (base, 0) != ref2)
2396 {
2397 gimple *def = SSA_NAME_DEF_STMT (ref2);
2398 if (is_gimple_assign (def)
2399 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2400 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2401 && poly_int_tree_p (gimple_assign_rhs2 (def))
2402 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2403 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2404 {
2405 ref2 = gimple_assign_rhs1 (def);
2406 if (TREE_CODE (ref2) == SSA_NAME)
2407 ref2 = SSA_VAL (ref2);
2408 }
2409 else
2410 return (void *)-1;
2411 }
2412 }
2413 else
2414 return (void *)-1;
fe60c82c 2415 tree len = gimple_call_arg (def_stmt, 2);
95579ce1 2416 HOST_WIDE_INT leni, offset2i, offseti;
2417 if (data->partial_defs.is_empty ()
2418 && known_subrange_p (offset, maxsize, offset2,
2419 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
3918bd18 2420 {
5bb50c85 2421 tree val;
2422 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2423 val = build_zero_cst (vr->type);
461814f5 2424 else if (INTEGRAL_TYPE_P (vr->type)
2425 && known_eq (ref->size, 8))
f927bad4 2426 {
d8483dd1 2427 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2428 vr->type, gimple_call_arg (def_stmt, 1));
49446baa 2429 val = vn_nary_build_or_lookup (&res_op);
f927bad4 2430 if (!val
2431 || (TREE_CODE (val) == SSA_NAME
2432 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2433 return (void *)-1;
2434 }
461814f5 2435 else
2436 {
2437 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2438 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2439 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2440 len);
2441 val = native_interpret_expr (vr->type, buf, len);
2442 if (!val)
2443 return (void *)-1;
2444 }
a4f94d42 2445 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 2446 (vuse, vr->set, vr->type, vr->operands, val);
3918bd18 2447 }
95579ce1 2448 /* For now handle clearing memory with partial defs. */
e479e8cd 2449 else if (known_eq (ref->size, maxsize)
2450 && integer_zerop (gimple_call_arg (def_stmt, 1))
95579ce1 2451 && tree_to_poly_int64 (len).is_constant (&leni)
2452 && offset.is_constant (&offseti)
2453 && offset2.is_constant (&offset2i)
2454 && maxsize.is_constant (&maxsizei))
2455 {
2456 pd_data pd;
2457 pd.rhs = build_constructor (NULL_TREE, NULL);
bd27922c 2458 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
95579ce1 2459 pd.size = leni;
2460 return data->push_partial_def (pd, vuse, maxsizei);
2461 }
d8021dea 2462 }
2463
2464 /* 2) Assignment from an empty CONSTRUCTOR. */
3918bd18 2465 else if (is_gimple_reg_type (vr->type)
d8021dea 2466 && gimple_assign_single_p (def_stmt)
2467 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2468 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2469 {
c1d5aaed 2470 tree lhs = gimple_assign_lhs (def_stmt);
d8021dea 2471 tree base2;
f3c2a387 2472 poly_int64 offset2, size2, maxsize2;
95579ce1 2473 HOST_WIDE_INT offset2i, size2i;
292237f3 2474 bool reverse;
c1d5aaed 2475 if (lhs_ref_ok)
2476 {
2477 base2 = ao_ref_base (&lhs_ref);
2478 offset2 = lhs_ref.offset;
2479 size2 = lhs_ref.size;
2480 maxsize2 = lhs_ref.max_size;
2481 reverse = reverse_storage_order_for_component_p (lhs);
2482 }
2483 else
2484 base2 = get_ref_base_and_extent (lhs,
2485 &offset2, &size2, &maxsize2, &reverse);
fe60c82c 2486 if (known_size_p (maxsize2)
e9690ec3 2487 && known_eq (maxsize2, size2)
7dde7294 2488 && adjust_offsets_for_equal_base_address (base, &offset,
95579ce1 2489 base2, &offset2))
3918bd18 2490 {
95579ce1 2491 if (data->partial_defs.is_empty ()
2492 && known_subrange_p (offset, maxsize, offset2, size2))
2493 {
2494 tree val = build_zero_cst (vr->type);
2495 return vn_reference_lookup_or_insert_for_pieces
2496 (vuse, vr->set, vr->type, vr->operands, val);
2497 }
e479e8cd 2498 else if (known_eq (ref->size, maxsize)
2499 && maxsize.is_constant (&maxsizei)
95579ce1 2500 && maxsizei % BITS_PER_UNIT == 0
2501 && offset.is_constant (&offseti)
2502 && offseti % BITS_PER_UNIT == 0
2503 && offset2.is_constant (&offset2i)
2504 && offset2i % BITS_PER_UNIT == 0
2505 && size2.is_constant (&size2i)
2506 && size2i % BITS_PER_UNIT == 0)
2507 {
2508 pd_data pd;
2509 pd.rhs = gimple_assign_rhs1 (def_stmt);
2510 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2511 pd.size = size2i / BITS_PER_UNIT;
2512 return data->push_partial_def (pd, vuse, maxsizei);
2513 }
3918bd18 2514 }
d8021dea 2515 }
2516
87b53397 2517 /* 3) Assignment from a constant. We can use folds native encode/interpret
2518 routines to extract the assigned bits. */
fe60c82c 2519 else if (known_eq (ref->size, maxsize)
87b53397 2520 && is_gimple_reg_type (vr->type)
292237f3 2521 && !contains_storage_order_barrier_p (vr->operands)
87b53397 2522 && gimple_assign_single_p (def_stmt)
76825907 2523 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
fe60c82c 2524 /* native_encode and native_decode operate on arrays of bytes
2525 and so fundamentally need a compile-time size and offset. */
2526 && maxsize.is_constant (&maxsizei)
2527 && maxsizei % BITS_PER_UNIT == 0
2528 && offset.is_constant (&offseti)
2529 && offseti % BITS_PER_UNIT == 0
76825907 2530 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2531 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2532 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
87b53397 2533 {
c1d5aaed 2534 tree lhs = gimple_assign_lhs (def_stmt);
87b53397 2535 tree base2;
7dde7294 2536 poly_int64 offset2, size2, maxsize2;
95579ce1 2537 HOST_WIDE_INT offset2i, size2i;
292237f3 2538 bool reverse;
c1d5aaed 2539 if (lhs_ref_ok)
2540 {
2541 base2 = ao_ref_base (&lhs_ref);
2542 offset2 = lhs_ref.offset;
2543 size2 = lhs_ref.size;
2544 maxsize2 = lhs_ref.max_size;
2545 reverse = reverse_storage_order_for_component_p (lhs);
2546 }
2547 else
2548 base2 = get_ref_base_and_extent (lhs,
2549 &offset2, &size2, &maxsize2, &reverse);
f3c2a387 2550 if (base2
2551 && !reverse
7dde7294 2552 && known_eq (maxsize2, size2)
2553 && multiple_p (size2, BITS_PER_UNIT)
2554 && multiple_p (offset2, BITS_PER_UNIT)
2555 && adjust_offsets_for_equal_base_address (base, &offset,
2556 base2, &offset2)
2557 && offset.is_constant (&offseti)
2558 && offset2.is_constant (&offset2i)
95579ce1 2559 && size2.is_constant (&size2i))
87b53397 2560 {
95579ce1 2561 if (data->partial_defs.is_empty ()
2562 && known_subrange_p (offseti, maxsizei, offset2, size2))
87b53397 2563 {
95579ce1 2564 /* We support up to 512-bit values (for V8DFmode). */
2565 unsigned char buffer[64];
2566 int len;
2567
2568 tree rhs = gimple_assign_rhs1 (def_stmt);
2569 if (TREE_CODE (rhs) == SSA_NAME)
2570 rhs = SSA_VAL (rhs);
32506afe 2571 unsigned pad = 0;
2572 if (BYTES_BIG_ENDIAN
2573 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs))))
2574 {
2575 /* On big-endian the padding is at the 'front' so
2576 just skip the initial bytes. */
2577 fixed_size_mode mode
2578 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (rhs)));
2579 pad = GET_MODE_SIZE (mode) - size2i / BITS_PER_UNIT;
2580 }
95579ce1 2581 len = native_encode_expr (rhs,
2582 buffer, sizeof (buffer),
32506afe 2583 ((offseti - offset2i) / BITS_PER_UNIT
2584 + pad));
95579ce1 2585 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
3739ac00 2586 {
95579ce1 2587 tree type = vr->type;
2588 /* Make sure to interpret in a type that has a range
2589 covering the whole access size. */
2590 if (INTEGRAL_TYPE_P (vr->type)
2591 && maxsizei != TYPE_PRECISION (vr->type))
2592 type = build_nonstandard_integer_type (maxsizei,
2593 TYPE_UNSIGNED (type));
2594 tree val = native_interpret_expr (type, buffer,
2595 maxsizei / BITS_PER_UNIT);
2596 /* If we chop off bits because the types precision doesn't
2597 match the memory access size this is ok when optimizing
2598 reads but not when called from the DSE code during
2599 elimination. */
2600 if (val
2601 && type != vr->type)
2602 {
2603 if (! int_fits_type_p (val, vr->type))
2604 val = NULL_TREE;
2605 else
2606 val = fold_convert (vr->type, val);
2607 }
2608
2609 if (val)
2610 return vn_reference_lookup_or_insert_for_pieces
2611 (vuse, vr->set, vr->type, vr->operands, val);
3739ac00 2612 }
95579ce1 2613 }
2614 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i, size2i))
2615 {
2616 pd_data pd;
2617 tree rhs = gimple_assign_rhs1 (def_stmt);
2618 if (TREE_CODE (rhs) == SSA_NAME)
2619 rhs = SSA_VAL (rhs);
2620 pd.rhs = rhs;
2621 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2622 pd.size = size2i / BITS_PER_UNIT;
2623 return data->push_partial_def (pd, vuse, maxsizei);
87b53397 2624 }
2625 }
2626 }
2627
a3bb56f0 2628 /* 4) Assignment from an SSA name which definition we may be able
2629 to access pieces from. */
fe60c82c 2630 else if (known_eq (ref->size, maxsize)
a3bb56f0 2631 && is_gimple_reg_type (vr->type)
292237f3 2632 && !contains_storage_order_barrier_p (vr->operands)
a3bb56f0 2633 && gimple_assign_single_p (def_stmt)
95579ce1 2634 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2635 /* A subset of partial defs from non-constants can be handled
2636 by for example inserting a CONSTRUCTOR, a COMPLEX_EXPR or
2637 even a (series of) BIT_INSERT_EXPR hoping for simplifications
2638 downstream, not so much for actually doing the insertion. */
2639 && data->partial_defs.is_empty ())
a3bb56f0 2640 {
c1d5aaed 2641 tree lhs = gimple_assign_lhs (def_stmt);
76825907 2642 tree base2;
f3c2a387 2643 poly_int64 offset2, size2, maxsize2;
76825907 2644 bool reverse;
c1d5aaed 2645 if (lhs_ref_ok)
2646 {
2647 base2 = ao_ref_base (&lhs_ref);
2648 offset2 = lhs_ref.offset;
2649 size2 = lhs_ref.size;
2650 maxsize2 = lhs_ref.max_size;
2651 reverse = reverse_storage_order_for_component_p (lhs);
2652 }
2653 else
2654 base2 = get_ref_base_and_extent (lhs,
2655 &offset2, &size2, &maxsize2, &reverse);
c768ada5 2656 tree def_rhs = gimple_assign_rhs1 (def_stmt);
76825907 2657 if (!reverse
f3c2a387 2658 && known_size_p (maxsize2)
2659 && known_eq (maxsize2, size2)
7dde7294 2660 && adjust_offsets_for_equal_base_address (base, &offset,
2661 base2, &offset2)
fe60c82c 2662 && known_subrange_p (offset, maxsize, offset2, size2)
76825907 2663 /* ??? We can't handle bitfield precision extracts without
2664 either using an alternate type for the BIT_FIELD_REF and
2665 then doing a conversion or possibly adjusting the offset
d0abd9e0 2666 according to endianness. */
76825907 2667 && (! INTEGRAL_TYPE_P (vr->type)
fe60c82c 2668 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
c768ada5 2669 && multiple_p (ref->size, BITS_PER_UNIT)
2670 && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2671 || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
a3bb56f0 2672 {
d8483dd1 2673 gimple_match_op op (gimple_match_cond::UNCOND,
2674 BIT_FIELD_REF, vr->type,
c768ada5 2675 vn_valueize (def_rhs),
49446baa 2676 bitsize_int (ref->size),
2677 bitsize_int (offset - offset2));
2678 tree val = vn_nary_build_or_lookup (&op);
e2392db5 2679 if (val
2680 && (TREE_CODE (val) != SSA_NAME
2681 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
a3bb56f0 2682 {
76825907 2683 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2684 (vuse, vr->set, vr->type, vr->operands, val);
2685 return res;
a3bb56f0 2686 }
2687 }
2688 }
2689
2690 /* 5) For aggregate copies translate the reference through them if
d8021dea 2691 the copy kills ref. */
f52fbd56 2692 else if (data->vn_walk_kind == VN_WALKREWRITE
8ecc6b38 2693 && gimple_assign_single_p (def_stmt)
d8021dea 2694 && (DECL_P (gimple_assign_rhs1 (def_stmt))
182cf5a9 2695 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
ea95153b 2696 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
d8021dea 2697 {
2698 tree base2;
292237f3 2699 int i, j, k;
c2078b80 2700 auto_vec<vn_reference_op_s> rhs;
d8021dea 2701 vn_reference_op_t vro;
3918bd18 2702 ao_ref r;
d8021dea 2703
66b86a74 2704 if (!lhs_ref_ok)
2705 return (void *)-1;
2706
d8021dea 2707 /* See if the assignment kills REF. */
66b86a74 2708 base2 = ao_ref_base (&lhs_ref);
fe60c82c 2709 if (!lhs_ref.max_size_known_p ()
e5d08bfd 2710 || (base != base2
2711 && (TREE_CODE (base) != MEM_REF
2712 || TREE_CODE (base2) != MEM_REF
2713 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2714 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2715 TREE_OPERAND (base2, 1))))
11beb29c 2716 || !stmt_kills_ref_p (def_stmt, ref))
d8021dea 2717 return (void *)-1;
2718
66b86a74 2719 /* Find the common base of ref and the lhs. lhs_ops already
2720 contains valueized operands for the lhs. */
f1f41a6c 2721 i = vr->operands.length () - 1;
2722 j = lhs_ops.length () - 1;
0d5b37dd 2723 while (j >= 0 && i >= 0
f1f41a6c 2724 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
d8021dea 2725 {
2726 i--;
2727 j--;
2728 }
0d5b37dd 2729
b11771e1 2730 /* ??? The innermost op should always be a MEM_REF and we already
2731 checked that the assignment to the lhs kills vr. Thus for
2732 aggregate copies using char[] types the vn_reference_op_eq
2733 may fail when comparing types for compatibility. But we really
2734 don't care here - further lookups with the rewritten operands
2735 will simply fail if we messed up types too badly. */
fe60c82c 2736 poly_int64 extra_off = 0;
78e606ea 2737 if (j == 0 && i >= 0
f1f41a6c 2738 && lhs_ops[0].opcode == MEM_REF
fe60c82c 2739 && maybe_ne (lhs_ops[0].off, -1))
a5650c86 2740 {
fe60c82c 2741 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
a5650c86 2742 i--, j--;
2743 else if (vr->operands[i].opcode == MEM_REF
fe60c82c 2744 && maybe_ne (vr->operands[i].off, -1))
a5650c86 2745 {
2746 extra_off = vr->operands[i].off - lhs_ops[0].off;
2747 i--, j--;
2748 }
2749 }
b11771e1 2750
d8021dea 2751 /* i now points to the first additional op.
2752 ??? LHS may not be completely contained in VR, one or more
2753 VIEW_CONVERT_EXPRs could be in its way. We could at least
2754 try handling outermost VIEW_CONVERT_EXPRs. */
2755 if (j != -1)
2756 return (void *)-1;
d8021dea 2757
292237f3 2758 /* Punt if the additional ops contain a storage order barrier. */
2759 for (k = i; k >= 0; k--)
2760 {
2761 vro = &vr->operands[k];
2762 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2763 return (void *)-1;
2764 }
2765
d8021dea 2766 /* Now re-write REF to be based on the rhs of the assignment. */
2767 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
a5650c86 2768
2769 /* Apply an extra offset to the inner MEM_REF of the RHS. */
fe60c82c 2770 if (maybe_ne (extra_off, 0))
a5650c86 2771 {
dcb9ba66 2772 if (rhs.length () < 2)
a5650c86 2773 return (void *)-1;
dcb9ba66 2774 int ix = rhs.length () - 2;
2775 if (rhs[ix].opcode != MEM_REF
2776 || known_eq (rhs[ix].off, -1))
2777 return (void *)-1;
2778 rhs[ix].off += extra_off;
2779 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2780 build_int_cst (TREE_TYPE (rhs[ix].op0),
2781 extra_off));
a5650c86 2782 }
2783
d8021dea 2784 /* We need to pre-pend vr->operands[0..i] to rhs. */
2fd3ecff 2785 vec<vn_reference_op_s> old = vr->operands;
f1f41a6c 2786 if (i + 1 + rhs.length () > vr->operands.length ())
b82cf82e 2787 vr->operands.safe_grow (i + 1 + rhs.length ());
d8021dea 2788 else
f1f41a6c 2789 vr->operands.truncate (i + 1 + rhs.length ());
2790 FOR_EACH_VEC_ELT (rhs, j, vro)
2791 vr->operands[i + 1 + j] = *vro;
01fd46e3 2792 vr->operands = valueize_refs (vr->operands);
2fd3ecff 2793 if (old == shared_lookup_references)
2794 shared_lookup_references = vr->operands;
d8021dea 2795 vr->hashcode = vn_reference_compute_hash (vr);
77c7051b 2796
a5650c86 2797 /* Try folding the new reference to a constant. */
2798 tree val = fully_constant_vn_reference_p (vr);
2799 if (val)
ea95153b 2800 {
2801 if (data->partial_defs.is_empty ())
2802 return vn_reference_lookup_or_insert_for_pieces
2803 (vuse, vr->set, vr->type, vr->operands, val);
2804 /* This is the only interesting case for partial-def handling
2805 coming from targets that like to gimplify init-ctors as
2806 aggregate copies from constant data like aarch64 for
2807 PR83518. */
2808 if (maxsize.is_constant (&maxsizei)
2809 && known_eq (ref->size, maxsize))
2810 {
2811 pd_data pd;
2812 pd.rhs = val;
2813 pd.offset = 0;
2814 pd.size = maxsizei / BITS_PER_UNIT;
2815 return data->push_partial_def (pd, vuse, maxsizei);
2816 }
2817 }
2818
2819 /* Continuing with partial defs isn't easily possible here, we
2820 have to find a full def from further lookups from here. Probably
2821 not worth the special-casing everywhere. */
2822 if (!data->partial_defs.is_empty ())
2823 return (void *)-1;
a5650c86 2824
77c7051b 2825 /* Adjust *ref from the new operands. */
2826 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2827 return (void *)-1;
2828 /* This can happen with bitfields. */
fe60c82c 2829 if (maybe_ne (ref->size, r.size))
77c7051b 2830 return (void *)-1;
2831 *ref = r;
2832
2833 /* Do not update last seen VUSE after translating. */
f52fbd56 2834 data->last_vuse_ptr = NULL;
f359a95b 2835 /* Invalidate the original access path since it now contains
2836 the wrong base. */
2837 data->orig_ref.ref = NULL_TREE;
77c7051b 2838
2839 /* Keep looking for the adjusted *REF / VR pair. */
2840 return NULL;
2841 }
2842
a3bb56f0 2843 /* 6) For memcpy copies translate the reference through them if
77c7051b 2844 the copy kills ref. */
f52fbd56 2845 else if (data->vn_walk_kind == VN_WALKREWRITE
77c7051b 2846 && is_gimple_reg_type (vr->type)
2847 /* ??? Handle BCOPY as well. */
2848 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2849 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2850 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2851 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2852 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2853 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2854 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
95579ce1 2855 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
2856 /* Handling this is more complicated, give up for now. */
2857 && data->partial_defs.is_empty ())
77c7051b 2858 {
2859 tree lhs, rhs;
2860 ao_ref r;
fe60c82c 2861 poly_int64 rhs_offset, lhs_offset;
77c7051b 2862 vn_reference_op_s op;
fe60c82c 2863 poly_uint64 mem_offset;
2864 poly_int64 at, byte_maxsize;
77c7051b 2865
77c7051b 2866 /* Only handle non-variable, addressable refs. */
fe60c82c 2867 if (maybe_ne (ref->size, maxsize)
2868 || !multiple_p (offset, BITS_PER_UNIT, &at)
2869 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
77c7051b 2870 return (void *)-1;
2871
2872 /* Extract a pointer base and an offset for the destination. */
2873 lhs = gimple_call_arg (def_stmt, 0);
2874 lhs_offset = 0;
2875 if (TREE_CODE (lhs) == SSA_NAME)
6325605f 2876 {
51e85e64 2877 lhs = vn_valueize (lhs);
6325605f 2878 if (TREE_CODE (lhs) == SSA_NAME)
2879 {
42acab1c 2880 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
6325605f 2881 if (gimple_assign_single_p (def_stmt)
2882 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2883 lhs = gimple_assign_rhs1 (def_stmt);
2884 }
2885 }
77c7051b 2886 if (TREE_CODE (lhs) == ADDR_EXPR)
2887 {
2888 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
773078cb 2889 &lhs_offset);
77c7051b 2890 if (!tem)
2891 return (void *)-1;
2892 if (TREE_CODE (tem) == MEM_REF
fe60c82c 2893 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
77c7051b 2894 {
2895 lhs = TREE_OPERAND (tem, 0);
6325605f 2896 if (TREE_CODE (lhs) == SSA_NAME)
51e85e64 2897 lhs = vn_valueize (lhs);
fe60c82c 2898 lhs_offset += mem_offset;
77c7051b 2899 }
2900 else if (DECL_P (tem))
2901 lhs = build_fold_addr_expr (tem);
2902 else
2903 return (void *)-1;
2904 }
2905 if (TREE_CODE (lhs) != SSA_NAME
2906 && TREE_CODE (lhs) != ADDR_EXPR)
2907 return (void *)-1;
2908
2909 /* Extract a pointer base and an offset for the source. */
2910 rhs = gimple_call_arg (def_stmt, 1);
2911 rhs_offset = 0;
2912 if (TREE_CODE (rhs) == SSA_NAME)
51e85e64 2913 rhs = vn_valueize (rhs);
77c7051b 2914 if (TREE_CODE (rhs) == ADDR_EXPR)
2915 {
2916 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
773078cb 2917 &rhs_offset);
77c7051b 2918 if (!tem)
2919 return (void *)-1;
2920 if (TREE_CODE (tem) == MEM_REF
fe60c82c 2921 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
77c7051b 2922 {
2923 rhs = TREE_OPERAND (tem, 0);
fe60c82c 2924 rhs_offset += mem_offset;
77c7051b 2925 }
8d803464 2926 else if (DECL_P (tem)
2927 || TREE_CODE (tem) == STRING_CST)
77c7051b 2928 rhs = build_fold_addr_expr (tem);
2929 else
2930 return (void *)-1;
2931 }
2932 if (TREE_CODE (rhs) != SSA_NAME
2933 && TREE_CODE (rhs) != ADDR_EXPR)
2934 return (void *)-1;
2935
77c7051b 2936 /* The bases of the destination and the references have to agree. */
77c7051b 2937 if (TREE_CODE (base) == MEM_REF)
19c4d015 2938 {
2939 if (TREE_OPERAND (base, 0) != lhs
fe60c82c 2940 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
19c4d015 2941 return (void *) -1;
fe60c82c 2942 at += mem_offset;
19c4d015 2943 }
2944 else if (!DECL_P (base)
2945 || TREE_CODE (lhs) != ADDR_EXPR
2946 || TREE_OPERAND (lhs, 0) != base)
2947 return (void *)-1;
2948
6325605f 2949 /* If the access is completely outside of the memcpy destination
2950 area there is no aliasing. */
fe60c82c 2951 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
6325605f 2952 return NULL;
2953 /* And the access has to be contained within the memcpy destination. */
fe60c82c 2954 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
77c7051b 2955 return (void *)-1;
2956
2957 /* Make room for 2 operands in the new reference. */
f1f41a6c 2958 if (vr->operands.length () < 2)
77c7051b 2959 {
f1f41a6c 2960 vec<vn_reference_op_s> old = vr->operands;
2961 vr->operands.safe_grow_cleared (2);
b82cf82e 2962 if (old == shared_lookup_references)
2fd3ecff 2963 shared_lookup_references = vr->operands;
77c7051b 2964 }
2965 else
f1f41a6c 2966 vr->operands.truncate (2);
77c7051b 2967
2968 /* The looked-through reference is a simple MEM_REF. */
2969 memset (&op, 0, sizeof (op));
2970 op.type = vr->type;
2971 op.opcode = MEM_REF;
8aaef9d6 2972 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
77c7051b 2973 op.off = at - lhs_offset + rhs_offset;
f1f41a6c 2974 vr->operands[0] = op;
2be90eed 2975 op.type = TREE_TYPE (rhs);
77c7051b 2976 op.opcode = TREE_CODE (rhs);
2977 op.op0 = rhs;
2978 op.off = -1;
f1f41a6c 2979 vr->operands[1] = op;
77c7051b 2980 vr->hashcode = vn_reference_compute_hash (vr);
3918bd18 2981
950db54a 2982 /* Try folding the new reference to a constant. */
2983 tree val = fully_constant_vn_reference_p (vr);
2984 if (val)
2985 return vn_reference_lookup_or_insert_for_pieces
2986 (vuse, vr->set, vr->type, vr->operands, val);
2987
3918bd18 2988 /* Adjust *ref from the new operands. */
2989 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
d8021dea 2990 return (void *)-1;
8f15ba15 2991 /* This can happen with bitfields. */
fe60c82c 2992 if (maybe_ne (ref->size, r.size))
8f15ba15 2993 return (void *)-1;
3918bd18 2994 *ref = r;
d8021dea 2995
4a83fadb 2996 /* Do not update last seen VUSE after translating. */
f52fbd56 2997 data->last_vuse_ptr = NULL;
f359a95b 2998 /* Invalidate the original access path since it now contains
2999 the wrong base. */
3000 data->orig_ref.ref = NULL_TREE;
4a83fadb 3001
d8021dea 3002 /* Keep looking for the adjusted *REF / VR pair. */
3003 return NULL;
3004 }
3005
3006 /* Bail out and stop walking. */
3007 return (void *)-1;
3008}
3009
e1bba980 3010/* Return a reference op vector from OP that can be used for
3011 vn_reference_lookup_pieces. The caller is responsible for releasing
3012 the vector. */
3013
3014vec<vn_reference_op_s>
3015vn_reference_operands_for_lookup (tree op)
3016{
3017 bool valueized;
3018 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3019}
3020
f6c33c78 3021/* Lookup a reference operation by it's parts, in the current hash table.
3022 Returns the resulting value number if it exists in the hash table,
3023 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3024 vn_reference_t stored in the hashtable if something is found. */
9e9e6e3e 3025
3026tree
3918bd18 3027vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
f1f41a6c 3028 vec<vn_reference_op_s> operands,
8ecc6b38 3029 vn_reference_t *vnresult, vn_lookup_kind kind)
f6c33c78 3030{
3031 struct vn_reference_s vr1;
dd277d48 3032 vn_reference_t tmp;
c26ce8a9 3033 tree cst;
dd277d48 3034
3035 if (!vnresult)
3036 vnresult = &tmp;
3037 *vnresult = NULL;
d8021dea 3038
b8a2283e 3039 vr1.vuse = vuse_ssa_val (vuse);
f1f41a6c 3040 shared_lookup_references.truncate (0);
3041 shared_lookup_references.safe_grow (operands.length ());
3042 memcpy (shared_lookup_references.address (),
3043 operands.address (),
d8021dea 3044 sizeof (vn_reference_op_s)
f1f41a6c 3045 * operands.length ());
d8021dea 3046 vr1.operands = operands = shared_lookup_references
3047 = valueize_refs (shared_lookup_references);
3918bd18 3048 vr1.type = type;
3049 vr1.set = set;
f6c33c78 3050 vr1.hashcode = vn_reference_compute_hash (&vr1);
c26ce8a9 3051 if ((cst = fully_constant_vn_reference_p (&vr1)))
3052 return cst;
f6c33c78 3053
c26ce8a9 3054 vn_reference_lookup_1 (&vr1, vnresult);
dd277d48 3055 if (!*vnresult
8ecc6b38 3056 && kind != VN_NOWALK
dd277d48 3057 && vr1.vuse)
02067dc5 3058 {
3918bd18 3059 ao_ref r;
1f510793 3060 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
f359a95b 3061 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true);
3918bd18 3062 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
d8021dea 3063 *vnresult =
7dde7294 3064 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
d8021dea 3065 vn_reference_lookup_2,
46816709 3066 vn_reference_lookup_3,
f52fbd56 3067 vuse_valueize, limit, &data);
2fd3ecff 3068 gcc_checking_assert (vr1.operands == shared_lookup_references);
02067dc5 3069 }
3070
dd277d48 3071 if (*vnresult)
3072 return (*vnresult)->result;
3073
3074 return NULL_TREE;
f6c33c78 3075}
3076
3077/* Lookup OP in the current hash table, and return the resulting value
3078 number if it exists in the hash table. Return NULL_TREE if it does
3079 not exist in the hash table or if the result field of the structure
3080 was NULL.. VNRESULT will be filled in with the vn_reference_t
0b3619e6 3081 stored in the hashtable if one exists. When TBAA_P is false assume
f52fbd56 3082 we are looking up a store and treat it as having alias-set zero.
3083 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. */
f6c33c78 3084
3085tree
8ecc6b38 3086vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
f52fbd56 3087 vn_reference_t *vnresult, bool tbaa_p, tree *last_vuse_ptr)
9e9e6e3e 3088{
f1f41a6c 3089 vec<vn_reference_op_s> operands;
9e9e6e3e 3090 struct vn_reference_s vr1;
c26ce8a9 3091 tree cst;
882f8b55 3092 bool valuezied_anything;
dd277d48 3093
f6c33c78 3094 if (vnresult)
3095 *vnresult = NULL;
9e9e6e3e 3096
b8a2283e 3097 vr1.vuse = vuse_ssa_val (vuse);
882f8b55 3098 vr1.operands = operands
3099 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3918bd18 3100 vr1.type = TREE_TYPE (op);
7dde7294 3101 vr1.set = get_alias_set (op);
9e9e6e3e 3102 vr1.hashcode = vn_reference_compute_hash (&vr1);
c26ce8a9 3103 if ((cst = fully_constant_vn_reference_p (&vr1)))
3104 return cst;
404d6be4 3105
8ecc6b38 3106 if (kind != VN_NOWALK
dd277d48 3107 && vr1.vuse)
3108 {
3109 vn_reference_t wvnresult;
3918bd18 3110 ao_ref r;
1f510793 3111 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
882f8b55 3112 /* Make sure to use a valueized reference if we valueized anything.
3113 Otherwise preserve the full reference for advanced TBAA. */
3114 if (!valuezied_anything
3115 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
db133a52 3116 vr1.operands))
2be90eed 3117 ao_ref_init (&r, op);
f359a95b 3118 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3119 last_vuse_ptr, kind, tbaa_p);
dd277d48 3120 wvnresult =
7dde7294 3121 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p,
d8021dea 3122 vn_reference_lookup_2,
46816709 3123 vn_reference_lookup_3,
f52fbd56 3124 vuse_valueize, limit, &data);
2fd3ecff 3125 gcc_checking_assert (vr1.operands == shared_lookup_references);
dd277d48 3126 if (wvnresult)
3127 {
3128 if (vnresult)
3129 *vnresult = wvnresult;
3130 return wvnresult->result;
3131 }
3132
3133 return NULL_TREE;
404d6be4 3134 }
9e9e6e3e 3135
dd277d48 3136 return vn_reference_lookup_1 (&vr1, vnresult);
9e9e6e3e 3137}
3138
2fd3ecff 3139/* Lookup CALL in the current hash table and return the entry in
3140 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3141
3142void
1a91d914 3143vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2fd3ecff 3144 vn_reference_t vr)
3145{
72e693ed 3146 if (vnresult)
3147 *vnresult = NULL;
3148
2fd3ecff 3149 tree vuse = gimple_vuse (call);
3150
3151 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3152 vr->operands = valueize_shared_reference_ops_from_call (call);
3153 vr->type = gimple_expr_type (call);
3154 vr->set = 0;
3155 vr->hashcode = vn_reference_compute_hash (vr);
3156 vn_reference_lookup_1 (vr, vnresult);
3157}
f6c33c78 3158
51e85e64 3159/* Insert OP into the current hash table with a value number of RESULT. */
9e9e6e3e 3160
51e85e64 3161static void
39215e09 3162vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
9e9e6e3e 3163{
3e871d4d 3164 vn_reference_s **slot;
9e9e6e3e 3165 vn_reference_t vr1;
75aefb7b 3166 bool tem;
9e9e6e3e 3167
ca5aa39a 3168 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
f6c33c78 3169 if (TREE_CODE (result) == SSA_NAME)
3170 vr1->value_id = VN_INFO (result)->value_id;
3171 else
3172 vr1->value_id = get_or_alloc_constant_value_id (result);
51e85e64 3173 vr1->vuse = vuse_ssa_val (vuse);
75aefb7b 3174 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3918bd18 3175 vr1->type = TREE_TYPE (op);
3176 vr1->set = get_alias_set (op);
9e9e6e3e 3177 vr1->hashcode = vn_reference_compute_hash (vr1);
3178 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
39215e09 3179 vr1->result_vdef = vdef;
9e9e6e3e 3180
c42ece58 3181 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
51e85e64 3182 INSERT);
3183
3184 /* Because IL walking on reference lookup can end up visiting
3185 a def that is only to be visited later in iteration order
3186 when we are about to make an irreducible region reducible
3187 the def can be effectively processed and its ref being inserted
3188 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3189 but save a lookup if we deal with already inserted refs here. */
12661815 3190 if (*slot)
51e85e64 3191 {
881d2048 3192 /* We cannot assert that we have the same value either because
3193 when disentangling an irreducible region we may end up visiting
3194 a use before the corresponding def. That's a missed optimization
3195 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3196 if (dump_file && (dump_flags & TDF_DETAILS)
3197 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3198 {
3199 fprintf (dump_file, "Keeping old value ");
3200 print_generic_expr (dump_file, (*slot)->result);
3201 fprintf (dump_file, " because of collision\n");
3202 }
51e85e64 3203 free_reference (vr1);
3204 obstack_free (&vn_tables_obstack, vr1);
3205 return;
3206 }
9e9e6e3e 3207
3208 *slot = vr1;
c42ece58 3209 vr1->next = last_inserted_ref;
3210 last_inserted_ref = vr1;
f6c33c78 3211}
3212
3213/* Insert a reference by it's pieces into the current hash table with
3214 a value number of RESULT. Return the resulting reference
3215 structure we created. */
3216
3217vn_reference_t
3918bd18 3218vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
f1f41a6c 3219 vec<vn_reference_op_s> operands,
f6c33c78 3220 tree result, unsigned int value_id)
3221
3222{
3e871d4d 3223 vn_reference_s **slot;
f6c33c78 3224 vn_reference_t vr1;
3225
ca5aa39a 3226 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
dd277d48 3227 vr1->value_id = value_id;
51e85e64 3228 vr1->vuse = vuse_ssa_val (vuse);
f6c33c78 3229 vr1->operands = valueize_refs (operands);
3918bd18 3230 vr1->type = type;
3231 vr1->set = set;
f6c33c78 3232 vr1->hashcode = vn_reference_compute_hash (vr1);
3233 if (result && TREE_CODE (result) == SSA_NAME)
3234 result = SSA_VAL (result);
3235 vr1->result = result;
3236
c42ece58 3237 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
51e85e64 3238 INSERT);
48e1416a 3239
f6c33c78 3240 /* At this point we should have all the things inserted that we have
dd277d48 3241 seen before, and we should never try inserting something that
3242 already exists. */
f6c33c78 3243 gcc_assert (!*slot);
f6c33c78 3244
3245 *slot = vr1;
c42ece58 3246 vr1->next = last_inserted_ref;
3247 last_inserted_ref = vr1;
f6c33c78 3248 return vr1;
9e9e6e3e 3249}
3250
51a23cfc 3251/* Compute and return the hash value for nary operation VBO1. */
9e9e6e3e 3252
2fd3ecff 3253static hashval_t
51a23cfc 3254vn_nary_op_compute_hash (const vn_nary_op_t vno1)
9e9e6e3e 3255{
f32e91d5 3256 inchash::hash hstate;
51a23cfc 3257 unsigned i;
9e9e6e3e 3258
51a23cfc 3259 for (i = 0; i < vno1->length; ++i)
3260 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3261 vno1->op[i] = SSA_VAL (vno1->op[i]);
9e9e6e3e 3262
42b45e81 3263 if (((vno1->length == 2
3264 && commutative_tree_code (vno1->opcode))
3265 || (vno1->length == 3
3266 && commutative_ternary_tree_code (vno1->opcode)))
48baf518 3267 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
a4f59596 3268 std::swap (vno1->op[0], vno1->op[1]);
42b45e81 3269 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
48baf518 3270 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
42b45e81 3271 {
3272 std::swap (vno1->op[0], vno1->op[1]);
3273 vno1->opcode = swap_tree_comparison (vno1->opcode);
3274 }
9e9e6e3e 3275
f32e91d5 3276 hstate.add_int (vno1->opcode);
51a23cfc 3277 for (i = 0; i < vno1->length; ++i)
f32e91d5 3278 inchash::add_expr (vno1->op[i], hstate);
9e9e6e3e 3279
f32e91d5 3280 return hstate.end ();
9e9e6e3e 3281}
3282
3e871d4d 3283/* Compare nary operations VNO1 and VNO2 and return true if they are
9e9e6e3e 3284 equivalent. */
3285
3e871d4d 3286bool
3287vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
9e9e6e3e 3288{
51a23cfc 3289 unsigned i;
3290
3d2d7de7 3291 if (vno1->hashcode != vno2->hashcode)
3292 return false;
3293
7384c678 3294 if (vno1->length != vno2->length)
3295 return false;
3296
51a23cfc 3297 if (vno1->opcode != vno2->opcode
c477520d 3298 || !types_compatible_p (vno1->type, vno2->type))
51a23cfc 3299 return false;
3300
3301 for (i = 0; i < vno1->length; ++i)
3302 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3303 return false;
3304
2c422815 3305 /* BIT_INSERT_EXPR has an implict operand as the type precision
3306 of op1. Need to check to make sure they are the same. */
3307 if (vno1->opcode == BIT_INSERT_EXPR
3308 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3309 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3310 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3311 return false;
3312
51a23cfc 3313 return true;
9e9e6e3e 3314}
3315
f8ce304c 3316/* Initialize VNO from the pieces provided. */
9e9e6e3e 3317
f8ce304c 3318static void
3319init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
7384c678 3320 enum tree_code code, tree type, tree *ops)
f8ce304c 3321{
3322 vno->opcode = code;
3323 vno->length = length;
3324 vno->type = type;
7384c678 3325 memcpy (&vno->op[0], ops, sizeof (tree) * length);
f8ce304c 3326}
3327
3328/* Initialize VNO from OP. */
3329
3330static void
3331init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
3332{
3333 unsigned i;
3334
3335 vno->opcode = TREE_CODE (op);
3336 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
3337 vno->type = TREE_TYPE (op);
3338 for (i = 0; i < vno->length; ++i)
3339 vno->op[i] = TREE_OPERAND (op, i);
3340}
3341
7384c678 3342/* Return the number of operands for a vn_nary ops structure from STMT. */
3343
3344static unsigned int
42acab1c 3345vn_nary_length_from_stmt (gimple *stmt)
7384c678 3346{
3347 switch (gimple_assign_rhs_code (stmt))
3348 {
3349 case REALPART_EXPR:
3350 case IMAGPART_EXPR:
3351 case VIEW_CONVERT_EXPR:
3352 return 1;
3353
70cd63a3 3354 case BIT_FIELD_REF:
3355 return 3;
3356
7384c678 3357 case CONSTRUCTOR:
3358 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3359
3360 default:
3361 return gimple_num_ops (stmt) - 1;
3362 }
3363}
3364
f8ce304c 3365/* Initialize VNO from STMT. */
3366
3367static void
42acab1c 3368init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
f8ce304c 3369{
3370 unsigned i;
3371
3372 vno->opcode = gimple_assign_rhs_code (stmt);
f8ce304c 3373 vno->type = gimple_expr_type (stmt);
7384c678 3374 switch (vno->opcode)
3375 {
3376 case REALPART_EXPR:
3377 case IMAGPART_EXPR:
3378 case VIEW_CONVERT_EXPR:
3379 vno->length = 1;
3380 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3381 break;
3382
70cd63a3 3383 case BIT_FIELD_REF:
3384 vno->length = 3;
3385 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3386 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3387 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3388 break;
3389
7384c678 3390 case CONSTRUCTOR:
3391 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3392 for (i = 0; i < vno->length; ++i)
3393 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3394 break;
3395
3396 default:
70cd63a3 3397 gcc_checking_assert (!gimple_assign_single_p (stmt));
7384c678 3398 vno->length = gimple_num_ops (stmt) - 1;
3399 for (i = 0; i < vno->length; ++i)
3400 vno->op[i] = gimple_op (stmt, i + 1);
3401 }
f8ce304c 3402}
3403
3404/* Compute the hashcode for VNO and look for it in the hash table;
3405 return the resulting value number if it exists in the hash table.
3406 Return NULL_TREE if it does not exist in the hash table or if the
3407 result field of the operation is NULL. VNRESULT will contain the
3408 vn_nary_op_t from the hashtable if it exists. */
3409
3410static tree
3411vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
f6c33c78 3412{
3e871d4d 3413 vn_nary_op_s **slot;
f8ce304c 3414
f6c33c78 3415 if (vnresult)
3416 *vnresult = NULL;
f8ce304c 3417
3418 vno->hashcode = vn_nary_op_compute_hash (vno);
51e85e64 3419 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
f6c33c78 3420 if (!slot)
3421 return NULL_TREE;
3422 if (vnresult)
3e871d4d 3423 *vnresult = *slot;
51e85e64 3424 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
f6c33c78 3425}
3426
f8ce304c 3427/* Lookup a n-ary operation by its pieces and return the resulting value
3428 number if it exists in the hash table. Return NULL_TREE if it does
3429 not exist in the hash table or if the result field of the operation
3430 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3431 if it exists. */
3432
3433tree
3434vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
7384c678 3435 tree type, tree *ops, vn_nary_op_t *vnresult)
f8ce304c 3436{
7384c678 3437 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3438 sizeof_vn_nary_op (length));
3439 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3440 return vn_nary_op_lookup_1 (vno1, vnresult);
f8ce304c 3441}
3442
f6c33c78 3443/* Lookup OP in the current hash table, and return the resulting value
3444 number if it exists in the hash table. Return NULL_TREE if it does
3445 not exist in the hash table or if the result field of the operation
3446 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3447 if it exists. */
3448
3449tree
3450vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
9e9e6e3e 3451{
7384c678 3452 vn_nary_op_t vno1
3453 = XALLOCAVAR (struct vn_nary_op_s,
3454 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
3455 init_vn_nary_op_from_op (vno1, op);
3456 return vn_nary_op_lookup_1 (vno1, vnresult);
9e9e6e3e 3457}
3458
75a70cf9 3459/* Lookup the rhs of STMT in the current hash table, and return the resulting
3460 value number if it exists in the hash table. Return NULL_TREE if
3461 it does not exist in the hash table. VNRESULT will contain the
3462 vn_nary_op_t from the hashtable if it exists. */
3463
3464tree
42acab1c 3465vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
75a70cf9 3466{
7384c678 3467 vn_nary_op_t vno1
3468 = XALLOCAVAR (struct vn_nary_op_s,
3469 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3470 init_vn_nary_op_from_stmt (vno1, stmt);
3471 return vn_nary_op_lookup_1 (vno1, vnresult);
f8ce304c 3472}
3473
3474/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3475
3476static vn_nary_op_t
3477alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3478{
3479 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3480}
3481
3482/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3483 obstack. */
3484
3485static vn_nary_op_t
3486alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3487{
ca5aa39a 3488 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
f8ce304c 3489
3490 vno1->value_id = value_id;
3491 vno1->length = length;
51e85e64 3492 vno1->predicated_values = 0;
3493 vno1->u.result = result;
f8ce304c 3494
3495 return vno1;
3496}
3497
3498/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3499 VNO->HASHCODE first. */
3500
3501static vn_nary_op_t
c1f445d2 3502vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3e871d4d 3503 bool compute_hash)
f8ce304c 3504{
3e871d4d 3505 vn_nary_op_s **slot;
f8ce304c 3506
3507 if (compute_hash)
51e85e64 3508 {
3509 vno->hashcode = vn_nary_op_compute_hash (vno);
3510 gcc_assert (! vno->predicated_values
3511 || (! vno->u.values->next
72b40bde 3512 && vno->u.values->n == 1));
51e85e64 3513 }
f8ce304c 3514
c1f445d2 3515 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
51e85e64 3516 vno->unwind_to = *slot;
3517 if (*slot)
3518 {
3519 /* Prefer non-predicated values.
3520 ??? Only if those are constant, otherwise, with constant predicated
3521 value, turn them into predicated values with entry-block validity
3522 (??? but we always find the first valid result currently). */
3523 if ((*slot)->predicated_values
3524 && ! vno->predicated_values)
3525 {
3526 /* ??? We cannot remove *slot from the unwind stack list.
3527 For the moment we deal with this by skipping not found
3528 entries but this isn't ideal ... */
3529 *slot = vno;
3530 /* ??? Maintain a stack of states we can unwind in
3531 vn_nary_op_s? But how far do we unwind? In reality
3532 we need to push change records somewhere... Or not
3533 unwind vn_nary_op_s and linking them but instead
3534 unwind the results "list", linking that, which also
3535 doesn't move on hashtable resize. */
3536 /* We can also have a ->unwind_to recording *slot there.
3537 That way we can make u.values a fixed size array with
3538 recording the number of entries but of course we then
3539 have always N copies for each unwind_to-state. Or we
3540 make sure to only ever append and each unwinding will
3541 pop off one entry (but how to deal with predicated
3542 replaced with non-predicated here?) */
3543 vno->next = last_inserted_nary;
3544 last_inserted_nary = vno;
3545 return vno;
3546 }
3547 else if (vno->predicated_values
3548 && ! (*slot)->predicated_values)
3549 return *slot;
3550 else if (vno->predicated_values
3551 && (*slot)->predicated_values)
3552 {
3553 /* ??? Factor this all into a insert_single_predicated_value
3554 routine. */
3555 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3556 basic_block vno_bb
3557 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3558 vn_pval *nval = vno->u.values;
3559 vn_pval **next = &vno->u.values;
3560 bool found = false;
3561 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3562 {
3563 if (expressions_equal_p (val->result, vno->u.values->result))
3564 {
3565 found = true;
3566 for (unsigned i = 0; i < val->n; ++i)
3567 {
3568 basic_block val_bb
3569 = BASIC_BLOCK_FOR_FN (cfun,
3570 val->valid_dominated_by_p[i]);
3571 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3572 /* Value registered with more generic predicate. */
3573 return *slot;
3574 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3575 /* Shouldn't happen, we insert in RPO order. */
3576 gcc_unreachable ();
3577 }
3578 /* Append value. */
3579 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3580 sizeof (vn_pval)
3581 + val->n * sizeof (int));
3582 (*next)->next = NULL;
3583 (*next)->result = val->result;
3584 (*next)->n = val->n + 1;
3585 memcpy ((*next)->valid_dominated_by_p,
3586 val->valid_dominated_by_p,
3587 val->n * sizeof (int));
3588 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3589 next = &(*next)->next;
3590 if (dump_file && (dump_flags & TDF_DETAILS))
3591 fprintf (dump_file, "Appending predicate to value.\n");
3592 continue;
3593 }
3594 /* Copy other predicated values. */
3595 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3596 sizeof (vn_pval)
3597 + (val->n-1) * sizeof (int));
3598 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3599 (*next)->next = NULL;
3600 next = &(*next)->next;
3601 }
3602 if (!found)
3603 *next = nval;
3604
3605 *slot = vno;
3606 vno->next = last_inserted_nary;
3607 last_inserted_nary = vno;
3608 return vno;
3609 }
3610
3611 /* While we do not want to insert things twice it's awkward to
3612 avoid it in the case where visit_nary_op pattern-matches stuff
3613 and ends up simplifying the replacement to itself. We then
3614 get two inserts, one from visit_nary_op and one from
3615 vn_nary_build_or_lookup.
3616 So allow inserts with the same value number. */
3617 if ((*slot)->u.result == vno->u.result)
3618 return *slot;
3619 }
3620
3621 /* ??? There's also optimistic vs. previous commited state merging
3622 that is problematic for the case of unwinding. */
b49e8ef9 3623
51e85e64 3624 /* ??? We should return NULL if we do not use 'vno' and have the
3625 caller release it. */
f8ce304c 3626 gcc_assert (!*slot);
3627
3628 *slot = vno;
c42ece58 3629 vno->next = last_inserted_nary;
3630 last_inserted_nary = vno;
f8ce304c 3631 return vno;
75a70cf9 3632}
3633
f6c33c78 3634/* Insert a n-ary operation into the current hash table using it's
3635 pieces. Return the vn_nary_op_t structure we created and put in
3636 the hashtable. */
3637
3638vn_nary_op_t
3639vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
7384c678 3640 tree type, tree *ops,
3641 tree result, unsigned int value_id)
f6c33c78 3642{
7384c678 3643 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3644 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
c42ece58 3645 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
f6c33c78 3646}
3647
51e85e64 3648static vn_nary_op_t
3649vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3650 tree type, tree *ops,
3651 tree result, unsigned int value_id,
3652 edge pred_e)
3653{
3654 /* ??? Currently tracking BBs. */
3655 if (! single_pred_p (pred_e->dest))
3656 {
3657 /* Never record for backedges. */
3658 if (pred_e->flags & EDGE_DFS_BACK)
3659 return NULL;
3660 edge_iterator ei;
3661 edge e;
3662 int cnt = 0;
3663 /* Ignore backedges. */
3664 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3665 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3666 cnt++;
3667 if (cnt != 1)
3668 return NULL;
3669 }
3670 if (dump_file && (dump_flags & TDF_DETAILS)
3671 /* ??? Fix dumping, but currently we only get comparisons. */
3672 && TREE_CODE_CLASS (code) == tcc_comparison)
3673 {
3674 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3675 pred_e->dest->index);
3676 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3677 fprintf (dump_file, " %s ", get_tree_code_name (code));
3678 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3679 fprintf (dump_file, " == %s\n",
3680 integer_zerop (result) ? "false" : "true");
3681 }
3682 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3683 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3684 vno1->predicated_values = 1;
3685 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3686 sizeof (vn_pval));
3687 vno1->u.values->next = NULL;
3688 vno1->u.values->result = result;
3689 vno1->u.values->n = 1;
3690 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
51e85e64 3691 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3692}
3693
3694static bool
3695dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3696
3697static tree
3698vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3699{
3700 if (! vno->predicated_values)
3701 return vno->u.result;
3702 for (vn_pval *val = vno->u.values; val; val = val->next)
3703 for (unsigned i = 0; i < val->n; ++i)
3704 if (dominated_by_p_w_unex (bb,
3705 BASIC_BLOCK_FOR_FN
3706 (cfun, val->valid_dominated_by_p[i])))
3707 return val->result;
3708 return NULL_TREE;
3709}
3710
9e9e6e3e 3711/* Insert OP into the current hash table with a value number of
f6c33c78 3712 RESULT. Return the vn_nary_op_t structure we created and put in
3713 the hashtable. */
9e9e6e3e 3714
f6c33c78 3715vn_nary_op_t
51a23cfc 3716vn_nary_op_insert (tree op, tree result)
9e9e6e3e 3717{
51a23cfc 3718 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
51a23cfc 3719 vn_nary_op_t vno1;
51a23cfc 3720
f8ce304c 3721 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3722 init_vn_nary_op_from_op (vno1, op);
c42ece58 3723 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
9e9e6e3e 3724}
3725
75a70cf9 3726/* Insert the rhs of STMT into the current hash table with a value number of
3727 RESULT. */
3728
24500bba 3729static vn_nary_op_t
42acab1c 3730vn_nary_op_insert_stmt (gimple *stmt, tree result)
75a70cf9 3731{
7384c678 3732 vn_nary_op_t vno1
3733 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3734 result, VN_INFO (result)->value_id);
f8ce304c 3735 init_vn_nary_op_from_stmt (vno1, stmt);
c42ece58 3736 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
75a70cf9 3737}
3738
9e9e6e3e 3739/* Compute a hashcode for PHI operation VP1 and return it. */
3740
3741static inline hashval_t
3742vn_phi_compute_hash (vn_phi_t vp1)
3743{
ca5aa39a 3744 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3745 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
9e9e6e3e 3746 tree phi1op;
9a7beb5f 3747 tree type;
85249fd1 3748 edge e;
3749 edge_iterator ei;
9e9e6e3e 3750
9a7beb5f 3751 /* If all PHI arguments are constants we need to distinguish
3752 the PHI node via its type. */
82a7a70c 3753 type = vp1->type;
f32e91d5 3754 hstate.merge_hash (vn_hash_type (type));
9a7beb5f 3755
85249fd1 3756 FOR_EACH_EDGE (e, ei, vp1->block->preds)
9e9e6e3e 3757 {
85249fd1 3758 /* Don't hash backedge values they need to be handled as VN_TOP
3759 for optimistic value-numbering. */
3760 if (e->flags & EDGE_DFS_BACK)
3761 continue;
3762
3763 phi1op = vp1->phiargs[e->dest_idx];
9e9e6e3e 3764 if (phi1op == VN_TOP)
3765 continue;
f32e91d5 3766 inchash::add_expr (phi1op, hstate);
9e9e6e3e 3767 }
3768
f32e91d5 3769 return hstate.end ();
9e9e6e3e 3770}
3771
df10fba0 3772
2578db52 3773/* Return true if COND1 and COND2 represent the same condition, set
3774 *INVERTED_P if one needs to be inverted to make it the same as
3775 the other. */
3776
3777static bool
15edd328 3778cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3779 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2578db52 3780{
3781 enum tree_code code1 = gimple_cond_code (cond1);
3782 enum tree_code code2 = gimple_cond_code (cond2);
2578db52 3783
3784 *inverted_p = false;
3785 if (code1 == code2)
3786 ;
3787 else if (code1 == swap_tree_comparison (code2))
3788 std::swap (lhs2, rhs2);
3789 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3790 *inverted_p = true;
3791 else if (code1 == invert_tree_comparison
3792 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3793 {
3794 std::swap (lhs2, rhs2);
3795 *inverted_p = true;
3796 }
3797 else
3798 return false;
3799
4e916062 3800 return ((expressions_equal_p (lhs1, lhs2)
3801 && expressions_equal_p (rhs1, rhs2))
3802 || (commutative_tree_code (code1)
3803 && expressions_equal_p (lhs1, rhs2)
3804 && expressions_equal_p (rhs1, lhs2)));
2578db52 3805}
3806
9e9e6e3e 3807/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3808
3809static int
3e871d4d 3810vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
9e9e6e3e 3811{
3d2d7de7 3812 if (vp1->hashcode != vp2->hashcode)
3813 return false;
3814
df10fba0 3815 if (vp1->block != vp2->block)
9e9e6e3e 3816 {
ca5aa39a 3817 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
9a7beb5f 3818 return false;
3819
ca5aa39a 3820 switch (EDGE_COUNT (vp1->block->preds))
9e9e6e3e 3821 {
df10fba0 3822 case 1:
3823 /* Single-arg PHIs are just copies. */
3824 break;
3825
3826 case 2:
3827 {
3828 /* Rule out backedges into the PHI. */
3829 if (vp1->block->loop_father->header == vp1->block
3830 || vp2->block->loop_father->header == vp2->block)
3831 return false;
3832
3833 /* If the PHI nodes do not have compatible types
3834 they are not the same. */
3835 if (!types_compatible_p (vp1->type, vp2->type))
3836 return false;
3837
3838 basic_block idom1
3839 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3840 basic_block idom2
3841 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3842 /* If the immediate dominator end in switch stmts multiple
3843 values may end up in the same PHI arg via intermediate
3844 CFG merges. */
3845 if (EDGE_COUNT (idom1->succs) != 2
3846 || EDGE_COUNT (idom2->succs) != 2)
3847 return false;
3848
3849 /* Verify the controlling stmt is the same. */
a8daa86c 3850 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3851 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3852 if (! last1 || ! last2)
df10fba0 3853 return false;
2578db52 3854 bool inverted_p;
a8daa86c 3855 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3856 last2, vp2->cclhs, vp2->ccrhs,
15edd328 3857 &inverted_p))
df10fba0 3858 return false;
3859
3860 /* Get at true/false controlled edges into the PHI. */
3861 edge te1, te2, fe1, fe2;
3862 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3863 &te1, &fe1)
3864 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3865 &te2, &fe2))
3866 return false;
3867
2578db52 3868 /* Swap edges if the second condition is the inverted of the
3869 first. */
3870 if (inverted_p)
3871 std::swap (te2, fe2);
3872
df10fba0 3873 /* ??? Handle VN_TOP specially. */
3874 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3875 vp2->phiargs[te2->dest_idx])
3876 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3877 vp2->phiargs[fe2->dest_idx]))
3878 return false;
3879
3880 return true;
3881 }
3882
3883 default:
3884 return false;
9e9e6e3e 3885 }
9e9e6e3e 3886 }
df10fba0 3887
3888 /* If the PHI nodes do not have compatible types
3889 they are not the same. */
3890 if (!types_compatible_p (vp1->type, vp2->type))
3891 return false;
3892
3893 /* Any phi in the same block will have it's arguments in the
3894 same edge order, because of how we store phi nodes. */
ca5aa39a 3895 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
df10fba0 3896 {
ca5aa39a 3897 tree phi1op = vp1->phiargs[i];
df10fba0 3898 tree phi2op = vp2->phiargs[i];
3899 if (phi1op == VN_TOP || phi2op == VN_TOP)
3900 continue;
3901 if (!expressions_equal_p (phi1op, phi2op))
3902 return false;
3903 }
3904
3905 return true;
9e9e6e3e 3906}
3907
9e9e6e3e 3908/* Lookup PHI in the current hash table, and return the resulting
3909 value number if it exists in the hash table. Return NULL_TREE if
3910 it does not exist in the hash table. */
3911
3dc4c394 3912static tree
51e85e64 3913vn_phi_lookup (gimple *phi, bool backedges_varying_p)
9e9e6e3e 3914{
3e871d4d 3915 vn_phi_s **slot;
ca5aa39a 3916 struct vn_phi_s *vp1;
85249fd1 3917 edge e;
3918 edge_iterator ei;
9e9e6e3e 3919
ca5aa39a 3920 vp1 = XALLOCAVAR (struct vn_phi_s,
3921 sizeof (struct vn_phi_s)
3922 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
9e9e6e3e 3923
3924 /* Canonicalize the SSA_NAME's to their value number. */
85249fd1 3925 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
9e9e6e3e 3926 {
85249fd1 3927 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
51e85e64 3928 if (TREE_CODE (def) == SSA_NAME
3929 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3930 def = SSA_VAL (def);
ca5aa39a 3931 vp1->phiargs[e->dest_idx] = def;
9e9e6e3e 3932 }
ca5aa39a 3933 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3934 vp1->block = gimple_bb (phi);
15edd328 3935 /* Extract values of the controlling condition. */
ca5aa39a 3936 vp1->cclhs = NULL_TREE;
3937 vp1->ccrhs = NULL_TREE;
3938 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
15edd328 3939 if (EDGE_COUNT (idom1->succs) == 2)
a8daa86c 3940 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
15edd328 3941 {
51e85e64 3942 /* ??? We want to use SSA_VAL here. But possibly not
3943 allow VN_TOP. */
ca5aa39a 3944 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3945 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
15edd328 3946 }
ca5aa39a 3947 vp1->hashcode = vn_phi_compute_hash (vp1);
51e85e64 3948 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
9e9e6e3e 3949 if (!slot)
3950 return NULL_TREE;
3e871d4d 3951 return (*slot)->result;
9e9e6e3e 3952}
3953
3954/* Insert PHI into the current hash table with a value number of
3955 RESULT. */
3956
f6c33c78 3957static vn_phi_t
51e85e64 3958vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
9e9e6e3e 3959{
3e871d4d 3960 vn_phi_s **slot;
ca5aa39a 3961 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3962 sizeof (vn_phi_s)
3963 + ((gimple_phi_num_args (phi) - 1)
3964 * sizeof (tree)));
85249fd1 3965 edge e;
3966 edge_iterator ei;
3967
9e9e6e3e 3968 /* Canonicalize the SSA_NAME's to their value number. */
85249fd1 3969 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
9e9e6e3e 3970 {
85249fd1 3971 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
51e85e64 3972 if (TREE_CODE (def) == SSA_NAME
3973 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3974 def = SSA_VAL (def);
ca5aa39a 3975 vp1->phiargs[e->dest_idx] = def;
9e9e6e3e 3976 }
f6c33c78 3977 vp1->value_id = VN_INFO (result)->value_id;
82a7a70c 3978 vp1->type = TREE_TYPE (gimple_phi_result (phi));
75a70cf9 3979 vp1->block = gimple_bb (phi);
15edd328 3980 /* Extract values of the controlling condition. */
3981 vp1->cclhs = NULL_TREE;
3982 vp1->ccrhs = NULL_TREE;
3983 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3984 if (EDGE_COUNT (idom1->succs) == 2)
a8daa86c 3985 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
15edd328 3986 {
51e85e64 3987 /* ??? We want to use SSA_VAL here. But possibly not
3988 allow VN_TOP. */
15edd328 3989 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3990 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3991 }
9e9e6e3e 3992 vp1->result = result;
3993 vp1->hashcode = vn_phi_compute_hash (vp1);
3994
c42ece58 3995 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
51e85e64 3996 gcc_assert (!*slot);
9e9e6e3e 3997
9e9e6e3e 3998 *slot = vp1;
c42ece58 3999 vp1->next = last_inserted_phi;
4000 last_inserted_phi = vp1;
f6c33c78 4001 return vp1;
9e9e6e3e 4002}
4003
4004
8d6b2506 4005/* Return true if BB1 is dominated by BB2 taking into account edges
4006 that are not executable. */
4007
4008static bool
4009dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
4010{
4011 edge_iterator ei;
4012 edge e;
4013
4014 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4015 return true;
4016
4017 /* Before iterating we'd like to know if there exists a
4018 (executable) path from bb2 to bb1 at all, if not we can
4019 directly return false. For now simply iterate once. */
4020
4021 /* Iterate to the single executable bb1 predecessor. */
4022 if (EDGE_COUNT (bb1->preds) > 1)
4023 {
4024 edge prede = NULL;
4025 FOR_EACH_EDGE (e, ei, bb1->preds)
4026 if (e->flags & EDGE_EXECUTABLE)
4027 {
4028 if (prede)
4029 {
4030 prede = NULL;
4031 break;
4032 }
4033 prede = e;
4034 }
4035 if (prede)
4036 {
4037 bb1 = prede->src;
4038
4039 /* Re-do the dominance check with changed bb1. */
4040 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4041 return true;
4042 }
4043 }
4044
4045 /* Iterate to the single executable bb2 successor. */
4046 edge succe = NULL;
4047 FOR_EACH_EDGE (e, ei, bb2->succs)
4048 if (e->flags & EDGE_EXECUTABLE)
4049 {
4050 if (succe)
4051 {
4052 succe = NULL;
4053 break;
4054 }
4055 succe = e;
4056 }
4057 if (succe)
4058 {
4059 /* Verify the reached block is only reached through succe.
4060 If there is only one edge we can spare us the dominator
4061 check and iterate directly. */
4062 if (EDGE_COUNT (succe->dest->preds) > 1)
4063 {
4064 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4065 if (e != succe
4066 && (e->flags & EDGE_EXECUTABLE))
4067 {
4068 succe = NULL;
4069 break;
4070 }
4071 }
4072 if (succe)
4073 {
4074 bb2 = succe->dest;
4075
4076 /* Re-do the dominance check with changed bb2. */
4077 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4078 return true;
4079 }
4080 }
4081
4082 /* We could now iterate updating bb1 / bb2. */
4083 return false;
4084}
4085
9e9e6e3e 4086/* Set the value number of FROM to TO, return true if it has changed
4087 as a result. */
4088
4089static inline bool
4090set_ssa_val_to (tree from, tree to)
4091{
51e85e64 4092 vn_ssa_aux_t from_info = VN_INFO (from);
4093 tree currval = from_info->valnum; // SSA_VAL (from)
773078cb 4094 poly_int64 toff, coff;
9e9e6e3e 4095
85e9a542 4096 /* The only thing we allow as value numbers are ssa_names
4097 and invariants. So assert that here. We don't allow VN_TOP
4098 as visiting a stmt should produce a value-number other than
4099 that.
4100 ??? Still VN_TOP can happen for unreachable code, so force
4101 it to varying in that case. Not all code is prepared to
4102 get VN_TOP on valueization. */
4103 if (to == VN_TOP)
4104 {
51e85e64 4105 /* ??? When iterating and visiting PHI <undef, backedge-value>
4106 for the first time we rightfully get VN_TOP and we need to
4107 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4108 With SCCVN we were simply lucky we iterated the other PHI
4109 cycles first and thus visited the backedge-value DEF. */
4110 if (currval == VN_TOP)
4111 goto set_and_exit;
85e9a542 4112 if (dump_file && (dump_flags & TDF_DETAILS))
4113 fprintf (dump_file, "Forcing value number to varying on "
4114 "receiving VN_TOP\n");
4115 to = from;
4116 }
4117
51e85e64 4118 gcc_checking_assert (to != NULL_TREE
4119 && ((TREE_CODE (to) == SSA_NAME
4120 && (to == from || SSA_VAL (to) == to))
4121 || is_gimple_min_invariant (to)));
85e9a542 4122
b81ffaee 4123 if (from != to)
4124 {
4125 if (currval == from)
4126 {
4127 if (dump_file && (dump_flags & TDF_DETAILS))
4128 {
4129 fprintf (dump_file, "Not changing value number of ");
1ffa4346 4130 print_generic_expr (dump_file, from);
b81ffaee 4131 fprintf (dump_file, " from VARYING to ");
1ffa4346 4132 print_generic_expr (dump_file, to);
b81ffaee 4133 fprintf (dump_file, "\n");
4134 }
4135 return false;
4136 }
fbdb74ac 4137 bool curr_invariant = is_gimple_min_invariant (currval);
4138 bool curr_undefined = (TREE_CODE (currval) == SSA_NAME
4139 && ssa_undefined_value_p (currval, false));
4140 if (currval != VN_TOP
4141 && !curr_invariant
4142 && !curr_undefined
4143 && is_gimple_min_invariant (to))
cb1f8eb4 4144 {
4145 if (dump_file && (dump_flags & TDF_DETAILS))
4146 {
4147 fprintf (dump_file, "Forcing VARYING instead of changing "
4148 "value number of ");
1ffa4346 4149 print_generic_expr (dump_file, from);
cb1f8eb4 4150 fprintf (dump_file, " from ");
1ffa4346 4151 print_generic_expr (dump_file, currval);
cb1f8eb4 4152 fprintf (dump_file, " (non-constant) to ");
1ffa4346 4153 print_generic_expr (dump_file, to);
cb1f8eb4 4154 fprintf (dump_file, " (constant)\n");
4155 }
4156 to = from;
4157 }
fbdb74ac 4158 else if (currval != VN_TOP
4159 && !curr_undefined
4160 && TREE_CODE (to) == SSA_NAME
4161 && ssa_undefined_value_p (to, false))
4162 {
4163 if (dump_file && (dump_flags & TDF_DETAILS))
4164 {
4165 fprintf (dump_file, "Forcing VARYING instead of changing "
4166 "value number of ");
4167 print_generic_expr (dump_file, from);
4168 fprintf (dump_file, " from ");
4169 print_generic_expr (dump_file, currval);
4170 fprintf (dump_file, " (non-undefined) to ");
4171 print_generic_expr (dump_file, to);
4172 fprintf (dump_file, " (undefined)\n");
4173 }
4174 to = from;
4175 }
b81ffaee 4176 else if (TREE_CODE (to) == SSA_NAME
4177 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4178 to = from;
4179 }
5dbdbadc 4180
51e85e64 4181set_and_exit:
9e9e6e3e 4182 if (dump_file && (dump_flags & TDF_DETAILS))
4183 {
4184 fprintf (dump_file, "Setting value number of ");
1ffa4346 4185 print_generic_expr (dump_file, from);
9e9e6e3e 4186 fprintf (dump_file, " to ");
1ffa4346 4187 print_generic_expr (dump_file, to);
9e9e6e3e 4188 }
4189
d68e9408 4190 if (currval != to
4191 && !operand_equal_p (currval, to, 0)
9d8dca24 4192 /* Different undefined SSA names are not actually different. See
4193 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4194 && !(TREE_CODE (currval) == SSA_NAME
4195 && TREE_CODE (to) == SSA_NAME
4196 && ssa_undefined_value_p (currval, false)
4197 && ssa_undefined_value_p (to, false))
d68e9408 4198 /* ??? For addresses involving volatile objects or types operand_equal_p
4199 does not reliably detect ADDR_EXPRs as equal. We know we are only
4200 getting invariant gimple addresses here, so can use
4201 get_addr_base_and_unit_offset to do this comparison. */
4202 && !(TREE_CODE (currval) == ADDR_EXPR
4203 && TREE_CODE (to) == ADDR_EXPR
4204 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4205 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
773078cb 4206 && known_eq (coff, toff)))
9e9e6e3e 4207 {
2a305737 4208 if (dump_file && (dump_flags & TDF_DETAILS))
4209 fprintf (dump_file, " (changed)\n");
51e85e64 4210 from_info->valnum = to;
9e9e6e3e 4211 return true;
4212 }
19744bd4 4213 if (dump_file && (dump_flags & TDF_DETAILS))
4214 fprintf (dump_file, "\n");
9e9e6e3e 4215 return false;
4216}
4217
4218/* Set all definitions in STMT to value number to themselves.
4219 Return true if a value number changed. */
4220
4221static bool
42acab1c 4222defs_to_varying (gimple *stmt)
9e9e6e3e 4223{
4224 bool changed = false;
4225 ssa_op_iter iter;
4226 def_operand_p defp;
4227
4228 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4229 {
4230 tree def = DEF_FROM_PTR (defp);
9e9e6e3e 4231 changed |= set_ssa_val_to (def, def);
4232 }
4233 return changed;
4234}
4235
4236/* Visit a copy between LHS and RHS, return true if the value number
4237 changed. */
4238
4239static bool
4240visit_copy (tree lhs, tree rhs)
4241{
eb074ef3 4242 /* Valueize. */
cc99c1e1 4243 rhs = SSA_VAL (rhs);
9e9e6e3e 4244
4245 return set_ssa_val_to (lhs, rhs);
4246}
4247
fa879112 4248/* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4249 is the same. */
4250
4251static tree
4252valueized_wider_op (tree wide_type, tree op)
4253{
4254 if (TREE_CODE (op) == SSA_NAME)
51e85e64 4255 op = vn_valueize (op);
fa879112 4256
4257 /* Either we have the op widened available. */
4258 tree ops[3] = {};
4259 ops[0] = op;
4260 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4261 wide_type, ops, NULL);
4262 if (tem)
4263 return tem;
4264
4265 /* Or the op is truncated from some existing value. */
4266 if (TREE_CODE (op) == SSA_NAME)
4267 {
4268 gimple *def = SSA_NAME_DEF_STMT (op);
4269 if (is_gimple_assign (def)
4270 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4271 {
4272 tem = gimple_assign_rhs1 (def);
4273 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4274 {
4275 if (TREE_CODE (tem) == SSA_NAME)
51e85e64 4276 tem = vn_valueize (tem);
fa879112 4277 return tem;
4278 }
4279 }
4280 }
4281
4282 /* For constants simply extend it. */
4283 if (TREE_CODE (op) == INTEGER_CST)
e3d0f65c 4284 return wide_int_to_tree (wide_type, wi::to_wide (op));
fa879112 4285
4286 return NULL_TREE;
4287}
4288
0fea623c 4289/* Visit a nary operator RHS, value number it, and return true if the
9e9e6e3e 4290 value number of LHS has changed as a result. */
4291
4292static bool
fa879112 4293visit_nary_op (tree lhs, gassign *stmt)
9e9e6e3e 4294{
51e85e64 4295 vn_nary_op_t vnresult;
4296 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4297 if (! result && vnresult)
4298 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
9e9e6e3e 4299 if (result)
fa879112 4300 return set_ssa_val_to (lhs, result);
4301
4302 /* Do some special pattern matching for redundancies of operations
4303 in different types. */
4304 enum tree_code code = gimple_assign_rhs_code (stmt);
4305 tree type = TREE_TYPE (lhs);
4306 tree rhs1 = gimple_assign_rhs1 (stmt);
4307 switch (code)
75a70cf9 4308 {
fa879112 4309 CASE_CONVERT:
4310 /* Match arithmetic done in a different type where we can easily
4311 substitute the result from some earlier sign-changed or widened
4312 operation. */
4313 if (INTEGRAL_TYPE_P (type)
4314 && TREE_CODE (rhs1) == SSA_NAME
4315 /* We only handle sign-changes or zero-extension -> & mask. */
4316 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4317 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4318 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4319 {
4320 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4321 if (def
4322 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4323 || gimple_assign_rhs_code (def) == MINUS_EXPR
4324 || gimple_assign_rhs_code (def) == MULT_EXPR))
4325 {
4326 tree ops[3] = {};
4327 /* Either we have the op widened available. */
4328 ops[0] = valueized_wider_op (type,
4329 gimple_assign_rhs1 (def));
4330 if (ops[0])
4331 ops[1] = valueized_wider_op (type,
4332 gimple_assign_rhs2 (def));
4333 if (ops[0] && ops[1])
4334 {
4335 ops[0] = vn_nary_op_lookup_pieces
4336 (2, gimple_assign_rhs_code (def), type, ops, NULL);
4337 /* We have wider operation available. */
6b8ca7f3 4338 if (ops[0]
4339 /* If the leader is a wrapping operation we can
4340 insert it for code hoisting w/o introducing
4341 undefined overflow. If it is not it has to
4342 be available. See PR86554. */
4343 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4344 || (rpo_avail && vn_context_bb
4345 && rpo_avail->eliminate_avail (vn_context_bb,
4346 ops[0]))))
fa879112 4347 {
4348 unsigned lhs_prec = TYPE_PRECISION (type);
4349 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4350 if (lhs_prec == rhs_prec)
4351 {
d8483dd1 4352 gimple_match_op match_op (gimple_match_cond::UNCOND,
4353 NOP_EXPR, type, ops[0]);
49446baa 4354 result = vn_nary_build_or_lookup (&match_op);
fa879112 4355 if (result)
b49e8ef9 4356 {
4357 bool changed = set_ssa_val_to (lhs, result);
4358 vn_nary_op_insert_stmt (stmt, result);
4359 return changed;
4360 }
fa879112 4361 }
4362 else
4363 {
49446baa 4364 tree mask = wide_int_to_tree
4365 (type, wi::mask (rhs_prec, false, lhs_prec));
d8483dd1 4366 gimple_match_op match_op (gimple_match_cond::UNCOND,
4367 BIT_AND_EXPR,
49446baa 4368 TREE_TYPE (lhs),
4369 ops[0], mask);
4370 result = vn_nary_build_or_lookup (&match_op);
fa879112 4371 if (result)
b49e8ef9 4372 {
4373 bool changed = set_ssa_val_to (lhs, result);
4374 vn_nary_op_insert_stmt (stmt, result);
4375 return changed;
4376 }
fa879112 4377 }
4378 }
4379 }
4380 }
4381 }
4382 default:;
75a70cf9 4383 }
4384
fa879112 4385 bool changed = set_ssa_val_to (lhs, lhs);
4386 vn_nary_op_insert_stmt (stmt, lhs);
75a70cf9 4387 return changed;
4388}
4389
4390/* Visit a call STMT storing into LHS. Return true if the value number
4391 of the LHS has changed as a result. */
4392
4393static bool
1a91d914 4394visit_reference_op_call (tree lhs, gcall *stmt)
9e9e6e3e 4395{
4396 bool changed = false;
75a70cf9 4397 struct vn_reference_s vr1;
b736e424 4398 vn_reference_t vnresult = NULL;
b736e424 4399 tree vdef = gimple_vdef (stmt);
9e9e6e3e 4400
7ec657ff 4401 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4402 if (lhs && TREE_CODE (lhs) != SSA_NAME)
4403 lhs = NULL_TREE;
4404
2fd3ecff 4405 vn_reference_lookup_call (stmt, &vnresult, &vr1);
b736e424 4406 if (vnresult)
9e9e6e3e 4407 {
d1a94241 4408 if (vnresult->result_vdef && vdef)
b736e424 4409 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
0b77b2cf 4410 else if (vdef)
4411 /* If the call was discovered to be pure or const reflect
4412 that as far as possible. */
4413 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
b736e424 4414
4415 if (!vnresult->result && lhs)
4416 vnresult->result = lhs;
4417
4418 if (vnresult->result && lhs)
eb074ef3 4419 changed |= set_ssa_val_to (lhs, vnresult->result);
9e9e6e3e 4420 }
4421 else
4422 {
75a70cf9 4423 vn_reference_t vr2;
2fd3ecff 4424 vn_reference_s **slot;
81df50de 4425 tree vdef_val = vdef;
b736e424 4426 if (vdef)
81df50de 4427 {
4428 /* If we value numbered an indirect functions function to
4429 one not clobbering memory value number its VDEF to its
4430 VUSE. */
4431 tree fn = gimple_call_fn (stmt);
4432 if (fn && TREE_CODE (fn) == SSA_NAME)
4433 {
4434 fn = SSA_VAL (fn);
4435 if (TREE_CODE (fn) == ADDR_EXPR
4436 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4437 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4438 & (ECF_CONST | ECF_PURE)))
4439 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4440 }
4441 changed |= set_ssa_val_to (vdef, vdef_val);
4442 }
b736e424 4443 if (lhs)
4444 changed |= set_ssa_val_to (lhs, lhs);
ca5aa39a 4445 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
dd277d48 4446 vr2->vuse = vr1.vuse;
2fd3ecff 4447 /* As we are not walking the virtual operand chain we know the
4448 shared_lookup_references are still original so we can re-use
4449 them here. */
4450 vr2->operands = vr1.operands.copy ();
3918bd18 4451 vr2->type = vr1.type;
4452 vr2->set = vr1.set;
75a70cf9 4453 vr2->hashcode = vr1.hashcode;
4454 vr2->result = lhs;
81df50de 4455 vr2->result_vdef = vdef_val;
dcf49a51 4456 vr2->value_id = 0;
c42ece58 4457 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
51e85e64 4458 INSERT);
2fd3ecff 4459 gcc_assert (!*slot);
75a70cf9 4460 *slot = vr2;
c42ece58 4461 vr2->next = last_inserted_ref;
4462 last_inserted_ref = vr2;
9e9e6e3e 4463 }
4464
4465 return changed;
4466}
4467
4468/* Visit a load from a reference operator RHS, part of STMT, value number it,
4469 and return true if the value number of the LHS has changed as a result. */
4470
4471static bool
42acab1c 4472visit_reference_op_load (tree lhs, tree op, gimple *stmt)
9e9e6e3e 4473{
4474 bool changed = false;
4a83fadb 4475 tree last_vuse;
4476 tree result;
4477
4478 last_vuse = gimple_vuse (stmt);
8f190c8a 4479 result = vn_reference_lookup (op, gimple_vuse (stmt),
f52fbd56 4480 default_vn_walk_kind, NULL, true, &last_vuse);
9e9e6e3e 4481
1d9353f3 4482 /* We handle type-punning through unions by value-numbering based
4483 on offset and size of the access. Be prepared to handle a
4484 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4485 if (result
4486 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4487 {
4488 /* We will be setting the value number of lhs to the value number
4489 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4490 So first simplify and lookup this expression to see if it
4491 is already available. */
d8483dd1 4492 gimple_match_op res_op (gimple_match_cond::UNCOND,
4493 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
49446baa 4494 result = vn_nary_build_or_lookup (&res_op);
51e85e64 4495 /* When building the conversion fails avoid inserting the reference
4496 again. */
4497 if (!result)
4498 return set_ssa_val_to (lhs, lhs);
1d9353f3 4499 }
4500
9e9e6e3e 4501 if (result)
eb074ef3 4502 changed = set_ssa_val_to (lhs, result);
9e9e6e3e 4503 else
4504 {
4505 changed = set_ssa_val_to (lhs, lhs);
39215e09 4506 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
9e9e6e3e 4507 }
4508
4509 return changed;
4510}
4511
4512
4513/* Visit a store to a reference operator LHS, part of STMT, value number it,
4514 and return true if the value number of the LHS has changed as a result. */
4515
4516static bool
42acab1c 4517visit_reference_op_store (tree lhs, tree op, gimple *stmt)
9e9e6e3e 4518{
4519 bool changed = false;
39215e09 4520 vn_reference_t vnresult = NULL;
002b742b 4521 tree assign;
9e9e6e3e 4522 bool resultsame = false;
39215e09 4523 tree vuse = gimple_vuse (stmt);
4524 tree vdef = gimple_vdef (stmt);
9e9e6e3e 4525
9251bb6f 4526 if (TREE_CODE (op) == SSA_NAME)
4527 op = SSA_VAL (op);
4528
9e9e6e3e 4529 /* First we want to lookup using the *vuses* from the store and see
4530 if there the last store to this location with the same address
4531 had the same value.
4532
4533 The vuses represent the memory state before the store. If the
4534 memory state, address, and value of the store is the same as the
4535 last store to this location, then this store will produce the
4536 same memory state as that store.
4537
4538 In this case the vdef versions for this store are value numbered to those
4539 vuse versions, since they represent the same memory state after
4540 this store.
4541
4542 Otherwise, the vdefs for the store are used when inserting into
4543 the table, since the store generates a new memory state. */
4544
002b742b 4545 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4546 if (vnresult
4547 && vnresult->result)
9e9e6e3e 4548 {
002b742b 4549 tree result = vnresult->result;
51e85e64 4550 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4551 || result == SSA_VAL (result));
9e9e6e3e 4552 resultsame = expressions_equal_p (result, op);
1ef012e4 4553 if (resultsame)
4554 {
4555 /* If the TBAA state isn't compatible for downstream reads
4556 we cannot value-number the VDEFs the same. */
4557 alias_set_type set = get_alias_set (lhs);
4558 if (vnresult->set != set
4559 && ! alias_set_subset_of (set, vnresult->set))
4560 resultsame = false;
4561 }
9e9e6e3e 4562 }
4563
002b742b 4564 if (!resultsame)
4565 {
2fd3ecff 4566 /* Only perform the following when being called from PRE
4567 which embeds tail merging. */
002b742b 4568 if (default_vn_walk_kind == VN_WALK)
39215e09 4569 {
002b742b 4570 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4571 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4572 if (vnresult)
4573 {
51e85e64 4574 VN_INFO (vdef)->visited = true;
002b742b 4575 return set_ssa_val_to (vdef, vnresult->result_vdef);
4576 }
39215e09 4577 }
9e9e6e3e 4578
4579 if (dump_file && (dump_flags & TDF_DETAILS))
4580 {
4581 fprintf (dump_file, "No store match\n");
4582 fprintf (dump_file, "Value numbering store ");
1ffa4346 4583 print_generic_expr (dump_file, lhs);
9e9e6e3e 4584 fprintf (dump_file, " to ");
1ffa4346 4585 print_generic_expr (dump_file, op);
9e9e6e3e 4586 fprintf (dump_file, "\n");
4587 }
4588 /* Have to set value numbers before insert, since insert is
4589 going to valueize the references in-place. */
39215e09 4590 if (vdef)
002b742b 4591 changed |= set_ssa_val_to (vdef, vdef);
9e9e6e3e 4592
802d9f2f 4593 /* Do not insert structure copies into the tables. */
4594 if (is_gimple_min_invariant (op)
4595 || is_gimple_reg (op))
39215e09 4596 vn_reference_insert (lhs, op, vdef, NULL);
4597
2fd3ecff 4598 /* Only perform the following when being called from PRE
4599 which embeds tail merging. */
4600 if (default_vn_walk_kind == VN_WALK)
4601 {
4602 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4603 vn_reference_insert (assign, lhs, vuse, vdef);
4604 }
9e9e6e3e 4605 }
4606 else
4607 {
dd277d48 4608 /* We had a match, so value number the vdef to have the value
4609 number of the vuse it came from. */
9e9e6e3e 4610
4611 if (dump_file && (dump_flags & TDF_DETAILS))
19efce70 4612 fprintf (dump_file, "Store matched earlier value, "
9e9e6e3e 4613 "value numbering store vdefs to matching vuses.\n");
4614
39215e09 4615 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
9e9e6e3e 4616 }
4617
4618 return changed;
4619}
4620
4621/* Visit and value number PHI, return true if the value number
51e85e64 4622 changed. When BACKEDGES_VARYING_P is true then assume all
4623 backedge values are varying. When INSERTED is not NULL then
4624 this is just a ahead query for a possible iteration, set INSERTED
4625 to true if we'd insert into the hashtable. */
9e9e6e3e 4626
4627static bool
51e85e64 4628visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
9e9e6e3e 4629{
6ba51c38 4630 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
ffb47768 4631 tree backedge_val = NULL_TREE;
4632 bool seen_non_backedge = false;
69859909 4633 tree sameval_base = NULL_TREE;
4634 poly_int64 soff, doff;
4e916062 4635 unsigned n_executable = 0;
6ba51c38 4636 edge_iterator ei;
4637 edge e;
9e9e6e3e 4638
51e85e64 4639 /* TODO: We could check for this in initialization, and replace this
5f6261a7 4640 with a gcc_assert. */
4641 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4642 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4643
51e85e64 4644 /* We track whether a PHI was CSEd to to avoid excessive iterations
4645 that would be necessary only because the PHI changed arguments
4646 but not value. */
4647 if (!inserted)
4648 gimple_set_plf (phi, GF_PLF_1, false);
4649
9e9e6e3e 4650 /* See if all non-TOP arguments have the same value. TOP is
4651 equivalent to everything, so we can ignore it. */
85e9a542 4652 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4653 if (e->flags & EDGE_EXECUTABLE)
4654 {
4655 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
9e9e6e3e 4656
4e916062 4657 ++n_executable;
fcf59b73 4658 if (TREE_CODE (def) == SSA_NAME)
4659 {
fcf59b73 4660 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4661 def = SSA_VAL (def);
ffb47768 4662 if (e->flags & EDGE_DFS_BACK)
4663 backedge_val = def;
fcf59b73 4664 }
ffb47768 4665 if (!(e->flags & EDGE_DFS_BACK))
4666 seen_non_backedge = true;
85e9a542 4667 if (def == VN_TOP)
6ba51c38 4668 ;
4669 /* Ignore undefined defs for sameval but record one. */
4670 else if (TREE_CODE (def) == SSA_NAME
51e85e64 4671 && ! virtual_operand_p (def)
6ba51c38 4672 && ssa_undefined_value_p (def, false))
4673 seen_undef = def;
4674 else if (sameval == VN_TOP)
85249fd1 4675 sameval = def;
4676 else if (!expressions_equal_p (def, sameval))
85e9a542 4677 {
69859909 4678 /* We know we're arriving only with invariant addresses here,
4679 try harder comparing them. We can do some caching here
4680 which we cannot do in expressions_equal_p. */
4681 if (TREE_CODE (def) == ADDR_EXPR
4682 && TREE_CODE (sameval) == ADDR_EXPR
4683 && sameval_base != (void *)-1)
4684 {
4685 if (!sameval_base)
4686 sameval_base = get_addr_base_and_unit_offset
4687 (TREE_OPERAND (sameval, 0), &soff);
4688 if (!sameval_base)
4689 sameval_base = (tree)(void *)-1;
4690 else if ((get_addr_base_and_unit_offset
4691 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4692 && known_eq (soff, doff))
4693 continue;
4694 }
ffb47768 4695 sameval = NULL_TREE;
85249fd1 4696 break;
85e9a542 4697 }
4698 }
9e9e6e3e 4699
8973f96c 4700 /* If the value we want to use is flowing over the backedge and we
4701 should take it as VARYING but it has a non-VARYING value drop to
4702 VARYING.
4703 If we value-number a virtual operand never value-number to the
ffb47768 4704 value from the backedge as that confuses the alias-walking code.
4705 See gcc.dg/torture/pr87176.c. If the value is the same on a
4706 non-backedge everything is OK though. */
2fb4181e 4707 bool visited_p;
4708 if ((backedge_val
4709 && !seen_non_backedge
4710 && TREE_CODE (backedge_val) == SSA_NAME
4711 && sameval == backedge_val
4712 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
4713 || SSA_VAL (backedge_val) != backedge_val))
4714 /* Do not value-number a virtual operand to sth not visited though
4715 given that allows us to escape a region in alias walking. */
4716 || (sameval
4717 && TREE_CODE (sameval) == SSA_NAME
c2ec998b 4718 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
2fb4181e 4719 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
4720 && (SSA_VAL (sameval, &visited_p), !visited_p)))
ffb47768 4721 /* Note this just drops to VARYING without inserting the PHI into
4722 the hashes. */
fcf59b73 4723 result = PHI_RESULT (phi);
6ba51c38 4724 /* If none of the edges was executable keep the value-number at VN_TOP,
4725 if only a single edge is exectuable use its value. */
fcf59b73 4726 else if (n_executable <= 1)
6ba51c38 4727 result = seen_undef ? seen_undef : sameval;
0340b0d4 4728 /* If we saw only undefined values and VN_TOP use one of the
4729 undefined values. */
6ba51c38 4730 else if (sameval == VN_TOP)
0340b0d4 4731 result = seen_undef ? seen_undef : sameval;
85249fd1 4732 /* First see if it is equivalent to a phi node in this block. We prefer
4733 this as it allows IV elimination - see PRs 66502 and 67167. */
51e85e64 4734 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4735 {
4736 if (!inserted
4737 && TREE_CODE (result) == SSA_NAME
4738 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4739 {
4740 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4741 if (dump_file && (dump_flags & TDF_DETAILS))
4742 {
4743 fprintf (dump_file, "Marking CSEd to PHI node ");
4744 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4745 0, TDF_SLIM);
4746 fprintf (dump_file, "\n");
4747 }
4748 }
4749 }
6ba51c38 4750 /* If all values are the same use that, unless we've seen undefined
4751 values as well and the value isn't constant.
4752 CCP/copyprop have the same restriction to not remove uninit warnings. */
ffb47768 4753 else if (sameval
6ba51c38 4754 && (! seen_undef || is_gimple_min_invariant (sameval)))
4755 result = sameval;
9e9e6e3e 4756 else
4757 {
6ba51c38 4758 result = PHI_RESULT (phi);
4759 /* Only insert PHIs that are varying, for constant value numbers
4760 we mess up equivalences otherwise as we are only comparing
4761 the immediate controlling predicates. */
51e85e64 4762 vn_phi_insert (phi, result, backedges_varying_p);
4763 if (inserted)
4764 *inserted = true;
9e9e6e3e 4765 }
4766
6ba51c38 4767 return set_ssa_val_to (PHI_RESULT (phi), result);
9e9e6e3e 4768}
4769
9e9e6e3e 4770/* Try to simplify RHS using equivalences and constant folding. */
4771
4772static tree
1a91d914 4773try_to_simplify (gassign *stmt)
9e9e6e3e 4774{
ce993cc2 4775 enum tree_code code = gimple_assign_rhs_code (stmt);
e004838d 4776 tree tem;
4777
d4cdfd27 4778 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4779 in this case, there is no point in doing extra work. */
ce993cc2 4780 if (code == SSA_NAME)
75a70cf9 4781 return NULL_TREE;
e004838d 4782
1d0b727d 4783 /* First try constant folding based on our current lattice. */
eb074ef3 4784 mprts_hook = vn_lookup_simplify_result;
ef8cb3d3 4785 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
eb074ef3 4786 mprts_hook = NULL;
ce993cc2 4787 if (tem
4788 && (TREE_CODE (tem) == SSA_NAME
4789 || is_gimple_min_invariant (tem)))
1d0b727d 4790 return tem;
4791
75a70cf9 4792 return NULL_TREE;
9e9e6e3e 4793}
4794
51e85e64 4795/* Visit and value number STMT, return true if the value number
4796 changed. */
9e9e6e3e 4797
4798static bool
51e85e64 4799visit_stmt (gimple *stmt, bool backedges_varying_p = false)
9e9e6e3e 4800{
4801 bool changed = false;
1253e89f 4802
4803 if (dump_file && (dump_flags & TDF_DETAILS))
9e9e6e3e 4804 {
51e85e64 4805 fprintf (dump_file, "Value numbering stmt = ");
1ffa4346 4806 print_gimple_stmt (dump_file, stmt, 0);
9e9e6e3e 4807 }
4808
1253e89f 4809 if (gimple_code (stmt) == GIMPLE_PHI)
51e85e64 4810 changed = visit_phi (stmt, NULL, backedges_varying_p);
afb92221 4811 else if (gimple_has_volatile_ops (stmt))
4812 changed = defs_to_varying (stmt);
4813 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4814 {
4815 enum tree_code code = gimple_assign_rhs_code (ass);
4816 tree lhs = gimple_assign_lhs (ass);
4817 tree rhs1 = gimple_assign_rhs1 (ass);
4818 tree simplified;
4819
4820 /* Shortcut for copies. Simplifying copies is pointless,
4821 since we copy the expression and value they represent. */
4822 if (code == SSA_NAME
4823 && TREE_CODE (lhs) == SSA_NAME)
4824 {
4825 changed = visit_copy (lhs, rhs1);
4826 goto done;
4827 }
4828 simplified = try_to_simplify (ass);
4829 if (simplified)
9e9e6e3e 4830 {
afb92221 4831 if (dump_file && (dump_flags & TDF_DETAILS))
2a922cb6 4832 {
afb92221 4833 fprintf (dump_file, "RHS ");
1ffa4346 4834 print_gimple_expr (dump_file, ass, 0);
afb92221 4835 fprintf (dump_file, " simplified to ");
1ffa4346 4836 print_generic_expr (dump_file, simplified);
afb92221 4837 fprintf (dump_file, "\n");
4838 }
4839 }
4840 /* Setting value numbers to constants will occasionally
4841 screw up phi congruence because constants are not
4842 uniquely associated with a single ssa name that can be
4843 looked up. */
4844 if (simplified
4845 && is_gimple_min_invariant (simplified)
4846 && TREE_CODE (lhs) == SSA_NAME)
4847 {
4848 changed = set_ssa_val_to (lhs, simplified);
4849 goto done;
4850 }
4851 else if (simplified
4852 && TREE_CODE (simplified) == SSA_NAME
4853 && TREE_CODE (lhs) == SSA_NAME)
4854 {
4855 changed = visit_copy (lhs, simplified);
4856 goto done;
4857 }
4858
4859 if ((TREE_CODE (lhs) == SSA_NAME
4860 /* We can substitute SSA_NAMEs that are live over
4861 abnormal edges with their constant value. */
4862 && !(gimple_assign_copy_p (ass)
4863 && is_gimple_min_invariant (rhs1))
4864 && !(simplified
4865 && is_gimple_min_invariant (simplified))
4866 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4867 /* Stores or copies from SSA_NAMEs that are live over
4868 abnormal edges are a problem. */
4869 || (code == SSA_NAME
4870 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4871 changed = defs_to_varying (ass);
4872 else if (REFERENCE_CLASS_P (lhs)
4873 || DECL_P (lhs))
4874 changed = visit_reference_op_store (lhs, rhs1, ass);
4875 else if (TREE_CODE (lhs) == SSA_NAME)
4876 {
4877 if ((gimple_assign_copy_p (ass)
4878 && is_gimple_min_invariant (rhs1))
4879 || (simplified
4880 && is_gimple_min_invariant (simplified)))
4881 {
4882 if (simplified)
4883 changed = set_ssa_val_to (lhs, simplified);
4884 else
4885 changed = set_ssa_val_to (lhs, rhs1);
4886 }
4887 else
4888 {
4889 /* Visit the original statement. */
4890 switch (vn_get_stmt_kind (ass))
4891 {
4892 case VN_NARY:
4893 changed = visit_nary_op (lhs, ass);
4894 break;
4895 case VN_REFERENCE:
4896 changed = visit_reference_op_load (lhs, rhs1, ass);
4897 break;
4898 default:
4899 changed = defs_to_varying (ass);
4900 break;
4901 }
2a922cb6 4902 }
afb92221 4903 }
4904 else
4905 changed = defs_to_varying (ass);
4906 }
4907 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4908 {
4909 tree lhs = gimple_call_lhs (call_stmt);
4910 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4911 {
4912 /* Try constant folding based on our current lattice. */
4913 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4914 vn_valueize);
75a70cf9 4915 if (simplified)
9e9e6e3e 4916 {
4917 if (dump_file && (dump_flags & TDF_DETAILS))
4918 {
afb92221 4919 fprintf (dump_file, "call ");
1ffa4346 4920 print_gimple_expr (dump_file, call_stmt, 0);
9e9e6e3e 4921 fprintf (dump_file, " simplified to ");
1ffa4346 4922 print_generic_expr (dump_file, simplified);
eb074ef3 4923 fprintf (dump_file, "\n");
9e9e6e3e 4924 }
4925 }
4926 /* Setting value numbers to constants will occasionally
4927 screw up phi congruence because constants are not
4928 uniquely associated with a single ssa name that can be
4929 looked up. */
75a70cf9 4930 if (simplified
afb92221 4931 && is_gimple_min_invariant (simplified))
9e9e6e3e 4932 {
9e9e6e3e 4933 changed = set_ssa_val_to (lhs, simplified);
afb92221 4934 if (gimple_vdef (call_stmt))
4935 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4936 SSA_VAL (gimple_vuse (call_stmt)));
9e9e6e3e 4937 goto done;
4938 }
75a70cf9 4939 else if (simplified
afb92221 4940 && TREE_CODE (simplified) == SSA_NAME)
9e9e6e3e 4941 {
4942 changed = visit_copy (lhs, simplified);
afb92221 4943 if (gimple_vdef (call_stmt))
4944 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4945 SSA_VAL (gimple_vuse (call_stmt)));
9e9e6e3e 4946 goto done;
4947 }
afb92221 4948 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
9e9e6e3e 4949 {
afb92221 4950 changed = defs_to_varying (call_stmt);
4951 goto done;
9e9e6e3e 4952 }
9e9e6e3e 4953 }
75a70cf9 4954
81df50de 4955 /* Pick up flags from a devirtualization target. */
4956 tree fn = gimple_call_fn (stmt);
4957 int extra_fnflags = 0;
4958 if (fn && TREE_CODE (fn) == SSA_NAME)
4959 {
4960 fn = SSA_VAL (fn);
4961 if (TREE_CODE (fn) == ADDR_EXPR
4962 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4963 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4964 }
afb92221 4965 if (!gimple_call_internal_p (call_stmt)
4966 && (/* Calls to the same function with the same vuse
4967 and the same operands do not necessarily return the same
4968 value, unless they're pure or const. */
81df50de 4969 ((gimple_call_flags (call_stmt) | extra_fnflags)
4970 & (ECF_PURE | ECF_CONST))
afb92221 4971 /* If calls have a vdef, subsequent calls won't have
4972 the same incoming vuse. So, if 2 calls with vdef have the
4973 same vuse, we know they're not subsequent.
4974 We can value number 2 calls to the same function with the
4975 same vuse and the same operands which are not subsequent
4976 the same, because there is no code in the program that can
4977 compare the 2 values... */
4978 || (gimple_vdef (call_stmt)
4979 /* ... unless the call returns a pointer which does
4980 not alias with anything else. In which case the
4981 information that the values are distinct are encoded
4982 in the IL. */
4983 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4984 /* Only perform the following when being called from PRE
4985 which embeds tail merging. */
4986 && default_vn_walk_kind == VN_WALK)))
4987 changed = visit_reference_op_call (lhs, call_stmt);
b736e424 4988 else
afb92221 4989 changed = defs_to_varying (call_stmt);
9e9e6e3e 4990 }
afb92221 4991 else
4992 changed = defs_to_varying (stmt);
9e9e6e3e 4993 done:
4994 return changed;
4995}
4996
9e9e6e3e 4997
51e85e64 4998/* Allocate a value number table. */
9e9e6e3e 4999
5000static void
51e85e64 5001allocate_vn_table (vn_tables_t table, unsigned size)
9e9e6e3e 5002{
51e85e64 5003 table->phis = new vn_phi_table_type (size);
5004 table->nary = new vn_nary_op_table_type (size);
5005 table->references = new vn_reference_table_type (size);
9e9e6e3e 5006}
5007
51e85e64 5008/* Free a value number table. */
9e9e6e3e 5009
5010static void
51e85e64 5011free_vn_table (vn_tables_t table)
9e9e6e3e 5012{
ca5aa39a 5013 /* Walk over elements and release vectors. */
5014 vn_reference_iterator_type hir;
5015 vn_reference_t vr;
5016 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5017 vr->operands.release ();
c1f445d2 5018 delete table->phis;
5019 table->phis = NULL;
5020 delete table->nary;
5021 table->nary = NULL;
5022 delete table->references;
5023 table->references = NULL;
9e9e6e3e 5024}
5025
63628665 5026/* Set *ID according to RESULT. */
f8ce304c 5027
5028static void
5029set_value_id_for_result (tree result, unsigned int *id)
5030{
63628665 5031 if (result && TREE_CODE (result) == SSA_NAME)
5032 *id = VN_INFO (result)->value_id;
5033 else if (result && is_gimple_min_invariant (result))
5034 *id = get_or_alloc_constant_value_id (result);
5035 else
5036 *id = get_next_value_id ();
f8ce304c 5037}
5038
8883e700 5039/* Set the value ids in the valid hash tables. */
f6c33c78 5040
5041static void
5042set_hashtable_value_ids (void)
5043{
3e871d4d 5044 vn_nary_op_iterator_type hin;
5045 vn_phi_iterator_type hip;
5046 vn_reference_iterator_type hir;
f6c33c78 5047 vn_nary_op_t vno;
5048 vn_reference_t vr;
5049 vn_phi_t vp;
8883e700 5050
f6c33c78 5051 /* Now set the value ids of the things we had put in the hash
5052 table. */
5053
c1f445d2 5054 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
51e85e64 5055 if (! vno->predicated_values)
5056 set_value_id_for_result (vno->u.result, &vno->value_id);
f6c33c78 5057
c1f445d2 5058 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
f8ce304c 5059 set_value_id_for_result (vp->result, &vp->value_id);
f6c33c78 5060
c1f445d2 5061 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5062 hir)
f8ce304c 5063 set_value_id_for_result (vr->result, &vr->value_id);
f6c33c78 5064}
5065
51e85e64 5066/* Return the maximum value id we have ever seen. */
42b45e81 5067
51e85e64 5068unsigned int
5069get_max_value_id (void)
42b45e81 5070{
51e85e64 5071 return next_value_id;
5072}
42b45e81 5073
51e85e64 5074/* Return the next unique value id. */
42b45e81 5075
51e85e64 5076unsigned int
5077get_next_value_id (void)
5078{
5079 return next_value_id++;
5080}
42b45e81 5081
42b45e81 5082
51e85e64 5083/* Compare two expressions E1 and E2 and return true if they are equal. */
42b45e81 5084
51e85e64 5085bool
5086expressions_equal_p (tree e1, tree e2)
42b45e81 5087{
51e85e64 5088 /* The obvious case. */
5089 if (e1 == e2)
5090 return true;
42b45e81 5091
51e85e64 5092 /* If either one is VN_TOP consider them equal. */
5093 if (e1 == VN_TOP || e2 == VN_TOP)
5094 return true;
db981500 5095
51e85e64 5096 /* If only one of them is null, they cannot be equal. */
5097 if (!e1 || !e2)
5098 return false;
089af8be 5099
51e85e64 5100 /* Now perform the actual comparison. */
5101 if (TREE_CODE (e1) == TREE_CODE (e2)
5102 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5103 return true;
f6c33c78 5104
5105 return false;
5106}
5107
2ac47fdf 5108
5109/* Return true if the nary operation NARY may trap. This is a copy
5110 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5111
5112bool
5113vn_nary_may_trap (vn_nary_op_t nary)
5114{
5115 tree type;
888b74b6 5116 tree rhs2 = NULL_TREE;
2ac47fdf 5117 bool honor_nans = false;
5118 bool honor_snans = false;
5119 bool fp_operation = false;
5120 bool honor_trapv = false;
5121 bool handled, ret;
5122 unsigned i;
5123
5124 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5125 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5126 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5127 {
5128 type = nary->type;
5129 fp_operation = FLOAT_TYPE_P (type);
5130 if (fp_operation)
5131 {
5132 honor_nans = flag_trapping_math && !flag_finite_math_only;
5133 honor_snans = flag_signaling_nans != 0;
5134 }
5135 else if (INTEGRAL_TYPE_P (type)
5136 && TYPE_OVERFLOW_TRAPS (type))
5137 honor_trapv = true;
5138 }
888b74b6 5139 if (nary->length >= 2)
5140 rhs2 = nary->op[1];
2ac47fdf 5141 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5142 honor_trapv,
5143 honor_nans, honor_snans, rhs2,
5144 &handled);
5145 if (handled
5146 && ret)
5147 return true;
5148
5149 for (i = 0; i < nary->length; ++i)
5150 if (tree_could_trap_p (nary->op[i]))
5151 return true;
5152
5153 return false;
948ac165 5154}
5155
5156/* Return true if the reference operation REF may trap. */
5157
5158bool
5159vn_reference_may_trap (vn_reference_t ref)
5160{
5161 switch (ref->operands[0].opcode)
5162 {
5163 case MODIFY_EXPR:
5164 case CALL_EXPR:
5165 /* We do not handle calls. */
5166 case ADDR_EXPR:
5167 /* And toplevel address computations never trap. */
5168 return false;
5169 default:;
5170 }
5171
5172 vn_reference_op_t op;
5173 unsigned i;
5174 FOR_EACH_VEC_ELT (ref->operands, i, op)
5175 {
5176 switch (op->opcode)
5177 {
5178 case WITH_SIZE_EXPR:
5179 case TARGET_MEM_REF:
5180 /* Always variable. */
5181 return true;
5182 case COMPONENT_REF:
5183 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5184 return true;
5185 break;
5186 case ARRAY_RANGE_REF:
5187 case ARRAY_REF:
5188 if (TREE_CODE (op->op0) == SSA_NAME)
5189 return true;
5190 break;
5191 case MEM_REF:
5192 /* Nothing interesting in itself, the base is separate. */
5193 break;
5194 /* The following are the address bases. */
5195 case SSA_NAME:
5196 return true;
5197 case ADDR_EXPR:
5198 if (op->op0)
5199 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5200 return false;
5201 default:;
5202 }
5203 }
5204 return false;
2ac47fdf 5205}
2201c330 5206
2201c330 5207eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5208 bitmap inserted_exprs_)
5209 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5210 el_todo (0), eliminations (0), insertions (0),
5211 inserted_exprs (inserted_exprs_)
5212{
5213 need_eh_cleanup = BITMAP_ALLOC (NULL);
5214 need_ab_cleanup = BITMAP_ALLOC (NULL);
5215}
5216
5217eliminate_dom_walker::~eliminate_dom_walker ()
5218{
5219 BITMAP_FREE (need_eh_cleanup);
5220 BITMAP_FREE (need_ab_cleanup);
5221}
5222
5223/* Return a leader for OP that is available at the current point of the
5224 eliminate domwalk. */
5225
5226tree
51e85e64 5227eliminate_dom_walker::eliminate_avail (basic_block, tree op)
2201c330 5228{
5229 tree valnum = VN_INFO (op)->valnum;
5230 if (TREE_CODE (valnum) == SSA_NAME)
5231 {
5232 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5233 return valnum;
5234 if (avail.length () > SSA_NAME_VERSION (valnum))
5235 return avail[SSA_NAME_VERSION (valnum)];
5236 }
5237 else if (is_gimple_min_invariant (valnum))
5238 return valnum;
5239 return NULL_TREE;
5240}
5241
5242/* At the current point of the eliminate domwalk make OP available. */
5243
5244void
51e85e64 5245eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
2201c330 5246{
5247 tree valnum = VN_INFO (op)->valnum;
5248 if (TREE_CODE (valnum) == SSA_NAME)
5249 {
5250 if (avail.length () <= SSA_NAME_VERSION (valnum))
5251 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5252 tree pushop = op;
5253 if (avail[SSA_NAME_VERSION (valnum)])
5254 pushop = avail[SSA_NAME_VERSION (valnum)];
5255 avail_stack.safe_push (pushop);
5256 avail[SSA_NAME_VERSION (valnum)] = op;
5257 }
5258}
5259
5260/* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5261 the leader for the expression if insertion was successful. */
5262
5263tree
51e85e64 5264eliminate_dom_walker::eliminate_insert (basic_block bb,
5265 gimple_stmt_iterator *gsi, tree val)
2201c330 5266{
5267 /* We can insert a sequence with a single assignment only. */
5268 gimple_seq stmts = VN_INFO (val)->expr;
5269 if (!gimple_seq_singleton_p (stmts))
5270 return NULL_TREE;
5271 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5272 if (!stmt
5273 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5274 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5275 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5276 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5277 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5278 return NULL_TREE;
5279
5280 tree op = gimple_assign_rhs1 (stmt);
5281 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5282 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5283 op = TREE_OPERAND (op, 0);
51e85e64 5284 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
2201c330 5285 if (!leader)
5286 return NULL_TREE;
5287
5288 tree res;
5289 stmts = NULL;
5290 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5291 res = gimple_build (&stmts, BIT_FIELD_REF,
5292 TREE_TYPE (val), leader,
5293 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5294 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5295 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5296 res = gimple_build (&stmts, BIT_AND_EXPR,
5297 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5298 else
5299 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5300 TREE_TYPE (val), leader);
5301 if (TREE_CODE (res) != SSA_NAME
5302 || SSA_NAME_IS_DEFAULT_DEF (res)
5303 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5304 {
5305 gimple_seq_discard (stmts);
5306
5307 /* During propagation we have to treat SSA info conservatively
5308 and thus we can end up simplifying the inserted expression
5309 at elimination time to sth not defined in stmts. */
5310 /* But then this is a redundancy we failed to detect. Which means
5311 res now has two values. That doesn't play well with how
5312 we track availability here, so give up. */
5313 if (dump_file && (dump_flags & TDF_DETAILS))
5314 {
5315 if (TREE_CODE (res) == SSA_NAME)
51e85e64 5316 res = eliminate_avail (bb, res);
2201c330 5317 if (res)
5318 {
5319 fprintf (dump_file, "Failed to insert expression for value ");
5320 print_generic_expr (dump_file, val);
5321 fprintf (dump_file, " which is really fully redundant to ");
5322 print_generic_expr (dump_file, res);
5323 fprintf (dump_file, "\n");
5324 }
5325 }
5326
5327 return NULL_TREE;
5328 }
5329 else
5330 {
5331 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
51e85e64 5332 VN_INFO (res)->valnum = val;
5333 VN_INFO (res)->visited = true;
2201c330 5334 }
5335
5336 insertions++;
5337 if (dump_file && (dump_flags & TDF_DETAILS))
5338 {
5339 fprintf (dump_file, "Inserted ");
5340 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5341 }
5342
5343 return res;
5344}
5345
51e85e64 5346void
5347eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
2201c330 5348{
51e85e64 5349 tree sprime = NULL_TREE;
5350 gimple *stmt = gsi_stmt (*gsi);
5351 tree lhs = gimple_get_lhs (stmt);
5352 if (lhs && TREE_CODE (lhs) == SSA_NAME
5353 && !gimple_has_volatile_ops (stmt)
5354 /* See PR43491. Do not replace a global register variable when
5355 it is a the RHS of an assignment. Do replace local register
5356 variables since gcc does not guarantee a local variable will
5357 be allocated in register.
5358 ??? The fix isn't effective here. This should instead
5359 be ensured by not value-numbering them the same but treating
5360 them like volatiles? */
5361 && !(gimple_assign_single_p (stmt)
5362 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5363 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5364 && is_global_var (gimple_assign_rhs1 (stmt)))))
2201c330 5365 {
51e85e64 5366 sprime = eliminate_avail (b, lhs);
5367 if (!sprime)
2201c330 5368 {
51e85e64 5369 /* If there is no existing usable leader but SCCVN thinks
5370 it has an expression it wants to use as replacement,
5371 insert that. */
5372 tree val = VN_INFO (lhs)->valnum;
5373 if (val != VN_TOP
5374 && TREE_CODE (val) == SSA_NAME
5375 && VN_INFO (val)->needs_insertion
5376 && VN_INFO (val)->expr != NULL
5377 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
5378 eliminate_push_avail (b, sprime);
2201c330 5379 }
5380
51e85e64 5381 /* If this now constitutes a copy duplicate points-to
5382 and range info appropriately. This is especially
5383 important for inserted code. See tree-ssa-copy.c
5384 for similar code. */
2201c330 5385 if (sprime
51e85e64 5386 && TREE_CODE (sprime) == SSA_NAME)
2201c330 5387 {
51e85e64 5388 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5389 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5390 && SSA_NAME_PTR_INFO (lhs)
5391 && ! SSA_NAME_PTR_INFO (sprime))
2201c330 5392 {
51e85e64 5393 duplicate_ssa_name_ptr_info (sprime,
5394 SSA_NAME_PTR_INFO (lhs));
5395 if (b != sprime_b)
5396 mark_ptr_info_alignment_unknown
5397 (SSA_NAME_PTR_INFO (sprime));
2201c330 5398 }
51e85e64 5399 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5400 && SSA_NAME_RANGE_INFO (lhs)
5401 && ! SSA_NAME_RANGE_INFO (sprime)
5402 && b == sprime_b)
5403 duplicate_ssa_name_range_info (sprime,
5404 SSA_NAME_RANGE_TYPE (lhs),
5405 SSA_NAME_RANGE_INFO (lhs));
2201c330 5406 }
5407
51e85e64 5408 /* Inhibit the use of an inserted PHI on a loop header when
5409 the address of the memory reference is a simple induction
5410 variable. In other cases the vectorizer won't do anything
5411 anyway (either it's loop invariant or a complicated
5412 expression). */
5413 if (sprime
5414 && TREE_CODE (sprime) == SSA_NAME
5415 && do_pre
5416 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5417 && loop_outer (b->loop_father)
5418 && has_zero_uses (sprime)
5419 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5420 && gimple_assign_load_p (stmt))
2201c330 5421 {
51e85e64 5422 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5423 basic_block def_bb = gimple_bb (def_stmt);
5424 if (gimple_code (def_stmt) == GIMPLE_PHI
5425 && def_bb->loop_father->header == def_bb)
2201c330 5426 {
51e85e64 5427 loop_p loop = def_bb->loop_father;
5428 ssa_op_iter iter;
5429 tree op;
5430 bool found = false;
5431 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
2201c330 5432 {
51e85e64 5433 affine_iv iv;
5434 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5435 if (def_bb
5436 && flow_bb_inside_loop_p (loop, def_bb)
5437 && simple_iv (loop, loop, op, &iv, true))
2201c330 5438 {
51e85e64 5439 found = true;
5440 break;
2201c330 5441 }
51e85e64 5442 }
5443 if (found)
5444 {
5445 if (dump_file && (dump_flags & TDF_DETAILS))
2201c330 5446 {
51e85e64 5447 fprintf (dump_file, "Not replacing ");
5448 print_gimple_expr (dump_file, stmt, 0);
5449 fprintf (dump_file, " with ");
5450 print_generic_expr (dump_file, sprime);
5451 fprintf (dump_file, " which would add a loop"
5452 " carried dependence to loop %d\n",
5453 loop->num);
2201c330 5454 }
51e85e64 5455 /* Don't keep sprime available. */
5456 sprime = NULL_TREE;
2201c330 5457 }
5458 }
51e85e64 5459 }
2201c330 5460
51e85e64 5461 if (sprime)
5462 {
5463 /* If we can propagate the value computed for LHS into
5464 all uses don't bother doing anything with this stmt. */
5465 if (may_propagate_copy (lhs, sprime))
2201c330 5466 {
51e85e64 5467 /* Mark it for removal. */
5468 to_remove.safe_push (stmt);
5469
5470 /* ??? Don't count copy/constant propagations. */
5471 if (gimple_assign_single_p (stmt)
5472 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5473 || gimple_assign_rhs1 (stmt) == sprime))
5474 return;
5475
5476 if (dump_file && (dump_flags & TDF_DETAILS))
2201c330 5477 {
51e85e64 5478 fprintf (dump_file, "Replaced ");
5479 print_gimple_expr (dump_file, stmt, 0);
5480 fprintf (dump_file, " with ");
5481 print_generic_expr (dump_file, sprime);
5482 fprintf (dump_file, " in all uses of ");
5483 print_gimple_stmt (dump_file, stmt, 0);
5484 }
2201c330 5485
51e85e64 5486 eliminations++;
5487 return;
5488 }
2201c330 5489
51e85e64 5490 /* If this is an assignment from our leader (which
5491 happens in the case the value-number is a constant)
5492 then there is nothing to do. */
5493 if (gimple_assign_single_p (stmt)
5494 && sprime == gimple_assign_rhs1 (stmt))
5495 return;
5496
5497 /* Else replace its RHS. */
51e85e64 5498 if (dump_file && (dump_flags & TDF_DETAILS))
5499 {
5500 fprintf (dump_file, "Replaced ");
5501 print_gimple_expr (dump_file, stmt, 0);
5502 fprintf (dump_file, " with ");
5503 print_generic_expr (dump_file, sprime);
5504 fprintf (dump_file, " in ");
5505 print_gimple_stmt (dump_file, stmt, 0);
5506 }
51e85e64 5507 eliminations++;
64e56341 5508
5509 bool can_make_abnormal_goto = (is_gimple_call (stmt)
5510 && stmt_can_make_abnormal_goto (stmt));
51e85e64 5511 gimple *orig_stmt = stmt;
5512 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5513 TREE_TYPE (sprime)))
64e56341 5514 {
5515 /* We preserve conversions to but not from function or method
5516 types. This asymmetry makes it necessary to re-instantiate
5517 conversions here. */
5518 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5519 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
5520 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5521 else
5522 gcc_unreachable ();
5523 }
51e85e64 5524 tree vdef = gimple_vdef (stmt);
5525 tree vuse = gimple_vuse (stmt);
5526 propagate_tree_value_into_stmt (gsi, sprime);
5527 stmt = gsi_stmt (*gsi);
5528 update_stmt (stmt);
51ebce8c 5529 /* In case the VDEF on the original stmt was released, value-number
5530 it to the VUSE. This is to make vuse_ssa_val able to skip
5531 released virtual operands. */
51e85e64 5532 if (vdef != gimple_vdef (stmt))
51ebce8c 5533 {
5534 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
5535 VN_INFO (vdef)->valnum = vuse;
5536 }
51e85e64 5537
5538 /* If we removed EH side-effects from the statement, clean
5539 its EH information. */
5540 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5541 {
5542 bitmap_set_bit (need_eh_cleanup,
5543 gimple_bb (stmt)->index);
5544 if (dump_file && (dump_flags & TDF_DETAILS))
5545 fprintf (dump_file, " Removed EH side-effects.\n");
5546 }
5547
5548 /* Likewise for AB side-effects. */
5549 if (can_make_abnormal_goto
5550 && !stmt_can_make_abnormal_goto (stmt))
5551 {
5552 bitmap_set_bit (need_ab_cleanup,
5553 gimple_bb (stmt)->index);
5554 if (dump_file && (dump_flags & TDF_DETAILS))
5555 fprintf (dump_file, " Removed AB side-effects.\n");
5556 }
5557
5558 return;
5559 }
5560 }
5561
5562 /* If the statement is a scalar store, see if the expression
5563 has the same value number as its rhs. If so, the store is
5564 dead. */
5565 if (gimple_assign_single_p (stmt)
5566 && !gimple_has_volatile_ops (stmt)
5567 && !is_gimple_reg (gimple_assign_lhs (stmt))
5568 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5569 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5570 {
5571 tree val;
5572 tree rhs = gimple_assign_rhs1 (stmt);
5573 vn_reference_t vnresult;
5574 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5575 &vnresult, false);
5576 if (TREE_CODE (rhs) == SSA_NAME)
5577 rhs = VN_INFO (rhs)->valnum;
5578 if (val
5579 && operand_equal_p (val, rhs, 0))
5580 {
5581 /* We can only remove the later store if the former aliases
5582 at least all accesses the later one does or if the store
5583 was to readonly memory storing the same value. */
5584 alias_set_type set = get_alias_set (lhs);
5585 if (! vnresult
5586 || vnresult->set == set
5587 || alias_set_subset_of (set, vnresult->set))
5588 {
5589 if (dump_file && (dump_flags & TDF_DETAILS))
5590 {
5591 fprintf (dump_file, "Deleted redundant store ");
5592 print_gimple_stmt (dump_file, stmt, 0);
5593 }
5594
5595 /* Queue stmt for removal. */
5596 to_remove.safe_push (stmt);
5597 return;
5598 }
5599 }
5600 }
5601
5602 /* If this is a control statement value numbering left edges
5603 unexecuted on force the condition in a way consistent with
5604 that. */
5605 if (gcond *cond = dyn_cast <gcond *> (stmt))
5606 {
5607 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5608 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5609 {
5610 if (dump_file && (dump_flags & TDF_DETAILS))
5611 {
5612 fprintf (dump_file, "Removing unexecutable edge from ");
5613 print_gimple_stmt (dump_file, stmt, 0);
5614 }
5615 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5616 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5617 gimple_cond_make_true (cond);
5618 else
5619 gimple_cond_make_false (cond);
5620 update_stmt (cond);
5621 el_todo |= TODO_cleanup_cfg;
5622 return;
5623 }
5624 }
5625
5626 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5627 bool was_noreturn = (is_gimple_call (stmt)
5628 && gimple_call_noreturn_p (stmt));
5629 tree vdef = gimple_vdef (stmt);
5630 tree vuse = gimple_vuse (stmt);
5631
5632 /* If we didn't replace the whole stmt (or propagate the result
5633 into all uses), replace all uses on this stmt with their
5634 leaders. */
5635 bool modified = false;
5636 use_operand_p use_p;
5637 ssa_op_iter iter;
5638 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5639 {
5640 tree use = USE_FROM_PTR (use_p);
5641 /* ??? The call code above leaves stmt operands un-updated. */
5642 if (TREE_CODE (use) != SSA_NAME)
5643 continue;
5644 tree sprime;
5645 if (SSA_NAME_IS_DEFAULT_DEF (use))
5646 /* ??? For default defs BB shouldn't matter, but we have to
5647 solve the inconsistency between rpo eliminate and
5648 dom eliminate avail valueization first. */
5649 sprime = eliminate_avail (b, use);
5650 else
5651 /* Look for sth available at the definition block of the argument.
5652 This avoids inconsistencies between availability there which
5653 decides if the stmt can be removed and availability at the
5654 use site. The SSA property ensures that things available
5655 at the definition are also available at uses. */
5656 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5657 if (sprime && sprime != use
5658 && may_propagate_copy (use, sprime)
5659 /* We substitute into debug stmts to avoid excessive
5660 debug temporaries created by removed stmts, but we need
5661 to avoid doing so for inserted sprimes as we never want
5662 to create debug temporaries for them. */
5663 && (!inserted_exprs
5664 || TREE_CODE (sprime) != SSA_NAME
5665 || !is_gimple_debug (stmt)
5666 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5667 {
5668 propagate_value (use_p, sprime);
5669 modified = true;
5670 }
5671 }
5672
5673 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5674 into which is a requirement for the IPA devirt machinery. */
5675 gimple *old_stmt = stmt;
5676 if (modified)
5677 {
5678 /* If a formerly non-invariant ADDR_EXPR is turned into an
5679 invariant one it was on a separate stmt. */
5680 if (gimple_assign_single_p (stmt)
5681 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5682 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5683 gimple_stmt_iterator prev = *gsi;
5684 gsi_prev (&prev);
5685 if (fold_stmt (gsi))
5686 {
5687 /* fold_stmt may have created new stmts inbetween
5688 the previous stmt and the folded stmt. Mark
5689 all defs created there as varying to not confuse
5690 the SCCVN machinery as we're using that even during
5691 elimination. */
5692 if (gsi_end_p (prev))
5693 prev = gsi_start_bb (b);
5694 else
5695 gsi_next (&prev);
5696 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5697 do
5698 {
5699 tree def;
5700 ssa_op_iter dit;
5701 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5702 dit, SSA_OP_ALL_DEFS)
5703 /* As existing DEFs may move between stmts
5704 only process new ones. */
5705 if (! has_VN_INFO (def))
5706 {
5707 VN_INFO (def)->valnum = def;
5708 VN_INFO (def)->visited = true;
5709 }
5710 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5711 break;
5712 gsi_next (&prev);
5713 }
5714 while (1);
5715 }
5716 stmt = gsi_stmt (*gsi);
5717 /* In case we folded the stmt away schedule the NOP for removal. */
5718 if (gimple_nop_p (stmt))
5719 to_remove.safe_push (stmt);
5720 }
5721
5722 /* Visit indirect calls and turn them into direct calls if
5723 possible using the devirtualization machinery. Do this before
5724 checking for required EH/abnormal/noreturn cleanup as devird
5725 may expose more of those. */
5726 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5727 {
5728 tree fn = gimple_call_fn (call_stmt);
5729 if (fn
5730 && flag_devirtualize
5731 && virtual_method_call_p (fn))
5732 {
5733 tree otr_type = obj_type_ref_class (fn);
5734 unsigned HOST_WIDE_INT otr_tok
5735 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5736 tree instance;
5737 ipa_polymorphic_call_context context (current_function_decl,
5738 fn, stmt, &instance);
5739 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
915df3d8 5740 otr_type, stmt, NULL);
51e85e64 5741 bool final;
5742 vec <cgraph_node *> targets
5743 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5744 otr_tok, context, &final);
5745 if (dump_file)
5746 dump_possible_polymorphic_call_targets (dump_file,
5747 obj_type_ref_class (fn),
5748 otr_tok, context);
5749 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5750 {
5751 tree fn;
5752 if (targets.length () == 1)
5753 fn = targets[0]->decl;
5754 else
5755 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5756 if (dump_enabled_p ())
5757 {
5758 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5759 "converting indirect call to "
5760 "function %s\n",
5761 lang_hooks.decl_printable_name (fn, 2));
5762 }
5763 gimple_call_set_fndecl (call_stmt, fn);
5764 /* If changing the call to __builtin_unreachable
5765 or similar noreturn function, adjust gimple_call_fntype
5766 too. */
5767 if (gimple_call_noreturn_p (call_stmt)
5768 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5769 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5770 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5771 == void_type_node))
5772 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5773 maybe_remove_unused_call_args (cfun, call_stmt);
5774 modified = true;
5775 }
5776 }
5777 }
5778
5779 if (modified)
5780 {
5781 /* When changing a call into a noreturn call, cfg cleanup
5782 is needed to fix up the noreturn call. */
5783 if (!was_noreturn
5784 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5785 to_fixup.safe_push (stmt);
5786 /* When changing a condition or switch into one we know what
5787 edge will be executed, schedule a cfg cleanup. */
5788 if ((gimple_code (stmt) == GIMPLE_COND
5789 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5790 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5791 || (gimple_code (stmt) == GIMPLE_SWITCH
5792 && TREE_CODE (gimple_switch_index
5793 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5794 el_todo |= TODO_cleanup_cfg;
5795 /* If we removed EH side-effects from the statement, clean
5796 its EH information. */
5797 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5798 {
5799 bitmap_set_bit (need_eh_cleanup,
5800 gimple_bb (stmt)->index);
5801 if (dump_file && (dump_flags & TDF_DETAILS))
5802 fprintf (dump_file, " Removed EH side-effects.\n");
5803 }
5804 /* Likewise for AB side-effects. */
5805 if (can_make_abnormal_goto
5806 && !stmt_can_make_abnormal_goto (stmt))
5807 {
5808 bitmap_set_bit (need_ab_cleanup,
5809 gimple_bb (stmt)->index);
5810 if (dump_file && (dump_flags & TDF_DETAILS))
5811 fprintf (dump_file, " Removed AB side-effects.\n");
5812 }
5813 update_stmt (stmt);
51ebce8c 5814 /* In case the VDEF on the original stmt was released, value-number
5815 it to the VUSE. This is to make vuse_ssa_val able to skip
5816 released virtual operands. */
5817 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
51e85e64 5818 VN_INFO (vdef)->valnum = vuse;
5819 }
5820
5821 /* Make new values available - for fully redundant LHS we
5822 continue with the next stmt above and skip this. */
5823 def_operand_p defp;
5824 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5825 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5826}
5827
5828/* Perform elimination for the basic-block B during the domwalk. */
5829
5830edge
5831eliminate_dom_walker::before_dom_children (basic_block b)
5832{
5833 /* Mark new bb. */
5834 avail_stack.safe_push (NULL_TREE);
5835
5836 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5837 if (!(b->flags & BB_EXECUTABLE))
5838 return NULL;
5839
5840 vn_context_bb = b;
5841
5842 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5843 {
5844 gphi *phi = gsi.phi ();
5845 tree res = PHI_RESULT (phi);
5846
5847 if (virtual_operand_p (res))
5848 {
5849 gsi_next (&gsi);
5850 continue;
5851 }
5852
5853 tree sprime = eliminate_avail (b, res);
5854 if (sprime
5855 && sprime != res)
5856 {
5857 if (dump_file && (dump_flags & TDF_DETAILS))
5858 {
5859 fprintf (dump_file, "Replaced redundant PHI node defining ");
5860 print_generic_expr (dump_file, res);
5861 fprintf (dump_file, " with ");
5862 print_generic_expr (dump_file, sprime);
5863 fprintf (dump_file, "\n");
5864 }
5865
5866 /* If we inserted this PHI node ourself, it's not an elimination. */
5867 if (! inserted_exprs
5868 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5869 eliminations++;
5870
5871 /* If we will propagate into all uses don't bother to do
5872 anything. */
5873 if (may_propagate_copy (res, sprime))
5874 {
5875 /* Mark the PHI for removal. */
5876 to_remove.safe_push (phi);
5877 gsi_next (&gsi);
5878 continue;
5879 }
5880
5881 remove_phi_node (&gsi, false);
5882
5883 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5884 sprime = fold_convert (TREE_TYPE (res), sprime);
5885 gimple *stmt = gimple_build_assign (res, sprime);
5886 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5887 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5888 continue;
5889 }
5890
5891 eliminate_push_avail (b, res);
5892 gsi_next (&gsi);
5893 }
5894
5895 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5896 !gsi_end_p (gsi);
5897 gsi_next (&gsi))
5898 eliminate_stmt (b, &gsi);
5899
5900 /* Replace destination PHI arguments. */
5901 edge_iterator ei;
5902 edge e;
5903 FOR_EACH_EDGE (e, ei, b->succs)
5904 if (e->flags & EDGE_EXECUTABLE)
5905 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5906 !gsi_end_p (gsi);
5907 gsi_next (&gsi))
5908 {
5909 gphi *phi = gsi.phi ();
5910 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5911 tree arg = USE_FROM_PTR (use_p);
5912 if (TREE_CODE (arg) != SSA_NAME
5913 || virtual_operand_p (arg))
5914 continue;
5915 tree sprime = eliminate_avail (b, arg);
5916 if (sprime && may_propagate_copy (arg, sprime))
5917 propagate_value (use_p, sprime);
5918 }
5919
5920 vn_context_bb = NULL;
5921
5922 return NULL;
5923}
5924
5925/* Make no longer available leaders no longer available. */
5926
5927void
5928eliminate_dom_walker::after_dom_children (basic_block)
5929{
5930 tree entry;
5931 while ((entry = avail_stack.pop ()) != NULL_TREE)
5932 {
5933 tree valnum = VN_INFO (entry)->valnum;
5934 tree old = avail[SSA_NAME_VERSION (valnum)];
5935 if (old == entry)
5936 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5937 else
5938 avail[SSA_NAME_VERSION (valnum)] = entry;
5939 }
5940}
5941
5942/* Remove queued stmts and perform delayed cleanups. */
5943
5944unsigned
5945eliminate_dom_walker::eliminate_cleanup (bool region_p)
5946{
5947 statistics_counter_event (cfun, "Eliminated", eliminations);
5948 statistics_counter_event (cfun, "Insertions", insertions);
5949
5950 /* We cannot remove stmts during BB walk, especially not release SSA
5951 names there as this confuses the VN machinery. The stmts ending
5952 up in to_remove are either stores or simple copies.
5953 Remove stmts in reverse order to make debug stmt creation possible. */
5954 while (!to_remove.is_empty ())
5955 {
5956 bool do_release_defs = true;
5957 gimple *stmt = to_remove.pop ();
5958
5959 /* When we are value-numbering a region we do not require exit PHIs to
5960 be present so we have to make sure to deal with uses outside of the
5961 region of stmts that we thought are eliminated.
5962 ??? Note we may be confused by uses in dead regions we didn't run
5963 elimination on. Rather than checking individual uses we accept
5964 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5965 contains such example). */
5966 if (region_p)
5967 {
5968 if (gphi *phi = dyn_cast <gphi *> (stmt))
5969 {
5970 tree lhs = gimple_phi_result (phi);
5971 if (!has_zero_uses (lhs))
5972 {
5973 if (dump_file && (dump_flags & TDF_DETAILS))
5974 fprintf (dump_file, "Keeping eliminated stmt live "
5975 "as copy because of out-of-region uses\n");
5976 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5977 gimple *copy = gimple_build_assign (lhs, sprime);
5978 gimple_stmt_iterator gsi
5979 = gsi_after_labels (gimple_bb (stmt));
5980 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5981 do_release_defs = false;
5982 }
5983 }
21461a79 5984 else if (tree lhs = gimple_get_lhs (stmt))
5985 if (TREE_CODE (lhs) == SSA_NAME
5986 && !has_zero_uses (lhs))
5987 {
5988 if (dump_file && (dump_flags & TDF_DETAILS))
5989 fprintf (dump_file, "Keeping eliminated stmt live "
5990 "as copy because of out-of-region uses\n");
5991 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5992 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5993 if (is_gimple_assign (stmt))
5994 {
5995 gimple_assign_set_rhs_from_tree (&gsi, sprime);
12df02d5 5996 stmt = gsi_stmt (gsi);
5997 update_stmt (stmt);
5998 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
5999 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
21461a79 6000 continue;
6001 }
6002 else
6003 {
6004 gimple *copy = gimple_build_assign (lhs, sprime);
6005 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6006 do_release_defs = false;
6007 }
6008 }
51e85e64 6009 }
6010
6011 if (dump_file && (dump_flags & TDF_DETAILS))
6012 {
6013 fprintf (dump_file, "Removing dead stmt ");
6014 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6015 }
6016
6017 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6018 if (gimple_code (stmt) == GIMPLE_PHI)
6019 remove_phi_node (&gsi, do_release_defs);
6020 else
6021 {
6022 basic_block bb = gimple_bb (stmt);
6023 unlink_stmt_vdef (stmt);
6024 if (gsi_remove (&gsi, true))
6025 bitmap_set_bit (need_eh_cleanup, bb->index);
6026 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6027 bitmap_set_bit (need_ab_cleanup, bb->index);
6028 if (do_release_defs)
6029 release_defs (stmt);
6030 }
6031
6032 /* Removing a stmt may expose a forwarder block. */
6033 el_todo |= TODO_cleanup_cfg;
6034 }
6035
6036 /* Fixup stmts that became noreturn calls. This may require splitting
6037 blocks and thus isn't possible during the dominator walk. Do this
6038 in reverse order so we don't inadvertedly remove a stmt we want to
6039 fixup by visiting a dominating now noreturn call first. */
6040 while (!to_fixup.is_empty ())
6041 {
6042 gimple *stmt = to_fixup.pop ();
6043
6044 if (dump_file && (dump_flags & TDF_DETAILS))
6045 {
6046 fprintf (dump_file, "Fixing up noreturn call ");
6047 print_gimple_stmt (dump_file, stmt, 0);
6048 }
6049
6050 if (fixup_noreturn_call (stmt))
6051 el_todo |= TODO_cleanup_cfg;
6052 }
6053
6054 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6055 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6056
6057 if (do_eh_cleanup)
6058 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6059
6060 if (do_ab_cleanup)
6061 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6062
6063 if (do_eh_cleanup || do_ab_cleanup)
6064 el_todo |= TODO_cleanup_cfg;
6065
6066 return el_todo;
6067}
6068
6069/* Eliminate fully redundant computations. */
6070
6071unsigned
6072eliminate_with_rpo_vn (bitmap inserted_exprs)
6073{
6074 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6075
6076 walker.walk (cfun->cfg->x_entry_block_ptr);
6077 return walker.eliminate_cleanup ();
6078}
6079
6080static unsigned
6081do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6082 bool iterate, bool eliminate);
6083
6084void
6085run_rpo_vn (vn_lookup_kind kind)
6086{
6087 default_vn_walk_kind = kind;
6088 do_rpo_vn (cfun, NULL, NULL, true, false);
6089
6090 /* ??? Prune requirement of these. */
6091 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6092 constant_value_ids = BITMAP_ALLOC (NULL);
6093
6094 /* Initialize the value ids and prune out remaining VN_TOPs
6095 from dead code. */
6096 tree name;
6097 unsigned i;
6098 FOR_EACH_SSA_NAME (i, name, cfun)
6099 {
6100 vn_ssa_aux_t info = VN_INFO (name);
6101 if (!info->visited
6102 || info->valnum == VN_TOP)
6103 info->valnum = name;
6104 if (info->valnum == name)
6105 info->value_id = get_next_value_id ();
6106 else if (is_gimple_min_invariant (info->valnum))
6107 info->value_id = get_or_alloc_constant_value_id (info->valnum);
6108 }
6109
6110 /* Propagate. */
6111 FOR_EACH_SSA_NAME (i, name, cfun)
6112 {
6113 vn_ssa_aux_t info = VN_INFO (name);
6114 if (TREE_CODE (info->valnum) == SSA_NAME
6115 && info->valnum != name
6116 && info->value_id != VN_INFO (info->valnum)->value_id)
6117 info->value_id = VN_INFO (info->valnum)->value_id;
6118 }
6119
6120 set_hashtable_value_ids ();
6121
6122 if (dump_file && (dump_flags & TDF_DETAILS))
6123 {
6124 fprintf (dump_file, "Value numbers:\n");
6125 FOR_EACH_SSA_NAME (i, name, cfun)
6126 {
6127 if (VN_INFO (name)->visited
6128 && SSA_VAL (name) != name)
6129 {
6130 print_generic_expr (dump_file, name);
6131 fprintf (dump_file, " = ");
6132 print_generic_expr (dump_file, SSA_VAL (name));
6133 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6134 }
6135 }
6136 }
6137}
6138
6139/* Free VN associated data structures. */
6140
6141void
6142free_rpo_vn (void)
6143{
6144 free_vn_table (valid_info);
6145 XDELETE (valid_info);
6146 obstack_free (&vn_tables_obstack, NULL);
6147 obstack_free (&vn_tables_insert_obstack, NULL);
6148
51e85e64 6149 vn_ssa_aux_iterator_type it;
6150 vn_ssa_aux_t info;
6151 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6152 if (info->needs_insertion)
6153 release_ssa_name (info->name);
6154 obstack_free (&vn_ssa_aux_obstack, NULL);
6155 delete vn_ssa_aux_hash;
6156
6157 delete constant_to_value_id;
6158 constant_to_value_id = NULL;
6159 BITMAP_FREE (constant_value_ids);
6160}
6161
51e85e64 6162/* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6163
6164static tree
6165vn_lookup_simplify_result (gimple_match_op *res_op)
6166{
6167 if (!res_op->code.is_tree_code ())
6168 return NULL_TREE;
6169 tree *ops = res_op->ops;
6170 unsigned int length = res_op->num_ops;
6171 if (res_op->code == CONSTRUCTOR
6172 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6173 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6174 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6175 {
6176 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6177 ops = XALLOCAVEC (tree, length);
6178 for (unsigned i = 0; i < length; ++i)
6179 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6180 }
6181 vn_nary_op_t vnresult = NULL;
6182 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6183 res_op->type, ops, &vnresult);
6184 /* If this is used from expression simplification make sure to
6185 return an available expression. */
da3890a5 6186 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
51e85e64 6187 res = rpo_avail->eliminate_avail (vn_context_bb, res);
6188 return res;
6189}
6190
51e85e64 6191/* Return a leader for OPs value that is valid at BB. */
6192
6193tree
6194rpo_elim::eliminate_avail (basic_block bb, tree op)
6195{
2a06e47d 6196 bool visited;
6197 tree valnum = SSA_VAL (op, &visited);
6198 /* If we didn't visit OP then it must be defined outside of the
6199 region we process and also dominate it. So it is available. */
6200 if (!visited)
6201 return op;
51e85e64 6202 if (TREE_CODE (valnum) == SSA_NAME)
6203 {
6204 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6205 return valnum;
21ffc389 6206 vn_avail *av = VN_INFO (valnum)->avail;
6207 if (!av)
51e85e64 6208 return NULL_TREE;
21ffc389 6209 if (av->location == bb->index)
51e85e64 6210 /* On tramp3d 90% of the cases are here. */
21ffc389 6211 return ssa_name (av->leader);
51e85e64 6212 do
6213 {
21ffc389 6214 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
51e85e64 6215 /* ??? During elimination we have to use availability at the
6216 definition site of a use we try to replace. This
6217 is required to not run into inconsistencies because
6218 of dominated_by_p_w_unex behavior and removing a definition
6219 while not replacing all uses.
6220 ??? We could try to consistently walk dominators
6221 ignoring non-executable regions. The nearest common
6222 dominator of bb and abb is where we can stop walking. We
6223 may also be able to "pre-compute" (bits of) the next immediate
6224 (non-)dominator during the RPO walk when marking edges as
6225 executable. */
6226 if (dominated_by_p_w_unex (bb, abb))
6227 {
21ffc389 6228 tree leader = ssa_name (av->leader);
51e85e64 6229 /* Prevent eliminations that break loop-closed SSA. */
6230 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6231 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6232 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6233 (leader))->loop_father,
6234 bb))
6235 return NULL_TREE;
6236 if (dump_file && (dump_flags & TDF_DETAILS))
6237 {
6238 print_generic_expr (dump_file, leader);
6239 fprintf (dump_file, " is available for ");
6240 print_generic_expr (dump_file, valnum);
6241 fprintf (dump_file, "\n");
6242 }
6243 /* On tramp3d 99% of the _remaining_ cases succeed at
6244 the first enty. */
6245 return leader;
6246 }
6247 /* ??? Can we somehow skip to the immediate dominator
6248 RPO index (bb_to_rpo)? Again, maybe not worth, on
6249 tramp3d the worst number of elements in the vector is 9. */
21ffc389 6250 av = av->next;
51e85e64 6251 }
21ffc389 6252 while (av);
51e85e64 6253 }
6254 else if (valnum != VN_TOP)
6255 /* valnum is is_gimple_min_invariant. */
6256 return valnum;
6257 return NULL_TREE;
6258}
6259
6260/* Make LEADER a leader for its value at BB. */
6261
6262void
6263rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
6264{
6265 tree valnum = VN_INFO (leader)->valnum;
1dc6fdb5 6266 if (valnum == VN_TOP
6267 || is_gimple_min_invariant (valnum))
51e85e64 6268 return;
6269 if (dump_file && (dump_flags & TDF_DETAILS))
6270 {
6271 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
6272 print_generic_expr (dump_file, leader);
6273 fprintf (dump_file, " for value ");
6274 print_generic_expr (dump_file, valnum);
6275 fprintf (dump_file, "\n");
6276 }
21ffc389 6277 vn_ssa_aux_t value = VN_INFO (valnum);
6278 vn_avail *av;
6279 if (m_avail_freelist)
51e85e64 6280 {
21ffc389 6281 av = m_avail_freelist;
6282 m_avail_freelist = m_avail_freelist->next;
51e85e64 6283 }
21ffc389 6284 else
6285 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
6286 av->location = bb->index;
6287 av->leader = SSA_NAME_VERSION (leader);
6288 av->next = value->avail;
6289 value->avail = av;
51e85e64 6290}
6291
6292/* Valueization hook for RPO VN plus required state. */
6293
6294tree
6295rpo_vn_valueize (tree name)
6296{
6297 if (TREE_CODE (name) == SSA_NAME)
6298 {
6299 vn_ssa_aux_t val = VN_INFO (name);
51e85e64 6300 if (val)
6301 {
6302 tree tem = val->valnum;
6303 if (tem != VN_TOP && tem != name)
6304 {
6305 if (TREE_CODE (tem) != SSA_NAME)
6306 return tem;
6307 /* For all values we only valueize to an available leader
6308 which means we can use SSA name info without restriction. */
6309 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
6310 if (tem)
6311 return tem;
6312 }
6313 }
6314 }
6315 return name;
6316}
2201c330 6317
51e85e64 6318/* Insert on PRED_E predicates derived from CODE OPS being true besides the
6319 inverted condition. */
2201c330 6320
51e85e64 6321static void
6322insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
6323{
6324 switch (code)
6325 {
6326 case LT_EXPR:
6327 /* a < b -> a {!,<}= b */
6328 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6329 ops, boolean_true_node, 0, pred_e);
6330 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
6331 ops, boolean_true_node, 0, pred_e);
6332 /* a < b -> ! a {>,=} b */
6333 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6334 ops, boolean_false_node, 0, pred_e);
6335 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6336 ops, boolean_false_node, 0, pred_e);
6337 break;
6338 case GT_EXPR:
6339 /* a > b -> a {!,>}= b */
6340 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6341 ops, boolean_true_node, 0, pred_e);
6342 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
6343 ops, boolean_true_node, 0, pred_e);
6344 /* a > b -> ! a {<,=} b */
6345 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6346 ops, boolean_false_node, 0, pred_e);
6347 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6348 ops, boolean_false_node, 0, pred_e);
6349 break;
6350 case EQ_EXPR:
6351 /* a == b -> ! a {<,>} b */
6352 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6353 ops, boolean_false_node, 0, pred_e);
6354 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6355 ops, boolean_false_node, 0, pred_e);
6356 break;
6357 case LE_EXPR:
6358 case GE_EXPR:
6359 case NE_EXPR:
6360 /* Nothing besides inverted condition. */
6361 break;
6362 default:;
6363 }
6364}
2201c330 6365
51e85e64 6366/* Main stmt worker for RPO VN, process BB. */
2201c330 6367
51e85e64 6368static unsigned
6369process_bb (rpo_elim &avail, basic_block bb,
6370 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
6d26c698 6371 bool do_region, bitmap exit_bbs, bool skip_phis)
51e85e64 6372{
6373 unsigned todo = 0;
6374 edge_iterator ei;
6375 edge e;
2201c330 6376
51e85e64 6377 vn_context_bb = bb;
2201c330 6378
51e85e64 6379 /* If we are in loop-closed SSA preserve this state. This is
6380 relevant when called on regions from outside of FRE/PRE. */
6381 bool lc_phi_nodes = false;
6d26c698 6382 if (!skip_phis
6383 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
51e85e64 6384 FOR_EACH_EDGE (e, ei, bb->preds)
6385 if (e->src->loop_father != e->dest->loop_father
6386 && flow_loop_nested_p (e->dest->loop_father,
6387 e->src->loop_father))
6388 {
6389 lc_phi_nodes = true;
6390 break;
6391 }
2201c330 6392
b51523c4 6393 /* When we visit a loop header substitute into loop info. */
6394 if (!iterate && eliminate && bb->loop_father->header == bb)
6395 {
6396 /* Keep fields in sync with substitute_in_loop_info. */
6397 if (bb->loop_father->nb_iterations)
6398 bb->loop_father->nb_iterations
6399 = simplify_replace_tree (bb->loop_father->nb_iterations,
6400 NULL_TREE, NULL_TREE, vn_valueize);
6401 }
6402
51e85e64 6403 /* Value-number all defs in the basic-block. */
6d26c698 6404 if (!skip_phis)
6405 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6406 gsi_next (&gsi))
6407 {
6408 gphi *phi = gsi.phi ();
6409 tree res = PHI_RESULT (phi);
6410 vn_ssa_aux_t res_info = VN_INFO (res);
6411 if (!bb_visited)
6412 {
6413 gcc_assert (!res_info->visited);
6414 res_info->valnum = VN_TOP;
6415 res_info->visited = true;
6416 }
2201c330 6417
6d26c698 6418 /* When not iterating force backedge values to varying. */
6419 visit_stmt (phi, !iterate_phis);
6420 if (virtual_operand_p (res))
6421 continue;
51e85e64 6422
6d26c698 6423 /* Eliminate */
6424 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6425 how we handle backedges and availability.
6426 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6427 tree val = res_info->valnum;
6428 if (res != val && !iterate && eliminate)
6429 {
6430 if (tree leader = avail.eliminate_avail (bb, res))
6431 {
6432 if (leader != res
6433 /* Preserve loop-closed SSA form. */
6434 && (! lc_phi_nodes
6435 || is_gimple_min_invariant (leader)))
6436 {
6437 if (dump_file && (dump_flags & TDF_DETAILS))
6438 {
6439 fprintf (dump_file, "Replaced redundant PHI node "
6440 "defining ");
6441 print_generic_expr (dump_file, res);
6442 fprintf (dump_file, " with ");
6443 print_generic_expr (dump_file, leader);
6444 fprintf (dump_file, "\n");
6445 }
6446 avail.eliminations++;
2201c330 6447
6d26c698 6448 if (may_propagate_copy (res, leader))
6449 {
6450 /* Schedule for removal. */
6451 avail.to_remove.safe_push (phi);
6452 continue;
6453 }
6454 /* ??? Else generate a copy stmt. */
6455 }
6456 }
6457 }
6458 /* Only make defs available that not already are. But make
6459 sure loop-closed SSA PHI node defs are picked up for
6460 downstream uses. */
6461 if (lc_phi_nodes
6462 || res == val
6463 || ! avail.eliminate_avail (bb, res))
6464 avail.eliminate_push_avail (bb, res);
6465 }
2201c330 6466
51e85e64 6467 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6468 we do this when processing the last stmt as we have to do this
6469 before elimination which otherwise forces GIMPLE_CONDs to
6470 if (1 != 0) style when seeing non-executable edges. */
6471 if (gsi_end_p (gsi_start_bb (bb)))
6472 {
6473 FOR_EACH_EDGE (e, ei, bb->succs)
2201c330 6474 {
c73fc2a8 6475 if (!(e->flags & EDGE_EXECUTABLE))
6476 {
6477 if (dump_file && (dump_flags & TDF_DETAILS))
6478 fprintf (dump_file,
6479 "marking outgoing edge %d -> %d executable\n",
6480 e->src->index, e->dest->index);
c73fc2a8 6481 e->flags |= EDGE_EXECUTABLE;
6482 e->dest->flags |= BB_EXECUTABLE;
6483 }
6484 else if (!(e->dest->flags & BB_EXECUTABLE))
6485 {
6486 if (dump_file && (dump_flags & TDF_DETAILS))
6487 fprintf (dump_file,
6488 "marking destination block %d reachable\n",
6489 e->dest->index);
6490 e->dest->flags |= BB_EXECUTABLE;
6491 }
2201c330 6492 }
51e85e64 6493 }
6494 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6495 !gsi_end_p (gsi); gsi_next (&gsi))
6496 {
6497 ssa_op_iter i;
6498 tree op;
6499 if (!bb_visited)
2201c330 6500 {
51e85e64 6501 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
2201c330 6502 {
51e85e64 6503 vn_ssa_aux_t op_info = VN_INFO (op);
6504 gcc_assert (!op_info->visited);
6505 op_info->valnum = VN_TOP;
6506 op_info->visited = true;
2201c330 6507 }
51e85e64 6508
6509 /* We somehow have to deal with uses that are not defined
6510 in the processed region. Forcing unvisited uses to
6511 varying here doesn't play well with def-use following during
6512 expression simplification, so we deal with this by checking
6513 the visited flag in SSA_VAL. */
2201c330 6514 }
6515
51e85e64 6516 visit_stmt (gsi_stmt (gsi));
6517
6518 gimple *last = gsi_stmt (gsi);
6519 e = NULL;
6520 switch (gimple_code (last))
2201c330 6521 {
51e85e64 6522 case GIMPLE_SWITCH:
6523 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
6524 (as_a <gswitch *> (last))));
6525 break;
6526 case GIMPLE_COND:
6527 {
6528 tree lhs = vn_valueize (gimple_cond_lhs (last));
6529 tree rhs = vn_valueize (gimple_cond_rhs (last));
6530 tree val = gimple_simplify (gimple_cond_code (last),
6531 boolean_type_node, lhs, rhs,
6532 NULL, vn_valueize);
6533 /* If the condition didn't simplfy see if we have recorded
6534 an expression from sofar taken edges. */
6535 if (! val || TREE_CODE (val) != INTEGER_CST)
6536 {
6537 vn_nary_op_t vnresult;
6538 tree ops[2];
6539 ops[0] = lhs;
6540 ops[1] = rhs;
6541 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6542 boolean_type_node, ops,
6543 &vnresult);
6544 /* Did we get a predicated value? */
6545 if (! val && vnresult && vnresult->predicated_values)
2201c330 6546 {
51e85e64 6547 val = vn_nary_op_get_predicated_value (vnresult, bb);
6548 if (val && dump_file && (dump_flags & TDF_DETAILS))
6549 {
6550 fprintf (dump_file, "Got predicated value ");
6551 print_generic_expr (dump_file, val, TDF_NONE);
6552 fprintf (dump_file, " for ");
6553 print_gimple_stmt (dump_file, last, TDF_SLIM);
6554 }
2201c330 6555 }
51e85e64 6556 }
6557 if (val)
6558 e = find_taken_edge (bb, val);
6559 if (! e)
6560 {
6561 /* If we didn't manage to compute the taken edge then
6562 push predicated expressions for the condition itself
6563 and related conditions to the hashtables. This allows
6564 simplification of redundant conditions which is
6565 important as early cleanup. */
6566 edge true_e, false_e;
6567 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6568 enum tree_code code = gimple_cond_code (last);
6569 enum tree_code icode
6570 = invert_tree_comparison (code, HONOR_NANS (lhs));
6571 tree ops[2];
6572 ops[0] = lhs;
6573 ops[1] = rhs;
6574 if (do_region
6575 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6576 true_e = NULL;
6577 if (do_region
6578 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6579 false_e = NULL;
6580 if (true_e)
6581 vn_nary_op_insert_pieces_predicated
6582 (2, code, boolean_type_node, ops,
6583 boolean_true_node, 0, true_e);
6584 if (false_e)
6585 vn_nary_op_insert_pieces_predicated
6586 (2, code, boolean_type_node, ops,
6587 boolean_false_node, 0, false_e);
6588 if (icode != ERROR_MARK)
6589 {
6590 if (true_e)
6591 vn_nary_op_insert_pieces_predicated
6592 (2, icode, boolean_type_node, ops,
6593 boolean_false_node, 0, true_e);
6594 if (false_e)
6595 vn_nary_op_insert_pieces_predicated
6596 (2, icode, boolean_type_node, ops,
6597 boolean_true_node, 0, false_e);
6598 }
6599 /* Relax for non-integers, inverted condition handled
6600 above. */
6601 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6602 {
6603 if (true_e)
6604 insert_related_predicates_on_edge (code, ops, true_e);
6605 if (false_e)
6606 insert_related_predicates_on_edge (icode, ops, false_e);
6607 }
6608 }
6609 break;
6610 }
6611 case GIMPLE_GOTO:
6612 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6613 break;
6614 default:
6615 e = NULL;
2201c330 6616 }
51e85e64 6617 if (e)
2201c330 6618 {
51e85e64 6619 todo = TODO_cleanup_cfg;
6620 if (!(e->flags & EDGE_EXECUTABLE))
2201c330 6621 {
51e85e64 6622 if (dump_file && (dump_flags & TDF_DETAILS))
6623 fprintf (dump_file,
6624 "marking known outgoing %sedge %d -> %d executable\n",
6625 e->flags & EDGE_DFS_BACK ? "back-" : "",
6626 e->src->index, e->dest->index);
51e85e64 6627 e->flags |= EDGE_EXECUTABLE;
6628 e->dest->flags |= BB_EXECUTABLE;
2201c330 6629 }
c73fc2a8 6630 else if (!(e->dest->flags & BB_EXECUTABLE))
6631 {
6632 if (dump_file && (dump_flags & TDF_DETAILS))
6633 fprintf (dump_file,
6634 "marking destination block %d reachable\n",
6635 e->dest->index);
6636 e->dest->flags |= BB_EXECUTABLE;
6637 }
2201c330 6638 }
51e85e64 6639 else if (gsi_one_before_end_p (gsi))
2201c330 6640 {
51e85e64 6641 FOR_EACH_EDGE (e, ei, bb->succs)
2201c330 6642 {
c73fc2a8 6643 if (!(e->flags & EDGE_EXECUTABLE))
6644 {
6645 if (dump_file && (dump_flags & TDF_DETAILS))
6646 fprintf (dump_file,
6647 "marking outgoing edge %d -> %d executable\n",
6648 e->src->index, e->dest->index);
c73fc2a8 6649 e->flags |= EDGE_EXECUTABLE;
6650 e->dest->flags |= BB_EXECUTABLE;
6651 }
6652 else if (!(e->dest->flags & BB_EXECUTABLE))
6653 {
6654 if (dump_file && (dump_flags & TDF_DETAILS))
6655 fprintf (dump_file,
6656 "marking destination block %d reachable\n",
6657 e->dest->index);
6658 e->dest->flags |= BB_EXECUTABLE;
6659 }
2201c330 6660 }
2201c330 6661 }
6662
51e85e64 6663 /* Eliminate. That also pushes to avail. */
6664 if (eliminate && ! iterate)
6665 avail.eliminate_stmt (bb, &gsi);
6666 else
6667 /* If not eliminating, make all not already available defs
6668 available. */
6669 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6670 if (! avail.eliminate_avail (bb, op))
6671 avail.eliminate_push_avail (bb, op);
2201c330 6672 }
6673
51e85e64 6674 /* Eliminate in destination PHI arguments. Always substitute in dest
6675 PHIs, even for non-executable edges. This handles region
6676 exits PHIs. */
6677 if (!iterate && eliminate)
6678 FOR_EACH_EDGE (e, ei, bb->succs)
2201c330 6679 for (gphi_iterator gsi = gsi_start_phis (e->dest);
51e85e64 6680 !gsi_end_p (gsi); gsi_next (&gsi))
2201c330 6681 {
6682 gphi *phi = gsi.phi ();
6683 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6684 tree arg = USE_FROM_PTR (use_p);
6685 if (TREE_CODE (arg) != SSA_NAME
6686 || virtual_operand_p (arg))
6687 continue;
51e85e64 6688 tree sprime;
6689 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6690 {
6691 sprime = SSA_VAL (arg);
6692 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6693 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6694 }
6695 else
6696 /* Look for sth available at the definition block of the argument.
6697 This avoids inconsistencies between availability there which
6698 decides if the stmt can be removed and availability at the
6699 use site. The SSA property ensures that things available
6700 at the definition are also available at uses. */
6701 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6702 arg);
6703 if (sprime
6704 && sprime != arg
6705 && may_propagate_copy (arg, sprime))
2201c330 6706 propagate_value (use_p, sprime);
6707 }
51e85e64 6708
6709 vn_context_bb = NULL;
6710 return todo;
2201c330 6711}
6712
51e85e64 6713/* Unwind state per basic-block. */
2201c330 6714
51e85e64 6715struct unwind_state
2201c330 6716{
51e85e64 6717 /* Times this block has been visited. */
6718 unsigned visited;
6719 /* Whether to handle this as iteration point or whether to treat
6720 incoming backedge PHI values as varying. */
6721 bool iterate;
2ac8e016 6722 /* Maximum RPO index this block is reachable from. */
6723 int max_rpo;
6724 /* Unwind state. */
51e85e64 6725 void *ob_top;
6726 vn_reference_t ref_top;
6727 vn_phi_t phi_top;
6728 vn_nary_op_t nary_top;
6729};
6730
6731/* Unwind the RPO VN state for iteration. */
6732
6733static void
6734do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6735{
6736 gcc_assert (to->iterate);
6737 for (; last_inserted_nary != to->nary_top;
6738 last_inserted_nary = last_inserted_nary->next)
2201c330 6739 {
51e85e64 6740 vn_nary_op_t *slot;
6741 slot = valid_info->nary->find_slot_with_hash
6742 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6743 /* Predication causes the need to restore previous state. */
6744 if ((*slot)->unwind_to)
6745 *slot = (*slot)->unwind_to;
2201c330 6746 else
51e85e64 6747 valid_info->nary->clear_slot (slot);
6748 }
6749 for (; last_inserted_phi != to->phi_top;
6750 last_inserted_phi = last_inserted_phi->next)
6751 {
6752 vn_phi_t *slot;
6753 slot = valid_info->phis->find_slot_with_hash
6754 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6755 valid_info->phis->clear_slot (slot);
6756 }
6757 for (; last_inserted_ref != to->ref_top;
6758 last_inserted_ref = last_inserted_ref->next)
6759 {
6760 vn_reference_t *slot;
6761 slot = valid_info->references->find_slot_with_hash
6762 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6763 (*slot)->operands.release ();
6764 valid_info->references->clear_slot (slot);
6765 }
6766 obstack_free (&vn_tables_obstack, to->ob_top);
6767
6768 /* Prune [rpo_idx, ] from avail. */
6769 /* ??? This is O(number-of-values-in-region) which is
6770 O(region-size) rather than O(iteration-piece). */
21ffc389 6771 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
6772 i != vn_ssa_aux_hash->end (); ++i)
51e85e64 6773 {
21ffc389 6774 while ((*i)->avail)
51e85e64 6775 {
21ffc389 6776 if (bb_to_rpo[(*i)->avail->location] < rpo_idx)
51e85e64 6777 break;
21ffc389 6778 vn_avail *av = (*i)->avail;
6779 (*i)->avail = (*i)->avail->next;
6780 av->next = avail.m_avail_freelist;
6781 avail.m_avail_freelist = av;
51e85e64 6782 }
2201c330 6783 }
6784}
6785
51e85e64 6786/* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6787 If ITERATE is true then treat backedges optimistically as not
6788 executed and iterate. If ELIMINATE is true then perform
6789 elimination, otherwise leave that to the caller. */
2201c330 6790
51e85e64 6791static unsigned
6792do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6793 bool iterate, bool eliminate)
2201c330 6794{
51e85e64 6795 unsigned todo = 0;
2201c330 6796
51e85e64 6797 /* We currently do not support region-based iteration when
6798 elimination is requested. */
6799 gcc_assert (!entry || !iterate || !eliminate);
6800 /* When iterating we need loop info up-to-date. */
6801 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
2201c330 6802
51e85e64 6803 bool do_region = entry != NULL;
6804 if (!do_region)
2201c330 6805 {
51e85e64 6806 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6807 exit_bbs = BITMAP_ALLOC (NULL);
6808 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6809 }
2201c330 6810
6d26c698 6811 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
6812 re-mark those that are contained in the region. */
6813 edge_iterator ei;
6814 edge e;
6815 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6816 e->flags &= ~EDGE_DFS_BACK;
6817
51e85e64 6818 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
2ac8e016 6819 int n = rev_post_order_and_mark_dfs_back_seme
6820 (fn, entry, exit_bbs, !loops_state_satisfies_p (LOOPS_NEED_FIXUP), rpo);
51e85e64 6821 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6822 for (int i = 0; i < n / 2; ++i)
6823 std::swap (rpo[i], rpo[n-i-1]);
6824
6825 if (!do_region)
6826 BITMAP_FREE (exit_bbs);
6827
6d26c698 6828 /* If there are any non-DFS_BACK edges into entry->dest skip
6829 processing PHI nodes for that block. This supports
6830 value-numbering loop bodies w/o the actual loop. */
6831 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6832 if (e != entry
6833 && !(e->flags & EDGE_DFS_BACK))
6834 break;
6835 bool skip_entry_phis = e != NULL;
6836 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
6837 fprintf (dump_file, "Region does not contain all edges into "
6838 "the entry block, skipping its PHIs.\n");
6839
51e85e64 6840 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6841 for (int i = 0; i < n; ++i)
6842 bb_to_rpo[rpo[i]] = i;
6843
6844 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6845
6846 rpo_elim avail (entry->dest);
6847 rpo_avail = &avail;
6848
6849 /* Verify we have no extra entries into the region. */
6850 if (flag_checking && do_region)
6851 {
6852 auto_bb_flag bb_in_region (fn);
6853 for (int i = 0; i < n; ++i)
2201c330 6854 {
51e85e64 6855 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6856 bb->flags |= bb_in_region;
6857 }
6858 /* We can't merge the first two loops because we cannot rely
6859 on EDGE_DFS_BACK for edges not within the region. But if
6860 we decide to always have the bb_in_region flag we can
6861 do the checking during the RPO walk itself (but then it's
6862 also easy to handle MEME conservatively). */
6863 for (int i = 0; i < n; ++i)
6864 {
6865 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6866 edge e;
6867 edge_iterator ei;
6868 FOR_EACH_EDGE (e, ei, bb->preds)
6d26c698 6869 gcc_assert (e == entry
6870 || (skip_entry_phis && bb == entry->dest)
6871 || (e->src->flags & bb_in_region));
51e85e64 6872 }
6873 for (int i = 0; i < n; ++i)
6874 {
6875 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6876 bb->flags &= ~bb_in_region;
2201c330 6877 }
51e85e64 6878 }
2201c330 6879
51e85e64 6880 /* Create the VN state. For the initial size of the various hashtables
6881 use a heuristic based on region size and number of SSA names. */
6882 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6883 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6884 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
dcf49a51 6885 next_value_id = 1;
51e85e64 6886
6887 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6888 gcc_obstack_init (&vn_ssa_aux_obstack);
6889
6890 gcc_obstack_init (&vn_tables_obstack);
6891 gcc_obstack_init (&vn_tables_insert_obstack);
6892 valid_info = XCNEW (struct vn_tables_s);
6893 allocate_vn_table (valid_info, region_size);
6894 last_inserted_ref = NULL;
6895 last_inserted_phi = NULL;
6896 last_inserted_nary = NULL;
6897
6898 vn_valueize = rpo_vn_valueize;
6899
6900 /* Initialize the unwind state and edge/BB executable state. */
bd686db6 6901 bool need_max_rpo_iterate = false;
51e85e64 6902 for (int i = 0; i < n; ++i)
6903 {
6904 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6905 rpo_state[i].visited = 0;
2ac8e016 6906 rpo_state[i].max_rpo = i;
fcf59b73 6907 bb->flags &= ~BB_EXECUTABLE;
51e85e64 6908 bool has_backedges = false;
6909 edge e;
6910 edge_iterator ei;
6911 FOR_EACH_EDGE (e, ei, bb->preds)
2201c330 6912 {
51e85e64 6913 if (e->flags & EDGE_DFS_BACK)
6914 has_backedges = true;
dd7ed3c2 6915 e->flags &= ~EDGE_EXECUTABLE;
6d26c698 6916 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
2ac8e016 6917 continue;
6918 if (bb_to_rpo[e->src->index] > i)
bd686db6 6919 {
6920 rpo_state[i].max_rpo = MAX (rpo_state[i].max_rpo,
6921 bb_to_rpo[e->src->index]);
6922 need_max_rpo_iterate = true;
6923 }
2ac8e016 6924 else
6925 rpo_state[i].max_rpo
6926 = MAX (rpo_state[i].max_rpo,
6927 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
2201c330 6928 }
51e85e64 6929 rpo_state[i].iterate = iterate && has_backedges;
51e85e64 6930 }
6931 entry->flags |= EDGE_EXECUTABLE;
6932 entry->dest->flags |= BB_EXECUTABLE;
2201c330 6933
bd686db6 6934 /* When there are irreducible regions the simplistic max_rpo computation
6935 above for the case of backedges doesn't work and we need to iterate
6936 until there are no more changes. */
6937 unsigned nit = 0;
6938 while (need_max_rpo_iterate)
6939 {
6940 nit++;
6941 need_max_rpo_iterate = false;
6942 for (int i = 0; i < n; ++i)
6943 {
6944 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6945 edge e;
6946 edge_iterator ei;
6947 FOR_EACH_EDGE (e, ei, bb->preds)
6948 {
6d26c698 6949 if (e == entry || (skip_entry_phis && bb == entry->dest))
bd686db6 6950 continue;
6951 int max_rpo = MAX (rpo_state[i].max_rpo,
6952 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6953 if (rpo_state[i].max_rpo != max_rpo)
6954 {
6955 rpo_state[i].max_rpo = max_rpo;
6956 need_max_rpo_iterate = true;
6957 }
6958 }
6959 }
6960 }
6961 statistics_histogram_event (cfun, "RPO max_rpo iterations", nit);
6962
51e85e64 6963 /* As heuristic to improve compile-time we handle only the N innermost
6964 loops and the outermost one optimistically. */
6965 if (iterate)
6966 {
6967 loop_p loop;
6968 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6969 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6970 if (loop_depth (loop) > max_depth)
6971 for (unsigned i = 2;
6972 i < loop_depth (loop) - max_depth; ++i)
6973 {
6974 basic_block header = superloop_at_depth (loop, i)->header;
6f5bdb34 6975 bool non_latch_backedge = false;
51e85e64 6976 edge e;
6977 edge_iterator ei;
6978 FOR_EACH_EDGE (e, ei, header->preds)
6979 if (e->flags & EDGE_DFS_BACK)
6f5bdb34 6980 {
6f5bdb34 6981 /* There can be a non-latch backedge into the header
6982 which is part of an outer irreducible region. We
6983 cannot avoid iterating this block then. */
6984 if (!dominated_by_p (CDI_DOMINATORS,
6985 e->src, e->dest))
6986 {
6987 if (dump_file && (dump_flags & TDF_DETAILS))
6988 fprintf (dump_file, "non-latch backedge %d -> %d "
6989 "forces iteration of loop %d\n",
6990 e->src->index, e->dest->index, loop->num);
6991 non_latch_backedge = true;
6992 }
9b7b9b8d 6993 else
6994 e->flags |= EDGE_EXECUTABLE;
6f5bdb34 6995 }
6996 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
51e85e64 6997 }
2201c330 6998 }
6999
51e85e64 7000 uint64_t nblk = 0;
2ac8e016 7001 int idx = 0;
7002 if (iterate)
7003 /* Go and process all blocks, iterating as necessary. */
7004 do
7005 {
7006 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7007
7008 /* If the block has incoming backedges remember unwind state. This
7009 is required even for non-executable blocks since in irreducible
7010 regions we might reach them via the backedge and re-start iterating
7011 from there.
7012 Note we can individually mark blocks with incoming backedges to
7013 not iterate where we then handle PHIs conservatively. We do that
7014 heuristically to reduce compile-time for degenerate cases. */
7015 if (rpo_state[idx].iterate)
7016 {
7017 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7018 rpo_state[idx].ref_top = last_inserted_ref;
7019 rpo_state[idx].phi_top = last_inserted_phi;
7020 rpo_state[idx].nary_top = last_inserted_nary;
7021 }
7022
7023 if (!(bb->flags & BB_EXECUTABLE))
7024 {
7025 if (dump_file && (dump_flags & TDF_DETAILS))
7026 fprintf (dump_file, "Block %d: BB%d found not executable\n",
7027 idx, bb->index);
7028 idx++;
7029 continue;
7030 }
7031
7032 if (dump_file && (dump_flags & TDF_DETAILS))
7033 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7034 nblk++;
7035 todo |= process_bb (avail, bb,
7036 rpo_state[idx].visited != 0,
7037 rpo_state[idx].iterate,
6d26c698 7038 iterate, eliminate, do_region, exit_bbs, false);
2ac8e016 7039 rpo_state[idx].visited++;
7040
7041 /* Verify if changed values flow over executable outgoing backedges
7042 and those change destination PHI values (that's the thing we
7043 can easily verify). Reduce over all such edges to the farthest
7044 away PHI. */
7045 int iterate_to = -1;
7046 edge_iterator ei;
7047 edge e;
7048 FOR_EACH_EDGE (e, ei, bb->succs)
7049 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7050 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7051 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7052 {
7053 int destidx = bb_to_rpo[e->dest->index];
7054 if (!rpo_state[destidx].visited)
7055 {
7056 if (dump_file && (dump_flags & TDF_DETAILS))
7057 fprintf (dump_file, "Unvisited destination %d\n",
7058 e->dest->index);
7059 if (iterate_to == -1 || destidx < iterate_to)
7060 iterate_to = destidx;
7061 continue;
7062 }
7063 if (dump_file && (dump_flags & TDF_DETAILS))
7064 fprintf (dump_file, "Looking for changed values of backedge"
7065 " %d->%d destination PHIs\n",
7066 e->src->index, e->dest->index);
7067 vn_context_bb = e->dest;
7068 gphi_iterator gsi;
7069 for (gsi = gsi_start_phis (e->dest);
7070 !gsi_end_p (gsi); gsi_next (&gsi))
7071 {
7072 bool inserted = false;
7073 /* While we'd ideally just iterate on value changes
7074 we CSE PHIs and do that even across basic-block
7075 boundaries. So even hashtable state changes can
7076 be important (which is roughly equivalent to
7077 PHI argument value changes). To not excessively
7078 iterate because of that we track whether a PHI
7079 was CSEd to with GF_PLF_1. */
7080 bool phival_changed;
7081 if ((phival_changed = visit_phi (gsi.phi (),
7082 &inserted, false))
7083 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7084 {
7085 if (!phival_changed
7086 && dump_file && (dump_flags & TDF_DETAILS))
7087 fprintf (dump_file, "PHI was CSEd and hashtable "
7088 "state (changed)\n");
7089 if (iterate_to == -1 || destidx < iterate_to)
7090 iterate_to = destidx;
7091 break;
7092 }
7093 }
7094 vn_context_bb = NULL;
7095 }
7096 if (iterate_to != -1)
7097 {
7098 do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
7099 idx = iterate_to;
7100 if (dump_file && (dump_flags & TDF_DETAILS))
7101 fprintf (dump_file, "Iterating to %d BB%d\n",
7102 iterate_to, rpo[iterate_to]);
7103 continue;
7104 }
7105
7106 idx++;
7107 }
7108 while (idx < n);
7109
7110 else /* !iterate */
2201c330 7111 {
2ac8e016 7112 /* Process all blocks greedily with a worklist that enforces RPO
7113 processing of reachable blocks. */
7114 auto_bitmap worklist;
7115 bitmap_set_bit (worklist, 0);
7116 while (!bitmap_empty_p (worklist))
51e85e64 7117 {
2ac8e016 7118 int idx = bitmap_first_set_bit (worklist);
7119 bitmap_clear_bit (worklist, idx);
7120 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7121 gcc_assert ((bb->flags & BB_EXECUTABLE)
7122 && !rpo_state[idx].visited);
51e85e64 7123
51e85e64 7124 if (dump_file && (dump_flags & TDF_DETAILS))
2ac8e016 7125 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
2201c330 7126
2ac8e016 7127 /* When we run into predecessor edges where we cannot trust its
7128 executable state mark them executable so PHI processing will
7129 be conservative.
7130 ??? Do we need to force arguments flowing over that edge
7131 to be varying or will they even always be? */
51e85e64 7132 edge_iterator ei;
7133 edge e;
2ac8e016 7134 FOR_EACH_EDGE (e, ei, bb->preds)
7135 if (!(e->flags & EDGE_EXECUTABLE)
6d26c698 7136 && (bb == entry->dest
7137 || (!rpo_state[bb_to_rpo[e->src->index]].visited
7138 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7139 >= (int)idx))))
51e85e64 7140 {
7141 if (dump_file && (dump_flags & TDF_DETAILS))
2ac8e016 7142 fprintf (dump_file, "Cannot trust state of predecessor "
7143 "edge %d -> %d, marking executable\n",
51e85e64 7144 e->src->index, e->dest->index);
2ac8e016 7145 e->flags |= EDGE_EXECUTABLE;
51e85e64 7146 }
2201c330 7147
2ac8e016 7148 nblk++;
7149 todo |= process_bb (avail, bb, false, false, false, eliminate,
6d26c698 7150 do_region, exit_bbs,
7151 skip_entry_phis && bb == entry->dest);
2ac8e016 7152 rpo_state[idx].visited++;
7153
7154 FOR_EACH_EDGE (e, ei, bb->succs)
7155 if ((e->flags & EDGE_EXECUTABLE)
7156 && e->dest->index != EXIT_BLOCK
7157 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7158 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
7159 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7160 }
51e85e64 7161 }
51e85e64 7162
7163 /* If statistics or dump file active. */
7164 int nex = 0;
7165 unsigned max_visited = 1;
7166 for (int i = 0; i < n; ++i)
7167 {
7168 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7169 if (bb->flags & BB_EXECUTABLE)
7170 nex++;
7171 statistics_histogram_event (cfun, "RPO block visited times",
7172 rpo_state[i].visited);
7173 if (rpo_state[i].visited > max_visited)
7174 max_visited = rpo_state[i].visited;
7175 }
7176 unsigned nvalues = 0, navail = 0;
21ffc389 7177 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7178 i != vn_ssa_aux_hash->end (); ++i)
51e85e64 7179 {
7180 nvalues++;
21ffc389 7181 vn_avail *av = (*i)->avail;
7182 while (av)
7183 {
7184 navail++;
7185 av = av->next;
7186 }
51e85e64 7187 }
7188 statistics_counter_event (cfun, "RPO blocks", n);
7189 statistics_counter_event (cfun, "RPO blocks visited", nblk);
7190 statistics_counter_event (cfun, "RPO blocks executable", nex);
7191 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7192 statistics_histogram_event (cfun, "RPO num values", nvalues);
7193 statistics_histogram_event (cfun, "RPO num avail", navail);
7194 statistics_histogram_event (cfun, "RPO num lattice",
7195 vn_ssa_aux_hash->elements ());
7196 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7197 {
7198 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7199 " blocks in total discovering %d executable blocks iterating "
7200 "%d.%d times, a block was visited max. %u times\n",
7201 n, nblk, nex,
7202 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7203 max_visited);
7204 fprintf (dump_file, "RPO tracked %d values available at %d locations "
7205 "and %" PRIu64 " lattice elements\n",
7206 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
2201c330 7207 }
7208
51e85e64 7209 if (eliminate)
7210 {
7211 /* When !iterate we already performed elimination during the RPO
7212 walk. */
7213 if (iterate)
7214 {
7215 /* Elimination for region-based VN needs to be done within the
7216 RPO walk. */
7217 gcc_assert (! do_region);
7218 /* Note we can't use avail.walk here because that gets confused
7219 by the existing availability and it will be less efficient
7220 as well. */
7221 todo |= eliminate_with_rpo_vn (NULL);
7222 }
7223 else
7224 todo |= avail.eliminate_cleanup (do_region);
7225 }
2201c330 7226
51e85e64 7227 vn_valueize = NULL;
7228 rpo_avail = NULL;
2201c330 7229
51e85e64 7230 XDELETEVEC (bb_to_rpo);
7231 XDELETEVEC (rpo);
43043015 7232 XDELETEVEC (rpo_state);
2201c330 7233
51e85e64 7234 return todo;
7235}
2201c330 7236
51e85e64 7237/* Region-based entry for RPO VN. Performs value-numbering and elimination
6d26c698 7238 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7239 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7240 are not considered. */
2201c330 7241
51e85e64 7242unsigned
7243do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7244{
7245 default_vn_walk_kind = VN_WALKREWRITE;
7246 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7247 free_rpo_vn ();
7248 return todo;
2201c330 7249}
7250
7251
7252namespace {
7253
7254const pass_data pass_data_fre =
7255{
7256 GIMPLE_PASS, /* type */
7257 "fre", /* name */
7258 OPTGROUP_NONE, /* optinfo_flags */
7259 TV_TREE_FRE, /* tv_id */
7260 ( PROP_cfg | PROP_ssa ), /* properties_required */
7261 0, /* properties_provided */
7262 0, /* properties_destroyed */
7263 0, /* todo_flags_start */
7264 0, /* todo_flags_finish */
7265};
7266
7267class pass_fre : public gimple_opt_pass
7268{
7269public:
7270 pass_fre (gcc::context *ctxt)
ee5b48dc 7271 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
2201c330 7272 {}
7273
7274 /* opt_pass methods: */
7275 opt_pass * clone () { return new pass_fre (m_ctxt); }
ee5b48dc 7276 void set_pass_param (unsigned int n, bool param)
7277 {
7278 gcc_assert (n == 0);
7279 may_iterate = param;
7280 }
7281 virtual bool gate (function *)
7282 {
7283 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7284 }
2201c330 7285 virtual unsigned int execute (function *);
7286
ee5b48dc 7287private:
7288 bool may_iterate;
2201c330 7289}; // class pass_fre
7290
7291unsigned int
51e85e64 7292pass_fre::execute (function *fun)
2201c330 7293{
51e85e64 7294 unsigned todo = 0;
2201c330 7295
51e85e64 7296 /* At -O[1g] use the cheap non-iterating mode. */
ee5b48dc 7297 bool iterate_p = may_iterate && (optimize > 1);
51e85e64 7298 calculate_dominance_info (CDI_DOMINATORS);
ee5b48dc 7299 if (iterate_p)
51e85e64 7300 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2201c330 7301
51e85e64 7302 default_vn_walk_kind = VN_WALKREWRITE;
ee5b48dc 7303 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
51e85e64 7304 free_rpo_vn ();
2201c330 7305
ee5b48dc 7306 if (iterate_p)
51e85e64 7307 loop_optimizer_finalize ();
2201c330 7308
7309 return todo;
7310}
7311
7312} // anon namespace
7313
7314gimple_opt_pass *
7315make_pass_fre (gcc::context *ctxt)
7316{
7317 return new pass_fre (ctxt);
7318}
51e85e64 7319
7320#undef BB_EXECUTABLE