]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-sccvn.c
Update copyright years.
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
CommitLineData
9e9e6e3e 1/* SCC value numbering for trees
f1717362 2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
9e9e6e3e 3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
9e9e6e3e 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
9e9e6e3e 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
9ef16211 24#include "backend.h"
7c29e30e 25#include "rtl.h"
9e9e6e3e 26#include "tree.h"
9ef16211 27#include "gimple.h"
7c29e30e 28#include "alloc-pool.h"
9ef16211 29#include "ssa.h"
7c29e30e 30#include "expmed.h"
31#include "insn-config.h"
32#include "emit-rtl.h"
33#include "cgraph.h"
34#include "gimple-pretty-print.h"
9ef16211 35#include "alias.h"
b20a8bb4 36#include "fold-const.h"
9ed99284 37#include "stor-layout.h"
94ea8568 38#include "cfganal.h"
9e9e6e3e 39#include "tree-inline.h"
bc61cadb 40#include "internal-fn.h"
41#include "gimple-fold.h"
42#include "tree-eh.h"
a8783bee 43#include "gimplify.h"
d53441c8 44#include "flags.h"
d53441c8 45#include "dojump.h"
46#include "explow.h"
47#include "calls.h"
d53441c8 48#include "varasm.h"
49#include "stmt.h"
9ed99284 50#include "expr.h"
073c1fd5 51#include "tree-dfa.h"
52#include "tree-ssa.h"
b9ed1410 53#include "dumpfile.h"
9e9e6e3e 54#include "cfgloop.h"
a9b2282e 55#include "params.h"
1c6d350b 56#include "tree-ssa-propagate.h"
9e9e6e3e 57#include "tree-ssa-sccvn.h"
85e9a542 58#include "tree-cfg.h"
59#include "domwalk.h"
db981500 60#include "gimple-iterator.h"
eb074ef3 61#include "gimple-match.h"
9e9e6e3e 62
63/* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
68
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
72
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
77
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
82
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
86
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
91
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
95
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
98
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
104
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
99698cf3 108 equivalent.
9e9e6e3e 109 TODO:
110
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
118*/
119
3e871d4d 120
5c08a518 121static tree *last_vuse_ptr;
122static vn_lookup_kind vn_walk_kind;
123static vn_lookup_kind default_vn_walk_kind;
dddafd79 124bitmap const_parms;
5c08a518 125
3e871d4d 126/* vn_nary_op hashtable helpers. */
127
770ff93b 128struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
3e871d4d 129{
9969c043 130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
3e871d4d 133};
134
135/* Return the computed hashcode for nary operation P1. */
136
137inline hashval_t
9969c043 138vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
3e871d4d 139{
140 return vno1->hashcode;
141}
142
143/* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
145
146inline bool
9969c043 147vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
3e871d4d 148{
149 return vn_nary_op_eq (vno1, vno2);
150}
151
c1f445d2 152typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
3e871d4d 153typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
154
155
156/* vn_phi hashtable helpers. */
157
158static int
159vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
160
576d4555 161struct vn_phi_hasher : pointer_hash <vn_phi_s>
3e871d4d 162{
9969c043 163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
3e871d4d 166};
167
168/* Return the computed hashcode for phi operation P1. */
169
170inline hashval_t
9969c043 171vn_phi_hasher::hash (const vn_phi_s *vp1)
3e871d4d 172{
173 return vp1->hashcode;
174}
175
176/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
177
178inline bool
9969c043 179vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
3e871d4d 180{
181 return vn_phi_eq (vp1, vp2);
182}
183
184/* Free a phi operation structure VP. */
185
186inline void
9969c043 187vn_phi_hasher::remove (vn_phi_s *phi)
3e871d4d 188{
189 phi->phiargs.release ();
190}
191
c1f445d2 192typedef hash_table<vn_phi_hasher> vn_phi_table_type;
3e871d4d 193typedef vn_phi_table_type::iterator vn_phi_iterator_type;
194
195
196/* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
198
199static int
200vn_reference_op_eq (const void *p1, const void *p2)
201{
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
204
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
214}
215
216/* Free a reference operation structure VP. */
217
218static inline void
219free_reference (vn_reference_s *vr)
220{
221 vr->operands.release ();
222}
223
224
225/* vn_reference hashtable helpers. */
226
576d4555 227struct vn_reference_hasher : pointer_hash <vn_reference_s>
3e871d4d 228{
9969c043 229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
3e871d4d 232};
233
234/* Return the hashcode for a given reference operation P1. */
235
236inline hashval_t
9969c043 237vn_reference_hasher::hash (const vn_reference_s *vr1)
3e871d4d 238{
239 return vr1->hashcode;
240}
241
242inline bool
9969c043 243vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
3e871d4d 244{
245 return vn_reference_eq (v, c);
246}
247
248inline void
9969c043 249vn_reference_hasher::remove (vn_reference_s *v)
3e871d4d 250{
251 free_reference (v);
252}
253
c1f445d2 254typedef hash_table<vn_reference_hasher> vn_reference_table_type;
3e871d4d 255typedef vn_reference_table_type::iterator vn_reference_iterator_type;
256
257
9e9e6e3e 258/* The set of hashtables and alloc_pool's for their items. */
259
260typedef struct vn_tables_s
261{
c1f445d2 262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
51a23cfc 265 struct obstack nary_obstack;
e16712b1 266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
9e9e6e3e 268} *vn_tables_t;
269
3e871d4d 270
271/* vn_constant hashtable helpers. */
272
298e7f9a 273struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
3e871d4d 274{
9969c043 275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
3e871d4d 277};
278
279/* Hash table hash function for vn_constant_t. */
280
281inline hashval_t
9969c043 282vn_constant_hasher::hash (const vn_constant_s *vc1)
3e871d4d 283{
284 return vc1->hashcode;
285}
286
287/* Hash table equality function for vn_constant_t. */
288
289inline bool
9969c043 290vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
3e871d4d 291{
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
294
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
296}
297
c1f445d2 298static hash_table<vn_constant_hasher> *constant_to_value_id;
f6c33c78 299static bitmap constant_value_ids;
9e9e6e3e 300
9e9e6e3e 301
302/* Valid hashtables storing information we have proven to be
303 correct. */
304
305static vn_tables_t valid_info;
306
307/* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
309
310static vn_tables_t optimistic_info;
311
9e9e6e3e 312/* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
315
316static vn_tables_t current_info;
317
318
319/* Reverse post order index for each basic block. */
320
321static int *rpo_numbers;
322
323#define SSA_VAL(x) (VN_INFO ((x))->valnum)
324
b8a2283e 325/* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
328
329static inline tree
330vuse_ssa_val (tree x)
331{
332 if (!x)
333 return NULL_TREE;
334
335 do
336 {
337 x = SSA_VAL (x);
338 }
339 while (SSA_NAME_IN_FREE_LIST (x));
340
341 return x;
342}
343
9e9e6e3e 344/* This represents the top of the VN lattice, which is the universal
345 value. */
346
347tree VN_TOP;
348
f6c33c78 349/* Unique counter for our value ids. */
350
351static unsigned int next_value_id;
352
9e9e6e3e 353/* Next DFS number and the stack for strongly connected component
354 detection. */
355
356static unsigned int next_dfs_num;
f1f41a6c 357static vec<tree> sccstack;
9e9e6e3e 358
1d9353f3 359
9e9e6e3e 360
b9584939 361/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
f1f41a6c 363 without looping over the vec. */
9e9e6e3e 364
f1f41a6c 365static vec<vn_ssa_aux_t> vn_ssa_aux_table;
b9584939 366static struct obstack vn_ssa_aux_obstack;
9e9e6e3e 367
afb92221 368/* Return whether there is value numbering information for a given SSA name. */
369
370bool
371has_VN_INFO (tree name)
372{
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
376}
377
9e9e6e3e 378/* Return the value numbering information for a given SSA name. */
379
380vn_ssa_aux_t
381VN_INFO (tree name)
382{
f1f41a6c 383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
0ea2d350 384 gcc_checking_assert (res);
f6c33c78 385 return res;
9e9e6e3e 386}
387
388/* Set the value numbering info for a given SSA name to a given
389 value. */
390
391static inline void
392VN_INFO_SET (tree name, vn_ssa_aux_t value)
393{
f1f41a6c 394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
9e9e6e3e 395}
396
b9584939 397/* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
9e9e6e3e 399
400vn_ssa_aux_t
401VN_INFO_GET (tree name)
402{
b9584939 403 vn_ssa_aux_t newinfo;
404
eb074ef3 405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
45ba1503 407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
b9584939 408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
f1f41a6c 409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
9e9e6e3e 412 return newinfo;
413}
414
415
024fee2c 416/* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
418
419enum vn_kind
42acab1c 420vn_get_stmt_kind (gimple *stmt)
024fee2c 421{
422 switch (gimple_code (stmt))
423 {
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
429 {
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
433 {
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
440 {
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
449
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
453
454 case tcc_constant:
455 return VN_CONSTANT;
456
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
464 }
465 default:
466 return VN_NONE;
467 }
468 }
469 default:
470 return VN_NONE;
471 }
472}
75a70cf9 473
8c8a7011 474/* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
476
477unsigned int
478get_constant_value_id (tree constant)
479{
3e871d4d 480 vn_constant_s **slot;
8c8a7011 481 struct vn_constant_s vc;
75a70cf9 482
483 vc.hashcode = vn_hash_constant_with_type (constant);
8c8a7011 484 vc.constant = constant;
c1f445d2 485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
8c8a7011 486 if (slot)
3e871d4d 487 return (*slot)->value_id;
8c8a7011 488 return 0;
489}
490
f6c33c78 491/* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
493
494unsigned int
495get_or_alloc_constant_value_id (tree constant)
496{
3e871d4d 497 vn_constant_s **slot;
88006128 498 struct vn_constant_s vc;
499 vn_constant_t vcp;
48e1416a 500
88006128 501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
c1f445d2 503 slot = constant_to_value_id->find_slot (&vc, INSERT);
f6c33c78 504 if (*slot)
3e871d4d 505 return (*slot)->value_id;
88006128 506
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
3e871d4d 511 *slot = vcp;
88006128 512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
f6c33c78 514}
515
516/* Return true if V is a value id for a constant. */
517
518bool
519value_id_constant_p (unsigned int v)
520{
48e1416a 521 return bitmap_bit_p (constant_value_ids, v);
f6c33c78 522}
523
8f4173dc 524/* Compute the hash for a reference operand VRO1. */
9e9e6e3e 525
f32e91d5 526static void
527vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
9e9e6e3e 528{
f32e91d5 529 hstate.add_int (vro1->opcode);
3d2d7de7 530 if (vro1->op0)
f32e91d5 531 inchash::add_expr (vro1->op0, hstate);
3d2d7de7 532 if (vro1->op1)
f32e91d5 533 inchash::add_expr (vro1->op1, hstate);
3d2d7de7 534 if (vro1->op2)
f32e91d5 535 inchash::add_expr (vro1->op2, hstate);
9e9e6e3e 536}
537
9e9e6e3e 538/* Compute a hash for the reference operation VR1 and return it. */
539
2fd3ecff 540static hashval_t
9e9e6e3e 541vn_reference_compute_hash (const vn_reference_t vr1)
542{
f32e91d5 543 inchash::hash hstate;
544 hashval_t result;
9e9e6e3e 545 int i;
546 vn_reference_op_t vro;
182cf5a9 547 HOST_WIDE_INT off = -1;
548 bool deref = false;
9e9e6e3e 549
f1f41a6c 550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
182cf5a9 551 {
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
557 {
558 if (off == -1)
559 off = 0;
560 off += vro->off;
561 }
562 else
563 {
564 if (off != -1
565 && off != 0)
f32e91d5 566 hstate.add_int (off);
182cf5a9 567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
570 {
571 if (vro->op0)
572 {
573 tree op = TREE_OPERAND (vro->op0, 0);
f32e91d5 574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
182cf5a9 576 }
577 }
578 else
f32e91d5 579 vn_reference_op_compute_hash (vro, hstate);
182cf5a9 580 }
581 }
f32e91d5 582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
84cd88b5 584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
9e9e6e3e 586
587 return result;
588}
589
3e871d4d 590/* Return true if reference operations VR1 and VR2 are equivalent. This
9e9e6e3e 591 means they have the same set of operands and vuses. */
592
3e871d4d 593bool
594vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
9e9e6e3e 595{
182cf5a9 596 unsigned i, j;
9e9e6e3e 597
dd277d48 598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
9e9e6e3e 601
dd277d48 602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
9e9e6e3e 604 return false;
605
dd277d48 606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
609
182cf5a9 610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
9e9e6e3e 611 return false;
612
87d822bb 613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
615 {
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
618 }
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
f9ae6f95 621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
87d822bb 622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
f9ae6f95 625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
87d822bb 626 return false;
627
182cf5a9 628 i = 0;
629 j = 0;
630 do
631 {
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
f1f41a6c 636 for (; vr1->operands.iterate (i, &vro1); i++)
182cf5a9 637 {
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
292237f3 640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
182cf5a9 643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
646 }
f1f41a6c 647 for (; vr2->operands.iterate (j, &vro2); j++)
182cf5a9 648 {
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
292237f3 651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
182cf5a9 654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
657 }
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
661 {
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
f9f051a3 667 deref1 = false;
182cf5a9 668 }
669 if (deref2 && vro2->opcode == ADDR_EXPR)
670 {
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
f9f051a3 676 deref2 = false;
182cf5a9 677 }
f9f051a3 678 if (deref1 != deref2)
679 return false;
182cf5a9 680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
684 }
f1f41a6c 685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
9e9e6e3e 687
dd277d48 688 return true;
9e9e6e3e 689}
690
75a70cf9 691/* Copy the operations present in load/store REF into RESULT, a vector of
9e9e6e3e 692 vn_reference_op_s's. */
693
2fd3ecff 694static void
f1f41a6c 695copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
9e9e6e3e 696{
43a3cf90 697 if (TREE_CODE (ref) == TARGET_MEM_REF)
698 {
699 vn_reference_op_s temp;
700
75aefb7b 701 result->reserve (3);
702
43a3cf90 703 memset (&temp, 0, sizeof (temp));
2be90eed 704 temp.type = TREE_TYPE (ref);
43a3cf90 705 temp.opcode = TREE_CODE (ref);
869bac23 706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
182cf5a9 709 temp.off = -1;
842c7753 710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
75aefb7b 712 result->quick_push (temp);
43a3cf90 713
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
28daba6f 716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
75aefb7b 719 result->quick_push (temp);
28daba6f 720
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
182cf5a9 725 temp.off = -1;
75aefb7b 726 result->quick_push (temp);
43a3cf90 727 return;
728 }
729
9e9e6e3e 730 /* For non-calls, store the information that makes up the address. */
6a00bf6b 731 tree orig = ref;
9e9e6e3e 732 while (ref)
733 {
734 vn_reference_op_s temp;
735
736 memset (&temp, 0, sizeof (temp));
2be90eed 737 temp.type = TREE_TYPE (ref);
9e9e6e3e 738 temp.opcode = TREE_CODE (ref);
182cf5a9 739 temp.off = -1;
9e9e6e3e 740
741 switch (temp.opcode)
742 {
39215e09 743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
8a19bda6 746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
182cf5a9 750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
a37605ce 753 {
754 offset_int off = mem_ref_offset (ref);
755 if (wi::fits_shwi_p (off))
756 temp.off = off.to_shwi ();
757 }
842c7753 758 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 temp.base = MR_DEPENDENCE_BASE (ref);
292237f3 760 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
182cf5a9 761 break;
9e9e6e3e 762 case BIT_FIELD_REF:
292237f3 763 /* Record bits, position and storage order. */
9e9e6e3e 764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
842c7753 766 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
767 {
768 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 if (off % BITS_PER_UNIT == 0)
770 temp.off = off / BITS_PER_UNIT;
771 }
292237f3 772 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
9e9e6e3e 773 break;
774 case COMPONENT_REF:
659ce413 775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp.type = NULL_TREE;
3918bd18 779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.op1 = TREE_OPERAND (ref, 2);
182cf5a9 781 {
782 tree this_offset = component_ref_field_offset (ref);
783 if (this_offset
784 && TREE_CODE (this_offset) == INTEGER_CST)
785 {
786 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
f9ae6f95 787 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
182cf5a9 788 {
5de9d3ed 789 offset_int off
790 = (wi::to_offset (this_offset)
791 + wi::lrshift (wi::to_offset (bit_offset),
885a2694 792 LOG2_BITS_PER_UNIT));
fe5ad926 793 if (wi::fits_shwi_p (off)
6a00bf6b 794 /* Probibit value-numbering zero offset components
795 of addresses the same before the pass folding
796 __builtin_object_size had a chance to run
797 (checking cfun->after_inlining does the
798 trick here). */
799 && (TREE_CODE (orig) != ADDR_EXPR
fe5ad926 800 || off != 0
6a00bf6b 801 || cfun->after_inlining))
e913b5cd 802 temp.off = off.to_shwi ();
182cf5a9 803 }
804 }
805 }
9e9e6e3e 806 break;
807 case ARRAY_RANGE_REF:
808 case ARRAY_REF:
809 /* Record index as operand. */
810 temp.op0 = TREE_OPERAND (ref, 1);
9fa67218 811 /* Always record lower bounds and element size. */
812 temp.op1 = array_ref_low_bound (ref);
813 temp.op2 = array_ref_element_size (ref);
182cf5a9 814 if (TREE_CODE (temp.op0) == INTEGER_CST
815 && TREE_CODE (temp.op1) == INTEGER_CST
816 && TREE_CODE (temp.op2) == INTEGER_CST)
817 {
5de9d3ed 818 offset_int off = ((wi::to_offset (temp.op0)
819 - wi::to_offset (temp.op1))
820 * wi::to_offset (temp.op2));
796b6678 821 if (wi::fits_shwi_p (off))
e913b5cd 822 temp.off = off.to_shwi();
182cf5a9 823 }
9e9e6e3e 824 break;
2be90eed 825 case VAR_DECL:
826 if (DECL_HARD_REGISTER (ref))
827 {
828 temp.op0 = ref;
829 break;
830 }
831 /* Fallthru. */
832 case PARM_DECL:
833 case CONST_DECL:
834 case RESULT_DECL:
835 /* Canonicalize decls to MEM[&decl] which is what we end up with
836 when valueizing MEM[ptr] with ptr = &decl. */
837 temp.opcode = MEM_REF;
838 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
839 temp.off = 0;
f1f41a6c 840 result->safe_push (temp);
2be90eed 841 temp.opcode = ADDR_EXPR;
75aefb7b 842 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
2be90eed 843 temp.type = TREE_TYPE (temp.op0);
844 temp.off = -1;
845 break;
a0e3bc3a 846 case STRING_CST:
847 case INTEGER_CST:
848 case COMPLEX_CST:
849 case VECTOR_CST:
7342d4d1 850 case REAL_CST:
7f7ae544 851 case FIXED_CST:
2a2aef73 852 case CONSTRUCTOR:
9e9e6e3e 853 case SSA_NAME:
854 temp.op0 = ref;
855 break;
4be5a86a 856 case ADDR_EXPR:
857 if (is_gimple_min_invariant (ref))
858 {
859 temp.op0 = ref;
860 break;
861 }
a5650c86 862 break;
a0e3bc3a 863 /* These are only interesting for their operands, their
864 existence, and their type. They will never be the last
865 ref in the chain of references (IE they require an
866 operand), so we don't have to put anything
867 for op* as it will be handled by the iteration */
a0e3bc3a 868 case REALPART_EXPR:
292237f3 869 temp.off = 0;
870 break;
a0e3bc3a 871 case VIEW_CONVERT_EXPR:
182cf5a9 872 temp.off = 0;
292237f3 873 temp.reverse = storage_order_barrier_p (ref);
182cf5a9 874 break;
875 case IMAGPART_EXPR:
876 /* This is only interesting for its constant offset. */
f9ae6f95 877 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
9e9e6e3e 878 break;
a0e3bc3a 879 default:
880 gcc_unreachable ();
9e9e6e3e 881 }
f1f41a6c 882 result->safe_push (temp);
9e9e6e3e 883
4be5a86a 884 if (REFERENCE_CLASS_P (ref)
39215e09 885 || TREE_CODE (ref) == MODIFY_EXPR
8a19bda6 886 || TREE_CODE (ref) == WITH_SIZE_EXPR
4be5a86a 887 || (TREE_CODE (ref) == ADDR_EXPR
888 && !is_gimple_min_invariant (ref)))
9e9e6e3e 889 ref = TREE_OPERAND (ref, 0);
890 else
891 ref = NULL_TREE;
892 }
893}
894
3918bd18 895/* Build a alias-oracle reference abstraction in *REF from the vn_reference
896 operands in *OPS, the reference alias set SET and the reference type TYPE.
897 Return true if something useful was produced. */
02067dc5 898
3918bd18 899bool
900ao_ref_init_from_vn_reference (ao_ref *ref,
901 alias_set_type set, tree type,
f1f41a6c 902 vec<vn_reference_op_s> ops)
02067dc5 903{
904 vn_reference_op_t op;
905 unsigned i;
3918bd18 906 tree base = NULL_TREE;
907 tree *op0_p = &base;
925e34bb 908 offset_int offset = 0;
909 offset_int max_size;
910 offset_int size = -1;
3918bd18 911 tree size_tree = NULL_TREE;
182cf5a9 912 alias_set_type base_alias_set = -1;
3918bd18 913
914 /* First get the final access size from just the outermost expression. */
f1f41a6c 915 op = &ops[0];
3918bd18 916 if (op->opcode == COMPONENT_REF)
182cf5a9 917 size_tree = DECL_SIZE (op->op0);
3918bd18 918 else if (op->opcode == BIT_FIELD_REF)
919 size_tree = op->op0;
920 else
921 {
3754d046 922 machine_mode mode = TYPE_MODE (type);
3918bd18 923 if (mode == BLKmode)
924 size_tree = TYPE_SIZE (type);
925 else
925e34bb 926 size = int (GET_MODE_BITSIZE (mode));
3918bd18 927 }
925e34bb 928 if (size_tree != NULL_TREE
929 && TREE_CODE (size_tree) == INTEGER_CST)
930 size = wi::to_offset (size_tree);
3918bd18 931
932 /* Initially, maxsize is the same as the accessed element size.
933 In the following it will only grow (or become -1). */
934 max_size = size;
02067dc5 935
3918bd18 936 /* Compute cumulative bit-offset for nested component-refs and array-refs,
937 and find the ultimate containing object. */
f1f41a6c 938 FOR_EACH_VEC_ELT (ops, i, op)
02067dc5 939 {
940 switch (op->opcode)
941 {
3918bd18 942 /* These may be in the reference ops, but we cannot do anything
943 sensible with them here. */
3918bd18 944 case ADDR_EXPR:
182cf5a9 945 /* Apart from ADDR_EXPR arguments to MEM_REF. */
946 if (base != NULL_TREE
947 && TREE_CODE (base) == MEM_REF
948 && op->op0
949 && DECL_P (TREE_OPERAND (op->op0, 0)))
950 {
f1f41a6c 951 vn_reference_op_t pop = &ops[i-1];
182cf5a9 952 base = TREE_OPERAND (op->op0, 0);
953 if (pop->off == -1)
954 {
955 max_size = -1;
956 offset = 0;
957 }
958 else
959 offset += pop->off * BITS_PER_UNIT;
960 op0_p = NULL;
961 break;
962 }
963 /* Fallthru. */
964 case CALL_EXPR:
3918bd18 965 return false;
02067dc5 966
3918bd18 967 /* Record the base objects. */
182cf5a9 968 case MEM_REF:
969 base_alias_set = get_deref_alias_set (op->op0);
970 *op0_p = build2 (MEM_REF, op->type,
971 NULL_TREE, op->op0);
842c7753 972 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
973 MR_DEPENDENCE_BASE (*op0_p) = op->base;
182cf5a9 974 op0_p = &TREE_OPERAND (*op0_p, 0);
975 break;
976
3918bd18 977 case VAR_DECL:
978 case PARM_DECL:
979 case RESULT_DECL:
980 case SSA_NAME:
3918bd18 981 *op0_p = op->op0;
182cf5a9 982 op0_p = NULL;
3918bd18 983 break;
984
985 /* And now the usual component-reference style ops. */
02067dc5 986 case BIT_FIELD_REF:
925e34bb 987 offset += wi::to_offset (op->op1);
02067dc5 988 break;
989
990 case COMPONENT_REF:
3918bd18 991 {
992 tree field = op->op0;
993 /* We do not have a complete COMPONENT_REF tree here so we
994 cannot use component_ref_field_offset. Do the interesting
995 parts manually. */
925e34bb 996 tree this_offset = DECL_FIELD_OFFSET (field);
3918bd18 997
925e34bb 998 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
3918bd18 999 max_size = -1;
1000 else
1001 {
925e34bb 1002 offset_int woffset = wi::lshift (wi::to_offset (this_offset),
1003 LOG2_BITS_PER_UNIT);
1004 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1005 offset += woffset;
3918bd18 1006 }
1007 break;
1008 }
02067dc5 1009
1010 case ARRAY_RANGE_REF:
1011 case ARRAY_REF:
9fa67218 1012 /* We recorded the lower bound and the element size. */
925e34bb 1013 if (TREE_CODE (op->op0) != INTEGER_CST
1014 || TREE_CODE (op->op1) != INTEGER_CST
1015 || TREE_CODE (op->op2) != INTEGER_CST)
3918bd18 1016 max_size = -1;
1017 else
1018 {
925e34bb 1019 offset_int woffset
1020 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1021 TYPE_PRECISION (TREE_TYPE (op->op0)));
1022 woffset *= wi::to_offset (op->op2);
1023 woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
1024 offset += woffset;
3918bd18 1025 }
1026 break;
1027
1028 case REALPART_EXPR:
1029 break;
1030
1031 case IMAGPART_EXPR:
1032 offset += size;
1033 break;
1034
1035 case VIEW_CONVERT_EXPR:
02067dc5 1036 break;
1037
1038 case STRING_CST:
1039 case INTEGER_CST:
1040 case COMPLEX_CST:
1041 case VECTOR_CST:
1042 case REAL_CST:
1043 case CONSTRUCTOR:
02067dc5 1044 case CONST_DECL:
3918bd18 1045 return false;
02067dc5 1046
1047 default:
3918bd18 1048 return false;
02067dc5 1049 }
1050 }
1051
3918bd18 1052 if (base == NULL_TREE)
1053 return false;
1054
1055 ref->ref = NULL_TREE;
1056 ref->base = base;
3918bd18 1057 ref->ref_alias_set = set;
182cf5a9 1058 if (base_alias_set != -1)
1059 ref->base_alias_set = base_alias_set;
1060 else
1061 ref->base_alias_set = get_alias_set (base);
3787db52 1062 /* We discount volatiles from value-numbering elsewhere. */
1063 ref->volatile_p = false;
3918bd18 1064
925e34bb 1065 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1066 {
1067 ref->offset = 0;
1068 ref->size = -1;
1069 ref->max_size = -1;
1070 return true;
1071 }
1072
1073 ref->size = size.to_shwi ();
1074
1075 if (!wi::fits_shwi_p (offset))
1076 {
1077 ref->offset = 0;
1078 ref->max_size = -1;
1079 return true;
1080 }
1081
1082 ref->offset = offset.to_shwi ();
1083
1084 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1085 ref->max_size = -1;
1086 else
1087 ref->max_size = max_size.to_shwi ();
1088
3918bd18 1089 return true;
02067dc5 1090}
1091
75a70cf9 1092/* Copy the operations present in load/store/call REF into RESULT, a vector of
1093 vn_reference_op_s's. */
1094
2fd3ecff 1095static void
1a91d914 1096copy_reference_ops_from_call (gcall *call,
f1f41a6c 1097 vec<vn_reference_op_s> *result)
75a70cf9 1098{
1099 vn_reference_op_s temp;
75a70cf9 1100 unsigned i;
7ec657ff 1101 tree lhs = gimple_call_lhs (call);
27b0e9e4 1102 int lr;
7ec657ff 1103
1104 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1105 different. By adding the lhs here in the vector, we ensure that the
1106 hashcode is different, guaranteeing a different value number. */
1107 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1108 {
1109 memset (&temp, 0, sizeof (temp));
1110 temp.opcode = MODIFY_EXPR;
1111 temp.type = TREE_TYPE (lhs);
1112 temp.op0 = lhs;
1113 temp.off = -1;
f1f41a6c 1114 result->safe_push (temp);
7ec657ff 1115 }
75a70cf9 1116
27b0e9e4 1117 /* Copy the type, opcode, function, static chain and EH region, if any. */
75a70cf9 1118 memset (&temp, 0, sizeof (temp));
1119 temp.type = gimple_call_return_type (call);
1120 temp.opcode = CALL_EXPR;
4be5a86a 1121 temp.op0 = gimple_call_fn (call);
0e3bb11d 1122 temp.op1 = gimple_call_chain (call);
27b0e9e4 1123 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1124 temp.op2 = size_int (lr);
182cf5a9 1125 temp.off = -1;
058a1b7a 1126 if (gimple_call_with_bounds_p (call))
1127 temp.with_bounds = 1;
f1f41a6c 1128 result->safe_push (temp);
75a70cf9 1129
4be5a86a 1130 /* Copy the call arguments. As they can be references as well,
1131 just chain them together. */
75a70cf9 1132 for (i = 0; i < gimple_call_num_args (call); ++i)
1133 {
1134 tree callarg = gimple_call_arg (call, i);
4be5a86a 1135 copy_reference_ops_from_ref (callarg, result);
75a70cf9 1136 }
75a70cf9 1137}
1138
d12dee9c 1139/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1140 *I_P to point to the last element of the replacement. */
11beb29c 1141static bool
f1f41a6c 1142vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
d12dee9c 1143 unsigned int *i_p)
9e9e6e3e 1144{
d12dee9c 1145 unsigned int i = *i_p;
f1f41a6c 1146 vn_reference_op_t op = &(*ops)[i];
1147 vn_reference_op_t mem_op = &(*ops)[i - 1];
182cf5a9 1148 tree addr_base;
197400ff 1149 HOST_WIDE_INT addr_offset = 0;
182cf5a9 1150
1151 /* The only thing we have to do is from &OBJ.foo.bar add the offset
9d75589a 1152 from .foo.bar to the preceding MEM_REF offset and replace the
182cf5a9 1153 address with &OBJ. */
1154 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1155 &addr_offset);
1156 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
d68e9408 1157 if (addr_base != TREE_OPERAND (op->op0, 0))
182cf5a9 1158 {
5de9d3ed 1159 offset_int off = offset_int::from (mem_op->op0, SIGNED);
e913b5cd 1160 off += addr_offset;
1161 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
182cf5a9 1162 op->op0 = build_fold_addr_expr (addr_base);
e913b5cd 1163 if (tree_fits_shwi_p (mem_op->op0))
1164 mem_op->off = tree_to_shwi (mem_op->op0);
182cf5a9 1165 else
1166 mem_op->off = -1;
11beb29c 1167 return true;
d12dee9c 1168 }
11beb29c 1169 return false;
d12dee9c 1170}
9e9e6e3e 1171
37b80bde 1172/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1173 *I_P to point to the last element of the replacement. */
11beb29c 1174static bool
f1f41a6c 1175vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
37b80bde 1176 unsigned int *i_p)
1177{
1178 unsigned int i = *i_p;
f1f41a6c 1179 vn_reference_op_t op = &(*ops)[i];
1180 vn_reference_op_t mem_op = &(*ops)[i - 1];
42acab1c 1181 gimple *def_stmt;
37b80bde 1182 enum tree_code code;
5de9d3ed 1183 offset_int off;
37b80bde 1184
1185 def_stmt = SSA_NAME_DEF_STMT (op->op0);
b62e7449 1186 if (!is_gimple_assign (def_stmt))
11beb29c 1187 return false;
37b80bde 1188
1189 code = gimple_assign_rhs_code (def_stmt);
1190 if (code != ADDR_EXPR
1191 && code != POINTER_PLUS_EXPR)
11beb29c 1192 return false;
37b80bde 1193
5de9d3ed 1194 off = offset_int::from (mem_op->op0, SIGNED);
37b80bde 1195
1196 /* The only thing we have to do is from &OBJ.foo.bar add the offset
9d75589a 1197 from .foo.bar to the preceding MEM_REF offset and replace the
37b80bde 1198 address with &OBJ. */
1199 if (code == ADDR_EXPR)
1200 {
1201 tree addr, addr_base;
1202 HOST_WIDE_INT addr_offset;
1203
1204 addr = gimple_assign_rhs1 (def_stmt);
1205 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1206 &addr_offset);
5c08a518 1207 /* If that didn't work because the address isn't invariant propagate
1208 the reference tree from the address operation in case the current
1209 dereference isn't offsetted. */
1210 if (!addr_base
1211 && *i_p == ops->length () - 1
1212 && off == 0
1213 /* This makes us disable this transform for PRE where the
1214 reference ops might be also used for code insertion which
1215 is invalid. */
1216 && default_vn_walk_kind == VN_WALKREWRITE)
1217 {
1218 auto_vec<vn_reference_op_s, 32> tem;
1219 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1220 ops->pop ();
1221 ops->pop ();
1222 ops->safe_splice (tem);
1223 --*i_p;
11beb29c 1224 return true;
5c08a518 1225 }
37b80bde 1226 if (!addr_base
1227 || TREE_CODE (addr_base) != MEM_REF)
11beb29c 1228 return false;
37b80bde 1229
e913b5cd 1230 off += addr_offset;
cf8f0e63 1231 off += mem_ref_offset (addr_base);
37b80bde 1232 op->op0 = TREE_OPERAND (addr_base, 0);
1233 }
1234 else
1235 {
1236 tree ptr, ptroff;
1237 ptr = gimple_assign_rhs1 (def_stmt);
1238 ptroff = gimple_assign_rhs2 (def_stmt);
1239 if (TREE_CODE (ptr) != SSA_NAME
1240 || TREE_CODE (ptroff) != INTEGER_CST)
11beb29c 1241 return false;
37b80bde 1242
5de9d3ed 1243 off += wi::to_offset (ptroff);
b62e7449 1244 op->op0 = ptr;
37b80bde 1245 }
1246
e913b5cd 1247 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1248 if (tree_fits_shwi_p (mem_op->op0))
1249 mem_op->off = tree_to_shwi (mem_op->op0);
37b80bde 1250 else
1251 mem_op->off = -1;
1252 if (TREE_CODE (op->op0) == SSA_NAME)
05eda0e7 1253 op->op0 = SSA_VAL (op->op0);
1254 if (TREE_CODE (op->op0) != SSA_NAME)
1255 op->opcode = TREE_CODE (op->op0);
37b80bde 1256
1257 /* And recurse. */
1258 if (TREE_CODE (op->op0) == SSA_NAME)
1259 vn_reference_maybe_forwprop_address (ops, i_p);
1260 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1261 vn_reference_fold_indirect (ops, i_p);
11beb29c 1262 return true;
37b80bde 1263}
1264
c26ce8a9 1265/* Optimize the reference REF to a constant if possible or return
1266 NULL_TREE if not. */
1267
1268tree
1269fully_constant_vn_reference_p (vn_reference_t ref)
1270{
f1f41a6c 1271 vec<vn_reference_op_s> operands = ref->operands;
c26ce8a9 1272 vn_reference_op_t op;
1273
1274 /* Try to simplify the translated expression if it is
1275 a call to a builtin function with at most two arguments. */
f1f41a6c 1276 op = &operands[0];
c26ce8a9 1277 if (op->opcode == CALL_EXPR
1278 && TREE_CODE (op->op0) == ADDR_EXPR
1279 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1280 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
f1f41a6c 1281 && operands.length () >= 2
1282 && operands.length () <= 3)
c26ce8a9 1283 {
1284 vn_reference_op_t arg0, arg1 = NULL;
1285 bool anyconst = false;
f1f41a6c 1286 arg0 = &operands[1];
1287 if (operands.length () > 2)
1288 arg1 = &operands[2];
c26ce8a9 1289 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1290 || (arg0->opcode == ADDR_EXPR
1291 && is_gimple_min_invariant (arg0->op0)))
1292 anyconst = true;
1293 if (arg1
1294 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1295 || (arg1->opcode == ADDR_EXPR
1296 && is_gimple_min_invariant (arg1->op0))))
1297 anyconst = true;
1298 if (anyconst)
1299 {
1300 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1301 arg1 ? 2 : 1,
1302 arg0->op0,
1303 arg1 ? arg1->op0 : NULL);
1304 if (folded
1305 && TREE_CODE (folded) == NOP_EXPR)
1306 folded = TREE_OPERAND (folded, 0);
1307 if (folded
1308 && is_gimple_min_invariant (folded))
1309 return folded;
1310 }
1311 }
1312
a5650c86 1313 /* Simplify reads from constants or constant initializers. */
1314 else if (BITS_PER_UNIT == 8
1315 && is_gimple_reg_type (ref->type)
1316 && (!INTEGRAL_TYPE_P (ref->type)
1317 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
c26ce8a9 1318 {
a5650c86 1319 HOST_WIDE_INT off = 0;
9f9cf897 1320 HOST_WIDE_INT size;
1321 if (INTEGRAL_TYPE_P (ref->type))
1322 size = TYPE_PRECISION (ref->type);
1323 else
1324 size = tree_to_shwi (TYPE_SIZE (ref->type));
a5650c86 1325 if (size % BITS_PER_UNIT != 0
1326 || size > MAX_BITSIZE_MODE_ANY_MODE)
1327 return NULL_TREE;
1328 size /= BITS_PER_UNIT;
1329 unsigned i;
1330 for (i = 0; i < operands.length (); ++i)
1331 {
1332 if (operands[i].off == -1)
1333 return NULL_TREE;
1334 off += operands[i].off;
1335 if (operands[i].opcode == MEM_REF)
1336 {
1337 ++i;
1338 break;
1339 }
1340 }
1341 vn_reference_op_t base = &operands[--i];
1342 tree ctor = error_mark_node;
1343 tree decl = NULL_TREE;
1344 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1345 ctor = base->op0;
1346 else if (base->opcode == MEM_REF
1347 && base[1].opcode == ADDR_EXPR
1348 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1349 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1350 {
1351 decl = TREE_OPERAND (base[1].op0, 0);
1352 ctor = ctor_for_folding (decl);
1353 }
1354 if (ctor == NULL_TREE)
1355 return build_zero_cst (ref->type);
1356 else if (ctor != error_mark_node)
1357 {
1358 if (decl)
1359 {
1360 tree res = fold_ctor_reference (ref->type, ctor,
1361 off * BITS_PER_UNIT,
1362 size * BITS_PER_UNIT, decl);
1363 if (res)
1364 {
1365 STRIP_USELESS_TYPE_CONVERSION (res);
1366 if (is_gimple_min_invariant (res))
1367 return res;
1368 }
1369 }
1370 else
1371 {
1372 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
522ea93c 1373 int len = native_encode_expr (ctor, buf, size, off);
1374 if (len > 0)
1375 return native_interpret_expr (ref->type, buf, len);
a5650c86 1376 }
1377 }
c26ce8a9 1378 }
1379
1380 return NULL_TREE;
1381}
1382
292237f3 1383/* Return true if OPS contain a storage order barrier. */
1384
1385static bool
1386contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1387{
1388 vn_reference_op_t op;
1389 unsigned i;
1390
1391 FOR_EACH_VEC_ELT (ops, i, op)
1392 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1393 return true;
1394
1395 return false;
1396}
1397
9e9e6e3e 1398/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1399 structures into their value numbers. This is done in-place, and
882f8b55 1400 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1401 whether any operands were valueized. */
9e9e6e3e 1402
f1f41a6c 1403static vec<vn_reference_op_s>
1404valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
9e9e6e3e 1405{
1406 vn_reference_op_t vro;
d12dee9c 1407 unsigned int i;
9e9e6e3e 1408
882f8b55 1409 *valueized_anything = false;
1410
f1f41a6c 1411 FOR_EACH_VEC_ELT (orig, i, vro)
9e9e6e3e 1412 {
1413 if (vro->opcode == SSA_NAME
1414 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
f6c33c78 1415 {
882f8b55 1416 tree tem = SSA_VAL (vro->op0);
1417 if (tem != vro->op0)
1418 {
1419 *valueized_anything = true;
1420 vro->op0 = tem;
1421 }
f6c33c78 1422 /* If it transforms from an SSA_NAME to a constant, update
1423 the opcode. */
1424 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1425 vro->opcode = TREE_CODE (vro->op0);
1426 }
d12dee9c 1427 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
882f8b55 1428 {
1429 tree tem = SSA_VAL (vro->op1);
1430 if (tem != vro->op1)
1431 {
1432 *valueized_anything = true;
1433 vro->op1 = tem;
1434 }
1435 }
d12dee9c 1436 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
882f8b55 1437 {
1438 tree tem = SSA_VAL (vro->op2);
1439 if (tem != vro->op2)
1440 {
1441 *valueized_anything = true;
1442 vro->op2 = tem;
1443 }
1444 }
182cf5a9 1445 /* If it transforms from an SSA_NAME to an address, fold with
1446 a preceding indirect reference. */
1447 if (i > 0
1448 && vro->op0
1449 && TREE_CODE (vro->op0) == ADDR_EXPR
f1f41a6c 1450 && orig[i - 1].opcode == MEM_REF)
11beb29c 1451 {
1452 if (vn_reference_fold_indirect (&orig, &i))
1453 *valueized_anything = true;
1454 }
37b80bde 1455 else if (i > 0
1456 && vro->opcode == SSA_NAME
f1f41a6c 1457 && orig[i - 1].opcode == MEM_REF)
11beb29c 1458 {
1459 if (vn_reference_maybe_forwprop_address (&orig, &i))
1460 *valueized_anything = true;
1461 }
182cf5a9 1462 /* If it transforms a non-constant ARRAY_REF into a constant
1463 one, adjust the constant offset. */
1464 else if (vro->opcode == ARRAY_REF
1465 && vro->off == -1
1466 && TREE_CODE (vro->op0) == INTEGER_CST
1467 && TREE_CODE (vro->op1) == INTEGER_CST
1468 && TREE_CODE (vro->op2) == INTEGER_CST)
1469 {
5de9d3ed 1470 offset_int off = ((wi::to_offset (vro->op0)
1471 - wi::to_offset (vro->op1))
1472 * wi::to_offset (vro->op2));
796b6678 1473 if (wi::fits_shwi_p (off))
e913b5cd 1474 vro->off = off.to_shwi ();
182cf5a9 1475 }
9e9e6e3e 1476 }
1477
1478 return orig;
1479}
1480
f1f41a6c 1481static vec<vn_reference_op_s>
1482valueize_refs (vec<vn_reference_op_s> orig)
882f8b55 1483{
1484 bool tem;
1485 return valueize_refs_1 (orig, &tem);
1486}
1487
f1f41a6c 1488static vec<vn_reference_op_s> shared_lookup_references;
d12dee9c 1489
1490/* Create a vector of vn_reference_op_s structures from REF, a
1491 REFERENCE_CLASS_P tree. The vector is shared among all callers of
882f8b55 1492 this function. *VALUEIZED_ANYTHING will specify whether any
1493 operands were valueized. */
d12dee9c 1494
f1f41a6c 1495static vec<vn_reference_op_s>
882f8b55 1496valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
d12dee9c 1497{
1498 if (!ref)
1e094109 1499 return vNULL;
f1f41a6c 1500 shared_lookup_references.truncate (0);
d12dee9c 1501 copy_reference_ops_from_ref (ref, &shared_lookup_references);
882f8b55 1502 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1503 valueized_anything);
d12dee9c 1504 return shared_lookup_references;
1505}
1506
1507/* Create a vector of vn_reference_op_s structures from CALL, a
1508 call statement. The vector is shared among all callers of
1509 this function. */
1510
f1f41a6c 1511static vec<vn_reference_op_s>
1a91d914 1512valueize_shared_reference_ops_from_call (gcall *call)
d12dee9c 1513{
1514 if (!call)
1e094109 1515 return vNULL;
f1f41a6c 1516 shared_lookup_references.truncate (0);
d12dee9c 1517 copy_reference_ops_from_call (call, &shared_lookup_references);
1518 shared_lookup_references = valueize_refs (shared_lookup_references);
1519 return shared_lookup_references;
1520}
1521
404d6be4 1522/* Lookup a SCCVN reference operation VR in the current hash table.
1523 Returns the resulting value number if it exists in the hash table,
f6c33c78 1524 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1525 vn_reference_t stored in the hashtable if something is found. */
404d6be4 1526
1527static tree
f6c33c78 1528vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
404d6be4 1529{
3e871d4d 1530 vn_reference_s **slot;
404d6be4 1531 hashval_t hash;
1532
1533 hash = vr->hashcode;
c1f445d2 1534 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
404d6be4 1535 if (!slot && current_info == optimistic_info)
c1f445d2 1536 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
404d6be4 1537 if (slot)
f6c33c78 1538 {
1539 if (vnresult)
1540 *vnresult = (vn_reference_t)*slot;
1541 return ((vn_reference_t)*slot)->result;
1542 }
48e1416a 1543
404d6be4 1544 return NULL_TREE;
1545}
1546
dd277d48 1547/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1548 with the current VUSE and performs the expression lookup. */
1549
1550static void *
297a2110 1551vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1552 unsigned int cnt, void *vr_)
dd277d48 1553{
1554 vn_reference_t vr = (vn_reference_t)vr_;
3e871d4d 1555 vn_reference_s **slot;
dd277d48 1556 hashval_t hash;
1557
297a2110 1558 /* This bounds the stmt walks we perform on reference lookups
1559 to O(1) instead of O(N) where N is the number of dominating
1560 stores. */
1561 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1562 return (void *)-1;
1563
4a83fadb 1564 if (last_vuse_ptr)
1565 *last_vuse_ptr = vuse;
1566
dd277d48 1567 /* Fixup vuse and hash. */
84cd88b5 1568 if (vr->vuse)
1569 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
b8a2283e 1570 vr->vuse = vuse_ssa_val (vuse);
84cd88b5 1571 if (vr->vuse)
1572 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
dd277d48 1573
1574 hash = vr->hashcode;
c1f445d2 1575 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
dd277d48 1576 if (!slot && current_info == optimistic_info)
c1f445d2 1577 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
dd277d48 1578 if (slot)
1579 return *slot;
48e1416a 1580
dd277d48 1581 return NULL;
1582}
f6c33c78 1583
01fd46e3 1584/* Lookup an existing or insert a new vn_reference entry into the
1585 value table for the VUSE, SET, TYPE, OPERANDS reference which
a4f94d42 1586 has the value VALUE which is either a constant or an SSA name. */
01fd46e3 1587
1588static vn_reference_t
a4f94d42 1589vn_reference_lookup_or_insert_for_pieces (tree vuse,
1590 alias_set_type set,
1591 tree type,
f1f41a6c 1592 vec<vn_reference_op_s,
1593 va_heap> operands,
a4f94d42 1594 tree value)
01fd46e3 1595{
9251bb6f 1596 vn_reference_s vr1;
01fd46e3 1597 vn_reference_t result;
a4f94d42 1598 unsigned value_id;
01fd46e3 1599 vr1.vuse = vuse;
1600 vr1.operands = operands;
1601 vr1.type = type;
1602 vr1.set = set;
1603 vr1.hashcode = vn_reference_compute_hash (&vr1);
1604 if (vn_reference_lookup_1 (&vr1, &result))
1605 return result;
a4f94d42 1606 if (TREE_CODE (value) == SSA_NAME)
1607 value_id = VN_INFO (value)->value_id;
1608 else
1609 value_id = get_or_alloc_constant_value_id (value);
01fd46e3 1610 return vn_reference_insert_pieces (vuse, set, type,
f1f41a6c 1611 operands.copy (), value, value_id);
01fd46e3 1612}
1613
d8021dea 1614/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1615 from the statement defining VUSE and if not successful tries to
9d75589a 1616 translate *REFP and VR_ through an aggregate copy at the definition
dddafd79 1617 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1618 of *REF and *VR. If only disambiguation was performed then
1619 *DISAMBIGUATE_ONLY is set to true. */
d8021dea 1620
1621static void *
38168b16 1622vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
dddafd79 1623 bool *disambiguate_only)
d8021dea 1624{
1625 vn_reference_t vr = (vn_reference_t)vr_;
42acab1c 1626 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
dddafd79 1627 tree base = ao_ref_base (ref);
f018d957 1628 HOST_WIDE_INT offset, maxsize;
f1f41a6c 1629 static vec<vn_reference_op_s>
1e094109 1630 lhs_ops = vNULL;
66b86a74 1631 ao_ref lhs_ref;
1632 bool lhs_ref_ok = false;
d8021dea 1633
dddafd79 1634 /* If the reference is based on a parameter that was determined as
1635 pointing to readonly memory it doesn't change. */
1636 if (TREE_CODE (base) == MEM_REF
1637 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1638 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1639 && bitmap_bit_p (const_parms,
1640 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1641 {
1642 *disambiguate_only = true;
1643 return NULL;
1644 }
1645
180572f4 1646 /* First try to disambiguate after value-replacing in the definitions LHS. */
1647 if (is_gimple_assign (def_stmt))
1648 {
1649 tree lhs = gimple_assign_lhs (def_stmt);
b11771e1 1650 bool valueized_anything = false;
66b86a74 1651 /* Avoid re-allocation overhead. */
f1f41a6c 1652 lhs_ops.truncate (0);
66b86a74 1653 copy_reference_ops_from_ref (lhs, &lhs_ops);
b11771e1 1654 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
b11771e1 1655 if (valueized_anything)
1656 {
1657 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1658 get_alias_set (lhs),
1659 TREE_TYPE (lhs), lhs_ops);
1660 if (lhs_ref_ok
1661 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
dddafd79 1662 {
1663 *disambiguate_only = true;
1664 return NULL;
1665 }
b11771e1 1666 }
1667 else
1668 {
1669 ao_ref_init (&lhs_ref, lhs);
1670 lhs_ref_ok = true;
1671 }
180572f4 1672 }
38168b16 1673 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1674 && gimple_call_num_args (def_stmt) <= 4)
1675 {
1676 /* For builtin calls valueize its arguments and call the
1677 alias oracle again. Valueization may improve points-to
1678 info of pointers and constify size and position arguments.
1679 Originally this was motivated by PR61034 which has
1680 conditional calls to free falsely clobbering ref because
1681 of imprecise points-to info of the argument. */
1682 tree oldargs[4];
2eb57bc3 1683 bool valueized_anything = false;
38168b16 1684 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1685 {
1686 oldargs[i] = gimple_call_arg (def_stmt, i);
1687 if (TREE_CODE (oldargs[i]) == SSA_NAME
1688 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1689 {
1690 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1691 valueized_anything = true;
1692 }
1693 }
1694 if (valueized_anything)
1695 {
1a91d914 1696 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1697 ref);
38168b16 1698 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1699 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1700 if (!res)
dddafd79 1701 {
1702 *disambiguate_only = true;
1703 return NULL;
1704 }
38168b16 1705 }
1706 }
1707
dddafd79 1708 if (*disambiguate_only)
38168b16 1709 return (void *)-1;
180572f4 1710
3918bd18 1711 offset = ref->offset;
3918bd18 1712 maxsize = ref->max_size;
d8021dea 1713
1714 /* If we cannot constrain the size of the reference we cannot
1715 test if anything kills it. */
1716 if (maxsize == -1)
1717 return (void *)-1;
1718
3c25489e 1719 /* We can't deduce anything useful from clobbers. */
1720 if (gimple_clobber_p (def_stmt))
1721 return (void *)-1;
1722
d8021dea 1723 /* def_stmt may-defs *ref. See if we can derive a value for *ref
3c25489e 1724 from that definition.
d8021dea 1725 1) Memset. */
3918bd18 1726 if (is_gimple_reg_type (vr->type)
77c7051b 1727 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
d8021dea 1728 && integer_zerop (gimple_call_arg (def_stmt, 1))
e913b5cd 1729 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
d8021dea 1730 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1731 {
1732 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1733 tree base2;
1734 HOST_WIDE_INT offset2, size2, maxsize2;
292237f3 1735 bool reverse;
1736 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1737 &reverse);
e913b5cd 1738 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
d8021dea 1739 if ((unsigned HOST_WIDE_INT)size2 / 8
e913b5cd 1740 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
a7be40cc 1741 && maxsize2 != -1
d8021dea 1742 && operand_equal_p (base, base2, 0)
1743 && offset2 <= offset
1744 && offset2 + size2 >= offset + maxsize)
3918bd18 1745 {
385f3f36 1746 tree val = build_zero_cst (vr->type);
a4f94d42 1747 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1748 (vuse, vr->set, vr->type, vr->operands, val);
3918bd18 1749 }
d8021dea 1750 }
1751
1752 /* 2) Assignment from an empty CONSTRUCTOR. */
3918bd18 1753 else if (is_gimple_reg_type (vr->type)
d8021dea 1754 && gimple_assign_single_p (def_stmt)
1755 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1756 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1757 {
1758 tree base2;
1759 HOST_WIDE_INT offset2, size2, maxsize2;
292237f3 1760 bool reverse;
d8021dea 1761 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
292237f3 1762 &offset2, &size2, &maxsize2, &reverse);
a7be40cc 1763 if (maxsize2 != -1
1764 && operand_equal_p (base, base2, 0)
d8021dea 1765 && offset2 <= offset
1766 && offset2 + size2 >= offset + maxsize)
3918bd18 1767 {
385f3f36 1768 tree val = build_zero_cst (vr->type);
a4f94d42 1769 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1770 (vuse, vr->set, vr->type, vr->operands, val);
3918bd18 1771 }
d8021dea 1772 }
1773
87b53397 1774 /* 3) Assignment from a constant. We can use folds native encode/interpret
1775 routines to extract the assigned bits. */
824bbeb8 1776 else if (vn_walk_kind == VN_WALKREWRITE
1777 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
87b53397 1778 && ref->size == maxsize
1779 && maxsize % BITS_PER_UNIT == 0
1780 && offset % BITS_PER_UNIT == 0
1781 && is_gimple_reg_type (vr->type)
292237f3 1782 && !contains_storage_order_barrier_p (vr->operands)
87b53397 1783 && gimple_assign_single_p (def_stmt)
1784 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1785 {
1786 tree base2;
1787 HOST_WIDE_INT offset2, size2, maxsize2;
292237f3 1788 bool reverse;
87b53397 1789 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
292237f3 1790 &offset2, &size2, &maxsize2, &reverse);
1791 if (!reverse
1792 && maxsize2 != -1
87b53397 1793 && maxsize2 == size2
1794 && size2 % BITS_PER_UNIT == 0
1795 && offset2 % BITS_PER_UNIT == 0
1796 && operand_equal_p (base, base2, 0)
1797 && offset2 <= offset
1798 && offset2 + size2 >= offset + maxsize)
1799 {
1800 /* We support up to 512-bit values (for V8DFmode). */
1801 unsigned char buffer[64];
1802 int len;
1803
1804 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1805 buffer, sizeof (buffer));
1806 if (len > 0)
1807 {
1808 tree val = native_interpret_expr (vr->type,
1809 buffer
1810 + ((offset - offset2)
1811 / BITS_PER_UNIT),
1812 ref->size / BITS_PER_UNIT);
1813 if (val)
a4f94d42 1814 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1815 (vuse, vr->set, vr->type, vr->operands, val);
87b53397 1816 }
1817 }
1818 }
1819
a3bb56f0 1820 /* 4) Assignment from an SSA name which definition we may be able
1821 to access pieces from. */
1822 else if (ref->size == maxsize
1823 && is_gimple_reg_type (vr->type)
292237f3 1824 && !contains_storage_order_barrier_p (vr->operands)
a3bb56f0 1825 && gimple_assign_single_p (def_stmt)
1826 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1827 {
1828 tree rhs1 = gimple_assign_rhs1 (def_stmt);
42acab1c 1829 gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
a3bb56f0 1830 if (is_gimple_assign (def_stmt2)
1831 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1832 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1833 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1834 {
1835 tree base2;
1836 HOST_WIDE_INT offset2, size2, maxsize2, off;
292237f3 1837 bool reverse;
a3bb56f0 1838 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
292237f3 1839 &offset2, &size2, &maxsize2,
1840 &reverse);
a3bb56f0 1841 off = offset - offset2;
292237f3 1842 if (!reverse
1843 && maxsize2 != -1
a3bb56f0 1844 && maxsize2 == size2
1845 && operand_equal_p (base, base2, 0)
1846 && offset2 <= offset
1847 && offset2 + size2 >= offset + maxsize)
1848 {
1849 tree val = NULL_TREE;
1850 HOST_WIDE_INT elsz
f9ae6f95 1851 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
a3bb56f0 1852 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1853 {
1854 if (off == 0)
1855 val = gimple_assign_rhs1 (def_stmt2);
1856 else if (off == elsz)
1857 val = gimple_assign_rhs2 (def_stmt2);
1858 }
1859 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1860 && off % elsz == 0)
1861 {
1862 tree ctor = gimple_assign_rhs1 (def_stmt2);
1863 unsigned i = off / elsz;
1864 if (i < CONSTRUCTOR_NELTS (ctor))
1865 {
1866 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
0ff8139c 1867 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1868 {
1869 if (TREE_CODE (TREE_TYPE (elt->value))
1870 != VECTOR_TYPE)
1871 val = elt->value;
1872 }
a3bb56f0 1873 }
1874 }
1875 if (val)
a4f94d42 1876 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1877 (vuse, vr->set, vr->type, vr->operands, val);
a3bb56f0 1878 }
1879 }
1880 }
1881
1882 /* 5) For aggregate copies translate the reference through them if
d8021dea 1883 the copy kills ref. */
8ecc6b38 1884 else if (vn_walk_kind == VN_WALKREWRITE
1885 && gimple_assign_single_p (def_stmt)
d8021dea 1886 && (DECL_P (gimple_assign_rhs1 (def_stmt))
182cf5a9 1887 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
d8021dea 1888 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1889 {
1890 tree base2;
11beb29c 1891 HOST_WIDE_INT maxsize2;
292237f3 1892 int i, j, k;
c2078b80 1893 auto_vec<vn_reference_op_s> rhs;
d8021dea 1894 vn_reference_op_t vro;
3918bd18 1895 ao_ref r;
d8021dea 1896
66b86a74 1897 if (!lhs_ref_ok)
1898 return (void *)-1;
1899
d8021dea 1900 /* See if the assignment kills REF. */
66b86a74 1901 base2 = ao_ref_base (&lhs_ref);
a7be40cc 1902 maxsize2 = lhs_ref.max_size;
1903 if (maxsize2 == -1
e5d08bfd 1904 || (base != base2
1905 && (TREE_CODE (base) != MEM_REF
1906 || TREE_CODE (base2) != MEM_REF
1907 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1908 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1909 TREE_OPERAND (base2, 1))))
11beb29c 1910 || !stmt_kills_ref_p (def_stmt, ref))
d8021dea 1911 return (void *)-1;
1912
66b86a74 1913 /* Find the common base of ref and the lhs. lhs_ops already
1914 contains valueized operands for the lhs. */
f1f41a6c 1915 i = vr->operands.length () - 1;
1916 j = lhs_ops.length () - 1;
0d5b37dd 1917 while (j >= 0 && i >= 0
f1f41a6c 1918 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
d8021dea 1919 {
1920 i--;
1921 j--;
1922 }
0d5b37dd 1923
b11771e1 1924 /* ??? The innermost op should always be a MEM_REF and we already
1925 checked that the assignment to the lhs kills vr. Thus for
1926 aggregate copies using char[] types the vn_reference_op_eq
1927 may fail when comparing types for compatibility. But we really
1928 don't care here - further lookups with the rewritten operands
1929 will simply fail if we messed up types too badly. */
a5650c86 1930 HOST_WIDE_INT extra_off = 0;
78e606ea 1931 if (j == 0 && i >= 0
f1f41a6c 1932 && lhs_ops[0].opcode == MEM_REF
a5650c86 1933 && lhs_ops[0].off != -1)
1934 {
1935 if (lhs_ops[0].off == vr->operands[i].off)
1936 i--, j--;
1937 else if (vr->operands[i].opcode == MEM_REF
1938 && vr->operands[i].off != -1)
1939 {
1940 extra_off = vr->operands[i].off - lhs_ops[0].off;
1941 i--, j--;
1942 }
1943 }
b11771e1 1944
d8021dea 1945 /* i now points to the first additional op.
1946 ??? LHS may not be completely contained in VR, one or more
1947 VIEW_CONVERT_EXPRs could be in its way. We could at least
1948 try handling outermost VIEW_CONVERT_EXPRs. */
1949 if (j != -1)
1950 return (void *)-1;
d8021dea 1951
292237f3 1952 /* Punt if the additional ops contain a storage order barrier. */
1953 for (k = i; k >= 0; k--)
1954 {
1955 vro = &vr->operands[k];
1956 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
1957 return (void *)-1;
1958 }
1959
d8021dea 1960 /* Now re-write REF to be based on the rhs of the assignment. */
1961 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
a5650c86 1962
1963 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1964 if (extra_off != 0)
1965 {
1966 if (rhs.length () < 2
1967 || rhs[0].opcode != MEM_REF
1968 || rhs[0].off == -1)
1969 return (void *)-1;
1970 rhs[0].off += extra_off;
1971 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1972 build_int_cst (TREE_TYPE (rhs[0].op0),
1973 extra_off));
1974 }
1975
d8021dea 1976 /* We need to pre-pend vr->operands[0..i] to rhs. */
2fd3ecff 1977 vec<vn_reference_op_s> old = vr->operands;
f1f41a6c 1978 if (i + 1 + rhs.length () > vr->operands.length ())
d8021dea 1979 {
f1f41a6c 1980 vr->operands.safe_grow (i + 1 + rhs.length ());
2fd3ecff 1981 if (old == shared_lookup_references)
1982 shared_lookup_references = vr->operands;
d8021dea 1983 }
1984 else
f1f41a6c 1985 vr->operands.truncate (i + 1 + rhs.length ());
1986 FOR_EACH_VEC_ELT (rhs, j, vro)
1987 vr->operands[i + 1 + j] = *vro;
01fd46e3 1988 vr->operands = valueize_refs (vr->operands);
2fd3ecff 1989 if (old == shared_lookup_references)
1990 shared_lookup_references = vr->operands;
d8021dea 1991 vr->hashcode = vn_reference_compute_hash (vr);
77c7051b 1992
a5650c86 1993 /* Try folding the new reference to a constant. */
1994 tree val = fully_constant_vn_reference_p (vr);
1995 if (val)
1996 return vn_reference_lookup_or_insert_for_pieces
1997 (vuse, vr->set, vr->type, vr->operands, val);
1998
77c7051b 1999 /* Adjust *ref from the new operands. */
2000 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2001 return (void *)-1;
2002 /* This can happen with bitfields. */
2003 if (ref->size != r.size)
2004 return (void *)-1;
2005 *ref = r;
2006
2007 /* Do not update last seen VUSE after translating. */
2008 last_vuse_ptr = NULL;
2009
2010 /* Keep looking for the adjusted *REF / VR pair. */
2011 return NULL;
2012 }
2013
a3bb56f0 2014 /* 6) For memcpy copies translate the reference through them if
77c7051b 2015 the copy kills ref. */
2016 else if (vn_walk_kind == VN_WALKREWRITE
2017 && is_gimple_reg_type (vr->type)
2018 /* ??? Handle BCOPY as well. */
2019 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2020 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2021 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2022 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2023 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2024 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2025 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
e913b5cd 2026 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
77c7051b 2027 {
2028 tree lhs, rhs;
2029 ao_ref r;
2030 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2031 vn_reference_op_s op;
2032 HOST_WIDE_INT at;
2033
77c7051b 2034 /* Only handle non-variable, addressable refs. */
2035 if (ref->size != maxsize
2036 || offset % BITS_PER_UNIT != 0
2037 || ref->size % BITS_PER_UNIT != 0)
2038 return (void *)-1;
2039
2040 /* Extract a pointer base and an offset for the destination. */
2041 lhs = gimple_call_arg (def_stmt, 0);
2042 lhs_offset = 0;
2043 if (TREE_CODE (lhs) == SSA_NAME)
6325605f 2044 {
2045 lhs = SSA_VAL (lhs);
2046 if (TREE_CODE (lhs) == SSA_NAME)
2047 {
42acab1c 2048 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
6325605f 2049 if (gimple_assign_single_p (def_stmt)
2050 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2051 lhs = gimple_assign_rhs1 (def_stmt);
2052 }
2053 }
77c7051b 2054 if (TREE_CODE (lhs) == ADDR_EXPR)
2055 {
2056 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2057 &lhs_offset);
2058 if (!tem)
2059 return (void *)-1;
2060 if (TREE_CODE (tem) == MEM_REF
e913b5cd 2061 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
77c7051b 2062 {
2063 lhs = TREE_OPERAND (tem, 0);
6325605f 2064 if (TREE_CODE (lhs) == SSA_NAME)
2065 lhs = SSA_VAL (lhs);
e913b5cd 2066 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
77c7051b 2067 }
2068 else if (DECL_P (tem))
2069 lhs = build_fold_addr_expr (tem);
2070 else
2071 return (void *)-1;
2072 }
2073 if (TREE_CODE (lhs) != SSA_NAME
2074 && TREE_CODE (lhs) != ADDR_EXPR)
2075 return (void *)-1;
2076
2077 /* Extract a pointer base and an offset for the source. */
2078 rhs = gimple_call_arg (def_stmt, 1);
2079 rhs_offset = 0;
2080 if (TREE_CODE (rhs) == SSA_NAME)
2081 rhs = SSA_VAL (rhs);
2082 if (TREE_CODE (rhs) == ADDR_EXPR)
2083 {
2084 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2085 &rhs_offset);
2086 if (!tem)
2087 return (void *)-1;
2088 if (TREE_CODE (tem) == MEM_REF
e913b5cd 2089 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
77c7051b 2090 {
2091 rhs = TREE_OPERAND (tem, 0);
e913b5cd 2092 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
77c7051b 2093 }
2094 else if (DECL_P (tem))
2095 rhs = build_fold_addr_expr (tem);
2096 else
2097 return (void *)-1;
2098 }
2099 if (TREE_CODE (rhs) != SSA_NAME
2100 && TREE_CODE (rhs) != ADDR_EXPR)
2101 return (void *)-1;
2102
8c53c46c 2103 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
77c7051b 2104
2105 /* The bases of the destination and the references have to agree. */
2106 if ((TREE_CODE (base) != MEM_REF
2107 && !DECL_P (base))
2108 || (TREE_CODE (base) == MEM_REF
2109 && (TREE_OPERAND (base, 0) != lhs
e913b5cd 2110 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
77c7051b 2111 || (DECL_P (base)
2112 && (TREE_CODE (lhs) != ADDR_EXPR
2113 || TREE_OPERAND (lhs, 0) != base)))
2114 return (void *)-1;
2115
77c7051b 2116 at = offset / BITS_PER_UNIT;
2117 if (TREE_CODE (base) == MEM_REF)
8c53c46c 2118 at += tree_to_uhwi (TREE_OPERAND (base, 1));
6325605f 2119 /* If the access is completely outside of the memcpy destination
2120 area there is no aliasing. */
2121 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2122 || lhs_offset + copy_size <= at)
2123 return NULL;
2124 /* And the access has to be contained within the memcpy destination. */
77c7051b 2125 if (lhs_offset > at
2126 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2127 return (void *)-1;
2128
2129 /* Make room for 2 operands in the new reference. */
f1f41a6c 2130 if (vr->operands.length () < 2)
77c7051b 2131 {
f1f41a6c 2132 vec<vn_reference_op_s> old = vr->operands;
2133 vr->operands.safe_grow_cleared (2);
77c7051b 2134 if (old == shared_lookup_references
2135 && vr->operands != old)
2fd3ecff 2136 shared_lookup_references = vr->operands;
77c7051b 2137 }
2138 else
f1f41a6c 2139 vr->operands.truncate (2);
77c7051b 2140
2141 /* The looked-through reference is a simple MEM_REF. */
2142 memset (&op, 0, sizeof (op));
2143 op.type = vr->type;
2144 op.opcode = MEM_REF;
2145 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2146 op.off = at - lhs_offset + rhs_offset;
f1f41a6c 2147 vr->operands[0] = op;
2be90eed 2148 op.type = TREE_TYPE (rhs);
77c7051b 2149 op.opcode = TREE_CODE (rhs);
2150 op.op0 = rhs;
2151 op.off = -1;
f1f41a6c 2152 vr->operands[1] = op;
77c7051b 2153 vr->hashcode = vn_reference_compute_hash (vr);
3918bd18 2154
2155 /* Adjust *ref from the new operands. */
2156 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
d8021dea 2157 return (void *)-1;
8f15ba15 2158 /* This can happen with bitfields. */
2159 if (ref->size != r.size)
2160 return (void *)-1;
3918bd18 2161 *ref = r;
d8021dea 2162
4a83fadb 2163 /* Do not update last seen VUSE after translating. */
2164 last_vuse_ptr = NULL;
2165
d8021dea 2166 /* Keep looking for the adjusted *REF / VR pair. */
2167 return NULL;
2168 }
2169
2170 /* Bail out and stop walking. */
2171 return (void *)-1;
2172}
2173
f6c33c78 2174/* Lookup a reference operation by it's parts, in the current hash table.
2175 Returns the resulting value number if it exists in the hash table,
2176 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2177 vn_reference_t stored in the hashtable if something is found. */
9e9e6e3e 2178
2179tree
3918bd18 2180vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
f1f41a6c 2181 vec<vn_reference_op_s> operands,
8ecc6b38 2182 vn_reference_t *vnresult, vn_lookup_kind kind)
f6c33c78 2183{
2184 struct vn_reference_s vr1;
dd277d48 2185 vn_reference_t tmp;
c26ce8a9 2186 tree cst;
dd277d48 2187
2188 if (!vnresult)
2189 vnresult = &tmp;
2190 *vnresult = NULL;
d8021dea 2191
b8a2283e 2192 vr1.vuse = vuse_ssa_val (vuse);
f1f41a6c 2193 shared_lookup_references.truncate (0);
2194 shared_lookup_references.safe_grow (operands.length ());
2195 memcpy (shared_lookup_references.address (),
2196 operands.address (),
d8021dea 2197 sizeof (vn_reference_op_s)
f1f41a6c 2198 * operands.length ());
d8021dea 2199 vr1.operands = operands = shared_lookup_references
2200 = valueize_refs (shared_lookup_references);
3918bd18 2201 vr1.type = type;
2202 vr1.set = set;
f6c33c78 2203 vr1.hashcode = vn_reference_compute_hash (&vr1);
c26ce8a9 2204 if ((cst = fully_constant_vn_reference_p (&vr1)))
2205 return cst;
f6c33c78 2206
c26ce8a9 2207 vn_reference_lookup_1 (&vr1, vnresult);
dd277d48 2208 if (!*vnresult
8ecc6b38 2209 && kind != VN_NOWALK
dd277d48 2210 && vr1.vuse)
02067dc5 2211 {
3918bd18 2212 ao_ref r;
8ecc6b38 2213 vn_walk_kind = kind;
3918bd18 2214 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
d8021dea 2215 *vnresult =
3918bd18 2216 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
d8021dea 2217 vn_reference_lookup_2,
46816709 2218 vn_reference_lookup_3,
f3c5599a 2219 vuse_ssa_val, &vr1);
2fd3ecff 2220 gcc_checking_assert (vr1.operands == shared_lookup_references);
02067dc5 2221 }
2222
dd277d48 2223 if (*vnresult)
2224 return (*vnresult)->result;
2225
2226 return NULL_TREE;
f6c33c78 2227}
2228
2229/* Lookup OP in the current hash table, and return the resulting value
2230 number if it exists in the hash table. Return NULL_TREE if it does
2231 not exist in the hash table or if the result field of the structure
2232 was NULL.. VNRESULT will be filled in with the vn_reference_t
2233 stored in the hashtable if one exists. */
2234
2235tree
8ecc6b38 2236vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
f6c33c78 2237 vn_reference_t *vnresult)
9e9e6e3e 2238{
f1f41a6c 2239 vec<vn_reference_op_s> operands;
9e9e6e3e 2240 struct vn_reference_s vr1;
c26ce8a9 2241 tree cst;
882f8b55 2242 bool valuezied_anything;
dd277d48 2243
f6c33c78 2244 if (vnresult)
2245 *vnresult = NULL;
9e9e6e3e 2246
b8a2283e 2247 vr1.vuse = vuse_ssa_val (vuse);
882f8b55 2248 vr1.operands = operands
2249 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3918bd18 2250 vr1.type = TREE_TYPE (op);
2251 vr1.set = get_alias_set (op);
9e9e6e3e 2252 vr1.hashcode = vn_reference_compute_hash (&vr1);
c26ce8a9 2253 if ((cst = fully_constant_vn_reference_p (&vr1)))
2254 return cst;
404d6be4 2255
8ecc6b38 2256 if (kind != VN_NOWALK
dd277d48 2257 && vr1.vuse)
2258 {
2259 vn_reference_t wvnresult;
3918bd18 2260 ao_ref r;
882f8b55 2261 /* Make sure to use a valueized reference if we valueized anything.
2262 Otherwise preserve the full reference for advanced TBAA. */
2263 if (!valuezied_anything
2264 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2265 vr1.operands))
2be90eed 2266 ao_ref_init (&r, op);
8ecc6b38 2267 vn_walk_kind = kind;
dd277d48 2268 wvnresult =
3918bd18 2269 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
d8021dea 2270 vn_reference_lookup_2,
46816709 2271 vn_reference_lookup_3,
f3c5599a 2272 vuse_ssa_val, &vr1);
2fd3ecff 2273 gcc_checking_assert (vr1.operands == shared_lookup_references);
dd277d48 2274 if (wvnresult)
2275 {
2276 if (vnresult)
2277 *vnresult = wvnresult;
2278 return wvnresult->result;
2279 }
2280
2281 return NULL_TREE;
404d6be4 2282 }
9e9e6e3e 2283
dd277d48 2284 return vn_reference_lookup_1 (&vr1, vnresult);
9e9e6e3e 2285}
2286
2fd3ecff 2287/* Lookup CALL in the current hash table and return the entry in
2288 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2289
2290void
1a91d914 2291vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2fd3ecff 2292 vn_reference_t vr)
2293{
72e693ed 2294 if (vnresult)
2295 *vnresult = NULL;
2296
2fd3ecff 2297 tree vuse = gimple_vuse (call);
2298
2299 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2300 vr->operands = valueize_shared_reference_ops_from_call (call);
2301 vr->type = gimple_expr_type (call);
2302 vr->set = 0;
2303 vr->hashcode = vn_reference_compute_hash (vr);
2304 vn_reference_lookup_1 (vr, vnresult);
2305}
f6c33c78 2306
9e9e6e3e 2307/* Insert OP into the current hash table with a value number of
f6c33c78 2308 RESULT, and return the resulting reference structure we created. */
9e9e6e3e 2309
2fd3ecff 2310static vn_reference_t
39215e09 2311vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
9e9e6e3e 2312{
3e871d4d 2313 vn_reference_s **slot;
9e9e6e3e 2314 vn_reference_t vr1;
75aefb7b 2315 bool tem;
9e9e6e3e 2316
d27a7bc4 2317 vr1 = current_info->references_pool->allocate ();
f6c33c78 2318 if (TREE_CODE (result) == SSA_NAME)
2319 vr1->value_id = VN_INFO (result)->value_id;
2320 else
2321 vr1->value_id = get_or_alloc_constant_value_id (result);
dd277d48 2322 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
75aefb7b 2323 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3918bd18 2324 vr1->type = TREE_TYPE (op);
2325 vr1->set = get_alias_set (op);
9e9e6e3e 2326 vr1->hashcode = vn_reference_compute_hash (vr1);
2327 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
39215e09 2328 vr1->result_vdef = vdef;
9e9e6e3e 2329
c1f445d2 2330 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2331 INSERT);
9e9e6e3e 2332
2333 /* Because we lookup stores using vuses, and value number failures
2334 using the vdefs (see visit_reference_op_store for how and why),
2335 it's possible that on failure we may try to insert an already
2336 inserted store. This is not wrong, there is no ssa name for a
2337 store that we could use as a differentiator anyway. Thus, unlike
2338 the other lookup functions, you cannot gcc_assert (!*slot)
2339 here. */
2340
12661815 2341 /* But free the old slot in case of a collision. */
2342 if (*slot)
2343 free_reference (*slot);
9e9e6e3e 2344
2345 *slot = vr1;
f6c33c78 2346 return vr1;
2347}
2348
2349/* Insert a reference by it's pieces into the current hash table with
2350 a value number of RESULT. Return the resulting reference
2351 structure we created. */
2352
2353vn_reference_t
3918bd18 2354vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
f1f41a6c 2355 vec<vn_reference_op_s> operands,
f6c33c78 2356 tree result, unsigned int value_id)
2357
2358{
3e871d4d 2359 vn_reference_s **slot;
f6c33c78 2360 vn_reference_t vr1;
2361
d27a7bc4 2362 vr1 = current_info->references_pool->allocate ();
dd277d48 2363 vr1->value_id = value_id;
2364 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
f6c33c78 2365 vr1->operands = valueize_refs (operands);
3918bd18 2366 vr1->type = type;
2367 vr1->set = set;
f6c33c78 2368 vr1->hashcode = vn_reference_compute_hash (vr1);
2369 if (result && TREE_CODE (result) == SSA_NAME)
2370 result = SSA_VAL (result);
2371 vr1->result = result;
2372
c1f445d2 2373 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2374 INSERT);
48e1416a 2375
f6c33c78 2376 /* At this point we should have all the things inserted that we have
dd277d48 2377 seen before, and we should never try inserting something that
2378 already exists. */
f6c33c78 2379 gcc_assert (!*slot);
2380 if (*slot)
2381 free_reference (*slot);
2382
2383 *slot = vr1;
2384 return vr1;
9e9e6e3e 2385}
2386
51a23cfc 2387/* Compute and return the hash value for nary operation VBO1. */
9e9e6e3e 2388
2fd3ecff 2389static hashval_t
51a23cfc 2390vn_nary_op_compute_hash (const vn_nary_op_t vno1)
9e9e6e3e 2391{
f32e91d5 2392 inchash::hash hstate;
51a23cfc 2393 unsigned i;
9e9e6e3e 2394
51a23cfc 2395 for (i = 0; i < vno1->length; ++i)
2396 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2397 vno1->op[i] = SSA_VAL (vno1->op[i]);
9e9e6e3e 2398
42b45e81 2399 if (((vno1->length == 2
2400 && commutative_tree_code (vno1->opcode))
2401 || (vno1->length == 3
2402 && commutative_ternary_tree_code (vno1->opcode)))
51a23cfc 2403 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
a4f59596 2404 std::swap (vno1->op[0], vno1->op[1]);
42b45e81 2405 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2406 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2407 {
2408 std::swap (vno1->op[0], vno1->op[1]);
2409 vno1->opcode = swap_tree_comparison (vno1->opcode);
2410 }
9e9e6e3e 2411
f32e91d5 2412 hstate.add_int (vno1->opcode);
51a23cfc 2413 for (i = 0; i < vno1->length; ++i)
f32e91d5 2414 inchash::add_expr (vno1->op[i], hstate);
9e9e6e3e 2415
f32e91d5 2416 return hstate.end ();
9e9e6e3e 2417}
2418
3e871d4d 2419/* Compare nary operations VNO1 and VNO2 and return true if they are
9e9e6e3e 2420 equivalent. */
2421
3e871d4d 2422bool
2423vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
9e9e6e3e 2424{
51a23cfc 2425 unsigned i;
2426
3d2d7de7 2427 if (vno1->hashcode != vno2->hashcode)
2428 return false;
2429
7384c678 2430 if (vno1->length != vno2->length)
2431 return false;
2432
51a23cfc 2433 if (vno1->opcode != vno2->opcode
c477520d 2434 || !types_compatible_p (vno1->type, vno2->type))
51a23cfc 2435 return false;
2436
2437 for (i = 0; i < vno1->length; ++i)
2438 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2439 return false;
2440
2441 return true;
9e9e6e3e 2442}
2443
f8ce304c 2444/* Initialize VNO from the pieces provided. */
9e9e6e3e 2445
f8ce304c 2446static void
2447init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
7384c678 2448 enum tree_code code, tree type, tree *ops)
f8ce304c 2449{
2450 vno->opcode = code;
2451 vno->length = length;
2452 vno->type = type;
7384c678 2453 memcpy (&vno->op[0], ops, sizeof (tree) * length);
f8ce304c 2454}
2455
2456/* Initialize VNO from OP. */
2457
2458static void
2459init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2460{
2461 unsigned i;
2462
2463 vno->opcode = TREE_CODE (op);
2464 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2465 vno->type = TREE_TYPE (op);
2466 for (i = 0; i < vno->length; ++i)
2467 vno->op[i] = TREE_OPERAND (op, i);
2468}
2469
7384c678 2470/* Return the number of operands for a vn_nary ops structure from STMT. */
2471
2472static unsigned int
42acab1c 2473vn_nary_length_from_stmt (gimple *stmt)
7384c678 2474{
2475 switch (gimple_assign_rhs_code (stmt))
2476 {
2477 case REALPART_EXPR:
2478 case IMAGPART_EXPR:
2479 case VIEW_CONVERT_EXPR:
2480 return 1;
2481
70cd63a3 2482 case BIT_FIELD_REF:
2483 return 3;
2484
7384c678 2485 case CONSTRUCTOR:
2486 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2487
2488 default:
2489 return gimple_num_ops (stmt) - 1;
2490 }
2491}
2492
f8ce304c 2493/* Initialize VNO from STMT. */
2494
2495static void
42acab1c 2496init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
f8ce304c 2497{
2498 unsigned i;
2499
2500 vno->opcode = gimple_assign_rhs_code (stmt);
f8ce304c 2501 vno->type = gimple_expr_type (stmt);
7384c678 2502 switch (vno->opcode)
2503 {
2504 case REALPART_EXPR:
2505 case IMAGPART_EXPR:
2506 case VIEW_CONVERT_EXPR:
2507 vno->length = 1;
2508 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2509 break;
2510
70cd63a3 2511 case BIT_FIELD_REF:
2512 vno->length = 3;
2513 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2514 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2515 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2516 break;
2517
7384c678 2518 case CONSTRUCTOR:
2519 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2520 for (i = 0; i < vno->length; ++i)
2521 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2522 break;
2523
2524 default:
70cd63a3 2525 gcc_checking_assert (!gimple_assign_single_p (stmt));
7384c678 2526 vno->length = gimple_num_ops (stmt) - 1;
2527 for (i = 0; i < vno->length; ++i)
2528 vno->op[i] = gimple_op (stmt, i + 1);
2529 }
f8ce304c 2530}
2531
2532/* Compute the hashcode for VNO and look for it in the hash table;
2533 return the resulting value number if it exists in the hash table.
2534 Return NULL_TREE if it does not exist in the hash table or if the
2535 result field of the operation is NULL. VNRESULT will contain the
2536 vn_nary_op_t from the hashtable if it exists. */
2537
2538static tree
2539vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
f6c33c78 2540{
3e871d4d 2541 vn_nary_op_s **slot;
f8ce304c 2542
f6c33c78 2543 if (vnresult)
2544 *vnresult = NULL;
f8ce304c 2545
2546 vno->hashcode = vn_nary_op_compute_hash (vno);
c1f445d2 2547 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2548 NO_INSERT);
f6c33c78 2549 if (!slot && current_info == optimistic_info)
c1f445d2 2550 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2551 NO_INSERT);
f6c33c78 2552 if (!slot)
2553 return NULL_TREE;
2554 if (vnresult)
3e871d4d 2555 *vnresult = *slot;
2556 return (*slot)->result;
f6c33c78 2557}
2558
f8ce304c 2559/* Lookup a n-ary operation by its pieces and return the resulting value
2560 number if it exists in the hash table. Return NULL_TREE if it does
2561 not exist in the hash table or if the result field of the operation
2562 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2563 if it exists. */
2564
2565tree
2566vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
7384c678 2567 tree type, tree *ops, vn_nary_op_t *vnresult)
f8ce304c 2568{
7384c678 2569 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2570 sizeof_vn_nary_op (length));
2571 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2572 return vn_nary_op_lookup_1 (vno1, vnresult);
f8ce304c 2573}
2574
f6c33c78 2575/* Lookup OP in the current hash table, and return the resulting value
2576 number if it exists in the hash table. Return NULL_TREE if it does
2577 not exist in the hash table or if the result field of the operation
2578 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2579 if it exists. */
2580
2581tree
2582vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
9e9e6e3e 2583{
7384c678 2584 vn_nary_op_t vno1
2585 = XALLOCAVAR (struct vn_nary_op_s,
2586 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2587 init_vn_nary_op_from_op (vno1, op);
2588 return vn_nary_op_lookup_1 (vno1, vnresult);
9e9e6e3e 2589}
2590
75a70cf9 2591/* Lookup the rhs of STMT in the current hash table, and return the resulting
2592 value number if it exists in the hash table. Return NULL_TREE if
2593 it does not exist in the hash table. VNRESULT will contain the
2594 vn_nary_op_t from the hashtable if it exists. */
2595
2596tree
42acab1c 2597vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
75a70cf9 2598{
7384c678 2599 vn_nary_op_t vno1
2600 = XALLOCAVAR (struct vn_nary_op_s,
2601 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2602 init_vn_nary_op_from_stmt (vno1, stmt);
2603 return vn_nary_op_lookup_1 (vno1, vnresult);
f8ce304c 2604}
2605
eb074ef3 2606/* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2607
2608static tree
2609vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2610{
2611 if (!rcode.is_tree_code ())
2612 return NULL_TREE;
2613 vn_nary_op_t vnresult = NULL;
2614 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2615 (tree_code) rcode, type, ops, &vnresult);
2616}
2617
f8ce304c 2618/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2619
2620static vn_nary_op_t
2621alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2622{
2623 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2624}
2625
2626/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2627 obstack. */
2628
2629static vn_nary_op_t
2630alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2631{
2632 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2633 &current_info->nary_obstack);
2634
2635 vno1->value_id = value_id;
2636 vno1->length = length;
2637 vno1->result = result;
2638
2639 return vno1;
2640}
2641
2642/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2643 VNO->HASHCODE first. */
2644
2645static vn_nary_op_t
c1f445d2 2646vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3e871d4d 2647 bool compute_hash)
f8ce304c 2648{
3e871d4d 2649 vn_nary_op_s **slot;
f8ce304c 2650
2651 if (compute_hash)
2652 vno->hashcode = vn_nary_op_compute_hash (vno);
2653
c1f445d2 2654 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
f8ce304c 2655 gcc_assert (!*slot);
2656
2657 *slot = vno;
2658 return vno;
75a70cf9 2659}
2660
f6c33c78 2661/* Insert a n-ary operation into the current hash table using it's
2662 pieces. Return the vn_nary_op_t structure we created and put in
2663 the hashtable. */
2664
2665vn_nary_op_t
2666vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
7384c678 2667 tree type, tree *ops,
2668 tree result, unsigned int value_id)
f6c33c78 2669{
7384c678 2670 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2671 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
f8ce304c 2672 return vn_nary_op_insert_into (vno1, current_info->nary, true);
f6c33c78 2673}
2674
9e9e6e3e 2675/* Insert OP into the current hash table with a value number of
f6c33c78 2676 RESULT. Return the vn_nary_op_t structure we created and put in
2677 the hashtable. */
9e9e6e3e 2678
f6c33c78 2679vn_nary_op_t
51a23cfc 2680vn_nary_op_insert (tree op, tree result)
9e9e6e3e 2681{
51a23cfc 2682 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
51a23cfc 2683 vn_nary_op_t vno1;
51a23cfc 2684
f8ce304c 2685 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2686 init_vn_nary_op_from_op (vno1, op);
2687 return vn_nary_op_insert_into (vno1, current_info->nary, true);
9e9e6e3e 2688}
2689
75a70cf9 2690/* Insert the rhs of STMT into the current hash table with a value number of
2691 RESULT. */
2692
24500bba 2693static vn_nary_op_t
42acab1c 2694vn_nary_op_insert_stmt (gimple *stmt, tree result)
75a70cf9 2695{
7384c678 2696 vn_nary_op_t vno1
2697 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2698 result, VN_INFO (result)->value_id);
f8ce304c 2699 init_vn_nary_op_from_stmt (vno1, stmt);
2700 return vn_nary_op_insert_into (vno1, current_info->nary, true);
75a70cf9 2701}
2702
9e9e6e3e 2703/* Compute a hashcode for PHI operation VP1 and return it. */
2704
2705static inline hashval_t
2706vn_phi_compute_hash (vn_phi_t vp1)
2707{
df10fba0 2708 inchash::hash hstate (vp1->phiargs.length () > 2
2709 ? vp1->block->index : vp1->phiargs.length ());
9e9e6e3e 2710 tree phi1op;
9a7beb5f 2711 tree type;
85249fd1 2712 edge e;
2713 edge_iterator ei;
9e9e6e3e 2714
9a7beb5f 2715 /* If all PHI arguments are constants we need to distinguish
2716 the PHI node via its type. */
82a7a70c 2717 type = vp1->type;
f32e91d5 2718 hstate.merge_hash (vn_hash_type (type));
9a7beb5f 2719
85249fd1 2720 FOR_EACH_EDGE (e, ei, vp1->block->preds)
9e9e6e3e 2721 {
85249fd1 2722 /* Don't hash backedge values they need to be handled as VN_TOP
2723 for optimistic value-numbering. */
2724 if (e->flags & EDGE_DFS_BACK)
2725 continue;
2726
2727 phi1op = vp1->phiargs[e->dest_idx];
9e9e6e3e 2728 if (phi1op == VN_TOP)
2729 continue;
f32e91d5 2730 inchash::add_expr (phi1op, hstate);
9e9e6e3e 2731 }
2732
f32e91d5 2733 return hstate.end ();
9e9e6e3e 2734}
2735
df10fba0 2736
2578db52 2737/* Return true if COND1 and COND2 represent the same condition, set
2738 *INVERTED_P if one needs to be inverted to make it the same as
2739 the other. */
2740
2741static bool
2742cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2743{
2744 enum tree_code code1 = gimple_cond_code (cond1);
2745 enum tree_code code2 = gimple_cond_code (cond2);
2746 tree lhs1 = gimple_cond_lhs (cond1);
2747 tree lhs2 = gimple_cond_lhs (cond2);
2748 tree rhs1 = gimple_cond_rhs (cond1);
2749 tree rhs2 = gimple_cond_rhs (cond2);
2750
2751 *inverted_p = false;
2752 if (code1 == code2)
2753 ;
2754 else if (code1 == swap_tree_comparison (code2))
2755 std::swap (lhs2, rhs2);
2756 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2757 *inverted_p = true;
2758 else if (code1 == invert_tree_comparison
2759 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2760 {
2761 std::swap (lhs2, rhs2);
2762 *inverted_p = true;
2763 }
2764 else
2765 return false;
2766
4e916062 2767 lhs1 = vn_valueize (lhs1);
2768 rhs1 = vn_valueize (rhs1);
2769 lhs2 = vn_valueize (lhs2);
2770 rhs2 = vn_valueize (rhs2);
2771 return ((expressions_equal_p (lhs1, lhs2)
2772 && expressions_equal_p (rhs1, rhs2))
2773 || (commutative_tree_code (code1)
2774 && expressions_equal_p (lhs1, rhs2)
2775 && expressions_equal_p (rhs1, lhs2)));
2578db52 2776}
2777
9e9e6e3e 2778/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2779
2780static int
3e871d4d 2781vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
9e9e6e3e 2782{
3d2d7de7 2783 if (vp1->hashcode != vp2->hashcode)
2784 return false;
2785
df10fba0 2786 if (vp1->block != vp2->block)
9e9e6e3e 2787 {
df10fba0 2788 if (vp1->phiargs.length () != vp2->phiargs.length ())
9a7beb5f 2789 return false;
2790
df10fba0 2791 switch (vp1->phiargs.length ())
9e9e6e3e 2792 {
df10fba0 2793 case 1:
2794 /* Single-arg PHIs are just copies. */
2795 break;
2796
2797 case 2:
2798 {
2799 /* Rule out backedges into the PHI. */
2800 if (vp1->block->loop_father->header == vp1->block
2801 || vp2->block->loop_father->header == vp2->block)
2802 return false;
2803
2804 /* If the PHI nodes do not have compatible types
2805 they are not the same. */
2806 if (!types_compatible_p (vp1->type, vp2->type))
2807 return false;
2808
2809 basic_block idom1
2810 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2811 basic_block idom2
2812 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2813 /* If the immediate dominator end in switch stmts multiple
2814 values may end up in the same PHI arg via intermediate
2815 CFG merges. */
2816 if (EDGE_COUNT (idom1->succs) != 2
2817 || EDGE_COUNT (idom2->succs) != 2)
2818 return false;
2819
2820 /* Verify the controlling stmt is the same. */
2821 gimple *last1 = last_stmt (idom1);
2822 gimple *last2 = last_stmt (idom2);
2823 if (gimple_code (last1) != GIMPLE_COND
2824 || gimple_code (last2) != GIMPLE_COND)
2825 return false;
2578db52 2826 bool inverted_p;
2827 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2828 as_a <gcond *> (last2), &inverted_p))
df10fba0 2829 return false;
2830
2831 /* Get at true/false controlled edges into the PHI. */
2832 edge te1, te2, fe1, fe2;
2833 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2834 &te1, &fe1)
2835 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2836 &te2, &fe2))
2837 return false;
2838
2578db52 2839 /* Swap edges if the second condition is the inverted of the
2840 first. */
2841 if (inverted_p)
2842 std::swap (te2, fe2);
2843
df10fba0 2844 /* ??? Handle VN_TOP specially. */
2845 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2846 vp2->phiargs[te2->dest_idx])
2847 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2848 vp2->phiargs[fe2->dest_idx]))
2849 return false;
2850
2851 return true;
2852 }
2853
2854 default:
2855 return false;
9e9e6e3e 2856 }
9e9e6e3e 2857 }
df10fba0 2858
2859 /* If the PHI nodes do not have compatible types
2860 they are not the same. */
2861 if (!types_compatible_p (vp1->type, vp2->type))
2862 return false;
2863
2864 /* Any phi in the same block will have it's arguments in the
2865 same edge order, because of how we store phi nodes. */
2866 int i;
2867 tree phi1op;
2868 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2869 {
2870 tree phi2op = vp2->phiargs[i];
2871 if (phi1op == VN_TOP || phi2op == VN_TOP)
2872 continue;
2873 if (!expressions_equal_p (phi1op, phi2op))
2874 return false;
2875 }
2876
2877 return true;
9e9e6e3e 2878}
2879
f1f41a6c 2880static vec<tree> shared_lookup_phiargs;
9e9e6e3e 2881
2882/* Lookup PHI in the current hash table, and return the resulting
2883 value number if it exists in the hash table. Return NULL_TREE if
2884 it does not exist in the hash table. */
2885
3dc4c394 2886static tree
42acab1c 2887vn_phi_lookup (gimple *phi)
9e9e6e3e 2888{
3e871d4d 2889 vn_phi_s **slot;
9e9e6e3e 2890 struct vn_phi_s vp1;
85249fd1 2891 edge e;
2892 edge_iterator ei;
9e9e6e3e 2893
f1f41a6c 2894 shared_lookup_phiargs.truncate (0);
85249fd1 2895 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
9e9e6e3e 2896
2897 /* Canonicalize the SSA_NAME's to their value number. */
85249fd1 2898 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
9e9e6e3e 2899 {
85249fd1 2900 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
9e9e6e3e 2901 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
85249fd1 2902 shared_lookup_phiargs[e->dest_idx] = def;
9e9e6e3e 2903 }
82a7a70c 2904 vp1.type = TREE_TYPE (gimple_phi_result (phi));
9e9e6e3e 2905 vp1.phiargs = shared_lookup_phiargs;
75a70cf9 2906 vp1.block = gimple_bb (phi);
9e9e6e3e 2907 vp1.hashcode = vn_phi_compute_hash (&vp1);
c1f445d2 2908 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2909 NO_INSERT);
48694fc0 2910 if (!slot && current_info == optimistic_info)
c1f445d2 2911 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2912 NO_INSERT);
9e9e6e3e 2913 if (!slot)
2914 return NULL_TREE;
3e871d4d 2915 return (*slot)->result;
9e9e6e3e 2916}
2917
2918/* Insert PHI into the current hash table with a value number of
2919 RESULT. */
2920
f6c33c78 2921static vn_phi_t
42acab1c 2922vn_phi_insert (gimple *phi, tree result)
9e9e6e3e 2923{
3e871d4d 2924 vn_phi_s **slot;
d27a7bc4 2925 vn_phi_t vp1 = current_info->phis_pool->allocate ();
1e094109 2926 vec<tree> args = vNULL;
85249fd1 2927 edge e;
2928 edge_iterator ei;
2929
2930 args.safe_grow (gimple_phi_num_args (phi));
9e9e6e3e 2931
2932 /* Canonicalize the SSA_NAME's to their value number. */
85249fd1 2933 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
9e9e6e3e 2934 {
85249fd1 2935 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
9e9e6e3e 2936 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
85249fd1 2937 args[e->dest_idx] = def;
9e9e6e3e 2938 }
f6c33c78 2939 vp1->value_id = VN_INFO (result)->value_id;
82a7a70c 2940 vp1->type = TREE_TYPE (gimple_phi_result (phi));
9e9e6e3e 2941 vp1->phiargs = args;
75a70cf9 2942 vp1->block = gimple_bb (phi);
9e9e6e3e 2943 vp1->result = result;
2944 vp1->hashcode = vn_phi_compute_hash (vp1);
2945
c1f445d2 2946 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
9e9e6e3e 2947
2948 /* Because we iterate over phi operations more than once, it's
2949 possible the slot might already exist here, hence no assert.*/
2950 *slot = vp1;
f6c33c78 2951 return vp1;
9e9e6e3e 2952}
2953
2954
2955/* Print set of components in strongly connected component SCC to OUT. */
2956
2957static void
f1f41a6c 2958print_scc (FILE *out, vec<tree> scc)
9e9e6e3e 2959{
2960 tree var;
2961 unsigned int i;
2962
7ef97146 2963 fprintf (out, "SCC consists of:");
f1f41a6c 2964 FOR_EACH_VEC_ELT (scc, i, var)
9e9e6e3e 2965 {
9e9e6e3e 2966 fprintf (out, " ");
7ef97146 2967 print_generic_expr (out, var, 0);
9e9e6e3e 2968 }
2969 fprintf (out, "\n");
2970}
2971
2972/* Set the value number of FROM to TO, return true if it has changed
2973 as a result. */
2974
2975static inline bool
2976set_ssa_val_to (tree from, tree to)
2977{
b81ffaee 2978 tree currval = SSA_VAL (from);
d68e9408 2979 HOST_WIDE_INT toff, coff;
9e9e6e3e 2980
85e9a542 2981 /* The only thing we allow as value numbers are ssa_names
2982 and invariants. So assert that here. We don't allow VN_TOP
2983 as visiting a stmt should produce a value-number other than
2984 that.
2985 ??? Still VN_TOP can happen for unreachable code, so force
2986 it to varying in that case. Not all code is prepared to
2987 get VN_TOP on valueization. */
2988 if (to == VN_TOP)
2989 {
2990 if (dump_file && (dump_flags & TDF_DETAILS))
2991 fprintf (dump_file, "Forcing value number to varying on "
2992 "receiving VN_TOP\n");
2993 to = from;
2994 }
2995
2996 gcc_assert (to != NULL_TREE
9251bb6f 2997 && ((TREE_CODE (to) == SSA_NAME
2998 && (to == from || SSA_VAL (to) == to))
85e9a542 2999 || is_gimple_min_invariant (to)));
3000
b81ffaee 3001 if (from != to)
3002 {
3003 if (currval == from)
3004 {
3005 if (dump_file && (dump_flags & TDF_DETAILS))
3006 {
3007 fprintf (dump_file, "Not changing value number of ");
3008 print_generic_expr (dump_file, from, 0);
3009 fprintf (dump_file, " from VARYING to ");
3010 print_generic_expr (dump_file, to, 0);
3011 fprintf (dump_file, "\n");
3012 }
3013 return false;
3014 }
3015 else if (TREE_CODE (to) == SSA_NAME
3016 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3017 to = from;
3018 }
5dbdbadc 3019
9e9e6e3e 3020 if (dump_file && (dump_flags & TDF_DETAILS))
3021 {
3022 fprintf (dump_file, "Setting value number of ");
3023 print_generic_expr (dump_file, from, 0);
3024 fprintf (dump_file, " to ");
3025 print_generic_expr (dump_file, to, 0);
9e9e6e3e 3026 }
3027
d68e9408 3028 if (currval != to
3029 && !operand_equal_p (currval, to, 0)
3030 /* ??? For addresses involving volatile objects or types operand_equal_p
3031 does not reliably detect ADDR_EXPRs as equal. We know we are only
3032 getting invariant gimple addresses here, so can use
3033 get_addr_base_and_unit_offset to do this comparison. */
3034 && !(TREE_CODE (currval) == ADDR_EXPR
3035 && TREE_CODE (to) == ADDR_EXPR
3036 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3037 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3038 && coff == toff))
9e9e6e3e 3039 {
dd277d48 3040 VN_INFO (from)->valnum = to;
19744bd4 3041 if (dump_file && (dump_flags & TDF_DETAILS))
3042 fprintf (dump_file, " (changed)\n");
9e9e6e3e 3043 return true;
3044 }
19744bd4 3045 if (dump_file && (dump_flags & TDF_DETAILS))
3046 fprintf (dump_file, "\n");
9e9e6e3e 3047 return false;
3048}
3049
b736e424 3050/* Mark as processed all the definitions in the defining stmt of USE, or
3051 the USE itself. */
3052
3053static void
3054mark_use_processed (tree use)
3055{
3056 ssa_op_iter iter;
3057 def_operand_p defp;
42acab1c 3058 gimple *stmt = SSA_NAME_DEF_STMT (use);
b736e424 3059
3060 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3061 {
3062 VN_INFO (use)->use_processed = true;
3063 return;
3064 }
3065
3066 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3067 {
3068 tree def = DEF_FROM_PTR (defp);
3069
3070 VN_INFO (def)->use_processed = true;
3071 }
3072}
3073
9e9e6e3e 3074/* Set all definitions in STMT to value number to themselves.
3075 Return true if a value number changed. */
3076
3077static bool
42acab1c 3078defs_to_varying (gimple *stmt)
9e9e6e3e 3079{
3080 bool changed = false;
3081 ssa_op_iter iter;
3082 def_operand_p defp;
3083
3084 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3085 {
3086 tree def = DEF_FROM_PTR (defp);
9e9e6e3e 3087 changed |= set_ssa_val_to (def, def);
3088 }
3089 return changed;
3090}
3091
3092/* Visit a copy between LHS and RHS, return true if the value number
3093 changed. */
3094
3095static bool
3096visit_copy (tree lhs, tree rhs)
3097{
eb074ef3 3098 /* Valueize. */
cc99c1e1 3099 rhs = SSA_VAL (rhs);
9e9e6e3e 3100
3101 return set_ssa_val_to (lhs, rhs);
3102}
3103
0fea623c 3104/* Visit a nary operator RHS, value number it, and return true if the
9e9e6e3e 3105 value number of LHS has changed as a result. */
3106
3107static bool
42acab1c 3108visit_nary_op (tree lhs, gimple *stmt)
9e9e6e3e 3109{
3110 bool changed = false;
75a70cf9 3111 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
9e9e6e3e 3112
3113 if (result)
0fea623c 3114 changed = set_ssa_val_to (lhs, result);
75a70cf9 3115 else
3116 {
3117 changed = set_ssa_val_to (lhs, lhs);
3118 vn_nary_op_insert_stmt (stmt, lhs);
3119 }
3120
3121 return changed;
3122}
3123
3124/* Visit a call STMT storing into LHS. Return true if the value number
3125 of the LHS has changed as a result. */
3126
3127static bool
1a91d914 3128visit_reference_op_call (tree lhs, gcall *stmt)
9e9e6e3e 3129{
3130 bool changed = false;
75a70cf9 3131 struct vn_reference_s vr1;
b736e424 3132 vn_reference_t vnresult = NULL;
b736e424 3133 tree vdef = gimple_vdef (stmt);
9e9e6e3e 3134
7ec657ff 3135 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3136 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3137 lhs = NULL_TREE;
3138
2fd3ecff 3139 vn_reference_lookup_call (stmt, &vnresult, &vr1);
b736e424 3140 if (vnresult)
9e9e6e3e 3141 {
d1a94241 3142 if (vnresult->result_vdef && vdef)
b736e424 3143 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3144
3145 if (!vnresult->result && lhs)
3146 vnresult->result = lhs;
3147
3148 if (vnresult->result && lhs)
eb074ef3 3149 changed |= set_ssa_val_to (lhs, vnresult->result);
9e9e6e3e 3150 }
3151 else
3152 {
75a70cf9 3153 vn_reference_t vr2;
2fd3ecff 3154 vn_reference_s **slot;
b736e424 3155 if (vdef)
3156 changed |= set_ssa_val_to (vdef, vdef);
3157 if (lhs)
3158 changed |= set_ssa_val_to (lhs, lhs);
d27a7bc4 3159 vr2 = current_info->references_pool->allocate ();
dd277d48 3160 vr2->vuse = vr1.vuse;
2fd3ecff 3161 /* As we are not walking the virtual operand chain we know the
3162 shared_lookup_references are still original so we can re-use
3163 them here. */
3164 vr2->operands = vr1.operands.copy ();
3918bd18 3165 vr2->type = vr1.type;
3166 vr2->set = vr1.set;
75a70cf9 3167 vr2->hashcode = vr1.hashcode;
3168 vr2->result = lhs;
b736e424 3169 vr2->result_vdef = vdef;
c1f445d2 3170 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3171 INSERT);
2fd3ecff 3172 gcc_assert (!*slot);
75a70cf9 3173 *slot = vr2;
9e9e6e3e 3174 }
3175
3176 return changed;
3177}
3178
3179/* Visit a load from a reference operator RHS, part of STMT, value number it,
3180 and return true if the value number of the LHS has changed as a result. */
3181
3182static bool
42acab1c 3183visit_reference_op_load (tree lhs, tree op, gimple *stmt)
9e9e6e3e 3184{
3185 bool changed = false;
4a83fadb 3186 tree last_vuse;
3187 tree result;
3188
3189 last_vuse = gimple_vuse (stmt);
3190 last_vuse_ptr = &last_vuse;
8f190c8a 3191 result = vn_reference_lookup (op, gimple_vuse (stmt),
3192 default_vn_walk_kind, NULL);
4a83fadb 3193 last_vuse_ptr = NULL;
9e9e6e3e 3194
1d9353f3 3195 /* We handle type-punning through unions by value-numbering based
3196 on offset and size of the access. Be prepared to handle a
3197 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3198 if (result
3199 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3200 {
3201 /* We will be setting the value number of lhs to the value number
3202 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3203 So first simplify and lookup this expression to see if it
3204 is already available. */
eb074ef3 3205 mprts_hook = vn_lookup_simplify_result;
717ceeab 3206 code_helper rcode = VIEW_CONVERT_EXPR;
3207 tree ops[3] = { result };
3208 bool res = gimple_resimplify1 (NULL, &rcode, TREE_TYPE (op), ops,
3209 vn_valueize);
eb074ef3 3210 mprts_hook = NULL;
717ceeab 3211 gimple *new_stmt = NULL;
3212 if (res
3213 && gimple_simplified_result_is_gimple_val (rcode, ops))
3214 /* The expression is already available. */
3215 result = ops[0];
3216 else
eb074ef3 3217 {
717ceeab 3218 tree val = vn_lookup_simplify_result (rcode, TREE_TYPE (op), ops);
eb074ef3 3219 if (!val)
3220 {
717ceeab 3221 gimple_seq stmts = NULL;
3222 result = maybe_push_res_to_seq (rcode, TREE_TYPE (op), ops,
3223 &stmts);
3224 gcc_assert (result && gimple_seq_singleton_p (stmts));
3225 new_stmt = gimple_seq_first_stmt (stmts);
eb074ef3 3226 }
717ceeab 3227 else
3228 /* The expression is already available. */
3229 result = val;
1d9353f3 3230 }
717ceeab 3231 if (new_stmt)
eb074ef3 3232 {
eb074ef3 3233 /* The expression is not yet available, value-number lhs to
3234 the new SSA_NAME we created. */
1d9353f3 3235 /* Initialize value-number information properly. */
3236 VN_INFO_GET (result)->valnum = result;
75a70cf9 3237 VN_INFO (result)->value_id = get_next_value_id ();
717ceeab 3238 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
3239 new_stmt);
1d9353f3 3240 VN_INFO (result)->needs_insertion = true;
3241 /* As all "inserted" statements are singleton SCCs, insert
3242 to the valid table. This is strictly needed to
3243 avoid re-generating new value SSA_NAMEs for the same
3244 expression during SCC iteration over and over (the
3245 optimistic table gets cleared after each iteration).
3246 We do not need to insert into the optimistic table, as
3247 lookups there will fall back to the valid table. */
3248 if (current_info == optimistic_info)
3249 {
3250 current_info = valid_info;
717ceeab 3251 vn_nary_op_insert_stmt (new_stmt, result);
1d9353f3 3252 current_info = optimistic_info;
3253 }
3254 else
717ceeab 3255 vn_nary_op_insert_stmt (new_stmt, result);
1d9353f3 3256 if (dump_file && (dump_flags & TDF_DETAILS))
3257 {
3258 fprintf (dump_file, "Inserting name ");
3259 print_generic_expr (dump_file, result, 0);
3260 fprintf (dump_file, " for expression ");
717ceeab 3261 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1d9353f3 3262 fprintf (dump_file, "\n");
3263 }
3264 }
3265 }
3266
9e9e6e3e 3267 if (result)
eb074ef3 3268 changed = set_ssa_val_to (lhs, result);
9e9e6e3e 3269 else
3270 {
3271 changed = set_ssa_val_to (lhs, lhs);
39215e09 3272 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
9e9e6e3e 3273 }
3274
3275 return changed;
3276}
3277
3278
3279/* Visit a store to a reference operator LHS, part of STMT, value number it,
3280 and return true if the value number of the LHS has changed as a result. */
3281
3282static bool
42acab1c 3283visit_reference_op_store (tree lhs, tree op, gimple *stmt)
9e9e6e3e 3284{
3285 bool changed = false;
39215e09 3286 vn_reference_t vnresult = NULL;
3287 tree result, assign;
9e9e6e3e 3288 bool resultsame = false;
39215e09 3289 tree vuse = gimple_vuse (stmt);
3290 tree vdef = gimple_vdef (stmt);
9e9e6e3e 3291
9251bb6f 3292 if (TREE_CODE (op) == SSA_NAME)
3293 op = SSA_VAL (op);
3294
9e9e6e3e 3295 /* First we want to lookup using the *vuses* from the store and see
3296 if there the last store to this location with the same address
3297 had the same value.
3298
3299 The vuses represent the memory state before the store. If the
3300 memory state, address, and value of the store is the same as the
3301 last store to this location, then this store will produce the
3302 same memory state as that store.
3303
3304 In this case the vdef versions for this store are value numbered to those
3305 vuse versions, since they represent the same memory state after
3306 this store.
3307
3308 Otherwise, the vdefs for the store are used when inserting into
3309 the table, since the store generates a new memory state. */
3310
39215e09 3311 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
9e9e6e3e 3312
3313 if (result)
3314 {
3315 if (TREE_CODE (result) == SSA_NAME)
3316 result = SSA_VAL (result);
3317 resultsame = expressions_equal_p (result, op);
3318 }
3319
2fd3ecff 3320 if ((!result || !resultsame)
3321 /* Only perform the following when being called from PRE
3322 which embeds tail merging. */
3323 && default_vn_walk_kind == VN_WALK)
9e9e6e3e 3324 {
39215e09 3325 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3326 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3327 if (vnresult)
3328 {
3329 VN_INFO (vdef)->use_processed = true;
3330 return set_ssa_val_to (vdef, vnresult->result_vdef);
3331 }
3332 }
9e9e6e3e 3333
39215e09 3334 if (!result || !resultsame)
3335 {
9e9e6e3e 3336 if (dump_file && (dump_flags & TDF_DETAILS))
3337 {
3338 fprintf (dump_file, "No store match\n");
3339 fprintf (dump_file, "Value numbering store ");
3340 print_generic_expr (dump_file, lhs, 0);
3341 fprintf (dump_file, " to ");
3342 print_generic_expr (dump_file, op, 0);
3343 fprintf (dump_file, "\n");
3344 }
3345 /* Have to set value numbers before insert, since insert is
3346 going to valueize the references in-place. */
39215e09 3347 if (vdef)
9e9e6e3e 3348 {
9e9e6e3e 3349 changed |= set_ssa_val_to (vdef, vdef);
3350 }
3351
802d9f2f 3352 /* Do not insert structure copies into the tables. */
3353 if (is_gimple_min_invariant (op)
3354 || is_gimple_reg (op))
39215e09 3355 vn_reference_insert (lhs, op, vdef, NULL);
3356
2fd3ecff 3357 /* Only perform the following when being called from PRE
3358 which embeds tail merging. */
3359 if (default_vn_walk_kind == VN_WALK)
3360 {
3361 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3362 vn_reference_insert (assign, lhs, vuse, vdef);
3363 }
9e9e6e3e 3364 }
3365 else
3366 {
dd277d48 3367 /* We had a match, so value number the vdef to have the value
3368 number of the vuse it came from. */
9e9e6e3e 3369
3370 if (dump_file && (dump_flags & TDF_DETAILS))
3371 fprintf (dump_file, "Store matched earlier value,"
3372 "value numbering store vdefs to matching vuses.\n");
3373
39215e09 3374 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
9e9e6e3e 3375 }
3376
3377 return changed;
3378}
3379
3380/* Visit and value number PHI, return true if the value number
3381 changed. */
3382
3383static bool
42acab1c 3384visit_phi (gimple *phi)
9e9e6e3e 3385{
3386 bool changed = false;
3387 tree result;
3388 tree sameval = VN_TOP;
3389 bool allsame = true;
4e916062 3390 unsigned n_executable = 0;
9e9e6e3e 3391
5f6261a7 3392 /* TODO: We could check for this in init_sccvn, and replace this
3393 with a gcc_assert. */
3394 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3395 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3396
9e9e6e3e 3397 /* See if all non-TOP arguments have the same value. TOP is
3398 equivalent to everything, so we can ignore it. */
85e9a542 3399 edge_iterator ei;
3400 edge e;
3401 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3402 if (e->flags & EDGE_EXECUTABLE)
3403 {
3404 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
9e9e6e3e 3405
4e916062 3406 ++n_executable;
85e9a542 3407 if (TREE_CODE (def) == SSA_NAME)
3408 def = SSA_VAL (def);
3409 if (def == VN_TOP)
3410 continue;
3411 if (sameval == VN_TOP)
85249fd1 3412 sameval = def;
3413 else if (!expressions_equal_p (def, sameval))
85e9a542 3414 {
85249fd1 3415 allsame = false;
3416 break;
85e9a542 3417 }
3418 }
29aa5498 3419
3420 /* If none of the edges was executable or all incoming values are
4e916062 3421 undefined keep the value-number at VN_TOP. If only a single edge
3422 is exectuable use its value. */
3423 if (sameval == VN_TOP
3424 || n_executable == 1)
3425 return set_ssa_val_to (PHI_RESULT (phi), sameval);
9e9e6e3e 3426
85249fd1 3427 /* First see if it is equivalent to a phi node in this block. We prefer
3428 this as it allows IV elimination - see PRs 66502 and 67167. */
9e9e6e3e 3429 result = vn_phi_lookup (phi);
3430 if (result)
b46fda07 3431 changed = set_ssa_val_to (PHI_RESULT (phi), result);
85249fd1 3432 /* Otherwise all value numbered to the same value, the phi node has that
3433 value. */
3434 else if (allsame)
3435 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
9e9e6e3e 3436 else
3437 {
3438 vn_phi_insert (phi, PHI_RESULT (phi));
9e9e6e3e 3439 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3440 }
3441
3442 return changed;
3443}
3444
9e9e6e3e 3445/* Try to simplify RHS using equivalences and constant folding. */
3446
3447static tree
1a91d914 3448try_to_simplify (gassign *stmt)
9e9e6e3e 3449{
ce993cc2 3450 enum tree_code code = gimple_assign_rhs_code (stmt);
e004838d 3451 tree tem;
3452
d4cdfd27 3453 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3454 in this case, there is no point in doing extra work. */
ce993cc2 3455 if (code == SSA_NAME)
75a70cf9 3456 return NULL_TREE;
e004838d 3457
1d0b727d 3458 /* First try constant folding based on our current lattice. */
eb074ef3 3459 mprts_hook = vn_lookup_simplify_result;
ef8cb3d3 3460 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
eb074ef3 3461 mprts_hook = NULL;
ce993cc2 3462 if (tem
3463 && (TREE_CODE (tem) == SSA_NAME
3464 || is_gimple_min_invariant (tem)))
1d0b727d 3465 return tem;
3466
75a70cf9 3467 return NULL_TREE;
9e9e6e3e 3468}
3469
3470/* Visit and value number USE, return true if the value number
3471 changed. */
3472
3473static bool
3474visit_use (tree use)
3475{
3476 bool changed = false;
42acab1c 3477 gimple *stmt = SSA_NAME_DEF_STMT (use);
9e9e6e3e 3478
b736e424 3479 mark_use_processed (use);
9e9e6e3e 3480
3481 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1d9353f3 3482 if (dump_file && (dump_flags & TDF_DETAILS)
75a70cf9 3483 && !SSA_NAME_IS_DEFAULT_DEF (use))
9e9e6e3e 3484 {
3485 fprintf (dump_file, "Value numbering ");
3486 print_generic_expr (dump_file, use, 0);
3487 fprintf (dump_file, " stmt = ");
75a70cf9 3488 print_gimple_stmt (dump_file, stmt, 0, 0);
9e9e6e3e 3489 }
3490
9e9e6e3e 3491 /* Handle uninitialized uses. */
75a70cf9 3492 if (SSA_NAME_IS_DEFAULT_DEF (use))
3493 changed = set_ssa_val_to (use, use);
afb92221 3494 else if (gimple_code (stmt) == GIMPLE_PHI)
3495 changed = visit_phi (stmt);
3496 else if (gimple_has_volatile_ops (stmt))
3497 changed = defs_to_varying (stmt);
3498 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3499 {
3500 enum tree_code code = gimple_assign_rhs_code (ass);
3501 tree lhs = gimple_assign_lhs (ass);
3502 tree rhs1 = gimple_assign_rhs1 (ass);
3503 tree simplified;
3504
3505 /* Shortcut for copies. Simplifying copies is pointless,
3506 since we copy the expression and value they represent. */
3507 if (code == SSA_NAME
3508 && TREE_CODE (lhs) == SSA_NAME)
3509 {
3510 changed = visit_copy (lhs, rhs1);
3511 goto done;
3512 }
3513 simplified = try_to_simplify (ass);
3514 if (simplified)
9e9e6e3e 3515 {
afb92221 3516 if (dump_file && (dump_flags & TDF_DETAILS))
2a922cb6 3517 {
afb92221 3518 fprintf (dump_file, "RHS ");
3519 print_gimple_expr (dump_file, ass, 0, 0);
3520 fprintf (dump_file, " simplified to ");
3521 print_generic_expr (dump_file, simplified, 0);
3522 fprintf (dump_file, "\n");
3523 }
3524 }
3525 /* Setting value numbers to constants will occasionally
3526 screw up phi congruence because constants are not
3527 uniquely associated with a single ssa name that can be
3528 looked up. */
3529 if (simplified
3530 && is_gimple_min_invariant (simplified)
3531 && TREE_CODE (lhs) == SSA_NAME)
3532 {
3533 changed = set_ssa_val_to (lhs, simplified);
3534 goto done;
3535 }
3536 else if (simplified
3537 && TREE_CODE (simplified) == SSA_NAME
3538 && TREE_CODE (lhs) == SSA_NAME)
3539 {
3540 changed = visit_copy (lhs, simplified);
3541 goto done;
3542 }
3543
3544 if ((TREE_CODE (lhs) == SSA_NAME
3545 /* We can substitute SSA_NAMEs that are live over
3546 abnormal edges with their constant value. */
3547 && !(gimple_assign_copy_p (ass)
3548 && is_gimple_min_invariant (rhs1))
3549 && !(simplified
3550 && is_gimple_min_invariant (simplified))
3551 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3552 /* Stores or copies from SSA_NAMEs that are live over
3553 abnormal edges are a problem. */
3554 || (code == SSA_NAME
3555 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3556 changed = defs_to_varying (ass);
3557 else if (REFERENCE_CLASS_P (lhs)
3558 || DECL_P (lhs))
3559 changed = visit_reference_op_store (lhs, rhs1, ass);
3560 else if (TREE_CODE (lhs) == SSA_NAME)
3561 {
3562 if ((gimple_assign_copy_p (ass)
3563 && is_gimple_min_invariant (rhs1))
3564 || (simplified
3565 && is_gimple_min_invariant (simplified)))
3566 {
3567 if (simplified)
3568 changed = set_ssa_val_to (lhs, simplified);
3569 else
3570 changed = set_ssa_val_to (lhs, rhs1);
3571 }
3572 else
3573 {
3574 /* Visit the original statement. */
3575 switch (vn_get_stmt_kind (ass))
3576 {
3577 case VN_NARY:
3578 changed = visit_nary_op (lhs, ass);
3579 break;
3580 case VN_REFERENCE:
3581 changed = visit_reference_op_load (lhs, rhs1, ass);
3582 break;
3583 default:
3584 changed = defs_to_varying (ass);
3585 break;
3586 }
2a922cb6 3587 }
afb92221 3588 }
3589 else
3590 changed = defs_to_varying (ass);
3591 }
3592 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3593 {
3594 tree lhs = gimple_call_lhs (call_stmt);
3595 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3596 {
3597 /* Try constant folding based on our current lattice. */
3598 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3599 vn_valueize);
75a70cf9 3600 if (simplified)
9e9e6e3e 3601 {
3602 if (dump_file && (dump_flags & TDF_DETAILS))
3603 {
afb92221 3604 fprintf (dump_file, "call ");
3605 print_gimple_expr (dump_file, call_stmt, 0, 0);
9e9e6e3e 3606 fprintf (dump_file, " simplified to ");
3607 print_generic_expr (dump_file, simplified, 0);
eb074ef3 3608 fprintf (dump_file, "\n");
9e9e6e3e 3609 }
3610 }
3611 /* Setting value numbers to constants will occasionally
3612 screw up phi congruence because constants are not
3613 uniquely associated with a single ssa name that can be
3614 looked up. */
75a70cf9 3615 if (simplified
afb92221 3616 && is_gimple_min_invariant (simplified))
9e9e6e3e 3617 {
9e9e6e3e 3618 changed = set_ssa_val_to (lhs, simplified);
afb92221 3619 if (gimple_vdef (call_stmt))
3620 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3621 SSA_VAL (gimple_vuse (call_stmt)));
9e9e6e3e 3622 goto done;
3623 }
75a70cf9 3624 else if (simplified
afb92221 3625 && TREE_CODE (simplified) == SSA_NAME)
9e9e6e3e 3626 {
3627 changed = visit_copy (lhs, simplified);
afb92221 3628 if (gimple_vdef (call_stmt))
3629 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3630 SSA_VAL (gimple_vuse (call_stmt)));
9e9e6e3e 3631 goto done;
3632 }
afb92221 3633 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
9e9e6e3e 3634 {
afb92221 3635 changed = defs_to_varying (call_stmt);
3636 goto done;
9e9e6e3e 3637 }
9e9e6e3e 3638 }
75a70cf9 3639
afb92221 3640 if (!gimple_call_internal_p (call_stmt)
3641 && (/* Calls to the same function with the same vuse
3642 and the same operands do not necessarily return the same
3643 value, unless they're pure or const. */
3644 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3645 /* If calls have a vdef, subsequent calls won't have
3646 the same incoming vuse. So, if 2 calls with vdef have the
3647 same vuse, we know they're not subsequent.
3648 We can value number 2 calls to the same function with the
3649 same vuse and the same operands which are not subsequent
3650 the same, because there is no code in the program that can
3651 compare the 2 values... */
3652 || (gimple_vdef (call_stmt)
3653 /* ... unless the call returns a pointer which does
3654 not alias with anything else. In which case the
3655 information that the values are distinct are encoded
3656 in the IL. */
3657 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3658 /* Only perform the following when being called from PRE
3659 which embeds tail merging. */
3660 && default_vn_walk_kind == VN_WALK)))
3661 changed = visit_reference_op_call (lhs, call_stmt);
b736e424 3662 else
afb92221 3663 changed = defs_to_varying (call_stmt);
9e9e6e3e 3664 }
afb92221 3665 else
3666 changed = defs_to_varying (stmt);
9e9e6e3e 3667 done:
3668 return changed;
3669}
3670
3671/* Compare two operands by reverse postorder index */
3672
3673static int
3674compare_ops (const void *pa, const void *pb)
3675{
3676 const tree opa = *((const tree *)pa);
3677 const tree opb = *((const tree *)pb);
42acab1c 3678 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3679 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
9e9e6e3e 3680 basic_block bba;
3681 basic_block bbb;
3682
75a70cf9 3683 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
f7b092e4 3684 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
75a70cf9 3685 else if (gimple_nop_p (opstmta))
9e9e6e3e 3686 return -1;
75a70cf9 3687 else if (gimple_nop_p (opstmtb))
9e9e6e3e 3688 return 1;
3689
75a70cf9 3690 bba = gimple_bb (opstmta);
3691 bbb = gimple_bb (opstmtb);
9e9e6e3e 3692
3693 if (!bba && !bbb)
f7b092e4 3694 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
9e9e6e3e 3695 else if (!bba)
3696 return -1;
3697 else if (!bbb)
3698 return 1;
3699
3700 if (bba == bbb)
3701 {
75a70cf9 3702 if (gimple_code (opstmta) == GIMPLE_PHI
3703 && gimple_code (opstmtb) == GIMPLE_PHI)
f7b092e4 3704 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
75a70cf9 3705 else if (gimple_code (opstmta) == GIMPLE_PHI)
9e9e6e3e 3706 return -1;
75a70cf9 3707 else if (gimple_code (opstmtb) == GIMPLE_PHI)
9e9e6e3e 3708 return 1;
f7b092e4 3709 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3710 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3711 else
3712 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
9e9e6e3e 3713 }
3714 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3715}
3716
3717/* Sort an array containing members of a strongly connected component
3718 SCC so that the members are ordered by RPO number.
3719 This means that when the sort is complete, iterating through the
3720 array will give you the members in RPO order. */
3721
3722static void
f1f41a6c 3723sort_scc (vec<tree> scc)
9e9e6e3e 3724{
f1f41a6c 3725 scc.qsort (compare_ops);
9e9e6e3e 3726}
3727
3df47675 3728/* Insert the no longer used nary ONARY to the hash INFO. */
ca4721d3 3729
3df47675 3730static void
3731copy_nary (vn_nary_op_t onary, vn_tables_t info)
ca4721d3 3732{
f8ce304c 3733 size_t size = sizeof_vn_nary_op (onary->length);
3734 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3735 &info->nary_obstack);
ca4721d3 3736 memcpy (nary, onary, size);
f8ce304c 3737 vn_nary_op_insert_into (nary, info->nary, false);
ca4721d3 3738}
3739
3df47675 3740/* Insert the no longer used phi OPHI to the hash INFO. */
ca4721d3 3741
3df47675 3742static void
3743copy_phi (vn_phi_t ophi, vn_tables_t info)
ca4721d3 3744{
d27a7bc4 3745 vn_phi_t phi = info->phis_pool->allocate ();
3e871d4d 3746 vn_phi_s **slot;
ca4721d3 3747 memcpy (phi, ophi, sizeof (*phi));
f1f41a6c 3748 ophi->phiargs.create (0);
c1f445d2 3749 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3df47675 3750 gcc_assert (!*slot);
ca4721d3 3751 *slot = phi;
ca4721d3 3752}
3753
3df47675 3754/* Insert the no longer used reference OREF to the hash INFO. */
ca4721d3 3755
3df47675 3756static void
3757copy_reference (vn_reference_t oref, vn_tables_t info)
ca4721d3 3758{
ca4721d3 3759 vn_reference_t ref;
3e871d4d 3760 vn_reference_s **slot;
d27a7bc4 3761 ref = info->references_pool->allocate ();
ca4721d3 3762 memcpy (ref, oref, sizeof (*ref));
f1f41a6c 3763 oref->operands.create (0);
c1f445d2 3764 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
ca4721d3 3765 if (*slot)
3766 free_reference (*slot);
3767 *slot = ref;
ca4721d3 3768}
3769
9e9e6e3e 3770/* Process a strongly connected component in the SSA graph. */
3771
3772static void
f1f41a6c 3773process_scc (vec<tree> scc)
9e9e6e3e 3774{
3df47675 3775 tree var;
3776 unsigned int i;
3777 unsigned int iterations = 0;
3778 bool changed = true;
3e871d4d 3779 vn_nary_op_iterator_type hin;
3780 vn_phi_iterator_type hip;
3781 vn_reference_iterator_type hir;
3df47675 3782 vn_nary_op_t nary;
3783 vn_phi_t phi;
3784 vn_reference_t ref;
9e9e6e3e 3785
3df47675 3786 /* If the SCC has a single member, just visit it. */
f1f41a6c 3787 if (scc.length () == 1)
9e9e6e3e 3788 {
f1f41a6c 3789 tree use = scc[0];
ebca8514 3790 if (VN_INFO (use)->use_processed)
3791 return;
3792 /* We need to make sure it doesn't form a cycle itself, which can
3793 happen for self-referential PHI nodes. In that case we would
3794 end up inserting an expression with VN_TOP operands into the
3795 valid table which makes us derive bogus equivalences later.
3796 The cheapest way to check this is to assume it for all PHI nodes. */
3797 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3798 /* Fallthru to iteration. */ ;
3799 else
3800 {
3801 visit_use (use);
3802 return;
3803 }
9e9e6e3e 3804 }
3df47675 3805
c443af04 3806 if (dump_file && (dump_flags & TDF_DETAILS))
3807 print_scc (dump_file, scc);
3808
3df47675 3809 /* Iterate over the SCC with the optimistic table until it stops
3810 changing. */
3811 current_info = optimistic_info;
3812 while (changed)
9e9e6e3e 3813 {
3df47675 3814 changed = false;
3815 iterations++;
b81ffaee 3816 if (dump_file && (dump_flags & TDF_DETAILS))
3817 fprintf (dump_file, "Starting iteration %d\n", iterations);
3df47675 3818 /* As we are value-numbering optimistically we have to
3819 clear the expression tables and the simplified expressions
3820 in each iteration until we converge. */
c1f445d2 3821 optimistic_info->nary->empty ();
3822 optimistic_info->phis->empty ();
3823 optimistic_info->references->empty ();
3df47675 3824 obstack_free (&optimistic_info->nary_obstack, NULL);
3825 gcc_obstack_init (&optimistic_info->nary_obstack);
d27a7bc4 3826 optimistic_info->phis_pool->release ();
3827 optimistic_info->references_pool->release ();
f1f41a6c 3828 FOR_EACH_VEC_ELT (scc, i, var)
eb074ef3 3829 gcc_assert (!VN_INFO (var)->needs_insertion
3830 && VN_INFO (var)->expr == NULL);
f1f41a6c 3831 FOR_EACH_VEC_ELT (scc, i, var)
3df47675 3832 changed |= visit_use (var);
3833 }
9e9e6e3e 3834
c443af04 3835 if (dump_file && (dump_flags & TDF_DETAILS))
3836 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3df47675 3837 statistics_histogram_event (cfun, "SCC iterations", iterations);
9e9e6e3e 3838
3df47675 3839 /* Finally, copy the contents of the no longer used optimistic
3840 table to the valid table. */
c1f445d2 3841 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3df47675 3842 copy_nary (nary, valid_info);
c1f445d2 3843 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3df47675 3844 copy_phi (phi, valid_info);
c1f445d2 3845 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3e871d4d 3846 ref, vn_reference_t, hir)
3df47675 3847 copy_reference (ref, valid_info);
3848
3849 current_info = valid_info;
9e9e6e3e 3850}
3851
000ef0a0 3852
3853/* Pop the components of the found SCC for NAME off the SCC stack
3854 and process them. Returns true if all went well, false if
3855 we run into resource limits. */
3856
3857static bool
3858extract_and_process_scc_for_name (tree name)
3859{
c2078b80 3860 auto_vec<tree> scc;
000ef0a0 3861 tree x;
3862
3863 /* Found an SCC, pop the components off the SCC stack and
3864 process them. */
3865 do
3866 {
f1f41a6c 3867 x = sccstack.pop ();
000ef0a0 3868
3869 VN_INFO (x)->on_sccstack = false;
f1f41a6c 3870 scc.safe_push (x);
000ef0a0 3871 } while (x != name);
3872
3873 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
f1f41a6c 3874 if (scc.length ()
000ef0a0 3875 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3876 {
3877 if (dump_file)
3878 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
f1f41a6c 3879 "SCC size %u exceeding %u\n", scc.length (),
000ef0a0 3880 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
83b709f2 3881
000ef0a0 3882 return false;
3883 }
3884
f1f41a6c 3885 if (scc.length () > 1)
000ef0a0 3886 sort_scc (scc);
3887
000ef0a0 3888 process_scc (scc);
3889
000ef0a0 3890 return true;
3891}
3892
9e9e6e3e 3893/* Depth first search on NAME to discover and process SCC's in the SSA
3894 graph.
3895 Execution of this algorithm relies on the fact that the SCC's are
a9b2282e 3896 popped off the stack in topological order.
3897 Returns true if successful, false if we stopped processing SCC's due
f0b5f617 3898 to resource constraints. */
9e9e6e3e 3899
a9b2282e 3900static bool
9e9e6e3e 3901DFS (tree name)
3902{
1e094109 3903 vec<ssa_op_iter> itervec = vNULL;
3904 vec<tree> namevec = vNULL;
000ef0a0 3905 use_operand_p usep = NULL;
42acab1c 3906 gimple *defstmt;
75a70cf9 3907 tree use;
9e9e6e3e 3908 ssa_op_iter iter;
9e9e6e3e 3909
000ef0a0 3910start_over:
9e9e6e3e 3911 /* SCC info */
3912 VN_INFO (name)->dfsnum = next_dfs_num++;
3913 VN_INFO (name)->visited = true;
3914 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3915
f1f41a6c 3916 sccstack.safe_push (name);
9e9e6e3e 3917 VN_INFO (name)->on_sccstack = true;
3918 defstmt = SSA_NAME_DEF_STMT (name);
3919
3920 /* Recursively DFS on our operands, looking for SCC's. */
75a70cf9 3921 if (!gimple_nop_p (defstmt))
9e9e6e3e 3922 {
000ef0a0 3923 /* Push a new iterator. */
1a91d914 3924 if (gphi *phi = dyn_cast <gphi *> (defstmt))
3925 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
000ef0a0 3926 else
3927 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3928 }
3929 else
5383fb56 3930 clear_and_done_ssa_iter (&iter);
000ef0a0 3931
3932 while (1)
3933 {
3934 /* If we are done processing uses of a name, go up the stack
3935 of iterators and process SCCs as we found them. */
3936 if (op_iter_done (&iter))
9e9e6e3e 3937 {
000ef0a0 3938 /* See if we found an SCC. */
3939 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3940 if (!extract_and_process_scc_for_name (name))
3941 {
f1f41a6c 3942 namevec.release ();
3943 itervec.release ();
000ef0a0 3944 return false;
3945 }
9e9e6e3e 3946
000ef0a0 3947 /* Check if we are done. */
f1f41a6c 3948 if (namevec.is_empty ())
000ef0a0 3949 {
f1f41a6c 3950 namevec.release ();
3951 itervec.release ();
000ef0a0 3952 return true;
3953 }
3954
3955 /* Restore the last use walker and continue walking there. */
3956 use = name;
f1f41a6c 3957 name = namevec.pop ();
3958 memcpy (&iter, &itervec.last (),
000ef0a0 3959 sizeof (ssa_op_iter));
f1f41a6c 3960 itervec.pop ();
000ef0a0 3961 goto continue_walking;
3962 }
9e9e6e3e 3963
000ef0a0 3964 use = USE_FROM_PTR (usep);
3965
3966 /* Since we handle phi nodes, we will sometimes get
3967 invariants in the use expression. */
3968 if (TREE_CODE (use) == SSA_NAME)
3969 {
9e9e6e3e 3970 if (! (VN_INFO (use)->visited))
3971 {
000ef0a0 3972 /* Recurse by pushing the current use walking state on
3973 the stack and starting over. */
f1f41a6c 3974 itervec.safe_push (iter);
3975 namevec.safe_push (name);
000ef0a0 3976 name = use;
3977 goto start_over;
3978
3979continue_walking:
9e9e6e3e 3980 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3981 VN_INFO (use)->low);
3982 }
3983 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3984 && VN_INFO (use)->on_sccstack)
3985 {
3986 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3987 VN_INFO (name)->low);
3988 }
3989 }
a9b2282e 3990
000ef0a0 3991 usep = op_iter_next_use (&iter);
9e9e6e3e 3992 }
3993}
3994
9e9e6e3e 3995/* Allocate a value number table. */
3996
3997static void
3998allocate_vn_table (vn_tables_t table)
3999{
c1f445d2 4000 table->phis = new vn_phi_table_type (23);
4001 table->nary = new vn_nary_op_table_type (23);
4002 table->references = new vn_reference_table_type (23);
9e9e6e3e 4003
51a23cfc 4004 gcc_obstack_init (&table->nary_obstack);
1dc6c44d 4005 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
e16712b1 4006 table->references_pool = new object_allocator<vn_reference_s>
1dc6c44d 4007 ("VN references");
9e9e6e3e 4008}
4009
4010/* Free a value number table. */
4011
4012static void
4013free_vn_table (vn_tables_t table)
4014{
c1f445d2 4015 delete table->phis;
4016 table->phis = NULL;
4017 delete table->nary;
4018 table->nary = NULL;
4019 delete table->references;
4020 table->references = NULL;
51a23cfc 4021 obstack_free (&table->nary_obstack, NULL);
d27a7bc4 4022 delete table->phis_pool;
4023 delete table->references_pool;
9e9e6e3e 4024}
4025
4026static void
4027init_scc_vn (void)
4028{
4029 size_t i;
4030 int j;
4031 int *rpo_numbers_temp;
9e9e6e3e 4032
4033 calculate_dominance_info (CDI_DOMINATORS);
85249fd1 4034 mark_dfs_back_edges ();
4035
f1f41a6c 4036 sccstack.create (0);
c1f445d2 4037 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
48e1416a 4038
f6c33c78 4039 constant_value_ids = BITMAP_ALLOC (NULL);
48e1416a 4040
9e9e6e3e 4041 next_dfs_num = 1;
f6c33c78 4042 next_value_id = 1;
48e1416a 4043
f1f41a6c 4044 vn_ssa_aux_table.create (num_ssa_names + 1);
9e9e6e3e 4045 /* VEC_alloc doesn't actually grow it to the right size, it just
4046 preallocates the space to do so. */
f1f41a6c 4047 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
b9584939 4048 gcc_obstack_init (&vn_ssa_aux_obstack);
4049
f1f41a6c 4050 shared_lookup_phiargs.create (0);
4051 shared_lookup_references.create (0);
fe672ac0 4052 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
a28770e1 4053 rpo_numbers_temp =
4054 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
9e9e6e3e 4055 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4056
4057 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4058 the i'th block in RPO order is bb. We want to map bb's to RPO
4059 numbers, so we need to rearrange this array. */
a28770e1 4060 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
9e9e6e3e 4061 rpo_numbers[rpo_numbers_temp[j]] = j;
4062
b9584939 4063 XDELETE (rpo_numbers_temp);
9e9e6e3e 4064
4065 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4066
ec415c45 4067 renumber_gimple_stmt_uids ();
9e9e6e3e 4068
4069 /* Create the valid and optimistic value numbering tables. */
4070 valid_info = XCNEW (struct vn_tables_s);
4071 allocate_vn_table (valid_info);
4072 optimistic_info = XCNEW (struct vn_tables_s);
4073 allocate_vn_table (optimistic_info);
34c8056f 4074 current_info = valid_info;
4075
4076 /* Create the VN_INFO structures, and initialize value numbers to
4077 TOP or VARYING for parameters. */
4078 for (i = 1; i < num_ssa_names; i++)
4079 {
4080 tree name = ssa_name (i);
4081 if (!name)
4082 continue;
4083
4084 VN_INFO_GET (name)->valnum = VN_TOP;
eb074ef3 4085 VN_INFO (name)->needs_insertion = false;
4086 VN_INFO (name)->expr = NULL;
34c8056f 4087 VN_INFO (name)->value_id = 0;
4088
4089 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4090 continue;
4091
4092 switch (TREE_CODE (SSA_NAME_VAR (name)))
4093 {
4094 case VAR_DECL:
4095 /* Undefined vars keep TOP. */
4096 break;
4097
4098 case PARM_DECL:
4099 /* Parameters are VARYING but we can record a condition
4100 if we know it is a non-NULL pointer. */
4101 VN_INFO (name)->visited = true;
4102 VN_INFO (name)->valnum = name;
4103 if (POINTER_TYPE_P (TREE_TYPE (name))
4104 && nonnull_arg_p (SSA_NAME_VAR (name)))
4105 {
4106 tree ops[2];
4107 ops[0] = name;
4108 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4109 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4110 boolean_true_node, 0);
4111 if (dump_file && (dump_flags & TDF_DETAILS))
4112 {
4113 fprintf (dump_file, "Recording ");
4114 print_generic_expr (dump_file, name, TDF_SLIM);
4115 fprintf (dump_file, " != 0\n");
4116 }
4117 }
4118 break;
4119
4120 case RESULT_DECL:
4121 /* If the result is passed by invisible reference the default
4122 def is initialized, otherwise it's uninitialized. */
4123 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4124 {
4125 VN_INFO (name)->visited = true;
4126 VN_INFO (name)->valnum = name;
4127 }
4128 break;
4129
4130 default:
4131 gcc_unreachable ();
4132 }
4133 }
9e9e6e3e 4134}
4135
4136void
4137free_scc_vn (void)
4138{
4139 size_t i;
4140
c1f445d2 4141 delete constant_to_value_id;
4142 constant_to_value_id = NULL;
f6c33c78 4143 BITMAP_FREE (constant_value_ids);
f1f41a6c 4144 shared_lookup_phiargs.release ();
4145 shared_lookup_references.release ();
9e9e6e3e 4146 XDELETEVEC (rpo_numbers);
b9584939 4147
9e9e6e3e 4148 for (i = 0; i < num_ssa_names; i++)
4149 {
4150 tree name = ssa_name (i);
1d9353f3 4151 if (name
afb92221 4152 && has_VN_INFO (name)
1d9353f3 4153 && VN_INFO (name)->needs_insertion)
4154 release_ssa_name (name);
9e9e6e3e 4155 }
b9584939 4156 obstack_free (&vn_ssa_aux_obstack, NULL);
f1f41a6c 4157 vn_ssa_aux_table.release ();
b9584939 4158
f1f41a6c 4159 sccstack.release ();
9e9e6e3e 4160 free_vn_table (valid_info);
4161 XDELETE (valid_info);
4162 free_vn_table (optimistic_info);
4163 XDELETE (optimistic_info);
dddafd79 4164
4165 BITMAP_FREE (const_parms);
9e9e6e3e 4166}
4167
63628665 4168/* Set *ID according to RESULT. */
f8ce304c 4169
4170static void
4171set_value_id_for_result (tree result, unsigned int *id)
4172{
63628665 4173 if (result && TREE_CODE (result) == SSA_NAME)
4174 *id = VN_INFO (result)->value_id;
4175 else if (result && is_gimple_min_invariant (result))
4176 *id = get_or_alloc_constant_value_id (result);
4177 else
4178 *id = get_next_value_id ();
f8ce304c 4179}
4180
8883e700 4181/* Set the value ids in the valid hash tables. */
f6c33c78 4182
4183static void
4184set_hashtable_value_ids (void)
4185{
3e871d4d 4186 vn_nary_op_iterator_type hin;
4187 vn_phi_iterator_type hip;
4188 vn_reference_iterator_type hir;
f6c33c78 4189 vn_nary_op_t vno;
4190 vn_reference_t vr;
4191 vn_phi_t vp;
8883e700 4192
f6c33c78 4193 /* Now set the value ids of the things we had put in the hash
4194 table. */
4195
c1f445d2 4196 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
f8ce304c 4197 set_value_id_for_result (vno->result, &vno->value_id);
f6c33c78 4198
c1f445d2 4199 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
f8ce304c 4200 set_value_id_for_result (vp->result, &vp->value_id);
f6c33c78 4201
c1f445d2 4202 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4203 hir)
f8ce304c 4204 set_value_id_for_result (vr->result, &vr->value_id);
f6c33c78 4205}
4206
db981500 4207class sccvn_dom_walker : public dom_walker
85e9a542 4208{
4209public:
42b45e81 4210 sccvn_dom_walker ()
96752458 4211 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (vNULL) {}
c45ca67d 4212 ~sccvn_dom_walker ();
85e9a542 4213
96752458 4214 virtual edge before_dom_children (basic_block);
42b45e81 4215 virtual void after_dom_children (basic_block);
4216
4217 void record_cond (basic_block,
4218 enum tree_code code, tree lhs, tree rhs, bool value);
4219 void record_conds (basic_block,
4220 enum tree_code code, tree lhs, tree rhs, bool value);
85e9a542 4221
4222 bool fail;
42b45e81 4223 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4224 cond_stack;
85e9a542 4225};
4226
c45ca67d 4227sccvn_dom_walker::~sccvn_dom_walker ()
4228{
4229 cond_stack.release ();
4230}
4231
42b45e81 4232/* Record a temporary condition for the BB and its dominated blocks. */
4233
4234void
4235sccvn_dom_walker::record_cond (basic_block bb,
4236 enum tree_code code, tree lhs, tree rhs,
4237 bool value)
4238{
4239 tree ops[2] = { lhs, rhs };
4240 vn_nary_op_t old = NULL;
4241 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4242 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4243 vn_nary_op_t cond
4244 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4245 value
4246 ? boolean_true_node
4247 : boolean_false_node, 0);
4248 if (dump_file && (dump_flags & TDF_DETAILS))
4249 {
4250 fprintf (dump_file, "Recording temporarily ");
4251 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4252 fprintf (dump_file, " %s ", get_tree_code_name (code));
4253 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4254 fprintf (dump_file, " == %s%s\n",
4255 value ? "true" : "false",
4256 old ? " (old entry saved)" : "");
4257 }
4258 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4259}
4260
4261/* Record temporary conditions for the BB and its dominated blocks
4262 according to LHS CODE RHS == VALUE and its dominated conditions. */
4263
4264void
4265sccvn_dom_walker::record_conds (basic_block bb,
4266 enum tree_code code, tree lhs, tree rhs,
4267 bool value)
4268{
4269 /* Record the original condition. */
4270 record_cond (bb, code, lhs, rhs, value);
4271
4272 if (!value)
4273 return;
4274
4275 /* Record dominated conditions if the condition is true. Note that
4276 the inversion is already recorded. */
4277 switch (code)
4278 {
4279 case LT_EXPR:
4280 case GT_EXPR:
4281 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4282 record_cond (bb, NE_EXPR, lhs, rhs, true);
4283 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4284 break;
4285
4286 case EQ_EXPR:
4287 record_cond (bb, LE_EXPR, lhs, rhs, true);
4288 record_cond (bb, GE_EXPR, lhs, rhs, true);
4289 record_cond (bb, LT_EXPR, lhs, rhs, false);
4290 record_cond (bb, GT_EXPR, lhs, rhs, false);
4291 break;
4292
4293 default:
4294 break;
4295 }
4296}
4297
4298/* Restore expressions and values derived from conditionals. */
4299
4300void
4301sccvn_dom_walker::after_dom_children (basic_block bb)
4302{
4303 while (!cond_stack.is_empty ()
4304 && cond_stack.last ().first == bb)
4305 {
4306 vn_nary_op_t cond = cond_stack.last ().second.first;
4307 vn_nary_op_t old = cond_stack.last ().second.second;
4308 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4309 if (old)
4310 vn_nary_op_insert_into (old, current_info->nary, false);
4311 cond_stack.pop ();
4312 }
4313}
4314
db981500 4315/* Value number all statements in BB. */
4316
96752458 4317edge
db981500 4318sccvn_dom_walker::before_dom_children (basic_block bb)
85e9a542 4319{
4320 edge e;
4321 edge_iterator ei;
4322
4323 if (fail)
96752458 4324 return NULL;
85e9a542 4325
089af8be 4326 if (dump_file && (dump_flags & TDF_DETAILS))
4327 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4328
42b45e81 4329 /* If we have a single predecessor record the equivalence from a
4330 possible condition on the predecessor edge. */
d4f86d5e 4331 edge pred_e = NULL;
4332 FOR_EACH_EDGE (e, ei, bb->preds)
4333 {
4334 /* Ignore simple backedges from this to allow recording conditions
4335 in loop headers. */
4336 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4337 continue;
4338 if (! pred_e)
4339 pred_e = e;
4340 else
4341 {
4342 pred_e = NULL;
4343 break;
4344 }
4345 }
4346 if (pred_e)
42b45e81 4347 {
42b45e81 4348 /* Check if there are multiple executable successor edges in
4349 the source block. Otherwise there is no additional info
4350 to be recorded. */
4351 edge e2;
d4f86d5e 4352 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4353 if (e2 != pred_e
42b45e81 4354 && e2->flags & EDGE_EXECUTABLE)
4355 break;
4356 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4357 {
d4f86d5e 4358 gimple *stmt = last_stmt (pred_e->src);
42b45e81 4359 if (stmt
4360 && gimple_code (stmt) == GIMPLE_COND)
4361 {
4362 enum tree_code code = gimple_cond_code (stmt);
4363 tree lhs = gimple_cond_lhs (stmt);
4364 tree rhs = gimple_cond_rhs (stmt);
4365 record_conds (bb, code, lhs, rhs,
d4f86d5e 4366 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
42b45e81 4367 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4368 if (code != ERROR_MARK)
4369 record_conds (bb, code, lhs, rhs,
d4f86d5e 4370 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
42b45e81 4371 }
4372 }
4373 }
4374
db981500 4375 /* Value-number all defs in the basic-block. */
4376 for (gphi_iterator gsi = gsi_start_phis (bb);
4377 !gsi_end_p (gsi); gsi_next (&gsi))
4378 {
4379 gphi *phi = gsi.phi ();
4380 tree res = PHI_RESULT (phi);
4381 if (!VN_INFO (res)->visited
4382 && !DFS (res))
4383 {
4384 fail = true;
96752458 4385 return NULL;
db981500 4386 }
4387 }
4388 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4389 !gsi_end_p (gsi); gsi_next (&gsi))
4390 {
4391 ssa_op_iter i;
4392 tree op;
4393 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4394 if (!VN_INFO (op)->visited
4395 && !DFS (op))
4396 {
4397 fail = true;
96752458 4398 return NULL;
db981500 4399 }
4400 }
4401
4402 /* Finally look at the last stmt. */
42acab1c 4403 gimple *stmt = last_stmt (bb);
85e9a542 4404 if (!stmt)
96752458 4405 return NULL;
85e9a542 4406
c443af04 4407 enum gimple_code code = gimple_code (stmt);
4408 if (code != GIMPLE_COND
4409 && code != GIMPLE_SWITCH
4410 && code != GIMPLE_GOTO)
96752458 4411 return NULL;
c443af04 4412
4413 if (dump_file && (dump_flags & TDF_DETAILS))
4414 {
089af8be 4415 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
c443af04 4416 print_gimple_stmt (dump_file, stmt, 0, 0);
4417 }
4418
85e9a542 4419 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4420 if value-numbering can prove they are not reachable. Handling
4421 computed gotos is also possible. */
4422 tree val;
c443af04 4423 switch (code)
85e9a542 4424 {
4425 case GIMPLE_COND:
4426 {
eb074ef3 4427 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4428 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4429 val = gimple_simplify (gimple_cond_code (stmt),
4430 boolean_type_node, lhs, rhs,
4431 NULL, vn_valueize);
42b45e81 4432 /* If that didn't simplify to a constant see if we have recorded
4433 temporary expressions from taken edges. */
4434 if (!val || TREE_CODE (val) != INTEGER_CST)
4435 {
4436 tree ops[2];
eb074ef3 4437 ops[0] = lhs;
4438 ops[1] = rhs;
42b45e81 4439 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4440 boolean_type_node, ops, NULL);
4441 }
85e9a542 4442 break;
4443 }
4444 case GIMPLE_SWITCH:
1a91d914 4445 val = gimple_switch_index (as_a <gswitch *> (stmt));
85e9a542 4446 break;
4447 case GIMPLE_GOTO:
4448 val = gimple_goto_dest (stmt);
4449 break;
4450 default:
c443af04 4451 gcc_unreachable ();
85e9a542 4452 }
4453 if (!val)
96752458 4454 return NULL;
85e9a542 4455
4456 edge taken = find_taken_edge (bb, vn_valueize (val));
4457 if (!taken)
96752458 4458 return NULL;
85e9a542 4459
4460 if (dump_file && (dump_flags & TDF_DETAILS))
4461 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4462 "not executable\n", bb->index, bb->index, taken->dest->index);
4463
96752458 4464 return taken;
85e9a542 4465}
4466
a9b2282e 4467/* Do SCCVN. Returns true if it finished, false if we bailed out
8f190c8a 4468 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4469 how we use the alias oracle walking during the VN process. */
a9b2282e 4470
4471bool
8f190c8a 4472run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
9e9e6e3e 4473{
4474 size_t i;
48e1416a 4475
8f190c8a 4476 default_vn_walk_kind = default_vn_walk_kind_;
4477
9e9e6e3e 4478 init_scc_vn ();
9e9e6e3e 4479
dddafd79 4480 /* Collect pointers we know point to readonly memory. */
4481 const_parms = BITMAP_ALLOC (NULL);
4482 tree fnspec = lookup_attribute ("fn spec",
4483 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4484 if (fnspec)
4485 {
4486 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4487 i = 1;
4488 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4489 arg; arg = DECL_CHAIN (arg), ++i)
4490 {
4491 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4492 break;
4493 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4494 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4495 {
4496 tree name = ssa_default_def (cfun, arg);
4497 if (name)
4498 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4499 }
4500 }
4501 }
4502
db981500 4503 /* Walk all blocks in dominator order, value-numbering stmts
4504 SSA defs and decide whether outgoing edges are not executable. */
4505 sccvn_dom_walker walker;
85e9a542 4506 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4507 if (walker.fail)
4508 {
4509 free_scc_vn ();
4510 return false;
4511 }
4512
db981500 4513 /* Initialize the value ids and prune out remaining VN_TOPs
4514 from dead code. */
f6c33c78 4515 for (i = 1; i < num_ssa_names; ++i)
4516 {
4517 tree name = ssa_name (i);
4518 vn_ssa_aux_t info;
4519 if (!name)
4520 continue;
4521 info = VN_INFO (name);
db981500 4522 if (!info->visited)
4523 info->valnum = name;
d94bf438 4524 if (info->valnum == name
4525 || info->valnum == VN_TOP)
f6c33c78 4526 info->value_id = get_next_value_id ();
4527 else if (is_gimple_min_invariant (info->valnum))
4528 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4529 }
48e1416a 4530
597ce871 4531 /* Propagate. */
4532 for (i = 1; i < num_ssa_names; ++i)
f6c33c78 4533 {
597ce871 4534 tree name = ssa_name (i);
4535 vn_ssa_aux_t info;
4536 if (!name)
4537 continue;
4538 info = VN_INFO (name);
4539 if (TREE_CODE (info->valnum) == SSA_NAME
4540 && info->valnum != name
4541 && info->value_id != VN_INFO (info->valnum)->value_id)
4542 info->value_id = VN_INFO (info->valnum)->value_id;
f6c33c78 4543 }
48e1416a 4544
f6c33c78 4545 set_hashtable_value_ids ();
48e1416a 4546
9e9e6e3e 4547 if (dump_file && (dump_flags & TDF_DETAILS))
4548 {
4549 fprintf (dump_file, "Value numbers:\n");
4550 for (i = 0; i < num_ssa_names; i++)
4551 {
4552 tree name = ssa_name (i);
8883e700 4553 if (name
4554 && VN_INFO (name)->visited
4555 && SSA_VAL (name) != name)
9e9e6e3e 4556 {
4557 print_generic_expr (dump_file, name, 0);
4558 fprintf (dump_file, " = ");
8883e700 4559 print_generic_expr (dump_file, SSA_VAL (name), 0);
9e9e6e3e 4560 fprintf (dump_file, "\n");
4561 }
4562 }
4563 }
a9b2282e 4564
4565 return true;
9e9e6e3e 4566}
f6c33c78 4567
4568/* Return the maximum value id we have ever seen. */
4569
4570unsigned int
48e1416a 4571get_max_value_id (void)
f6c33c78 4572{
4573 return next_value_id;
4574}
4575
4576/* Return the next unique value id. */
4577
4578unsigned int
4579get_next_value_id (void)
4580{
4581 return next_value_id++;
4582}
4583
4584
127fb64d 4585/* Compare two expressions E1 and E2 and return true if they are equal. */
f6c33c78 4586
4587bool
4588expressions_equal_p (tree e1, tree e2)
4589{
127fb64d 4590 /* The obvious case. */
f6c33c78 4591 if (e1 == e2)
4592 return true;
4593
4e916062 4594 /* If either one is VN_TOP consider them equal. */
4595 if (e1 == VN_TOP || e2 == VN_TOP)
4596 return true;
4597
127fb64d 4598 /* If only one of them is null, they cannot be equal. */
4599 if (!e1 || !e2)
4600 return false;
4601
127fb64d 4602 /* Now perform the actual comparison. */
4603 if (TREE_CODE (e1) == TREE_CODE (e2)
4604 && operand_equal_p (e1, e2, OEP_PURE_SAME))
f6c33c78 4605 return true;
4606
4607 return false;
4608}
4609
2ac47fdf 4610
4611/* Return true if the nary operation NARY may trap. This is a copy
4612 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4613
4614bool
4615vn_nary_may_trap (vn_nary_op_t nary)
4616{
4617 tree type;
888b74b6 4618 tree rhs2 = NULL_TREE;
2ac47fdf 4619 bool honor_nans = false;
4620 bool honor_snans = false;
4621 bool fp_operation = false;
4622 bool honor_trapv = false;
4623 bool handled, ret;
4624 unsigned i;
4625
4626 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4627 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4628 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4629 {
4630 type = nary->type;
4631 fp_operation = FLOAT_TYPE_P (type);
4632 if (fp_operation)
4633 {
4634 honor_nans = flag_trapping_math && !flag_finite_math_only;
4635 honor_snans = flag_signaling_nans != 0;
4636 }
4637 else if (INTEGRAL_TYPE_P (type)
4638 && TYPE_OVERFLOW_TRAPS (type))
4639 honor_trapv = true;
4640 }
888b74b6 4641 if (nary->length >= 2)
4642 rhs2 = nary->op[1];
2ac47fdf 4643 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4644 honor_trapv,
4645 honor_nans, honor_snans, rhs2,
4646 &handled);
4647 if (handled
4648 && ret)
4649 return true;
4650
4651 for (i = 0; i < nary->length; ++i)
4652 if (tree_could_trap_p (nary->op[i]))
4653 return true;
4654
4655 return false;
4656}