]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-sccvn.c
fix PR68343: disable fuse-*.c tests for isl 0.14 or earlier
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
CommitLineData
89fb70a3 1/* SCC value numbering for trees
818ab71a 2 Copyright (C) 2006-2016 Free Software Foundation, Inc.
89fb70a3
DB
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
89fb70a3
DB
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
89fb70a3
DB
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5 25#include "rtl.h"
89fb70a3 26#include "tree.h"
c7131fb2 27#include "gimple.h"
957060b5 28#include "alloc-pool.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "expmed.h"
31#include "insn-config.h"
32#include "emit-rtl.h"
33#include "cgraph.h"
34#include "gimple-pretty-print.h"
c7131fb2 35#include "alias.h"
40e23961 36#include "fold-const.h"
d8a2d370 37#include "stor-layout.h"
60393bbc 38#include "cfganal.h"
89fb70a3 39#include "tree-inline.h"
2fb9a547
AM
40#include "internal-fn.h"
41#include "gimple-fold.h"
42#include "tree-eh.h"
45b0be94 43#include "gimplify.h"
36566b39 44#include "flags.h"
36566b39
PK
45#include "dojump.h"
46#include "explow.h"
47#include "calls.h"
36566b39
PK
48#include "varasm.h"
49#include "stmt.h"
d8a2d370 50#include "expr.h"
442b4905
AM
51#include "tree-dfa.h"
52#include "tree-ssa.h"
7ee2468b 53#include "dumpfile.h"
89fb70a3 54#include "cfgloop.h"
863d2a57 55#include "params.h"
c2979eaf 56#include "tree-ssa-propagate.h"
89fb70a3 57#include "tree-ssa-sccvn.h"
a764d660
RB
58#include "tree-cfg.h"
59#include "domwalk.h"
d2713985 60#include "gimple-iterator.h"
34050b6b 61#include "gimple-match.h"
89fb70a3
DB
62
63/* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
68
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
72
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
77
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
82
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
86
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
91
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
95
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
98
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
104
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
070b797d 108 equivalent.
89fb70a3
DB
109 TODO:
110
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
118*/
119
bf190e8d 120
4da60082
RB
121static tree *last_vuse_ptr;
122static vn_lookup_kind vn_walk_kind;
123static vn_lookup_kind default_vn_walk_kind;
e7cbc096 124bitmap const_parms;
4da60082 125
bf190e8d
LC
126/* vn_nary_op hashtable helpers. */
127
8d67ee55 128struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
bf190e8d 129{
67f58944
TS
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
bf190e8d
LC
133};
134
135/* Return the computed hashcode for nary operation P1. */
136
137inline hashval_t
67f58944 138vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
bf190e8d
LC
139{
140 return vno1->hashcode;
141}
142
143/* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
145
146inline bool
67f58944 147vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
bf190e8d
LC
148{
149 return vn_nary_op_eq (vno1, vno2);
150}
151
c203e8a7 152typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
bf190e8d
LC
153typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
154
155
156/* vn_phi hashtable helpers. */
157
158static int
159vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
160
7edd9b15 161struct vn_phi_hasher : pointer_hash <vn_phi_s>
bf190e8d 162{
67f58944
TS
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
bf190e8d
LC
166};
167
168/* Return the computed hashcode for phi operation P1. */
169
170inline hashval_t
67f58944 171vn_phi_hasher::hash (const vn_phi_s *vp1)
bf190e8d
LC
172{
173 return vp1->hashcode;
174}
175
176/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
177
178inline bool
67f58944 179vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
bf190e8d
LC
180{
181 return vn_phi_eq (vp1, vp2);
182}
183
184/* Free a phi operation structure VP. */
185
186inline void
67f58944 187vn_phi_hasher::remove (vn_phi_s *phi)
bf190e8d
LC
188{
189 phi->phiargs.release ();
190}
191
c203e8a7 192typedef hash_table<vn_phi_hasher> vn_phi_table_type;
bf190e8d
LC
193typedef vn_phi_table_type::iterator vn_phi_iterator_type;
194
195
196/* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
198
199static int
200vn_reference_op_eq (const void *p1, const void *p2)
201{
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
204
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
214}
215
216/* Free a reference operation structure VP. */
217
218static inline void
219free_reference (vn_reference_s *vr)
220{
221 vr->operands.release ();
222}
223
224
225/* vn_reference hashtable helpers. */
226
7edd9b15 227struct vn_reference_hasher : pointer_hash <vn_reference_s>
bf190e8d 228{
67f58944
TS
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
bf190e8d
LC
232};
233
234/* Return the hashcode for a given reference operation P1. */
235
236inline hashval_t
67f58944 237vn_reference_hasher::hash (const vn_reference_s *vr1)
bf190e8d
LC
238{
239 return vr1->hashcode;
240}
241
242inline bool
67f58944 243vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
bf190e8d
LC
244{
245 return vn_reference_eq (v, c);
246}
247
248inline void
67f58944 249vn_reference_hasher::remove (vn_reference_s *v)
bf190e8d
LC
250{
251 free_reference (v);
252}
253
c203e8a7 254typedef hash_table<vn_reference_hasher> vn_reference_table_type;
bf190e8d
LC
255typedef vn_reference_table_type::iterator vn_reference_iterator_type;
256
257
89fb70a3
DB
258/* The set of hashtables and alloc_pool's for their items. */
259
260typedef struct vn_tables_s
261{
c203e8a7
TS
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
49a1fb2d 265 struct obstack nary_obstack;
fb0b2914
ML
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
89fb70a3
DB
268} *vn_tables_t;
269
bf190e8d
LC
270
271/* vn_constant hashtable helpers. */
272
95fbe13e 273struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
bf190e8d 274{
67f58944
TS
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
bf190e8d
LC
277};
278
279/* Hash table hash function for vn_constant_t. */
280
281inline hashval_t
67f58944 282vn_constant_hasher::hash (const vn_constant_s *vc1)
bf190e8d
LC
283{
284 return vc1->hashcode;
285}
286
287/* Hash table equality function for vn_constant_t. */
288
289inline bool
67f58944 290vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
bf190e8d
LC
291{
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
294
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
296}
297
c203e8a7 298static hash_table<vn_constant_hasher> *constant_to_value_id;
c9145754 299static bitmap constant_value_ids;
89fb70a3 300
89fb70a3
DB
301
302/* Valid hashtables storing information we have proven to be
303 correct. */
304
305static vn_tables_t valid_info;
306
307/* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
309
310static vn_tables_t optimistic_info;
311
89fb70a3
DB
312/* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
315
316static vn_tables_t current_info;
317
318
319/* Reverse post order index for each basic block. */
320
321static int *rpo_numbers;
322
323#define SSA_VAL(x) (VN_INFO ((x))->valnum)
324
d1c0308e
RB
325/* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
328
329static inline tree
330vuse_ssa_val (tree x)
331{
332 if (!x)
333 return NULL_TREE;
334
335 do
336 {
337 x = SSA_VAL (x);
338 }
339 while (SSA_NAME_IN_FREE_LIST (x));
340
341 return x;
342}
343
89fb70a3
DB
344/* This represents the top of the VN lattice, which is the universal
345 value. */
346
347tree VN_TOP;
348
c9145754
DB
349/* Unique counter for our value ids. */
350
351static unsigned int next_value_id;
352
89fb70a3
DB
353/* Next DFS number and the stack for strongly connected component
354 detection. */
355
356static unsigned int next_dfs_num;
9771b263 357static vec<tree> sccstack;
89fb70a3 358
3d45dd59 359
89fb70a3 360
cbfb21c1
SB
361/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
9771b263 363 without looping over the vec. */
89fb70a3 364
9771b263 365static vec<vn_ssa_aux_t> vn_ssa_aux_table;
cbfb21c1 366static struct obstack vn_ssa_aux_obstack;
89fb70a3 367
6a8b77b2
RB
368/* Return whether there is value numbering information for a given SSA name. */
369
370bool
371has_VN_INFO (tree name)
372{
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
376}
377
89fb70a3
DB
378/* Return the value numbering information for a given SSA name. */
379
380vn_ssa_aux_t
381VN_INFO (tree name)
382{
9771b263 383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
7a40b8b1 384 gcc_checking_assert (res);
c9145754 385 return res;
89fb70a3
DB
386}
387
388/* Set the value numbering info for a given SSA name to a given
389 value. */
390
391static inline void
392VN_INFO_SET (tree name, vn_ssa_aux_t value)
393{
9771b263 394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
89fb70a3
DB
395}
396
cbfb21c1
SB
397/* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
89fb70a3
DB
399
400vn_ssa_aux_t
401VN_INFO_GET (tree name)
402{
cbfb21c1
SB
403 vn_ssa_aux_t newinfo;
404
34050b6b
RB
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
3d9a9f94 407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
cbfb21c1 408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
9771b263
DN
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
89fb70a3
DB
412 return newinfo;
413}
414
415
17742d62
RG
416/* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
418
419enum vn_kind
355fe088 420vn_get_stmt_kind (gimple *stmt)
17742d62
RG
421{
422 switch (gimple_code (stmt))
423 {
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
429 {
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
433 {
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
440 {
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
449
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
453
454 case tcc_constant:
455 return VN_CONSTANT;
456
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
464 }
465 default:
466 return VN_NONE;
467 }
468 }
469 default:
470 return VN_NONE;
471 }
472}
726a989a 473
bb9e4199
RG
474/* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
476
477unsigned int
478get_constant_value_id (tree constant)
479{
bf190e8d 480 vn_constant_s **slot;
bb9e4199 481 struct vn_constant_s vc;
726a989a
RB
482
483 vc.hashcode = vn_hash_constant_with_type (constant);
bb9e4199 484 vc.constant = constant;
c203e8a7 485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
bb9e4199 486 if (slot)
bf190e8d 487 return (*slot)->value_id;
bb9e4199
RG
488 return 0;
489}
490
c9145754
DB
491/* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
493
494unsigned int
495get_or_alloc_constant_value_id (tree constant)
496{
bf190e8d 497 vn_constant_s **slot;
a7d04a53
RG
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
b8698a0f 500
a7d04a53
RG
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
c203e8a7 503 slot = constant_to_value_id->find_slot (&vc, INSERT);
c9145754 504 if (*slot)
bf190e8d 505 return (*slot)->value_id;
a7d04a53
RG
506
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
bf190e8d 511 *slot = vcp;
a7d04a53
RG
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
c9145754
DB
514}
515
516/* Return true if V is a value id for a constant. */
517
518bool
519value_id_constant_p (unsigned int v)
520{
b8698a0f 521 return bitmap_bit_p (constant_value_ids, v);
c9145754
DB
522}
523
b40bf772 524/* Compute the hash for a reference operand VRO1. */
89fb70a3 525
4e44a6e8
AK
526static void
527vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
89fb70a3 528{
4e44a6e8 529 hstate.add_int (vro1->opcode);
85169114 530 if (vro1->op0)
4e44a6e8 531 inchash::add_expr (vro1->op0, hstate);
85169114 532 if (vro1->op1)
4e44a6e8 533 inchash::add_expr (vro1->op1, hstate);
85169114 534 if (vro1->op2)
4e44a6e8 535 inchash::add_expr (vro1->op2, hstate);
89fb70a3
DB
536}
537
89fb70a3
DB
538/* Compute a hash for the reference operation VR1 and return it. */
539
26f3a4e1 540static hashval_t
89fb70a3
DB
541vn_reference_compute_hash (const vn_reference_t vr1)
542{
4e44a6e8
AK
543 inchash::hash hstate;
544 hashval_t result;
89fb70a3
DB
545 int i;
546 vn_reference_op_t vro;
70f34814
RG
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
89fb70a3 549
9771b263 550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
70f34814
RG
551 {
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
557 {
558 if (off == -1)
559 off = 0;
560 off += vro->off;
561 }
562 else
563 {
564 if (off != -1
565 && off != 0)
4e44a6e8 566 hstate.add_int (off);
70f34814
RG
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
570 {
571 if (vro->op0)
572 {
573 tree op = TREE_OPERAND (vro->op0, 0);
4e44a6e8
AK
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
70f34814
RG
576 }
577 }
578 else
4e44a6e8 579 vn_reference_op_compute_hash (vro, hstate);
70f34814
RG
580 }
581 }
4e44a6e8
AK
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
9708c51d
RG
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
89fb70a3
DB
586
587 return result;
588}
589
bf190e8d 590/* Return true if reference operations VR1 and VR2 are equivalent. This
89fb70a3
DB
591 means they have the same set of operands and vuses. */
592
bf190e8d
LC
593bool
594vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
89fb70a3 595{
70f34814 596 unsigned i, j;
89fb70a3 597
5006671f
RG
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
89fb70a3 601
5006671f
RG
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
89fb70a3
DB
604 return false;
605
5006671f
RG
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
609
70f34814 610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
89fb70a3
DB
611 return false;
612
5ccbfc1f
RG
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
615 {
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
618 }
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
627
70f34814
RG
628 i = 0;
629 j = 0;
630 do
631 {
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
9771b263 636 for (; vr1->operands.iterate (i, &vro1); i++)
70f34814
RG
637 {
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
ee45a32d
EB
640 /* Do not look through a storage order barrier. */
641 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 return false;
70f34814
RG
643 if (vro1->off == -1)
644 break;
645 off1 += vro1->off;
646 }
9771b263 647 for (; vr2->operands.iterate (j, &vro2); j++)
70f34814
RG
648 {
649 if (vro2->opcode == MEM_REF)
650 deref2 = true;
ee45a32d
EB
651 /* Do not look through a storage order barrier. */
652 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 return false;
70f34814
RG
654 if (vro2->off == -1)
655 break;
656 off2 += vro2->off;
657 }
658 if (off1 != off2)
659 return false;
660 if (deref1 && vro1->opcode == ADDR_EXPR)
661 {
662 memset (&tem1, 0, sizeof (tem1));
663 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 tem1.type = TREE_TYPE (tem1.op0);
665 tem1.opcode = TREE_CODE (tem1.op0);
666 vro1 = &tem1;
8bf43909 667 deref1 = false;
70f34814
RG
668 }
669 if (deref2 && vro2->opcode == ADDR_EXPR)
670 {
671 memset (&tem2, 0, sizeof (tem2));
672 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 tem2.type = TREE_TYPE (tem2.op0);
674 tem2.opcode = TREE_CODE (tem2.op0);
675 vro2 = &tem2;
8bf43909 676 deref2 = false;
70f34814 677 }
8bf43909
RG
678 if (deref1 != deref2)
679 return false;
70f34814
RG
680 if (!vn_reference_op_eq (vro1, vro2))
681 return false;
682 ++j;
683 ++i;
684 }
9771b263
DN
685 while (vr1->operands.length () != i
686 || vr2->operands.length () != j);
89fb70a3 687
5006671f 688 return true;
89fb70a3
DB
689}
690
726a989a 691/* Copy the operations present in load/store REF into RESULT, a vector of
89fb70a3
DB
692 vn_reference_op_s's. */
693
26f3a4e1 694static void
9771b263 695copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
89fb70a3 696{
9f59420d
RG
697 if (TREE_CODE (ref) == TARGET_MEM_REF)
698 {
699 vn_reference_op_s temp;
700
39e843e8
RB
701 result->reserve (3);
702
9f59420d 703 memset (&temp, 0, sizeof (temp));
6d6c9525 704 temp.type = TREE_TYPE (ref);
9f59420d 705 temp.opcode = TREE_CODE (ref);
150e3929
RG
706 temp.op0 = TMR_INDEX (ref);
707 temp.op1 = TMR_STEP (ref);
708 temp.op2 = TMR_OFFSET (ref);
70f34814 709 temp.off = -1;
1fefbb66
RB
710 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711 temp.base = MR_DEPENDENCE_BASE (ref);
39e843e8 712 result->quick_push (temp);
9f59420d
RG
713
714 memset (&temp, 0, sizeof (temp));
715 temp.type = NULL_TREE;
4d948885
RG
716 temp.opcode = ERROR_MARK;
717 temp.op0 = TMR_INDEX2 (ref);
718 temp.off = -1;
39e843e8 719 result->quick_push (temp);
4d948885
RG
720
721 memset (&temp, 0, sizeof (temp));
722 temp.type = NULL_TREE;
723 temp.opcode = TREE_CODE (TMR_BASE (ref));
724 temp.op0 = TMR_BASE (ref);
70f34814 725 temp.off = -1;
39e843e8 726 result->quick_push (temp);
9f59420d
RG
727 return;
728 }
729
89fb70a3 730 /* For non-calls, store the information that makes up the address. */
1eadb567 731 tree orig = ref;
89fb70a3
DB
732 while (ref)
733 {
734 vn_reference_op_s temp;
735
736 memset (&temp, 0, sizeof (temp));
6d6c9525 737 temp.type = TREE_TYPE (ref);
89fb70a3 738 temp.opcode = TREE_CODE (ref);
70f34814 739 temp.off = -1;
89fb70a3
DB
740
741 switch (temp.opcode)
742 {
4ec0a198
TV
743 case MODIFY_EXPR:
744 temp.op0 = TREE_OPERAND (ref, 1);
745 break;
842679dc
TV
746 case WITH_SIZE_EXPR:
747 temp.op0 = TREE_OPERAND (ref, 1);
748 temp.off = 0;
749 break;
70f34814
RG
750 case MEM_REF:
751 /* The base address gets its own vn_reference_op_s structure. */
752 temp.op0 = TREE_OPERAND (ref, 1);
8ab1d9d7
RB
753 {
754 offset_int off = mem_ref_offset (ref);
755 if (wi::fits_shwi_p (off))
756 temp.off = off.to_shwi ();
757 }
1fefbb66
RB
758 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 temp.base = MR_DEPENDENCE_BASE (ref);
ee45a32d 760 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
70f34814 761 break;
89fb70a3 762 case BIT_FIELD_REF:
ee45a32d 763 /* Record bits, position and storage order. */
89fb70a3
DB
764 temp.op0 = TREE_OPERAND (ref, 1);
765 temp.op1 = TREE_OPERAND (ref, 2);
1fefbb66
RB
766 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
767 {
768 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 if (off % BITS_PER_UNIT == 0)
770 temp.off = off / BITS_PER_UNIT;
771 }
ee45a32d 772 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
89fb70a3
DB
773 break;
774 case COMPONENT_REF:
ba2e1892
RG
775 /* The field decl is enough to unambiguously specify the field,
776 a matching type is not necessary and a mismatching type
777 is always a spurious difference. */
778 temp.type = NULL_TREE;
b45d2719
RG
779 temp.op0 = TREE_OPERAND (ref, 1);
780 temp.op1 = TREE_OPERAND (ref, 2);
70f34814
RG
781 {
782 tree this_offset = component_ref_field_offset (ref);
783 if (this_offset
784 && TREE_CODE (this_offset) == INTEGER_CST)
785 {
786 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
787 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
788 {
807e902e
KZ
789 offset_int off
790 = (wi::to_offset (this_offset)
791 + wi::lrshift (wi::to_offset (bit_offset),
792 LOG2_BITS_PER_UNIT));
793 if (wi::fits_shwi_p (off)
1eadb567
RB
794 /* Probibit value-numbering zero offset components
795 of addresses the same before the pass folding
796 __builtin_object_size had a chance to run
797 (checking cfun->after_inlining does the
798 trick here). */
799 && (TREE_CODE (orig) != ADDR_EXPR
807e902e 800 || off != 0
1eadb567 801 || cfun->after_inlining))
807e902e 802 temp.off = off.to_shwi ();
70f34814
RG
803 }
804 }
805 }
89fb70a3
DB
806 break;
807 case ARRAY_RANGE_REF:
808 case ARRAY_REF:
809 /* Record index as operand. */
810 temp.op0 = TREE_OPERAND (ref, 1);
e52201b6
RG
811 /* Always record lower bounds and element size. */
812 temp.op1 = array_ref_low_bound (ref);
813 temp.op2 = array_ref_element_size (ref);
70f34814
RG
814 if (TREE_CODE (temp.op0) == INTEGER_CST
815 && TREE_CODE (temp.op1) == INTEGER_CST
816 && TREE_CODE (temp.op2) == INTEGER_CST)
817 {
807e902e
KZ
818 offset_int off = ((wi::to_offset (temp.op0)
819 - wi::to_offset (temp.op1))
820 * wi::to_offset (temp.op2));
821 if (wi::fits_shwi_p (off))
822 temp.off = off.to_shwi();
70f34814 823 }
89fb70a3 824 break;
6d6c9525
RG
825 case VAR_DECL:
826 if (DECL_HARD_REGISTER (ref))
827 {
828 temp.op0 = ref;
829 break;
830 }
831 /* Fallthru. */
832 case PARM_DECL:
833 case CONST_DECL:
834 case RESULT_DECL:
835 /* Canonicalize decls to MEM[&decl] which is what we end up with
836 when valueizing MEM[ptr] with ptr = &decl. */
837 temp.opcode = MEM_REF;
838 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
839 temp.off = 0;
9771b263 840 result->safe_push (temp);
6d6c9525 841 temp.opcode = ADDR_EXPR;
39e843e8 842 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
6d6c9525
RG
843 temp.type = TREE_TYPE (temp.op0);
844 temp.off = -1;
845 break;
4794afa5
DB
846 case STRING_CST:
847 case INTEGER_CST:
848 case COMPLEX_CST:
849 case VECTOR_CST:
26b70b9f 850 case REAL_CST:
0747aae4 851 case FIXED_CST:
bb0c55f6 852 case CONSTRUCTOR:
89fb70a3
DB
853 case SSA_NAME:
854 temp.op0 = ref;
855 break;
ce94d354
RG
856 case ADDR_EXPR:
857 if (is_gimple_min_invariant (ref))
858 {
859 temp.op0 = ref;
860 break;
861 }
8403c2cf 862 break;
4794afa5
DB
863 /* These are only interesting for their operands, their
864 existence, and their type. They will never be the last
865 ref in the chain of references (IE they require an
866 operand), so we don't have to put anything
867 for op* as it will be handled by the iteration */
4794afa5 868 case REALPART_EXPR:
ee45a32d
EB
869 temp.off = 0;
870 break;
4794afa5 871 case VIEW_CONVERT_EXPR:
70f34814 872 temp.off = 0;
ee45a32d 873 temp.reverse = storage_order_barrier_p (ref);
70f34814
RG
874 break;
875 case IMAGPART_EXPR:
876 /* This is only interesting for its constant offset. */
877 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
89fb70a3 878 break;
4794afa5
DB
879 default:
880 gcc_unreachable ();
89fb70a3 881 }
9771b263 882 result->safe_push (temp);
89fb70a3 883
ce94d354 884 if (REFERENCE_CLASS_P (ref)
4ec0a198 885 || TREE_CODE (ref) == MODIFY_EXPR
842679dc 886 || TREE_CODE (ref) == WITH_SIZE_EXPR
ce94d354
RG
887 || (TREE_CODE (ref) == ADDR_EXPR
888 && !is_gimple_min_invariant (ref)))
89fb70a3
DB
889 ref = TREE_OPERAND (ref, 0);
890 else
891 ref = NULL_TREE;
892 }
893}
894
b45d2719
RG
895/* Build a alias-oracle reference abstraction in *REF from the vn_reference
896 operands in *OPS, the reference alias set SET and the reference type TYPE.
897 Return true if something useful was produced. */
53f3815c 898
b45d2719
RG
899bool
900ao_ref_init_from_vn_reference (ao_ref *ref,
901 alias_set_type set, tree type,
9771b263 902 vec<vn_reference_op_s> ops)
53f3815c
RG
903{
904 vn_reference_op_t op;
905 unsigned i;
b45d2719
RG
906 tree base = NULL_TREE;
907 tree *op0_p = &base;
b0463d3d
EB
908 offset_int offset = 0;
909 offset_int max_size;
910 offset_int size = -1;
b45d2719 911 tree size_tree = NULL_TREE;
70f34814 912 alias_set_type base_alias_set = -1;
b45d2719
RG
913
914 /* First get the final access size from just the outermost expression. */
9771b263 915 op = &ops[0];
b45d2719 916 if (op->opcode == COMPONENT_REF)
70f34814 917 size_tree = DECL_SIZE (op->op0);
b45d2719
RG
918 else if (op->opcode == BIT_FIELD_REF)
919 size_tree = op->op0;
920 else
921 {
ef4bddc2 922 machine_mode mode = TYPE_MODE (type);
b45d2719
RG
923 if (mode == BLKmode)
924 size_tree = TYPE_SIZE (type);
925 else
b0463d3d 926 size = int (GET_MODE_BITSIZE (mode));
b45d2719 927 }
b0463d3d
EB
928 if (size_tree != NULL_TREE
929 && TREE_CODE (size_tree) == INTEGER_CST)
930 size = wi::to_offset (size_tree);
b45d2719
RG
931
932 /* Initially, maxsize is the same as the accessed element size.
933 In the following it will only grow (or become -1). */
934 max_size = size;
53f3815c 935
b45d2719
RG
936 /* Compute cumulative bit-offset for nested component-refs and array-refs,
937 and find the ultimate containing object. */
9771b263 938 FOR_EACH_VEC_ELT (ops, i, op)
53f3815c
RG
939 {
940 switch (op->opcode)
941 {
b45d2719
RG
942 /* These may be in the reference ops, but we cannot do anything
943 sensible with them here. */
b45d2719 944 case ADDR_EXPR:
70f34814
RG
945 /* Apart from ADDR_EXPR arguments to MEM_REF. */
946 if (base != NULL_TREE
947 && TREE_CODE (base) == MEM_REF
948 && op->op0
949 && DECL_P (TREE_OPERAND (op->op0, 0)))
950 {
9771b263 951 vn_reference_op_t pop = &ops[i-1];
70f34814
RG
952 base = TREE_OPERAND (op->op0, 0);
953 if (pop->off == -1)
954 {
955 max_size = -1;
956 offset = 0;
957 }
958 else
959 offset += pop->off * BITS_PER_UNIT;
960 op0_p = NULL;
961 break;
962 }
963 /* Fallthru. */
964 case CALL_EXPR:
b45d2719 965 return false;
53f3815c 966
b45d2719 967 /* Record the base objects. */
70f34814
RG
968 case MEM_REF:
969 base_alias_set = get_deref_alias_set (op->op0);
970 *op0_p = build2 (MEM_REF, op->type,
971 NULL_TREE, op->op0);
1fefbb66
RB
972 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
973 MR_DEPENDENCE_BASE (*op0_p) = op->base;
70f34814
RG
974 op0_p = &TREE_OPERAND (*op0_p, 0);
975 break;
976
b45d2719
RG
977 case VAR_DECL:
978 case PARM_DECL:
979 case RESULT_DECL:
980 case SSA_NAME:
b45d2719 981 *op0_p = op->op0;
70f34814 982 op0_p = NULL;
b45d2719
RG
983 break;
984
985 /* And now the usual component-reference style ops. */
53f3815c 986 case BIT_FIELD_REF:
b0463d3d 987 offset += wi::to_offset (op->op1);
53f3815c
RG
988 break;
989
990 case COMPONENT_REF:
b45d2719
RG
991 {
992 tree field = op->op0;
993 /* We do not have a complete COMPONENT_REF tree here so we
994 cannot use component_ref_field_offset. Do the interesting
995 parts manually. */
b0463d3d 996 tree this_offset = DECL_FIELD_OFFSET (field);
b45d2719 997
b0463d3d 998 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
b45d2719
RG
999 max_size = -1;
1000 else
1001 {
b0463d3d
EB
1002 offset_int woffset = wi::lshift (wi::to_offset (this_offset),
1003 LOG2_BITS_PER_UNIT);
1004 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1005 offset += woffset;
b45d2719
RG
1006 }
1007 break;
1008 }
53f3815c
RG
1009
1010 case ARRAY_RANGE_REF:
1011 case ARRAY_REF:
e52201b6 1012 /* We recorded the lower bound and the element size. */
b0463d3d
EB
1013 if (TREE_CODE (op->op0) != INTEGER_CST
1014 || TREE_CODE (op->op1) != INTEGER_CST
1015 || TREE_CODE (op->op2) != INTEGER_CST)
b45d2719
RG
1016 max_size = -1;
1017 else
1018 {
b0463d3d
EB
1019 offset_int woffset
1020 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1021 TYPE_PRECISION (TREE_TYPE (op->op0)));
1022 woffset *= wi::to_offset (op->op2);
1023 woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
1024 offset += woffset;
b45d2719
RG
1025 }
1026 break;
1027
1028 case REALPART_EXPR:
1029 break;
1030
1031 case IMAGPART_EXPR:
1032 offset += size;
1033 break;
1034
1035 case VIEW_CONVERT_EXPR:
53f3815c
RG
1036 break;
1037
1038 case STRING_CST:
1039 case INTEGER_CST:
1040 case COMPLEX_CST:
1041 case VECTOR_CST:
1042 case REAL_CST:
1043 case CONSTRUCTOR:
53f3815c 1044 case CONST_DECL:
b45d2719 1045 return false;
53f3815c
RG
1046
1047 default:
b45d2719 1048 return false;
53f3815c
RG
1049 }
1050 }
1051
b45d2719
RG
1052 if (base == NULL_TREE)
1053 return false;
1054
1055 ref->ref = NULL_TREE;
1056 ref->base = base;
b45d2719 1057 ref->ref_alias_set = set;
70f34814
RG
1058 if (base_alias_set != -1)
1059 ref->base_alias_set = base_alias_set;
1060 else
1061 ref->base_alias_set = get_alias_set (base);
14cd91f9
RG
1062 /* We discount volatiles from value-numbering elsewhere. */
1063 ref->volatile_p = false;
b45d2719 1064
b0463d3d
EB
1065 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1066 {
1067 ref->offset = 0;
1068 ref->size = -1;
1069 ref->max_size = -1;
1070 return true;
1071 }
1072
1073 ref->size = size.to_shwi ();
1074
1075 if (!wi::fits_shwi_p (offset))
1076 {
1077 ref->offset = 0;
1078 ref->max_size = -1;
1079 return true;
1080 }
1081
1082 ref->offset = offset.to_shwi ();
1083
1084 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1085 ref->max_size = -1;
1086 else
1087 ref->max_size = max_size.to_shwi ();
1088
b45d2719 1089 return true;
53f3815c
RG
1090}
1091
726a989a
RB
1092/* Copy the operations present in load/store/call REF into RESULT, a vector of
1093 vn_reference_op_s's. */
1094
26f3a4e1 1095static void
538dd0b7 1096copy_reference_ops_from_call (gcall *call,
9771b263 1097 vec<vn_reference_op_s> *result)
726a989a
RB
1098{
1099 vn_reference_op_s temp;
726a989a 1100 unsigned i;
6867d9a9 1101 tree lhs = gimple_call_lhs (call);
7eab31ed 1102 int lr;
6867d9a9
TV
1103
1104 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1105 different. By adding the lhs here in the vector, we ensure that the
1106 hashcode is different, guaranteeing a different value number. */
1107 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1108 {
1109 memset (&temp, 0, sizeof (temp));
1110 temp.opcode = MODIFY_EXPR;
1111 temp.type = TREE_TYPE (lhs);
1112 temp.op0 = lhs;
1113 temp.off = -1;
9771b263 1114 result->safe_push (temp);
6867d9a9 1115 }
726a989a 1116
7eab31ed 1117 /* Copy the type, opcode, function, static chain and EH region, if any. */
726a989a
RB
1118 memset (&temp, 0, sizeof (temp));
1119 temp.type = gimple_call_return_type (call);
1120 temp.opcode = CALL_EXPR;
ce94d354 1121 temp.op0 = gimple_call_fn (call);
7aec7a38 1122 temp.op1 = gimple_call_chain (call);
7eab31ed
EB
1123 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1124 temp.op2 = size_int (lr);
70f34814 1125 temp.off = -1;
d5e254e1
IE
1126 if (gimple_call_with_bounds_p (call))
1127 temp.with_bounds = 1;
9771b263 1128 result->safe_push (temp);
726a989a 1129
ce94d354
RG
1130 /* Copy the call arguments. As they can be references as well,
1131 just chain them together. */
726a989a
RB
1132 for (i = 0; i < gimple_call_num_args (call); ++i)
1133 {
1134 tree callarg = gimple_call_arg (call, i);
ce94d354 1135 copy_reference_ops_from_ref (callarg, result);
726a989a 1136 }
726a989a
RB
1137}
1138
aa7069aa
RG
1139/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1140 *I_P to point to the last element of the replacement. */
a0f79fcc 1141static bool
9771b263 1142vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
aa7069aa 1143 unsigned int *i_p)
89fb70a3 1144{
aa7069aa 1145 unsigned int i = *i_p;
9771b263
DN
1146 vn_reference_op_t op = &(*ops)[i];
1147 vn_reference_op_t mem_op = &(*ops)[i - 1];
70f34814 1148 tree addr_base;
8b4f7b47 1149 HOST_WIDE_INT addr_offset = 0;
70f34814
RG
1150
1151 /* The only thing we have to do is from &OBJ.foo.bar add the offset
073a8998 1152 from .foo.bar to the preceding MEM_REF offset and replace the
70f34814
RG
1153 address with &OBJ. */
1154 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1155 &addr_offset);
1156 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
d1de852b 1157 if (addr_base != TREE_OPERAND (op->op0, 0))
70f34814 1158 {
807e902e
KZ
1159 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1160 off += addr_offset;
1161 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
70f34814 1162 op->op0 = build_fold_addr_expr (addr_base);
9541ffee 1163 if (tree_fits_shwi_p (mem_op->op0))
eb1ce453 1164 mem_op->off = tree_to_shwi (mem_op->op0);
70f34814
RG
1165 else
1166 mem_op->off = -1;
a0f79fcc 1167 return true;
aa7069aa 1168 }
a0f79fcc 1169 return false;
aa7069aa 1170}
89fb70a3 1171
a03a9774
RG
1172/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1173 *I_P to point to the last element of the replacement. */
a0f79fcc 1174static bool
9771b263 1175vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
a03a9774
RG
1176 unsigned int *i_p)
1177{
1178 unsigned int i = *i_p;
9771b263
DN
1179 vn_reference_op_t op = &(*ops)[i];
1180 vn_reference_op_t mem_op = &(*ops)[i - 1];
355fe088 1181 gimple *def_stmt;
a03a9774 1182 enum tree_code code;
807e902e 1183 offset_int off;
a03a9774
RG
1184
1185 def_stmt = SSA_NAME_DEF_STMT (op->op0);
d0c422cb 1186 if (!is_gimple_assign (def_stmt))
a0f79fcc 1187 return false;
a03a9774
RG
1188
1189 code = gimple_assign_rhs_code (def_stmt);
1190 if (code != ADDR_EXPR
1191 && code != POINTER_PLUS_EXPR)
a0f79fcc 1192 return false;
a03a9774 1193
807e902e 1194 off = offset_int::from (mem_op->op0, SIGNED);
a03a9774
RG
1195
1196 /* The only thing we have to do is from &OBJ.foo.bar add the offset
073a8998 1197 from .foo.bar to the preceding MEM_REF offset and replace the
a03a9774
RG
1198 address with &OBJ. */
1199 if (code == ADDR_EXPR)
1200 {
1201 tree addr, addr_base;
1202 HOST_WIDE_INT addr_offset;
1203
1204 addr = gimple_assign_rhs1 (def_stmt);
1205 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1206 &addr_offset);
4da60082
RB
1207 /* If that didn't work because the address isn't invariant propagate
1208 the reference tree from the address operation in case the current
1209 dereference isn't offsetted. */
1210 if (!addr_base
1211 && *i_p == ops->length () - 1
1212 && off == 0
1213 /* This makes us disable this transform for PRE where the
1214 reference ops might be also used for code insertion which
1215 is invalid. */
1216 && default_vn_walk_kind == VN_WALKREWRITE)
1217 {
1218 auto_vec<vn_reference_op_s, 32> tem;
1219 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1220 ops->pop ();
1221 ops->pop ();
1222 ops->safe_splice (tem);
1223 --*i_p;
a0f79fcc 1224 return true;
4da60082 1225 }
a03a9774
RG
1226 if (!addr_base
1227 || TREE_CODE (addr_base) != MEM_REF)
a0f79fcc 1228 return false;
a03a9774 1229
807e902e 1230 off += addr_offset;
27bcd47c 1231 off += mem_ref_offset (addr_base);
a03a9774
RG
1232 op->op0 = TREE_OPERAND (addr_base, 0);
1233 }
1234 else
1235 {
1236 tree ptr, ptroff;
1237 ptr = gimple_assign_rhs1 (def_stmt);
1238 ptroff = gimple_assign_rhs2 (def_stmt);
1239 if (TREE_CODE (ptr) != SSA_NAME
1240 || TREE_CODE (ptroff) != INTEGER_CST)
a0f79fcc 1241 return false;
a03a9774 1242
807e902e 1243 off += wi::to_offset (ptroff);
d0c422cb 1244 op->op0 = ptr;
a03a9774
RG
1245 }
1246
807e902e 1247 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
9541ffee 1248 if (tree_fits_shwi_p (mem_op->op0))
eb1ce453 1249 mem_op->off = tree_to_shwi (mem_op->op0);
a03a9774
RG
1250 else
1251 mem_op->off = -1;
1252 if (TREE_CODE (op->op0) == SSA_NAME)
e093ffe3
RG
1253 op->op0 = SSA_VAL (op->op0);
1254 if (TREE_CODE (op->op0) != SSA_NAME)
1255 op->opcode = TREE_CODE (op->op0);
a03a9774
RG
1256
1257 /* And recurse. */
1258 if (TREE_CODE (op->op0) == SSA_NAME)
1259 vn_reference_maybe_forwprop_address (ops, i_p);
1260 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1261 vn_reference_fold_indirect (ops, i_p);
a0f79fcc 1262 return true;
a03a9774
RG
1263}
1264
12bd5a1e
RG
1265/* Optimize the reference REF to a constant if possible or return
1266 NULL_TREE if not. */
1267
1268tree
1269fully_constant_vn_reference_p (vn_reference_t ref)
1270{
9771b263 1271 vec<vn_reference_op_s> operands = ref->operands;
12bd5a1e
RG
1272 vn_reference_op_t op;
1273
1274 /* Try to simplify the translated expression if it is
1275 a call to a builtin function with at most two arguments. */
9771b263 1276 op = &operands[0];
12bd5a1e
RG
1277 if (op->opcode == CALL_EXPR
1278 && TREE_CODE (op->op0) == ADDR_EXPR
1279 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1280 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
9771b263
DN
1281 && operands.length () >= 2
1282 && operands.length () <= 3)
12bd5a1e
RG
1283 {
1284 vn_reference_op_t arg0, arg1 = NULL;
1285 bool anyconst = false;
9771b263
DN
1286 arg0 = &operands[1];
1287 if (operands.length () > 2)
1288 arg1 = &operands[2];
12bd5a1e
RG
1289 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1290 || (arg0->opcode == ADDR_EXPR
1291 && is_gimple_min_invariant (arg0->op0)))
1292 anyconst = true;
1293 if (arg1
1294 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1295 || (arg1->opcode == ADDR_EXPR
1296 && is_gimple_min_invariant (arg1->op0))))
1297 anyconst = true;
1298 if (anyconst)
1299 {
1300 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1301 arg1 ? 2 : 1,
1302 arg0->op0,
1303 arg1 ? arg1->op0 : NULL);
1304 if (folded
1305 && TREE_CODE (folded) == NOP_EXPR)
1306 folded = TREE_OPERAND (folded, 0);
1307 if (folded
1308 && is_gimple_min_invariant (folded))
1309 return folded;
1310 }
1311 }
1312
8403c2cf
RB
1313 /* Simplify reads from constants or constant initializers. */
1314 else if (BITS_PER_UNIT == 8
1315 && is_gimple_reg_type (ref->type)
1316 && (!INTEGRAL_TYPE_P (ref->type)
1317 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
12bd5a1e 1318 {
8403c2cf 1319 HOST_WIDE_INT off = 0;
552b2afe
JJ
1320 HOST_WIDE_INT size;
1321 if (INTEGRAL_TYPE_P (ref->type))
1322 size = TYPE_PRECISION (ref->type);
1323 else
1324 size = tree_to_shwi (TYPE_SIZE (ref->type));
8403c2cf
RB
1325 if (size % BITS_PER_UNIT != 0
1326 || size > MAX_BITSIZE_MODE_ANY_MODE)
1327 return NULL_TREE;
1328 size /= BITS_PER_UNIT;
1329 unsigned i;
1330 for (i = 0; i < operands.length (); ++i)
1331 {
1332 if (operands[i].off == -1)
1333 return NULL_TREE;
1334 off += operands[i].off;
1335 if (operands[i].opcode == MEM_REF)
1336 {
1337 ++i;
1338 break;
1339 }
1340 }
1341 vn_reference_op_t base = &operands[--i];
1342 tree ctor = error_mark_node;
1343 tree decl = NULL_TREE;
1344 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1345 ctor = base->op0;
1346 else if (base->opcode == MEM_REF
1347 && base[1].opcode == ADDR_EXPR
1348 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1349 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1350 {
1351 decl = TREE_OPERAND (base[1].op0, 0);
1352 ctor = ctor_for_folding (decl);
1353 }
1354 if (ctor == NULL_TREE)
1355 return build_zero_cst (ref->type);
1356 else if (ctor != error_mark_node)
1357 {
1358 if (decl)
1359 {
1360 tree res = fold_ctor_reference (ref->type, ctor,
1361 off * BITS_PER_UNIT,
1362 size * BITS_PER_UNIT, decl);
1363 if (res)
1364 {
1365 STRIP_USELESS_TYPE_CONVERSION (res);
1366 if (is_gimple_min_invariant (res))
1367 return res;
1368 }
1369 }
1370 else
1371 {
1372 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
1373 int len = native_encode_expr (ctor, buf, size, off);
1374 if (len > 0)
1375 return native_interpret_expr (ref->type, buf, len);
8403c2cf
RB
1376 }
1377 }
12bd5a1e
RG
1378 }
1379
1380 return NULL_TREE;
1381}
1382
ee45a32d
EB
1383/* Return true if OPS contain a storage order barrier. */
1384
1385static bool
1386contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1387{
1388 vn_reference_op_t op;
1389 unsigned i;
1390
1391 FOR_EACH_VEC_ELT (ops, i, op)
1392 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1393 return true;
1394
1395 return false;
1396}
1397
89fb70a3
DB
1398/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1399 structures into their value numbers. This is done in-place, and
3ceaf2f5
RG
1400 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1401 whether any operands were valueized. */
89fb70a3 1402
9771b263
DN
1403static vec<vn_reference_op_s>
1404valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
89fb70a3
DB
1405{
1406 vn_reference_op_t vro;
aa7069aa 1407 unsigned int i;
89fb70a3 1408
3ceaf2f5
RG
1409 *valueized_anything = false;
1410
9771b263 1411 FOR_EACH_VEC_ELT (orig, i, vro)
89fb70a3
DB
1412 {
1413 if (vro->opcode == SSA_NAME
1414 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
c9145754 1415 {
3ceaf2f5
RG
1416 tree tem = SSA_VAL (vro->op0);
1417 if (tem != vro->op0)
1418 {
1419 *valueized_anything = true;
1420 vro->op0 = tem;
1421 }
c9145754
DB
1422 /* If it transforms from an SSA_NAME to a constant, update
1423 the opcode. */
1424 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1425 vro->opcode = TREE_CODE (vro->op0);
1426 }
aa7069aa 1427 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3ceaf2f5
RG
1428 {
1429 tree tem = SSA_VAL (vro->op1);
1430 if (tem != vro->op1)
1431 {
1432 *valueized_anything = true;
1433 vro->op1 = tem;
1434 }
1435 }
aa7069aa 1436 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3ceaf2f5
RG
1437 {
1438 tree tem = SSA_VAL (vro->op2);
1439 if (tem != vro->op2)
1440 {
1441 *valueized_anything = true;
1442 vro->op2 = tem;
1443 }
1444 }
70f34814
RG
1445 /* If it transforms from an SSA_NAME to an address, fold with
1446 a preceding indirect reference. */
1447 if (i > 0
1448 && vro->op0
1449 && TREE_CODE (vro->op0) == ADDR_EXPR
9771b263 1450 && orig[i - 1].opcode == MEM_REF)
a0f79fcc
RB
1451 {
1452 if (vn_reference_fold_indirect (&orig, &i))
1453 *valueized_anything = true;
1454 }
a03a9774
RG
1455 else if (i > 0
1456 && vro->opcode == SSA_NAME
9771b263 1457 && orig[i - 1].opcode == MEM_REF)
a0f79fcc
RB
1458 {
1459 if (vn_reference_maybe_forwprop_address (&orig, &i))
1460 *valueized_anything = true;
1461 }
70f34814
RG
1462 /* If it transforms a non-constant ARRAY_REF into a constant
1463 one, adjust the constant offset. */
1464 else if (vro->opcode == ARRAY_REF
1465 && vro->off == -1
1466 && TREE_CODE (vro->op0) == INTEGER_CST
1467 && TREE_CODE (vro->op1) == INTEGER_CST
1468 && TREE_CODE (vro->op2) == INTEGER_CST)
1469 {
807e902e
KZ
1470 offset_int off = ((wi::to_offset (vro->op0)
1471 - wi::to_offset (vro->op1))
1472 * wi::to_offset (vro->op2));
1473 if (wi::fits_shwi_p (off))
1474 vro->off = off.to_shwi ();
70f34814 1475 }
89fb70a3
DB
1476 }
1477
1478 return orig;
1479}
1480
9771b263
DN
1481static vec<vn_reference_op_s>
1482valueize_refs (vec<vn_reference_op_s> orig)
3ceaf2f5
RG
1483{
1484 bool tem;
1485 return valueize_refs_1 (orig, &tem);
1486}
1487
9771b263 1488static vec<vn_reference_op_s> shared_lookup_references;
aa7069aa
RG
1489
1490/* Create a vector of vn_reference_op_s structures from REF, a
1491 REFERENCE_CLASS_P tree. The vector is shared among all callers of
3ceaf2f5
RG
1492 this function. *VALUEIZED_ANYTHING will specify whether any
1493 operands were valueized. */
aa7069aa 1494
9771b263 1495static vec<vn_reference_op_s>
3ceaf2f5 1496valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
aa7069aa
RG
1497{
1498 if (!ref)
6e1aa848 1499 return vNULL;
9771b263 1500 shared_lookup_references.truncate (0);
aa7069aa 1501 copy_reference_ops_from_ref (ref, &shared_lookup_references);
3ceaf2f5
RG
1502 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1503 valueized_anything);
aa7069aa
RG
1504 return shared_lookup_references;
1505}
1506
1507/* Create a vector of vn_reference_op_s structures from CALL, a
1508 call statement. The vector is shared among all callers of
1509 this function. */
1510
9771b263 1511static vec<vn_reference_op_s>
538dd0b7 1512valueize_shared_reference_ops_from_call (gcall *call)
aa7069aa
RG
1513{
1514 if (!call)
6e1aa848 1515 return vNULL;
9771b263 1516 shared_lookup_references.truncate (0);
aa7069aa
RG
1517 copy_reference_ops_from_call (call, &shared_lookup_references);
1518 shared_lookup_references = valueize_refs (shared_lookup_references);
1519 return shared_lookup_references;
1520}
1521
896c8b96
RG
1522/* Lookup a SCCVN reference operation VR in the current hash table.
1523 Returns the resulting value number if it exists in the hash table,
c9145754
DB
1524 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1525 vn_reference_t stored in the hashtable if something is found. */
896c8b96
RG
1526
1527static tree
c9145754 1528vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
896c8b96 1529{
bf190e8d 1530 vn_reference_s **slot;
896c8b96
RG
1531 hashval_t hash;
1532
1533 hash = vr->hashcode;
c203e8a7 1534 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
896c8b96 1535 if (!slot && current_info == optimistic_info)
c203e8a7 1536 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
896c8b96 1537 if (slot)
c9145754
DB
1538 {
1539 if (vnresult)
1540 *vnresult = (vn_reference_t)*slot;
1541 return ((vn_reference_t)*slot)->result;
1542 }
b8698a0f 1543
896c8b96
RG
1544 return NULL_TREE;
1545}
1546
5006671f
RG
1547/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1548 with the current VUSE and performs the expression lookup. */
1549
1550static void *
9bb06c2a
RG
1551vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1552 unsigned int cnt, void *vr_)
5006671f
RG
1553{
1554 vn_reference_t vr = (vn_reference_t)vr_;
bf190e8d 1555 vn_reference_s **slot;
5006671f
RG
1556 hashval_t hash;
1557
9bb06c2a
RG
1558 /* This bounds the stmt walks we perform on reference lookups
1559 to O(1) instead of O(N) where N is the number of dominating
1560 stores. */
1561 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1562 return (void *)-1;
1563
d0ca0bcb
RG
1564 if (last_vuse_ptr)
1565 *last_vuse_ptr = vuse;
1566
5006671f 1567 /* Fixup vuse and hash. */
9708c51d
RG
1568 if (vr->vuse)
1569 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
d1c0308e 1570 vr->vuse = vuse_ssa_val (vuse);
9708c51d
RG
1571 if (vr->vuse)
1572 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
5006671f
RG
1573
1574 hash = vr->hashcode;
c203e8a7 1575 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
5006671f 1576 if (!slot && current_info == optimistic_info)
c203e8a7 1577 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
5006671f
RG
1578 if (slot)
1579 return *slot;
b8698a0f 1580
5006671f
RG
1581 return NULL;
1582}
c9145754 1583
b55eb410
RG
1584/* Lookup an existing or insert a new vn_reference entry into the
1585 value table for the VUSE, SET, TYPE, OPERANDS reference which
9179ed9d 1586 has the value VALUE which is either a constant or an SSA name. */
b55eb410
RG
1587
1588static vn_reference_t
9179ed9d
RG
1589vn_reference_lookup_or_insert_for_pieces (tree vuse,
1590 alias_set_type set,
1591 tree type,
9771b263
DN
1592 vec<vn_reference_op_s,
1593 va_heap> operands,
9179ed9d 1594 tree value)
b55eb410 1595{
703c9ccd 1596 vn_reference_s vr1;
b55eb410 1597 vn_reference_t result;
9179ed9d 1598 unsigned value_id;
b55eb410
RG
1599 vr1.vuse = vuse;
1600 vr1.operands = operands;
1601 vr1.type = type;
1602 vr1.set = set;
1603 vr1.hashcode = vn_reference_compute_hash (&vr1);
1604 if (vn_reference_lookup_1 (&vr1, &result))
1605 return result;
9179ed9d
RG
1606 if (TREE_CODE (value) == SSA_NAME)
1607 value_id = VN_INFO (value)->value_id;
1608 else
1609 value_id = get_or_alloc_constant_value_id (value);
b55eb410 1610 return vn_reference_insert_pieces (vuse, set, type,
9771b263 1611 operands.copy (), value, value_id);
b55eb410
RG
1612}
1613
01df5c8a
RG
1614/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1615 from the statement defining VUSE and if not successful tries to
073a8998 1616 translate *REFP and VR_ through an aggregate copy at the definition
e7cbc096
RB
1617 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1618 of *REF and *VR. If only disambiguation was performed then
1619 *DISAMBIGUATE_ONLY is set to true. */
01df5c8a
RG
1620
1621static void *
50f0aa20 1622vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
e7cbc096 1623 bool *disambiguate_only)
01df5c8a
RG
1624{
1625 vn_reference_t vr = (vn_reference_t)vr_;
355fe088 1626 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
e7cbc096 1627 tree base = ao_ref_base (ref);
0f900dfa 1628 HOST_WIDE_INT offset, maxsize;
9771b263 1629 static vec<vn_reference_op_s>
6e1aa848 1630 lhs_ops = vNULL;
8ea34dab
RG
1631 ao_ref lhs_ref;
1632 bool lhs_ref_ok = false;
01df5c8a 1633
e7cbc096
RB
1634 /* If the reference is based on a parameter that was determined as
1635 pointing to readonly memory it doesn't change. */
1636 if (TREE_CODE (base) == MEM_REF
1637 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1638 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1639 && bitmap_bit_p (const_parms,
1640 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1641 {
1642 *disambiguate_only = true;
1643 return NULL;
1644 }
1645
4fa4929e
RG
1646 /* First try to disambiguate after value-replacing in the definitions LHS. */
1647 if (is_gimple_assign (def_stmt))
1648 {
1649 tree lhs = gimple_assign_lhs (def_stmt);
25aa059e 1650 bool valueized_anything = false;
8ea34dab 1651 /* Avoid re-allocation overhead. */
9771b263 1652 lhs_ops.truncate (0);
8ea34dab 1653 copy_reference_ops_from_ref (lhs, &lhs_ops);
25aa059e 1654 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
25aa059e
RG
1655 if (valueized_anything)
1656 {
1657 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1658 get_alias_set (lhs),
1659 TREE_TYPE (lhs), lhs_ops);
1660 if (lhs_ref_ok
1661 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
e7cbc096
RB
1662 {
1663 *disambiguate_only = true;
1664 return NULL;
1665 }
25aa059e
RG
1666 }
1667 else
1668 {
1669 ao_ref_init (&lhs_ref, lhs);
1670 lhs_ref_ok = true;
1671 }
4fa4929e 1672 }
50f0aa20
RB
1673 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1674 && gimple_call_num_args (def_stmt) <= 4)
1675 {
1676 /* For builtin calls valueize its arguments and call the
1677 alias oracle again. Valueization may improve points-to
1678 info of pointers and constify size and position arguments.
1679 Originally this was motivated by PR61034 which has
1680 conditional calls to free falsely clobbering ref because
1681 of imprecise points-to info of the argument. */
1682 tree oldargs[4];
61dd7fbc 1683 bool valueized_anything = false;
50f0aa20
RB
1684 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1685 {
1686 oldargs[i] = gimple_call_arg (def_stmt, i);
1687 if (TREE_CODE (oldargs[i]) == SSA_NAME
1688 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1689 {
1690 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1691 valueized_anything = true;
1692 }
1693 }
1694 if (valueized_anything)
1695 {
538dd0b7
DM
1696 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1697 ref);
50f0aa20
RB
1698 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1699 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1700 if (!res)
e7cbc096
RB
1701 {
1702 *disambiguate_only = true;
1703 return NULL;
1704 }
50f0aa20
RB
1705 }
1706 }
1707
e7cbc096 1708 if (*disambiguate_only)
50f0aa20 1709 return (void *)-1;
4fa4929e 1710
b45d2719 1711 offset = ref->offset;
b45d2719 1712 maxsize = ref->max_size;
01df5c8a
RG
1713
1714 /* If we cannot constrain the size of the reference we cannot
1715 test if anything kills it. */
1716 if (maxsize == -1)
1717 return (void *)-1;
1718
47598145
MM
1719 /* We can't deduce anything useful from clobbers. */
1720 if (gimple_clobber_p (def_stmt))
1721 return (void *)-1;
1722
01df5c8a 1723 /* def_stmt may-defs *ref. See if we can derive a value for *ref
47598145 1724 from that definition.
01df5c8a 1725 1) Memset. */
b45d2719 1726 if (is_gimple_reg_type (vr->type)
c7ee7b45 1727 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
01df5c8a 1728 && integer_zerop (gimple_call_arg (def_stmt, 1))
cc269bb6 1729 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
01df5c8a
RG
1730 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1731 {
1732 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1733 tree base2;
1734 HOST_WIDE_INT offset2, size2, maxsize2;
ee45a32d
EB
1735 bool reverse;
1736 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1737 &reverse);
eb1ce453 1738 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
01df5c8a 1739 if ((unsigned HOST_WIDE_INT)size2 / 8
eb1ce453 1740 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
9c8cbc74 1741 && maxsize2 != -1
01df5c8a
RG
1742 && operand_equal_p (base, base2, 0)
1743 && offset2 <= offset
1744 && offset2 + size2 >= offset + maxsize)
b45d2719 1745 {
e8160c9a 1746 tree val = build_zero_cst (vr->type);
9179ed9d 1747 return vn_reference_lookup_or_insert_for_pieces
b55eb410 1748 (vuse, vr->set, vr->type, vr->operands, val);
b45d2719 1749 }
01df5c8a
RG
1750 }
1751
1752 /* 2) Assignment from an empty CONSTRUCTOR. */
b45d2719 1753 else if (is_gimple_reg_type (vr->type)
01df5c8a
RG
1754 && gimple_assign_single_p (def_stmt)
1755 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1756 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1757 {
1758 tree base2;
1759 HOST_WIDE_INT offset2, size2, maxsize2;
ee45a32d 1760 bool reverse;
01df5c8a 1761 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
ee45a32d 1762 &offset2, &size2, &maxsize2, &reverse);
9c8cbc74
EB
1763 if (maxsize2 != -1
1764 && operand_equal_p (base, base2, 0)
01df5c8a
RG
1765 && offset2 <= offset
1766 && offset2 + size2 >= offset + maxsize)
b45d2719 1767 {
e8160c9a 1768 tree val = build_zero_cst (vr->type);
9179ed9d 1769 return vn_reference_lookup_or_insert_for_pieces
b55eb410 1770 (vuse, vr->set, vr->type, vr->operands, val);
b45d2719 1771 }
01df5c8a
RG
1772 }
1773
c867aba0
RG
1774 /* 3) Assignment from a constant. We can use folds native encode/interpret
1775 routines to extract the assigned bits. */
b2e51979
RG
1776 else if (vn_walk_kind == VN_WALKREWRITE
1777 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
c867aba0
RG
1778 && ref->size == maxsize
1779 && maxsize % BITS_PER_UNIT == 0
1780 && offset % BITS_PER_UNIT == 0
1781 && is_gimple_reg_type (vr->type)
ee45a32d 1782 && !contains_storage_order_barrier_p (vr->operands)
c867aba0
RG
1783 && gimple_assign_single_p (def_stmt)
1784 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1785 {
1786 tree base2;
1787 HOST_WIDE_INT offset2, size2, maxsize2;
ee45a32d 1788 bool reverse;
c867aba0 1789 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
ee45a32d
EB
1790 &offset2, &size2, &maxsize2, &reverse);
1791 if (!reverse
1792 && maxsize2 != -1
c867aba0
RG
1793 && maxsize2 == size2
1794 && size2 % BITS_PER_UNIT == 0
1795 && offset2 % BITS_PER_UNIT == 0
1796 && operand_equal_p (base, base2, 0)
1797 && offset2 <= offset
1798 && offset2 + size2 >= offset + maxsize)
1799 {
1800 /* We support up to 512-bit values (for V8DFmode). */
1801 unsigned char buffer[64];
1802 int len;
1803
1804 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1805 buffer, sizeof (buffer));
1806 if (len > 0)
1807 {
1808 tree val = native_interpret_expr (vr->type,
1809 buffer
1810 + ((offset - offset2)
1811 / BITS_PER_UNIT),
1812 ref->size / BITS_PER_UNIT);
1813 if (val)
9179ed9d 1814 return vn_reference_lookup_or_insert_for_pieces
b55eb410 1815 (vuse, vr->set, vr->type, vr->operands, val);
c867aba0
RG
1816 }
1817 }
1818 }
1819
0147184e
RG
1820 /* 4) Assignment from an SSA name which definition we may be able
1821 to access pieces from. */
1822 else if (ref->size == maxsize
1823 && is_gimple_reg_type (vr->type)
ee45a32d 1824 && !contains_storage_order_barrier_p (vr->operands)
0147184e
RG
1825 && gimple_assign_single_p (def_stmt)
1826 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1827 {
1828 tree rhs1 = gimple_assign_rhs1 (def_stmt);
355fe088 1829 gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
0147184e
RG
1830 if (is_gimple_assign (def_stmt2)
1831 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1832 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1833 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1834 {
1835 tree base2;
1836 HOST_WIDE_INT offset2, size2, maxsize2, off;
ee45a32d 1837 bool reverse;
0147184e 1838 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
ee45a32d
EB
1839 &offset2, &size2, &maxsize2,
1840 &reverse);
0147184e 1841 off = offset - offset2;
ee45a32d
EB
1842 if (!reverse
1843 && maxsize2 != -1
0147184e
RG
1844 && maxsize2 == size2
1845 && operand_equal_p (base, base2, 0)
1846 && offset2 <= offset
1847 && offset2 + size2 >= offset + maxsize)
1848 {
1849 tree val = NULL_TREE;
1850 HOST_WIDE_INT elsz
1851 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1852 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1853 {
1854 if (off == 0)
1855 val = gimple_assign_rhs1 (def_stmt2);
1856 else if (off == elsz)
1857 val = gimple_assign_rhs2 (def_stmt2);
1858 }
1859 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1860 && off % elsz == 0)
1861 {
1862 tree ctor = gimple_assign_rhs1 (def_stmt2);
1863 unsigned i = off / elsz;
1864 if (i < CONSTRUCTOR_NELTS (ctor))
1865 {
1866 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
13396b6e
JJ
1867 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1868 {
1869 if (TREE_CODE (TREE_TYPE (elt->value))
1870 != VECTOR_TYPE)
1871 val = elt->value;
1872 }
0147184e
RG
1873 }
1874 }
1875 if (val)
9179ed9d 1876 return vn_reference_lookup_or_insert_for_pieces
b55eb410 1877 (vuse, vr->set, vr->type, vr->operands, val);
0147184e
RG
1878 }
1879 }
1880 }
1881
1882 /* 5) For aggregate copies translate the reference through them if
01df5c8a 1883 the copy kills ref. */
3bc27de7
RG
1884 else if (vn_walk_kind == VN_WALKREWRITE
1885 && gimple_assign_single_p (def_stmt)
01df5c8a 1886 && (DECL_P (gimple_assign_rhs1 (def_stmt))
70f34814 1887 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
01df5c8a
RG
1888 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1889 {
1890 tree base2;
a0f79fcc 1891 HOST_WIDE_INT maxsize2;
ee45a32d 1892 int i, j, k;
ef062b13 1893 auto_vec<vn_reference_op_s> rhs;
01df5c8a 1894 vn_reference_op_t vro;
b45d2719 1895 ao_ref r;
01df5c8a 1896
8ea34dab
RG
1897 if (!lhs_ref_ok)
1898 return (void *)-1;
1899
01df5c8a 1900 /* See if the assignment kills REF. */
8ea34dab 1901 base2 = ao_ref_base (&lhs_ref);
9c8cbc74
EB
1902 maxsize2 = lhs_ref.max_size;
1903 if (maxsize2 == -1
ea3eac3a
RB
1904 || (base != base2
1905 && (TREE_CODE (base) != MEM_REF
1906 || TREE_CODE (base2) != MEM_REF
1907 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1908 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1909 TREE_OPERAND (base2, 1))))
a0f79fcc 1910 || !stmt_kills_ref_p (def_stmt, ref))
01df5c8a
RG
1911 return (void *)-1;
1912
8ea34dab
RG
1913 /* Find the common base of ref and the lhs. lhs_ops already
1914 contains valueized operands for the lhs. */
9771b263
DN
1915 i = vr->operands.length () - 1;
1916 j = lhs_ops.length () - 1;
35ecd408 1917 while (j >= 0 && i >= 0
9771b263 1918 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
01df5c8a
RG
1919 {
1920 i--;
1921 j--;
1922 }
35ecd408 1923
25aa059e
RG
1924 /* ??? The innermost op should always be a MEM_REF and we already
1925 checked that the assignment to the lhs kills vr. Thus for
1926 aggregate copies using char[] types the vn_reference_op_eq
1927 may fail when comparing types for compatibility. But we really
1928 don't care here - further lookups with the rewritten operands
1929 will simply fail if we messed up types too badly. */
8403c2cf 1930 HOST_WIDE_INT extra_off = 0;
4f9dbaaa 1931 if (j == 0 && i >= 0
9771b263 1932 && lhs_ops[0].opcode == MEM_REF
8403c2cf
RB
1933 && lhs_ops[0].off != -1)
1934 {
1935 if (lhs_ops[0].off == vr->operands[i].off)
1936 i--, j--;
1937 else if (vr->operands[i].opcode == MEM_REF
1938 && vr->operands[i].off != -1)
1939 {
1940 extra_off = vr->operands[i].off - lhs_ops[0].off;
1941 i--, j--;
1942 }
1943 }
25aa059e 1944
01df5c8a
RG
1945 /* i now points to the first additional op.
1946 ??? LHS may not be completely contained in VR, one or more
1947 VIEW_CONVERT_EXPRs could be in its way. We could at least
1948 try handling outermost VIEW_CONVERT_EXPRs. */
1949 if (j != -1)
1950 return (void *)-1;
01df5c8a 1951
ee45a32d
EB
1952 /* Punt if the additional ops contain a storage order barrier. */
1953 for (k = i; k >= 0; k--)
1954 {
1955 vro = &vr->operands[k];
1956 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
1957 return (void *)-1;
1958 }
1959
01df5c8a
RG
1960 /* Now re-write REF to be based on the rhs of the assignment. */
1961 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
8403c2cf
RB
1962
1963 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1964 if (extra_off != 0)
1965 {
1966 if (rhs.length () < 2
1967 || rhs[0].opcode != MEM_REF
1968 || rhs[0].off == -1)
1969 return (void *)-1;
1970 rhs[0].off += extra_off;
1971 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1972 build_int_cst (TREE_TYPE (rhs[0].op0),
1973 extra_off));
1974 }
1975
01df5c8a 1976 /* We need to pre-pend vr->operands[0..i] to rhs. */
26f3a4e1 1977 vec<vn_reference_op_s> old = vr->operands;
9771b263 1978 if (i + 1 + rhs.length () > vr->operands.length ())
01df5c8a 1979 {
9771b263 1980 vr->operands.safe_grow (i + 1 + rhs.length ());
26f3a4e1
RB
1981 if (old == shared_lookup_references)
1982 shared_lookup_references = vr->operands;
01df5c8a
RG
1983 }
1984 else
9771b263
DN
1985 vr->operands.truncate (i + 1 + rhs.length ());
1986 FOR_EACH_VEC_ELT (rhs, j, vro)
1987 vr->operands[i + 1 + j] = *vro;
b55eb410 1988 vr->operands = valueize_refs (vr->operands);
26f3a4e1
RB
1989 if (old == shared_lookup_references)
1990 shared_lookup_references = vr->operands;
01df5c8a 1991 vr->hashcode = vn_reference_compute_hash (vr);
c7ee7b45 1992
8403c2cf
RB
1993 /* Try folding the new reference to a constant. */
1994 tree val = fully_constant_vn_reference_p (vr);
1995 if (val)
1996 return vn_reference_lookup_or_insert_for_pieces
1997 (vuse, vr->set, vr->type, vr->operands, val);
1998
c7ee7b45
RG
1999 /* Adjust *ref from the new operands. */
2000 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2001 return (void *)-1;
2002 /* This can happen with bitfields. */
2003 if (ref->size != r.size)
2004 return (void *)-1;
2005 *ref = r;
2006
2007 /* Do not update last seen VUSE after translating. */
2008 last_vuse_ptr = NULL;
2009
2010 /* Keep looking for the adjusted *REF / VR pair. */
2011 return NULL;
2012 }
2013
0147184e 2014 /* 6) For memcpy copies translate the reference through them if
c7ee7b45
RG
2015 the copy kills ref. */
2016 else if (vn_walk_kind == VN_WALKREWRITE
2017 && is_gimple_reg_type (vr->type)
2018 /* ??? Handle BCOPY as well. */
2019 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2020 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2021 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2022 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2023 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2024 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2025 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
cc269bb6 2026 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
c7ee7b45
RG
2027 {
2028 tree lhs, rhs;
2029 ao_ref r;
2030 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2031 vn_reference_op_s op;
2032 HOST_WIDE_INT at;
2033
c7ee7b45
RG
2034 /* Only handle non-variable, addressable refs. */
2035 if (ref->size != maxsize
2036 || offset % BITS_PER_UNIT != 0
2037 || ref->size % BITS_PER_UNIT != 0)
2038 return (void *)-1;
2039
2040 /* Extract a pointer base and an offset for the destination. */
2041 lhs = gimple_call_arg (def_stmt, 0);
2042 lhs_offset = 0;
2043 if (TREE_CODE (lhs) == SSA_NAME)
e65757f3
RB
2044 {
2045 lhs = SSA_VAL (lhs);
2046 if (TREE_CODE (lhs) == SSA_NAME)
2047 {
355fe088 2048 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
e65757f3
RB
2049 if (gimple_assign_single_p (def_stmt)
2050 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2051 lhs = gimple_assign_rhs1 (def_stmt);
2052 }
2053 }
c7ee7b45
RG
2054 if (TREE_CODE (lhs) == ADDR_EXPR)
2055 {
2056 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2057 &lhs_offset);
2058 if (!tem)
2059 return (void *)-1;
2060 if (TREE_CODE (tem) == MEM_REF
cc269bb6 2061 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
c7ee7b45
RG
2062 {
2063 lhs = TREE_OPERAND (tem, 0);
e65757f3
RB
2064 if (TREE_CODE (lhs) == SSA_NAME)
2065 lhs = SSA_VAL (lhs);
eb1ce453 2066 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
c7ee7b45
RG
2067 }
2068 else if (DECL_P (tem))
2069 lhs = build_fold_addr_expr (tem);
2070 else
2071 return (void *)-1;
2072 }
2073 if (TREE_CODE (lhs) != SSA_NAME
2074 && TREE_CODE (lhs) != ADDR_EXPR)
2075 return (void *)-1;
2076
2077 /* Extract a pointer base and an offset for the source. */
2078 rhs = gimple_call_arg (def_stmt, 1);
2079 rhs_offset = 0;
2080 if (TREE_CODE (rhs) == SSA_NAME)
2081 rhs = SSA_VAL (rhs);
2082 if (TREE_CODE (rhs) == ADDR_EXPR)
2083 {
2084 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2085 &rhs_offset);
2086 if (!tem)
2087 return (void *)-1;
2088 if (TREE_CODE (tem) == MEM_REF
cc269bb6 2089 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
c7ee7b45
RG
2090 {
2091 rhs = TREE_OPERAND (tem, 0);
eb1ce453 2092 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
c7ee7b45
RG
2093 }
2094 else if (DECL_P (tem))
2095 rhs = build_fold_addr_expr (tem);
2096 else
2097 return (void *)-1;
2098 }
2099 if (TREE_CODE (rhs) != SSA_NAME
2100 && TREE_CODE (rhs) != ADDR_EXPR)
2101 return (void *)-1;
2102
eb1ce453 2103 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
c7ee7b45
RG
2104
2105 /* The bases of the destination and the references have to agree. */
2106 if ((TREE_CODE (base) != MEM_REF
2107 && !DECL_P (base))
2108 || (TREE_CODE (base) == MEM_REF
2109 && (TREE_OPERAND (base, 0) != lhs
cc269bb6 2110 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
c7ee7b45
RG
2111 || (DECL_P (base)
2112 && (TREE_CODE (lhs) != ADDR_EXPR
2113 || TREE_OPERAND (lhs, 0) != base)))
2114 return (void *)-1;
2115
c7ee7b45
RG
2116 at = offset / BITS_PER_UNIT;
2117 if (TREE_CODE (base) == MEM_REF)
eb1ce453 2118 at += tree_to_uhwi (TREE_OPERAND (base, 1));
e65757f3
RB
2119 /* If the access is completely outside of the memcpy destination
2120 area there is no aliasing. */
2121 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2122 || lhs_offset + copy_size <= at)
2123 return NULL;
2124 /* And the access has to be contained within the memcpy destination. */
c7ee7b45
RG
2125 if (lhs_offset > at
2126 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2127 return (void *)-1;
2128
2129 /* Make room for 2 operands in the new reference. */
9771b263 2130 if (vr->operands.length () < 2)
c7ee7b45 2131 {
9771b263
DN
2132 vec<vn_reference_op_s> old = vr->operands;
2133 vr->operands.safe_grow_cleared (2);
c7ee7b45
RG
2134 if (old == shared_lookup_references
2135 && vr->operands != old)
26f3a4e1 2136 shared_lookup_references = vr->operands;
c7ee7b45
RG
2137 }
2138 else
9771b263 2139 vr->operands.truncate (2);
c7ee7b45
RG
2140
2141 /* The looked-through reference is a simple MEM_REF. */
2142 memset (&op, 0, sizeof (op));
2143 op.type = vr->type;
2144 op.opcode = MEM_REF;
2145 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2146 op.off = at - lhs_offset + rhs_offset;
9771b263 2147 vr->operands[0] = op;
6d6c9525 2148 op.type = TREE_TYPE (rhs);
c7ee7b45
RG
2149 op.opcode = TREE_CODE (rhs);
2150 op.op0 = rhs;
2151 op.off = -1;
9771b263 2152 vr->operands[1] = op;
c7ee7b45 2153 vr->hashcode = vn_reference_compute_hash (vr);
b45d2719
RG
2154
2155 /* Adjust *ref from the new operands. */
2156 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
01df5c8a 2157 return (void *)-1;
03472fdd
EB
2158 /* This can happen with bitfields. */
2159 if (ref->size != r.size)
2160 return (void *)-1;
b45d2719 2161 *ref = r;
01df5c8a 2162
d0ca0bcb
RG
2163 /* Do not update last seen VUSE after translating. */
2164 last_vuse_ptr = NULL;
2165
01df5c8a
RG
2166 /* Keep looking for the adjusted *REF / VR pair. */
2167 return NULL;
2168 }
2169
2170 /* Bail out and stop walking. */
2171 return (void *)-1;
2172}
2173
c9145754
DB
2174/* Lookup a reference operation by it's parts, in the current hash table.
2175 Returns the resulting value number if it exists in the hash table,
2176 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2177 vn_reference_t stored in the hashtable if something is found. */
89fb70a3
DB
2178
2179tree
b45d2719 2180vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
9771b263 2181 vec<vn_reference_op_s> operands,
3bc27de7 2182 vn_reference_t *vnresult, vn_lookup_kind kind)
c9145754
DB
2183{
2184 struct vn_reference_s vr1;
5006671f 2185 vn_reference_t tmp;
12bd5a1e 2186 tree cst;
5006671f
RG
2187
2188 if (!vnresult)
2189 vnresult = &tmp;
2190 *vnresult = NULL;
01df5c8a 2191
d1c0308e 2192 vr1.vuse = vuse_ssa_val (vuse);
9771b263
DN
2193 shared_lookup_references.truncate (0);
2194 shared_lookup_references.safe_grow (operands.length ());
2195 memcpy (shared_lookup_references.address (),
2196 operands.address (),
01df5c8a 2197 sizeof (vn_reference_op_s)
9771b263 2198 * operands.length ());
01df5c8a
RG
2199 vr1.operands = operands = shared_lookup_references
2200 = valueize_refs (shared_lookup_references);
b45d2719
RG
2201 vr1.type = type;
2202 vr1.set = set;
c9145754 2203 vr1.hashcode = vn_reference_compute_hash (&vr1);
12bd5a1e
RG
2204 if ((cst = fully_constant_vn_reference_p (&vr1)))
2205 return cst;
c9145754 2206
12bd5a1e 2207 vn_reference_lookup_1 (&vr1, vnresult);
5006671f 2208 if (!*vnresult
3bc27de7 2209 && kind != VN_NOWALK
5006671f 2210 && vr1.vuse)
53f3815c 2211 {
b45d2719 2212 ao_ref r;
3bc27de7 2213 vn_walk_kind = kind;
b45d2719 2214 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
01df5c8a 2215 *vnresult =
b45d2719 2216 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
01df5c8a 2217 vn_reference_lookup_2,
92a5094e 2218 vn_reference_lookup_3,
76be46db 2219 vuse_ssa_val, &vr1);
26f3a4e1 2220 gcc_checking_assert (vr1.operands == shared_lookup_references);
53f3815c
RG
2221 }
2222
5006671f
RG
2223 if (*vnresult)
2224 return (*vnresult)->result;
2225
2226 return NULL_TREE;
c9145754
DB
2227}
2228
2229/* Lookup OP in the current hash table, and return the resulting value
2230 number if it exists in the hash table. Return NULL_TREE if it does
2231 not exist in the hash table or if the result field of the structure
2232 was NULL.. VNRESULT will be filled in with the vn_reference_t
2233 stored in the hashtable if one exists. */
2234
2235tree
3bc27de7 2236vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
c9145754 2237 vn_reference_t *vnresult)
89fb70a3 2238{
9771b263 2239 vec<vn_reference_op_s> operands;
89fb70a3 2240 struct vn_reference_s vr1;
12bd5a1e 2241 tree cst;
3ceaf2f5 2242 bool valuezied_anything;
5006671f 2243
c9145754
DB
2244 if (vnresult)
2245 *vnresult = NULL;
89fb70a3 2246
d1c0308e 2247 vr1.vuse = vuse_ssa_val (vuse);
3ceaf2f5
RG
2248 vr1.operands = operands
2249 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
b45d2719
RG
2250 vr1.type = TREE_TYPE (op);
2251 vr1.set = get_alias_set (op);
89fb70a3 2252 vr1.hashcode = vn_reference_compute_hash (&vr1);
12bd5a1e
RG
2253 if ((cst = fully_constant_vn_reference_p (&vr1)))
2254 return cst;
896c8b96 2255
3bc27de7 2256 if (kind != VN_NOWALK
5006671f
RG
2257 && vr1.vuse)
2258 {
2259 vn_reference_t wvnresult;
b45d2719 2260 ao_ref r;
3ceaf2f5
RG
2261 /* Make sure to use a valueized reference if we valueized anything.
2262 Otherwise preserve the full reference for advanced TBAA. */
2263 if (!valuezied_anything
2264 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2265 vr1.operands))
6d6c9525 2266 ao_ref_init (&r, op);
3bc27de7 2267 vn_walk_kind = kind;
5006671f 2268 wvnresult =
b45d2719 2269 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
01df5c8a 2270 vn_reference_lookup_2,
92a5094e 2271 vn_reference_lookup_3,
76be46db 2272 vuse_ssa_val, &vr1);
26f3a4e1 2273 gcc_checking_assert (vr1.operands == shared_lookup_references);
5006671f
RG
2274 if (wvnresult)
2275 {
2276 if (vnresult)
2277 *vnresult = wvnresult;
2278 return wvnresult->result;
2279 }
2280
2281 return NULL_TREE;
896c8b96 2282 }
89fb70a3 2283
5006671f 2284 return vn_reference_lookup_1 (&vr1, vnresult);
89fb70a3
DB
2285}
2286
26f3a4e1
RB
2287/* Lookup CALL in the current hash table and return the entry in
2288 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2289
2290void
538dd0b7 2291vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
26f3a4e1
RB
2292 vn_reference_t vr)
2293{
27732ffd
ML
2294 if (vnresult)
2295 *vnresult = NULL;
2296
26f3a4e1
RB
2297 tree vuse = gimple_vuse (call);
2298
2299 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2300 vr->operands = valueize_shared_reference_ops_from_call (call);
2301 vr->type = gimple_expr_type (call);
2302 vr->set = 0;
2303 vr->hashcode = vn_reference_compute_hash (vr);
2304 vn_reference_lookup_1 (vr, vnresult);
2305}
c9145754 2306
89fb70a3 2307/* Insert OP into the current hash table with a value number of
c9145754 2308 RESULT, and return the resulting reference structure we created. */
89fb70a3 2309
26f3a4e1 2310static vn_reference_t
4ec0a198 2311vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
89fb70a3 2312{
bf190e8d 2313 vn_reference_s **slot;
89fb70a3 2314 vn_reference_t vr1;
39e843e8 2315 bool tem;
89fb70a3 2316
af6a6eec 2317 vr1 = current_info->references_pool->allocate ();
c9145754
DB
2318 if (TREE_CODE (result) == SSA_NAME)
2319 vr1->value_id = VN_INFO (result)->value_id;
2320 else
2321 vr1->value_id = get_or_alloc_constant_value_id (result);
5006671f 2322 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
39e843e8 2323 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
b45d2719
RG
2324 vr1->type = TREE_TYPE (op);
2325 vr1->set = get_alias_set (op);
89fb70a3
DB
2326 vr1->hashcode = vn_reference_compute_hash (vr1);
2327 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
4ec0a198 2328 vr1->result_vdef = vdef;
89fb70a3 2329
c203e8a7
TS
2330 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2331 INSERT);
89fb70a3
DB
2332
2333 /* Because we lookup stores using vuses, and value number failures
2334 using the vdefs (see visit_reference_op_store for how and why),
2335 it's possible that on failure we may try to insert an already
2336 inserted store. This is not wrong, there is no ssa name for a
2337 store that we could use as a differentiator anyway. Thus, unlike
2338 the other lookup functions, you cannot gcc_assert (!*slot)
2339 here. */
2340
8d0eca24
RG
2341 /* But free the old slot in case of a collision. */
2342 if (*slot)
2343 free_reference (*slot);
89fb70a3
DB
2344
2345 *slot = vr1;
c9145754
DB
2346 return vr1;
2347}
2348
2349/* Insert a reference by it's pieces into the current hash table with
2350 a value number of RESULT. Return the resulting reference
2351 structure we created. */
2352
2353vn_reference_t
b45d2719 2354vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
9771b263 2355 vec<vn_reference_op_s> operands,
c9145754
DB
2356 tree result, unsigned int value_id)
2357
2358{
bf190e8d 2359 vn_reference_s **slot;
c9145754
DB
2360 vn_reference_t vr1;
2361
af6a6eec 2362 vr1 = current_info->references_pool->allocate ();
5006671f
RG
2363 vr1->value_id = value_id;
2364 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
c9145754 2365 vr1->operands = valueize_refs (operands);
b45d2719
RG
2366 vr1->type = type;
2367 vr1->set = set;
c9145754
DB
2368 vr1->hashcode = vn_reference_compute_hash (vr1);
2369 if (result && TREE_CODE (result) == SSA_NAME)
2370 result = SSA_VAL (result);
2371 vr1->result = result;
2372
c203e8a7
TS
2373 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2374 INSERT);
b8698a0f 2375
c9145754 2376 /* At this point we should have all the things inserted that we have
5006671f
RG
2377 seen before, and we should never try inserting something that
2378 already exists. */
c9145754
DB
2379 gcc_assert (!*slot);
2380 if (*slot)
2381 free_reference (*slot);
2382
2383 *slot = vr1;
2384 return vr1;
89fb70a3
DB
2385}
2386
49a1fb2d 2387/* Compute and return the hash value for nary operation VBO1. */
89fb70a3 2388
26f3a4e1 2389static hashval_t
49a1fb2d 2390vn_nary_op_compute_hash (const vn_nary_op_t vno1)
89fb70a3 2391{
4e44a6e8 2392 inchash::hash hstate;
49a1fb2d 2393 unsigned i;
89fb70a3 2394
49a1fb2d
RG
2395 for (i = 0; i < vno1->length; ++i)
2396 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2397 vno1->op[i] = SSA_VAL (vno1->op[i]);
89fb70a3 2398
7fd9012e
RB
2399 if (((vno1->length == 2
2400 && commutative_tree_code (vno1->opcode))
2401 || (vno1->length == 3
2402 && commutative_ternary_tree_code (vno1->opcode)))
49a1fb2d 2403 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
6b4db501 2404 std::swap (vno1->op[0], vno1->op[1]);
7fd9012e
RB
2405 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2406 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2407 {
2408 std::swap (vno1->op[0], vno1->op[1]);
2409 vno1->opcode = swap_tree_comparison (vno1->opcode);
2410 }
89fb70a3 2411
4e44a6e8 2412 hstate.add_int (vno1->opcode);
49a1fb2d 2413 for (i = 0; i < vno1->length; ++i)
4e44a6e8 2414 inchash::add_expr (vno1->op[i], hstate);
89fb70a3 2415
4e44a6e8 2416 return hstate.end ();
89fb70a3
DB
2417}
2418
bf190e8d 2419/* Compare nary operations VNO1 and VNO2 and return true if they are
89fb70a3
DB
2420 equivalent. */
2421
bf190e8d
LC
2422bool
2423vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
89fb70a3 2424{
49a1fb2d
RG
2425 unsigned i;
2426
85169114
PB
2427 if (vno1->hashcode != vno2->hashcode)
2428 return false;
2429
5a7d7f9c
RG
2430 if (vno1->length != vno2->length)
2431 return false;
2432
49a1fb2d 2433 if (vno1->opcode != vno2->opcode
63a14fa3 2434 || !types_compatible_p (vno1->type, vno2->type))
49a1fb2d
RG
2435 return false;
2436
2437 for (i = 0; i < vno1->length; ++i)
2438 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2439 return false;
2440
2441 return true;
89fb70a3
DB
2442}
2443
9ad6bebe 2444/* Initialize VNO from the pieces provided. */
89fb70a3 2445
9ad6bebe
NF
2446static void
2447init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
5a7d7f9c 2448 enum tree_code code, tree type, tree *ops)
9ad6bebe
NF
2449{
2450 vno->opcode = code;
2451 vno->length = length;
2452 vno->type = type;
5a7d7f9c 2453 memcpy (&vno->op[0], ops, sizeof (tree) * length);
9ad6bebe
NF
2454}
2455
2456/* Initialize VNO from OP. */
2457
2458static void
2459init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2460{
2461 unsigned i;
2462
2463 vno->opcode = TREE_CODE (op);
2464 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2465 vno->type = TREE_TYPE (op);
2466 for (i = 0; i < vno->length; ++i)
2467 vno->op[i] = TREE_OPERAND (op, i);
2468}
2469
5a7d7f9c
RG
2470/* Return the number of operands for a vn_nary ops structure from STMT. */
2471
2472static unsigned int
355fe088 2473vn_nary_length_from_stmt (gimple *stmt)
5a7d7f9c
RG
2474{
2475 switch (gimple_assign_rhs_code (stmt))
2476 {
2477 case REALPART_EXPR:
2478 case IMAGPART_EXPR:
2479 case VIEW_CONVERT_EXPR:
2480 return 1;
2481
91af9dc9
RG
2482 case BIT_FIELD_REF:
2483 return 3;
2484
5a7d7f9c
RG
2485 case CONSTRUCTOR:
2486 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2487
2488 default:
2489 return gimple_num_ops (stmt) - 1;
2490 }
2491}
2492
9ad6bebe
NF
2493/* Initialize VNO from STMT. */
2494
2495static void
355fe088 2496init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
9ad6bebe
NF
2497{
2498 unsigned i;
2499
2500 vno->opcode = gimple_assign_rhs_code (stmt);
9ad6bebe 2501 vno->type = gimple_expr_type (stmt);
5a7d7f9c
RG
2502 switch (vno->opcode)
2503 {
2504 case REALPART_EXPR:
2505 case IMAGPART_EXPR:
2506 case VIEW_CONVERT_EXPR:
2507 vno->length = 1;
2508 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2509 break;
2510
91af9dc9
RG
2511 case BIT_FIELD_REF:
2512 vno->length = 3;
2513 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2514 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2515 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2516 break;
2517
5a7d7f9c
RG
2518 case CONSTRUCTOR:
2519 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2520 for (i = 0; i < vno->length; ++i)
2521 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2522 break;
2523
2524 default:
91af9dc9 2525 gcc_checking_assert (!gimple_assign_single_p (stmt));
5a7d7f9c
RG
2526 vno->length = gimple_num_ops (stmt) - 1;
2527 for (i = 0; i < vno->length; ++i)
2528 vno->op[i] = gimple_op (stmt, i + 1);
2529 }
9ad6bebe
NF
2530}
2531
2532/* Compute the hashcode for VNO and look for it in the hash table;
2533 return the resulting value number if it exists in the hash table.
2534 Return NULL_TREE if it does not exist in the hash table or if the
2535 result field of the operation is NULL. VNRESULT will contain the
2536 vn_nary_op_t from the hashtable if it exists. */
2537
2538static tree
2539vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
c9145754 2540{
bf190e8d 2541 vn_nary_op_s **slot;
9ad6bebe 2542
c9145754
DB
2543 if (vnresult)
2544 *vnresult = NULL;
9ad6bebe
NF
2545
2546 vno->hashcode = vn_nary_op_compute_hash (vno);
c203e8a7
TS
2547 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2548 NO_INSERT);
c9145754 2549 if (!slot && current_info == optimistic_info)
c203e8a7
TS
2550 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2551 NO_INSERT);
c9145754
DB
2552 if (!slot)
2553 return NULL_TREE;
2554 if (vnresult)
bf190e8d
LC
2555 *vnresult = *slot;
2556 return (*slot)->result;
c9145754
DB
2557}
2558
9ad6bebe
NF
2559/* Lookup a n-ary operation by its pieces and return the resulting value
2560 number if it exists in the hash table. Return NULL_TREE if it does
2561 not exist in the hash table or if the result field of the operation
2562 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2563 if it exists. */
2564
2565tree
2566vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
5a7d7f9c 2567 tree type, tree *ops, vn_nary_op_t *vnresult)
9ad6bebe 2568{
5a7d7f9c
RG
2569 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2570 sizeof_vn_nary_op (length));
2571 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2572 return vn_nary_op_lookup_1 (vno1, vnresult);
9ad6bebe
NF
2573}
2574
c9145754
DB
2575/* Lookup OP in the current hash table, and return the resulting value
2576 number if it exists in the hash table. Return NULL_TREE if it does
2577 not exist in the hash table or if the result field of the operation
2578 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2579 if it exists. */
2580
2581tree
2582vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
89fb70a3 2583{
5a7d7f9c
RG
2584 vn_nary_op_t vno1
2585 = XALLOCAVAR (struct vn_nary_op_s,
2586 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2587 init_vn_nary_op_from_op (vno1, op);
2588 return vn_nary_op_lookup_1 (vno1, vnresult);
89fb70a3
DB
2589}
2590
726a989a
RB
2591/* Lookup the rhs of STMT in the current hash table, and return the resulting
2592 value number if it exists in the hash table. Return NULL_TREE if
2593 it does not exist in the hash table. VNRESULT will contain the
2594 vn_nary_op_t from the hashtable if it exists. */
2595
2596tree
355fe088 2597vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
726a989a 2598{
5a7d7f9c
RG
2599 vn_nary_op_t vno1
2600 = XALLOCAVAR (struct vn_nary_op_s,
2601 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2602 init_vn_nary_op_from_stmt (vno1, stmt);
2603 return vn_nary_op_lookup_1 (vno1, vnresult);
9ad6bebe
NF
2604}
2605
34050b6b
RB
2606/* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2607
2608static tree
2609vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2610{
2611 if (!rcode.is_tree_code ())
2612 return NULL_TREE;
2613 vn_nary_op_t vnresult = NULL;
2614 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2615 (tree_code) rcode, type, ops, &vnresult);
2616}
2617
9ad6bebe
NF
2618/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2619
2620static vn_nary_op_t
2621alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2622{
2623 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2624}
2625
2626/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2627 obstack. */
2628
2629static vn_nary_op_t
2630alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2631{
2632 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2633 &current_info->nary_obstack);
2634
2635 vno1->value_id = value_id;
2636 vno1->length = length;
2637 vno1->result = result;
2638
2639 return vno1;
2640}
2641
2642/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2643 VNO->HASHCODE first. */
2644
2645static vn_nary_op_t
c203e8a7 2646vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
bf190e8d 2647 bool compute_hash)
9ad6bebe 2648{
bf190e8d 2649 vn_nary_op_s **slot;
9ad6bebe
NF
2650
2651 if (compute_hash)
2652 vno->hashcode = vn_nary_op_compute_hash (vno);
2653
c203e8a7 2654 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
9ad6bebe
NF
2655 gcc_assert (!*slot);
2656
2657 *slot = vno;
2658 return vno;
726a989a
RB
2659}
2660
c9145754
DB
2661/* Insert a n-ary operation into the current hash table using it's
2662 pieces. Return the vn_nary_op_t structure we created and put in
2663 the hashtable. */
2664
2665vn_nary_op_t
2666vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
5a7d7f9c
RG
2667 tree type, tree *ops,
2668 tree result, unsigned int value_id)
c9145754 2669{
5a7d7f9c
RG
2670 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2671 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
9ad6bebe 2672 return vn_nary_op_insert_into (vno1, current_info->nary, true);
c9145754
DB
2673}
2674
89fb70a3 2675/* Insert OP into the current hash table with a value number of
c9145754
DB
2676 RESULT. Return the vn_nary_op_t structure we created and put in
2677 the hashtable. */
89fb70a3 2678
c9145754 2679vn_nary_op_t
49a1fb2d 2680vn_nary_op_insert (tree op, tree result)
89fb70a3 2681{
49a1fb2d 2682 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
49a1fb2d 2683 vn_nary_op_t vno1;
49a1fb2d 2684
9ad6bebe
NF
2685 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2686 init_vn_nary_op_from_op (vno1, op);
2687 return vn_nary_op_insert_into (vno1, current_info->nary, true);
89fb70a3
DB
2688}
2689
726a989a
RB
2690/* Insert the rhs of STMT into the current hash table with a value number of
2691 RESULT. */
2692
60dd79ca 2693static vn_nary_op_t
355fe088 2694vn_nary_op_insert_stmt (gimple *stmt, tree result)
726a989a 2695{
5a7d7f9c
RG
2696 vn_nary_op_t vno1
2697 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2698 result, VN_INFO (result)->value_id);
9ad6bebe
NF
2699 init_vn_nary_op_from_stmt (vno1, stmt);
2700 return vn_nary_op_insert_into (vno1, current_info->nary, true);
726a989a
RB
2701}
2702
89fb70a3
DB
2703/* Compute a hashcode for PHI operation VP1 and return it. */
2704
2705static inline hashval_t
2706vn_phi_compute_hash (vn_phi_t vp1)
2707{
e6503e0a
RB
2708 inchash::hash hstate (vp1->phiargs.length () > 2
2709 ? vp1->block->index : vp1->phiargs.length ());
89fb70a3 2710 tree phi1op;
1d295886 2711 tree type;
9fe4f60a
RB
2712 edge e;
2713 edge_iterator ei;
89fb70a3 2714
1d295886
RG
2715 /* If all PHI arguments are constants we need to distinguish
2716 the PHI node via its type. */
24d63016 2717 type = vp1->type;
4e44a6e8 2718 hstate.merge_hash (vn_hash_type (type));
1d295886 2719
9fe4f60a 2720 FOR_EACH_EDGE (e, ei, vp1->block->preds)
89fb70a3 2721 {
9fe4f60a
RB
2722 /* Don't hash backedge values they need to be handled as VN_TOP
2723 for optimistic value-numbering. */
2724 if (e->flags & EDGE_DFS_BACK)
2725 continue;
2726
2727 phi1op = vp1->phiargs[e->dest_idx];
89fb70a3
DB
2728 if (phi1op == VN_TOP)
2729 continue;
4e44a6e8 2730 inchash::add_expr (phi1op, hstate);
89fb70a3
DB
2731 }
2732
4e44a6e8 2733 return hstate.end ();
89fb70a3
DB
2734}
2735
e6503e0a 2736
87961d1b
RB
2737/* Return true if COND1 and COND2 represent the same condition, set
2738 *INVERTED_P if one needs to be inverted to make it the same as
2739 the other. */
2740
2741static bool
2742cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2743{
2744 enum tree_code code1 = gimple_cond_code (cond1);
2745 enum tree_code code2 = gimple_cond_code (cond2);
2746 tree lhs1 = gimple_cond_lhs (cond1);
2747 tree lhs2 = gimple_cond_lhs (cond2);
2748 tree rhs1 = gimple_cond_rhs (cond1);
2749 tree rhs2 = gimple_cond_rhs (cond2);
2750
2751 *inverted_p = false;
2752 if (code1 == code2)
2753 ;
2754 else if (code1 == swap_tree_comparison (code2))
2755 std::swap (lhs2, rhs2);
2756 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2757 *inverted_p = true;
2758 else if (code1 == invert_tree_comparison
2759 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2760 {
2761 std::swap (lhs2, rhs2);
2762 *inverted_p = true;
2763 }
2764 else
2765 return false;
2766
94852c8e
RB
2767 lhs1 = vn_valueize (lhs1);
2768 rhs1 = vn_valueize (rhs1);
2769 lhs2 = vn_valueize (lhs2);
2770 rhs2 = vn_valueize (rhs2);
2771 return ((expressions_equal_p (lhs1, lhs2)
2772 && expressions_equal_p (rhs1, rhs2))
2773 || (commutative_tree_code (code1)
2774 && expressions_equal_p (lhs1, rhs2)
2775 && expressions_equal_p (rhs1, lhs2)));
87961d1b
RB
2776}
2777
89fb70a3
DB
2778/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2779
2780static int
bf190e8d 2781vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
89fb70a3 2782{
85169114
PB
2783 if (vp1->hashcode != vp2->hashcode)
2784 return false;
2785
e6503e0a 2786 if (vp1->block != vp2->block)
89fb70a3 2787 {
e6503e0a 2788 if (vp1->phiargs.length () != vp2->phiargs.length ())
1d295886
RG
2789 return false;
2790
e6503e0a 2791 switch (vp1->phiargs.length ())
89fb70a3 2792 {
e6503e0a
RB
2793 case 1:
2794 /* Single-arg PHIs are just copies. */
2795 break;
2796
2797 case 2:
2798 {
2799 /* Rule out backedges into the PHI. */
2800 if (vp1->block->loop_father->header == vp1->block
2801 || vp2->block->loop_father->header == vp2->block)
2802 return false;
2803
2804 /* If the PHI nodes do not have compatible types
2805 they are not the same. */
2806 if (!types_compatible_p (vp1->type, vp2->type))
2807 return false;
2808
2809 basic_block idom1
2810 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2811 basic_block idom2
2812 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2813 /* If the immediate dominator end in switch stmts multiple
2814 values may end up in the same PHI arg via intermediate
2815 CFG merges. */
2816 if (EDGE_COUNT (idom1->succs) != 2
2817 || EDGE_COUNT (idom2->succs) != 2)
2818 return false;
2819
2820 /* Verify the controlling stmt is the same. */
2821 gimple *last1 = last_stmt (idom1);
2822 gimple *last2 = last_stmt (idom2);
2823 if (gimple_code (last1) != GIMPLE_COND
2824 || gimple_code (last2) != GIMPLE_COND)
2825 return false;
87961d1b
RB
2826 bool inverted_p;
2827 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2828 as_a <gcond *> (last2), &inverted_p))
e6503e0a
RB
2829 return false;
2830
2831 /* Get at true/false controlled edges into the PHI. */
2832 edge te1, te2, fe1, fe2;
2833 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2834 &te1, &fe1)
2835 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2836 &te2, &fe2))
2837 return false;
2838
87961d1b
RB
2839 /* Swap edges if the second condition is the inverted of the
2840 first. */
2841 if (inverted_p)
2842 std::swap (te2, fe2);
2843
e6503e0a
RB
2844 /* ??? Handle VN_TOP specially. */
2845 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2846 vp2->phiargs[te2->dest_idx])
2847 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2848 vp2->phiargs[fe2->dest_idx]))
2849 return false;
2850
2851 return true;
2852 }
2853
2854 default:
2855 return false;
89fb70a3 2856 }
89fb70a3 2857 }
e6503e0a
RB
2858
2859 /* If the PHI nodes do not have compatible types
2860 they are not the same. */
2861 if (!types_compatible_p (vp1->type, vp2->type))
2862 return false;
2863
2864 /* Any phi in the same block will have it's arguments in the
2865 same edge order, because of how we store phi nodes. */
2866 int i;
2867 tree phi1op;
2868 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2869 {
2870 tree phi2op = vp2->phiargs[i];
2871 if (phi1op == VN_TOP || phi2op == VN_TOP)
2872 continue;
2873 if (!expressions_equal_p (phi1op, phi2op))
2874 return false;
2875 }
2876
2877 return true;
89fb70a3
DB
2878}
2879
9771b263 2880static vec<tree> shared_lookup_phiargs;
89fb70a3
DB
2881
2882/* Lookup PHI in the current hash table, and return the resulting
2883 value number if it exists in the hash table. Return NULL_TREE if
2884 it does not exist in the hash table. */
2885
de081cfd 2886static tree
355fe088 2887vn_phi_lookup (gimple *phi)
89fb70a3 2888{
bf190e8d 2889 vn_phi_s **slot;
89fb70a3 2890 struct vn_phi_s vp1;
9fe4f60a
RB
2891 edge e;
2892 edge_iterator ei;
89fb70a3 2893
9771b263 2894 shared_lookup_phiargs.truncate (0);
9fe4f60a 2895 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
89fb70a3
DB
2896
2897 /* Canonicalize the SSA_NAME's to their value number. */
9fe4f60a 2898 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
89fb70a3 2899 {
9fe4f60a 2900 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
89fb70a3 2901 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
9fe4f60a 2902 shared_lookup_phiargs[e->dest_idx] = def;
89fb70a3 2903 }
24d63016 2904 vp1.type = TREE_TYPE (gimple_phi_result (phi));
89fb70a3 2905 vp1.phiargs = shared_lookup_phiargs;
726a989a 2906 vp1.block = gimple_bb (phi);
89fb70a3 2907 vp1.hashcode = vn_phi_compute_hash (&vp1);
c203e8a7
TS
2908 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2909 NO_INSERT);
27fa4044 2910 if (!slot && current_info == optimistic_info)
c203e8a7
TS
2911 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2912 NO_INSERT);
89fb70a3
DB
2913 if (!slot)
2914 return NULL_TREE;
bf190e8d 2915 return (*slot)->result;
89fb70a3
DB
2916}
2917
2918/* Insert PHI into the current hash table with a value number of
2919 RESULT. */
2920
c9145754 2921static vn_phi_t
355fe088 2922vn_phi_insert (gimple *phi, tree result)
89fb70a3 2923{
bf190e8d 2924 vn_phi_s **slot;
af6a6eec 2925 vn_phi_t vp1 = current_info->phis_pool->allocate ();
6e1aa848 2926 vec<tree> args = vNULL;
9fe4f60a
RB
2927 edge e;
2928 edge_iterator ei;
2929
2930 args.safe_grow (gimple_phi_num_args (phi));
89fb70a3
DB
2931
2932 /* Canonicalize the SSA_NAME's to their value number. */
9fe4f60a 2933 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
89fb70a3 2934 {
9fe4f60a 2935 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
89fb70a3 2936 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
9fe4f60a 2937 args[e->dest_idx] = def;
89fb70a3 2938 }
c9145754 2939 vp1->value_id = VN_INFO (result)->value_id;
24d63016 2940 vp1->type = TREE_TYPE (gimple_phi_result (phi));
89fb70a3 2941 vp1->phiargs = args;
726a989a 2942 vp1->block = gimple_bb (phi);
89fb70a3
DB
2943 vp1->result = result;
2944 vp1->hashcode = vn_phi_compute_hash (vp1);
2945
c203e8a7 2946 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
89fb70a3
DB
2947
2948 /* Because we iterate over phi operations more than once, it's
2949 possible the slot might already exist here, hence no assert.*/
2950 *slot = vp1;
c9145754 2951 return vp1;
89fb70a3
DB
2952}
2953
2954
2955/* Print set of components in strongly connected component SCC to OUT. */
2956
2957static void
9771b263 2958print_scc (FILE *out, vec<tree> scc)
89fb70a3
DB
2959{
2960 tree var;
2961 unsigned int i;
2962
0eb09f31 2963 fprintf (out, "SCC consists of:");
9771b263 2964 FOR_EACH_VEC_ELT (scc, i, var)
89fb70a3 2965 {
89fb70a3 2966 fprintf (out, " ");
0eb09f31 2967 print_generic_expr (out, var, 0);
89fb70a3
DB
2968 }
2969 fprintf (out, "\n");
2970}
2971
fac40b02
RB
2972/* Return true if BB1 is dominated by BB2 taking into account edges
2973 that are not executable. */
2974
2975static bool
2976dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
2977{
2978 edge_iterator ei;
2979 edge e;
2980
2981 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
2982 return true;
2983
2984 /* Before iterating we'd like to know if there exists a
2985 (executable) path from bb2 to bb1 at all, if not we can
2986 directly return false. For now simply iterate once. */
2987
2988 /* Iterate to the single executable bb1 predecessor. */
2989 if (EDGE_COUNT (bb1->preds) > 1)
2990 {
2991 edge prede = NULL;
2992 FOR_EACH_EDGE (e, ei, bb1->preds)
2993 if (e->flags & EDGE_EXECUTABLE)
2994 {
2995 if (prede)
2996 {
2997 prede = NULL;
2998 break;
2999 }
3000 prede = e;
3001 }
3002 if (prede)
3003 {
3004 bb1 = prede->src;
3005
3006 /* Re-do the dominance check with changed bb1. */
3007 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3008 return true;
3009 }
3010 }
3011
3012 /* Iterate to the single executable bb2 successor. */
3013 edge succe = NULL;
3014 FOR_EACH_EDGE (e, ei, bb2->succs)
3015 if (e->flags & EDGE_EXECUTABLE)
3016 {
3017 if (succe)
3018 {
3019 succe = NULL;
3020 break;
3021 }
3022 succe = e;
3023 }
3024 if (succe)
3025 {
3026 /* Verify the reached block is only reached through succe.
3027 If there is only one edge we can spare us the dominator
3028 check and iterate directly. */
3029 if (EDGE_COUNT (succe->dest->preds) > 1)
3030 {
3031 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3032 if (e != succe
3033 && (e->flags & EDGE_EXECUTABLE))
3034 {
3035 succe = NULL;
3036 break;
3037 }
3038 }
3039 if (succe)
3040 {
3041 bb2 = succe->dest;
3042
3043 /* Re-do the dominance check with changed bb2. */
3044 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3045 return true;
3046 }
3047 }
3048
3049 /* We could now iterate updating bb1 / bb2. */
3050 return false;
3051}
3052
89fb70a3
DB
3053/* Set the value number of FROM to TO, return true if it has changed
3054 as a result. */
3055
3056static inline bool
3057set_ssa_val_to (tree from, tree to)
3058{
90bc4623 3059 tree currval = SSA_VAL (from);
d1de852b 3060 HOST_WIDE_INT toff, coff;
89fb70a3 3061
a764d660
RB
3062 /* The only thing we allow as value numbers are ssa_names
3063 and invariants. So assert that here. We don't allow VN_TOP
3064 as visiting a stmt should produce a value-number other than
3065 that.
3066 ??? Still VN_TOP can happen for unreachable code, so force
3067 it to varying in that case. Not all code is prepared to
3068 get VN_TOP on valueization. */
3069 if (to == VN_TOP)
3070 {
3071 if (dump_file && (dump_flags & TDF_DETAILS))
3072 fprintf (dump_file, "Forcing value number to varying on "
3073 "receiving VN_TOP\n");
3074 to = from;
3075 }
3076
3077 gcc_assert (to != NULL_TREE
703c9ccd
RB
3078 && ((TREE_CODE (to) == SSA_NAME
3079 && (to == from || SSA_VAL (to) == to))
a764d660
RB
3080 || is_gimple_min_invariant (to)));
3081
90bc4623
RG
3082 if (from != to)
3083 {
3084 if (currval == from)
3085 {
3086 if (dump_file && (dump_flags & TDF_DETAILS))
3087 {
3088 fprintf (dump_file, "Not changing value number of ");
3089 print_generic_expr (dump_file, from, 0);
3090 fprintf (dump_file, " from VARYING to ");
3091 print_generic_expr (dump_file, to, 0);
3092 fprintf (dump_file, "\n");
3093 }
3094 return false;
3095 }
3096 else if (TREE_CODE (to) == SSA_NAME
3097 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3098 to = from;
3099 }
fe4fefa0 3100
89fb70a3
DB
3101 if (dump_file && (dump_flags & TDF_DETAILS))
3102 {
3103 fprintf (dump_file, "Setting value number of ");
3104 print_generic_expr (dump_file, from, 0);
3105 fprintf (dump_file, " to ");
3106 print_generic_expr (dump_file, to, 0);
89fb70a3
DB
3107 }
3108
d1de852b
RB
3109 if (currval != to
3110 && !operand_equal_p (currval, to, 0)
3111 /* ??? For addresses involving volatile objects or types operand_equal_p
3112 does not reliably detect ADDR_EXPRs as equal. We know we are only
3113 getting invariant gimple addresses here, so can use
3114 get_addr_base_and_unit_offset to do this comparison. */
3115 && !(TREE_CODE (currval) == ADDR_EXPR
3116 && TREE_CODE (to) == ADDR_EXPR
3117 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3118 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3119 && coff == toff))
89fb70a3 3120 {
e93c66bc
RB
3121 /* If we equate two SSA names we have to make the side-band info
3122 of the leader conservative (and remember whatever original value
3123 was present). */
3124 if (TREE_CODE (to) == SSA_NAME)
3125 {
3126 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3127 && SSA_NAME_RANGE_INFO (to))
3128 {
3129 if (SSA_NAME_IS_DEFAULT_DEF (to)
fac40b02
RB
3130 || dominated_by_p_w_unex
3131 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3132 gimple_bb (SSA_NAME_DEF_STMT (to))))
e93c66bc
RB
3133 /* Keep the info from the dominator. */
3134 ;
3135 else if (SSA_NAME_IS_DEFAULT_DEF (from)
fac40b02
RB
3136 || dominated_by_p_w_unex
3137 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3138 gimple_bb (SSA_NAME_DEF_STMT (from))))
e93c66bc
RB
3139 {
3140 /* Save old info. */
3141 if (! VN_INFO (to)->info.range_info)
fa4511c2
RB
3142 {
3143 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3144 VN_INFO (to)->range_info_anti_range_p
3145 = SSA_NAME_ANTI_RANGE_P (to);
3146 }
e93c66bc
RB
3147 /* Use that from the dominator. */
3148 SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
fa4511c2 3149 SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
e93c66bc
RB
3150 }
3151 else
3152 {
3153 /* Save old info. */
3154 if (! VN_INFO (to)->info.range_info)
fa4511c2
RB
3155 {
3156 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3157 VN_INFO (to)->range_info_anti_range_p
3158 = SSA_NAME_ANTI_RANGE_P (to);
3159 }
e93c66bc
RB
3160 /* Rather than allocating memory and unioning the info
3161 just clear it. */
3162 SSA_NAME_RANGE_INFO (to) = NULL;
3163 }
3164 }
3165 else if (POINTER_TYPE_P (TREE_TYPE (to))
3166 && SSA_NAME_PTR_INFO (to))
3167 {
3168 if (SSA_NAME_IS_DEFAULT_DEF (to)
fac40b02
RB
3169 || dominated_by_p_w_unex
3170 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3171 gimple_bb (SSA_NAME_DEF_STMT (to))))
e93c66bc
RB
3172 /* Keep the info from the dominator. */
3173 ;
3174 else if (SSA_NAME_IS_DEFAULT_DEF (from)
fac40b02
RB
3175 || dominated_by_p_w_unex
3176 (gimple_bb (SSA_NAME_DEF_STMT (to)),
3177 gimple_bb (SSA_NAME_DEF_STMT (from))))
e93c66bc
RB
3178 {
3179 /* Save old info. */
3180 if (! VN_INFO (to)->info.ptr_info)
3181 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3182 /* Use that from the dominator. */
3183 SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3184 }
dd6f2cf9
RB
3185 else if (! SSA_NAME_PTR_INFO (from)
3186 /* Handle the case of trivially equivalent info. */
3187 || memcmp (SSA_NAME_PTR_INFO (to),
3188 SSA_NAME_PTR_INFO (from),
3189 sizeof (ptr_info_def)) != 0)
e93c66bc
RB
3190 {
3191 /* Save old info. */
3192 if (! VN_INFO (to)->info.ptr_info)
3193 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3194 /* Rather than allocating memory and unioning the info
3195 just clear it. */
3196 SSA_NAME_PTR_INFO (to) = NULL;
3197 }
3198 }
3199 }
3200
5006671f 3201 VN_INFO (from)->valnum = to;
8495c94f
RG
3202 if (dump_file && (dump_flags & TDF_DETAILS))
3203 fprintf (dump_file, " (changed)\n");
89fb70a3
DB
3204 return true;
3205 }
8495c94f
RG
3206 if (dump_file && (dump_flags & TDF_DETAILS))
3207 fprintf (dump_file, "\n");
89fb70a3
DB
3208 return false;
3209}
3210
00115921
TV
3211/* Mark as processed all the definitions in the defining stmt of USE, or
3212 the USE itself. */
3213
3214static void
3215mark_use_processed (tree use)
3216{
3217 ssa_op_iter iter;
3218 def_operand_p defp;
355fe088 3219 gimple *stmt = SSA_NAME_DEF_STMT (use);
00115921
TV
3220
3221 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3222 {
3223 VN_INFO (use)->use_processed = true;
3224 return;
3225 }
3226
3227 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3228 {
3229 tree def = DEF_FROM_PTR (defp);
3230
3231 VN_INFO (def)->use_processed = true;
3232 }
3233}
3234
89fb70a3
DB
3235/* Set all definitions in STMT to value number to themselves.
3236 Return true if a value number changed. */
3237
3238static bool
355fe088 3239defs_to_varying (gimple *stmt)
89fb70a3
DB
3240{
3241 bool changed = false;
3242 ssa_op_iter iter;
3243 def_operand_p defp;
3244
3245 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3246 {
3247 tree def = DEF_FROM_PTR (defp);
89fb70a3
DB
3248 changed |= set_ssa_val_to (def, def);
3249 }
3250 return changed;
3251}
3252
3253/* Visit a copy between LHS and RHS, return true if the value number
3254 changed. */
3255
3256static bool
3257visit_copy (tree lhs, tree rhs)
3258{
34050b6b 3259 /* Valueize. */
0d5a1b56 3260 rhs = SSA_VAL (rhs);
89fb70a3
DB
3261
3262 return set_ssa_val_to (lhs, rhs);
3263}
3264
2262707f 3265/* Visit a nary operator RHS, value number it, and return true if the
89fb70a3
DB
3266 value number of LHS has changed as a result. */
3267
3268static bool
355fe088 3269visit_nary_op (tree lhs, gimple *stmt)
89fb70a3
DB
3270{
3271 bool changed = false;
726a989a 3272 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
89fb70a3
DB
3273
3274 if (result)
2262707f 3275 changed = set_ssa_val_to (lhs, result);
726a989a
RB
3276 else
3277 {
3278 changed = set_ssa_val_to (lhs, lhs);
3279 vn_nary_op_insert_stmt (stmt, lhs);
3280 }
3281
3282 return changed;
3283}
3284
3285/* Visit a call STMT storing into LHS. Return true if the value number
3286 of the LHS has changed as a result. */
3287
3288static bool
538dd0b7 3289visit_reference_op_call (tree lhs, gcall *stmt)
89fb70a3
DB
3290{
3291 bool changed = false;
726a989a 3292 struct vn_reference_s vr1;
00115921 3293 vn_reference_t vnresult = NULL;
00115921 3294 tree vdef = gimple_vdef (stmt);
89fb70a3 3295
6867d9a9
TV
3296 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3297 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3298 lhs = NULL_TREE;
3299
26f3a4e1 3300 vn_reference_lookup_call (stmt, &vnresult, &vr1);
00115921 3301 if (vnresult)
89fb70a3 3302 {
4583fada 3303 if (vnresult->result_vdef && vdef)
00115921
TV
3304 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3305
3306 if (!vnresult->result && lhs)
3307 vnresult->result = lhs;
3308
3309 if (vnresult->result && lhs)
34050b6b 3310 changed |= set_ssa_val_to (lhs, vnresult->result);
89fb70a3
DB
3311 }
3312 else
3313 {
726a989a 3314 vn_reference_t vr2;
26f3a4e1 3315 vn_reference_s **slot;
00115921
TV
3316 if (vdef)
3317 changed |= set_ssa_val_to (vdef, vdef);
3318 if (lhs)
3319 changed |= set_ssa_val_to (lhs, lhs);
af6a6eec 3320 vr2 = current_info->references_pool->allocate ();
5006671f 3321 vr2->vuse = vr1.vuse;
26f3a4e1
RB
3322 /* As we are not walking the virtual operand chain we know the
3323 shared_lookup_references are still original so we can re-use
3324 them here. */
3325 vr2->operands = vr1.operands.copy ();
b45d2719
RG
3326 vr2->type = vr1.type;
3327 vr2->set = vr1.set;
726a989a
RB
3328 vr2->hashcode = vr1.hashcode;
3329 vr2->result = lhs;
00115921 3330 vr2->result_vdef = vdef;
c203e8a7
TS
3331 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3332 INSERT);
26f3a4e1 3333 gcc_assert (!*slot);
726a989a 3334 *slot = vr2;
89fb70a3
DB
3335 }
3336
3337 return changed;
3338}
3339
3340/* Visit a load from a reference operator RHS, part of STMT, value number it,
3341 and return true if the value number of the LHS has changed as a result. */
3342
3343static bool
355fe088 3344visit_reference_op_load (tree lhs, tree op, gimple *stmt)
89fb70a3
DB
3345{
3346 bool changed = false;
d0ca0bcb
RG
3347 tree last_vuse;
3348 tree result;
3349
3350 last_vuse = gimple_vuse (stmt);
3351 last_vuse_ptr = &last_vuse;
1ec87690
RG
3352 result = vn_reference_lookup (op, gimple_vuse (stmt),
3353 default_vn_walk_kind, NULL);
d0ca0bcb 3354 last_vuse_ptr = NULL;
89fb70a3 3355
3d45dd59
RG
3356 /* We handle type-punning through unions by value-numbering based
3357 on offset and size of the access. Be prepared to handle a
3358 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3359 if (result
3360 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3361 {
3362 /* We will be setting the value number of lhs to the value number
3363 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3364 So first simplify and lookup this expression to see if it
3365 is already available. */
34050b6b 3366 mprts_hook = vn_lookup_simplify_result;
c0f62740
RB
3367 code_helper rcode = VIEW_CONVERT_EXPR;
3368 tree ops[3] = { result };
3369 bool res = gimple_resimplify1 (NULL, &rcode, TREE_TYPE (op), ops,
3370 vn_valueize);
34050b6b 3371 mprts_hook = NULL;
c0f62740
RB
3372 gimple *new_stmt = NULL;
3373 if (res
3374 && gimple_simplified_result_is_gimple_val (rcode, ops))
3375 /* The expression is already available. */
3376 result = ops[0];
3377 else
34050b6b 3378 {
c0f62740 3379 tree val = vn_lookup_simplify_result (rcode, TREE_TYPE (op), ops);
34050b6b
RB
3380 if (!val)
3381 {
c0f62740
RB
3382 gimple_seq stmts = NULL;
3383 result = maybe_push_res_to_seq (rcode, TREE_TYPE (op), ops,
3384 &stmts);
40311329
RB
3385 if (result)
3386 {
3387 gcc_assert (gimple_seq_singleton_p (stmts));
3388 new_stmt = gimple_seq_first_stmt (stmts);
3389 }
34050b6b 3390 }
c0f62740
RB
3391 else
3392 /* The expression is already available. */
3393 result = val;
3d45dd59 3394 }
c0f62740 3395 if (new_stmt)
34050b6b 3396 {
34050b6b
RB
3397 /* The expression is not yet available, value-number lhs to
3398 the new SSA_NAME we created. */
3d45dd59
RG
3399 /* Initialize value-number information properly. */
3400 VN_INFO_GET (result)->valnum = result;
726a989a 3401 VN_INFO (result)->value_id = get_next_value_id ();
c0f62740
RB
3402 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
3403 new_stmt);
3d45dd59
RG
3404 VN_INFO (result)->needs_insertion = true;
3405 /* As all "inserted" statements are singleton SCCs, insert
3406 to the valid table. This is strictly needed to
3407 avoid re-generating new value SSA_NAMEs for the same
3408 expression during SCC iteration over and over (the
3409 optimistic table gets cleared after each iteration).
3410 We do not need to insert into the optimistic table, as
3411 lookups there will fall back to the valid table. */
3412 if (current_info == optimistic_info)
3413 {
3414 current_info = valid_info;
c0f62740 3415 vn_nary_op_insert_stmt (new_stmt, result);
3d45dd59
RG
3416 current_info = optimistic_info;
3417 }
3418 else
c0f62740 3419 vn_nary_op_insert_stmt (new_stmt, result);
3d45dd59
RG
3420 if (dump_file && (dump_flags & TDF_DETAILS))
3421 {
3422 fprintf (dump_file, "Inserting name ");
3423 print_generic_expr (dump_file, result, 0);
3424 fprintf (dump_file, " for expression ");
c0f62740 3425 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
3d45dd59
RG
3426 fprintf (dump_file, "\n");
3427 }
3428 }
3429 }
3430
89fb70a3 3431 if (result)
34050b6b 3432 changed = set_ssa_val_to (lhs, result);
89fb70a3
DB
3433 else
3434 {
3435 changed = set_ssa_val_to (lhs, lhs);
4ec0a198 3436 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
89fb70a3
DB
3437 }
3438
3439 return changed;
3440}
3441
3442
3443/* Visit a store to a reference operator LHS, part of STMT, value number it,
3444 and return true if the value number of the LHS has changed as a result. */
3445
3446static bool
355fe088 3447visit_reference_op_store (tree lhs, tree op, gimple *stmt)
89fb70a3
DB
3448{
3449 bool changed = false;
4ec0a198
TV
3450 vn_reference_t vnresult = NULL;
3451 tree result, assign;
89fb70a3 3452 bool resultsame = false;
4ec0a198
TV
3453 tree vuse = gimple_vuse (stmt);
3454 tree vdef = gimple_vdef (stmt);
89fb70a3 3455
703c9ccd
RB
3456 if (TREE_CODE (op) == SSA_NAME)
3457 op = SSA_VAL (op);
3458
89fb70a3
DB
3459 /* First we want to lookup using the *vuses* from the store and see
3460 if there the last store to this location with the same address
3461 had the same value.
3462
3463 The vuses represent the memory state before the store. If the
3464 memory state, address, and value of the store is the same as the
3465 last store to this location, then this store will produce the
3466 same memory state as that store.
3467
3468 In this case the vdef versions for this store are value numbered to those
3469 vuse versions, since they represent the same memory state after
3470 this store.
3471
3472 Otherwise, the vdefs for the store are used when inserting into
3473 the table, since the store generates a new memory state. */
3474
4ec0a198 3475 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
89fb70a3
DB
3476
3477 if (result)
3478 {
3479 if (TREE_CODE (result) == SSA_NAME)
3480 result = SSA_VAL (result);
3481 resultsame = expressions_equal_p (result, op);
3482 }
3483
26f3a4e1
RB
3484 if ((!result || !resultsame)
3485 /* Only perform the following when being called from PRE
3486 which embeds tail merging. */
3487 && default_vn_walk_kind == VN_WALK)
89fb70a3 3488 {
4ec0a198
TV
3489 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3490 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3491 if (vnresult)
3492 {
3493 VN_INFO (vdef)->use_processed = true;
3494 return set_ssa_val_to (vdef, vnresult->result_vdef);
3495 }
3496 }
89fb70a3 3497
4ec0a198
TV
3498 if (!result || !resultsame)
3499 {
89fb70a3
DB
3500 if (dump_file && (dump_flags & TDF_DETAILS))
3501 {
3502 fprintf (dump_file, "No store match\n");
3503 fprintf (dump_file, "Value numbering store ");
3504 print_generic_expr (dump_file, lhs, 0);
3505 fprintf (dump_file, " to ");
3506 print_generic_expr (dump_file, op, 0);
3507 fprintf (dump_file, "\n");
3508 }
3509 /* Have to set value numbers before insert, since insert is
3510 going to valueize the references in-place. */
4ec0a198 3511 if (vdef)
89fb70a3 3512 {
89fb70a3
DB
3513 changed |= set_ssa_val_to (vdef, vdef);
3514 }
3515
9a327766
RG
3516 /* Do not insert structure copies into the tables. */
3517 if (is_gimple_min_invariant (op)
3518 || is_gimple_reg (op))
4ec0a198
TV
3519 vn_reference_insert (lhs, op, vdef, NULL);
3520
26f3a4e1
RB
3521 /* Only perform the following when being called from PRE
3522 which embeds tail merging. */
3523 if (default_vn_walk_kind == VN_WALK)
3524 {
3525 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3526 vn_reference_insert (assign, lhs, vuse, vdef);
3527 }
89fb70a3
DB
3528 }
3529 else
3530 {
5006671f
RG
3531 /* We had a match, so value number the vdef to have the value
3532 number of the vuse it came from. */
89fb70a3
DB
3533
3534 if (dump_file && (dump_flags & TDF_DETAILS))
3535 fprintf (dump_file, "Store matched earlier value,"
3536 "value numbering store vdefs to matching vuses.\n");
3537
4ec0a198 3538 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
89fb70a3
DB
3539 }
3540
3541 return changed;
3542}
3543
3544/* Visit and value number PHI, return true if the value number
3545 changed. */
3546
3547static bool
355fe088 3548visit_phi (gimple *phi)
89fb70a3
DB
3549{
3550 bool changed = false;
3551 tree result;
3552 tree sameval = VN_TOP;
3553 bool allsame = true;
94852c8e 3554 unsigned n_executable = 0;
89fb70a3 3555
62b0d9ec
DJ
3556 /* TODO: We could check for this in init_sccvn, and replace this
3557 with a gcc_assert. */
3558 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3559 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3560
89fb70a3
DB
3561 /* See if all non-TOP arguments have the same value. TOP is
3562 equivalent to everything, so we can ignore it. */
a764d660
RB
3563 edge_iterator ei;
3564 edge e;
3565 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3566 if (e->flags & EDGE_EXECUTABLE)
3567 {
3568 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
89fb70a3 3569
94852c8e 3570 ++n_executable;
a764d660
RB
3571 if (TREE_CODE (def) == SSA_NAME)
3572 def = SSA_VAL (def);
3573 if (def == VN_TOP)
3574 continue;
3575 if (sameval == VN_TOP)
9fe4f60a
RB
3576 sameval = def;
3577 else if (!expressions_equal_p (def, sameval))
a764d660 3578 {
9fe4f60a
RB
3579 allsame = false;
3580 break;
a764d660
RB
3581 }
3582 }
28251e2c
RB
3583
3584 /* If none of the edges was executable or all incoming values are
94852c8e
RB
3585 undefined keep the value-number at VN_TOP. If only a single edge
3586 is exectuable use its value. */
3587 if (sameval == VN_TOP
3588 || n_executable == 1)
3589 return set_ssa_val_to (PHI_RESULT (phi), sameval);
89fb70a3 3590
9fe4f60a
RB
3591 /* First see if it is equivalent to a phi node in this block. We prefer
3592 this as it allows IV elimination - see PRs 66502 and 67167. */
89fb70a3
DB
3593 result = vn_phi_lookup (phi);
3594 if (result)
c1604254 3595 changed = set_ssa_val_to (PHI_RESULT (phi), result);
9fe4f60a
RB
3596 /* Otherwise all value numbered to the same value, the phi node has that
3597 value. */
3598 else if (allsame)
3599 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
89fb70a3
DB
3600 else
3601 {
3602 vn_phi_insert (phi, PHI_RESULT (phi));
89fb70a3
DB
3603 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3604 }
3605
3606 return changed;
3607}
3608
89fb70a3
DB
3609/* Try to simplify RHS using equivalences and constant folding. */
3610
3611static tree
538dd0b7 3612try_to_simplify (gassign *stmt)
89fb70a3 3613{
d3878abf 3614 enum tree_code code = gimple_assign_rhs_code (stmt);
ed97ddc6
RG
3615 tree tem;
3616
5be891a4
RG
3617 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3618 in this case, there is no point in doing extra work. */
d3878abf 3619 if (code == SSA_NAME)
726a989a 3620 return NULL_TREE;
ed97ddc6 3621
cfef45c8 3622 /* First try constant folding based on our current lattice. */
34050b6b 3623 mprts_hook = vn_lookup_simplify_result;
ff734093 3624 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
34050b6b 3625 mprts_hook = NULL;
d3878abf
RG
3626 if (tem
3627 && (TREE_CODE (tem) == SSA_NAME
3628 || is_gimple_min_invariant (tem)))
cfef45c8
RG
3629 return tem;
3630
726a989a 3631 return NULL_TREE;
89fb70a3
DB
3632}
3633
3634/* Visit and value number USE, return true if the value number
3635 changed. */
3636
3637static bool
3638visit_use (tree use)
3639{
3640 bool changed = false;
355fe088 3641 gimple *stmt = SSA_NAME_DEF_STMT (use);
89fb70a3 3642
00115921 3643 mark_use_processed (use);
89fb70a3
DB
3644
3645 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3d45dd59 3646 if (dump_file && (dump_flags & TDF_DETAILS)
726a989a 3647 && !SSA_NAME_IS_DEFAULT_DEF (use))
89fb70a3
DB
3648 {
3649 fprintf (dump_file, "Value numbering ");
3650 print_generic_expr (dump_file, use, 0);
3651 fprintf (dump_file, " stmt = ");
726a989a 3652 print_gimple_stmt (dump_file, stmt, 0, 0);
89fb70a3
DB
3653 }
3654
89fb70a3 3655 /* Handle uninitialized uses. */
726a989a
RB
3656 if (SSA_NAME_IS_DEFAULT_DEF (use))
3657 changed = set_ssa_val_to (use, use);
6a8b77b2
RB
3658 else if (gimple_code (stmt) == GIMPLE_PHI)
3659 changed = visit_phi (stmt);
3660 else if (gimple_has_volatile_ops (stmt))
3661 changed = defs_to_varying (stmt);
3662 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3663 {
3664 enum tree_code code = gimple_assign_rhs_code (ass);
3665 tree lhs = gimple_assign_lhs (ass);
3666 tree rhs1 = gimple_assign_rhs1 (ass);
3667 tree simplified;
3668
3669 /* Shortcut for copies. Simplifying copies is pointless,
3670 since we copy the expression and value they represent. */
3671 if (code == SSA_NAME
3672 && TREE_CODE (lhs) == SSA_NAME)
3673 {
3674 changed = visit_copy (lhs, rhs1);
3675 goto done;
3676 }
3677 simplified = try_to_simplify (ass);
3678 if (simplified)
89fb70a3 3679 {
6a8b77b2 3680 if (dump_file && (dump_flags & TDF_DETAILS))
8b0a5125 3681 {
6a8b77b2
RB
3682 fprintf (dump_file, "RHS ");
3683 print_gimple_expr (dump_file, ass, 0, 0);
3684 fprintf (dump_file, " simplified to ");
3685 print_generic_expr (dump_file, simplified, 0);
3686 fprintf (dump_file, "\n");
3687 }
3688 }
3689 /* Setting value numbers to constants will occasionally
3690 screw up phi congruence because constants are not
3691 uniquely associated with a single ssa name that can be
3692 looked up. */
3693 if (simplified
3694 && is_gimple_min_invariant (simplified)
3695 && TREE_CODE (lhs) == SSA_NAME)
3696 {
3697 changed = set_ssa_val_to (lhs, simplified);
3698 goto done;
3699 }
3700 else if (simplified
3701 && TREE_CODE (simplified) == SSA_NAME
3702 && TREE_CODE (lhs) == SSA_NAME)
3703 {
3704 changed = visit_copy (lhs, simplified);
3705 goto done;
3706 }
3707
3708 if ((TREE_CODE (lhs) == SSA_NAME
3709 /* We can substitute SSA_NAMEs that are live over
3710 abnormal edges with their constant value. */
3711 && !(gimple_assign_copy_p (ass)
3712 && is_gimple_min_invariant (rhs1))
3713 && !(simplified
3714 && is_gimple_min_invariant (simplified))
3715 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3716 /* Stores or copies from SSA_NAMEs that are live over
3717 abnormal edges are a problem. */
3718 || (code == SSA_NAME
3719 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3720 changed = defs_to_varying (ass);
3721 else if (REFERENCE_CLASS_P (lhs)
3722 || DECL_P (lhs))
3723 changed = visit_reference_op_store (lhs, rhs1, ass);
3724 else if (TREE_CODE (lhs) == SSA_NAME)
3725 {
3726 if ((gimple_assign_copy_p (ass)
3727 && is_gimple_min_invariant (rhs1))
3728 || (simplified
3729 && is_gimple_min_invariant (simplified)))
3730 {
3731 if (simplified)
3732 changed = set_ssa_val_to (lhs, simplified);
3733 else
3734 changed = set_ssa_val_to (lhs, rhs1);
3735 }
3736 else
3737 {
3738 /* Visit the original statement. */
3739 switch (vn_get_stmt_kind (ass))
3740 {
3741 case VN_NARY:
3742 changed = visit_nary_op (lhs, ass);
3743 break;
3744 case VN_REFERENCE:
3745 changed = visit_reference_op_load (lhs, rhs1, ass);
3746 break;
3747 default:
3748 changed = defs_to_varying (ass);
3749 break;
3750 }
8b0a5125 3751 }
6a8b77b2
RB
3752 }
3753 else
3754 changed = defs_to_varying (ass);
3755 }
3756 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3757 {
3758 tree lhs = gimple_call_lhs (call_stmt);
3759 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3760 {
3761 /* Try constant folding based on our current lattice. */
3762 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3763 vn_valueize);
726a989a 3764 if (simplified)
89fb70a3
DB
3765 {
3766 if (dump_file && (dump_flags & TDF_DETAILS))
3767 {
6a8b77b2
RB
3768 fprintf (dump_file, "call ");
3769 print_gimple_expr (dump_file, call_stmt, 0, 0);
89fb70a3
DB
3770 fprintf (dump_file, " simplified to ");
3771 print_generic_expr (dump_file, simplified, 0);
34050b6b 3772 fprintf (dump_file, "\n");
89fb70a3
DB
3773 }
3774 }
3775 /* Setting value numbers to constants will occasionally
3776 screw up phi congruence because constants are not
3777 uniquely associated with a single ssa name that can be
3778 looked up. */
726a989a 3779 if (simplified
6a8b77b2 3780 && is_gimple_min_invariant (simplified))
89fb70a3 3781 {
89fb70a3 3782 changed = set_ssa_val_to (lhs, simplified);
6a8b77b2
RB
3783 if (gimple_vdef (call_stmt))
3784 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3785 SSA_VAL (gimple_vuse (call_stmt)));
89fb70a3
DB
3786 goto done;
3787 }
726a989a 3788 else if (simplified
6a8b77b2 3789 && TREE_CODE (simplified) == SSA_NAME)
89fb70a3
DB
3790 {
3791 changed = visit_copy (lhs, simplified);
6a8b77b2
RB
3792 if (gimple_vdef (call_stmt))
3793 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3794 SSA_VAL (gimple_vuse (call_stmt)));
89fb70a3
DB
3795 goto done;
3796 }
6a8b77b2 3797 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
89fb70a3 3798 {
6a8b77b2
RB
3799 changed = defs_to_varying (call_stmt);
3800 goto done;
89fb70a3 3801 }
89fb70a3 3802 }
726a989a 3803
6a8b77b2
RB
3804 if (!gimple_call_internal_p (call_stmt)
3805 && (/* Calls to the same function with the same vuse
3806 and the same operands do not necessarily return the same
3807 value, unless they're pure or const. */
3808 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3809 /* If calls have a vdef, subsequent calls won't have
3810 the same incoming vuse. So, if 2 calls with vdef have the
3811 same vuse, we know they're not subsequent.
3812 We can value number 2 calls to the same function with the
3813 same vuse and the same operands which are not subsequent
3814 the same, because there is no code in the program that can
3815 compare the 2 values... */
3816 || (gimple_vdef (call_stmt)
3817 /* ... unless the call returns a pointer which does
3818 not alias with anything else. In which case the
3819 information that the values are distinct are encoded
3820 in the IL. */
3821 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3822 /* Only perform the following when being called from PRE
3823 which embeds tail merging. */
3824 && default_vn_walk_kind == VN_WALK)))
3825 changed = visit_reference_op_call (lhs, call_stmt);
00115921 3826 else
6a8b77b2 3827 changed = defs_to_varying (call_stmt);
89fb70a3 3828 }
6a8b77b2
RB
3829 else
3830 changed = defs_to_varying (stmt);
89fb70a3
DB
3831 done:
3832 return changed;
3833}
3834
3835/* Compare two operands by reverse postorder index */
3836
3837static int
3838compare_ops (const void *pa, const void *pb)
3839{
3840 const tree opa = *((const tree *)pa);
3841 const tree opb = *((const tree *)pb);
355fe088
TS
3842 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3843 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
89fb70a3
DB
3844 basic_block bba;
3845 basic_block bbb;
3846
726a989a 3847 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3d8fa148 3848 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
726a989a 3849 else if (gimple_nop_p (opstmta))
89fb70a3 3850 return -1;
726a989a 3851 else if (gimple_nop_p (opstmtb))
89fb70a3
DB
3852 return 1;
3853
726a989a
RB
3854 bba = gimple_bb (opstmta);
3855 bbb = gimple_bb (opstmtb);
89fb70a3
DB
3856
3857 if (!bba && !bbb)
3d8fa148 3858 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
89fb70a3
DB
3859 else if (!bba)
3860 return -1;
3861 else if (!bbb)
3862 return 1;
3863
3864 if (bba == bbb)
3865 {
726a989a
RB
3866 if (gimple_code (opstmta) == GIMPLE_PHI
3867 && gimple_code (opstmtb) == GIMPLE_PHI)
3d8fa148 3868 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
726a989a 3869 else if (gimple_code (opstmta) == GIMPLE_PHI)
89fb70a3 3870 return -1;
726a989a 3871 else if (gimple_code (opstmtb) == GIMPLE_PHI)
89fb70a3 3872 return 1;
3d8fa148
DK
3873 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3874 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3875 else
3876 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
89fb70a3
DB
3877 }
3878 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3879}
3880
3881/* Sort an array containing members of a strongly connected component
3882 SCC so that the members are ordered by RPO number.
3883 This means that when the sort is complete, iterating through the
3884 array will give you the members in RPO order. */
3885
3886static void
9771b263 3887sort_scc (vec<tree> scc)
89fb70a3 3888{
9771b263 3889 scc.qsort (compare_ops);
89fb70a3
DB
3890}
3891
880ad25f 3892/* Insert the no longer used nary ONARY to the hash INFO. */
c82e0b3b 3893
880ad25f
RG
3894static void
3895copy_nary (vn_nary_op_t onary, vn_tables_t info)
c82e0b3b 3896{
9ad6bebe
NF
3897 size_t size = sizeof_vn_nary_op (onary->length);
3898 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3899 &info->nary_obstack);
c82e0b3b 3900 memcpy (nary, onary, size);
9ad6bebe 3901 vn_nary_op_insert_into (nary, info->nary, false);
c82e0b3b
RG
3902}
3903
880ad25f 3904/* Insert the no longer used phi OPHI to the hash INFO. */
c82e0b3b 3905
880ad25f
RG
3906static void
3907copy_phi (vn_phi_t ophi, vn_tables_t info)
c82e0b3b 3908{
af6a6eec 3909 vn_phi_t phi = info->phis_pool->allocate ();
bf190e8d 3910 vn_phi_s **slot;
c82e0b3b 3911 memcpy (phi, ophi, sizeof (*phi));
9771b263 3912 ophi->phiargs.create (0);
c203e8a7 3913 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
880ad25f 3914 gcc_assert (!*slot);
c82e0b3b 3915 *slot = phi;
c82e0b3b
RG
3916}
3917
880ad25f 3918/* Insert the no longer used reference OREF to the hash INFO. */
c82e0b3b 3919
880ad25f
RG
3920static void
3921copy_reference (vn_reference_t oref, vn_tables_t info)
c82e0b3b 3922{
c82e0b3b 3923 vn_reference_t ref;
bf190e8d 3924 vn_reference_s **slot;
af6a6eec 3925 ref = info->references_pool->allocate ();
c82e0b3b 3926 memcpy (ref, oref, sizeof (*ref));
9771b263 3927 oref->operands.create (0);
c203e8a7 3928 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
c82e0b3b
RG
3929 if (*slot)
3930 free_reference (*slot);
3931 *slot = ref;
c82e0b3b
RG
3932}
3933
89fb70a3
DB
3934/* Process a strongly connected component in the SSA graph. */
3935
3936static void
9771b263 3937process_scc (vec<tree> scc)
89fb70a3 3938{
880ad25f
RG
3939 tree var;
3940 unsigned int i;
3941 unsigned int iterations = 0;
3942 bool changed = true;
bf190e8d
LC
3943 vn_nary_op_iterator_type hin;
3944 vn_phi_iterator_type hip;
3945 vn_reference_iterator_type hir;
880ad25f
RG
3946 vn_nary_op_t nary;
3947 vn_phi_t phi;
3948 vn_reference_t ref;
89fb70a3 3949
880ad25f 3950 /* If the SCC has a single member, just visit it. */
9771b263 3951 if (scc.length () == 1)
89fb70a3 3952 {
9771b263 3953 tree use = scc[0];
72a07d9b
RB
3954 if (VN_INFO (use)->use_processed)
3955 return;
3956 /* We need to make sure it doesn't form a cycle itself, which can
3957 happen for self-referential PHI nodes. In that case we would
3958 end up inserting an expression with VN_TOP operands into the
3959 valid table which makes us derive bogus equivalences later.
3960 The cheapest way to check this is to assume it for all PHI nodes. */
3961 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3962 /* Fallthru to iteration. */ ;
3963 else
3964 {
3965 visit_use (use);
3966 return;
3967 }
89fb70a3 3968 }
880ad25f 3969
b2b222b3
RB
3970 if (dump_file && (dump_flags & TDF_DETAILS))
3971 print_scc (dump_file, scc);
3972
880ad25f
RG
3973 /* Iterate over the SCC with the optimistic table until it stops
3974 changing. */
3975 current_info = optimistic_info;
3976 while (changed)
89fb70a3 3977 {
880ad25f
RG
3978 changed = false;
3979 iterations++;
90bc4623
RG
3980 if (dump_file && (dump_flags & TDF_DETAILS))
3981 fprintf (dump_file, "Starting iteration %d\n", iterations);
880ad25f
RG
3982 /* As we are value-numbering optimistically we have to
3983 clear the expression tables and the simplified expressions
3984 in each iteration until we converge. */
c203e8a7
TS
3985 optimistic_info->nary->empty ();
3986 optimistic_info->phis->empty ();
3987 optimistic_info->references->empty ();
880ad25f
RG
3988 obstack_free (&optimistic_info->nary_obstack, NULL);
3989 gcc_obstack_init (&optimistic_info->nary_obstack);
af6a6eec
ML
3990 optimistic_info->phis_pool->release ();
3991 optimistic_info->references_pool->release ();
9771b263 3992 FOR_EACH_VEC_ELT (scc, i, var)
34050b6b
RB
3993 gcc_assert (!VN_INFO (var)->needs_insertion
3994 && VN_INFO (var)->expr == NULL);
9771b263 3995 FOR_EACH_VEC_ELT (scc, i, var)
880ad25f
RG
3996 changed |= visit_use (var);
3997 }
89fb70a3 3998
b2b222b3
RB
3999 if (dump_file && (dump_flags & TDF_DETAILS))
4000 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
880ad25f 4001 statistics_histogram_event (cfun, "SCC iterations", iterations);
89fb70a3 4002
880ad25f
RG
4003 /* Finally, copy the contents of the no longer used optimistic
4004 table to the valid table. */
c203e8a7 4005 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
880ad25f 4006 copy_nary (nary, valid_info);
c203e8a7 4007 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
880ad25f 4008 copy_phi (phi, valid_info);
c203e8a7 4009 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
bf190e8d 4010 ref, vn_reference_t, hir)
880ad25f
RG
4011 copy_reference (ref, valid_info);
4012
4013 current_info = valid_info;
89fb70a3
DB
4014}
4015
6be34936
RG
4016
4017/* Pop the components of the found SCC for NAME off the SCC stack
4018 and process them. Returns true if all went well, false if
4019 we run into resource limits. */
4020
4021static bool
4022extract_and_process_scc_for_name (tree name)
4023{
ef062b13 4024 auto_vec<tree> scc;
6be34936
RG
4025 tree x;
4026
4027 /* Found an SCC, pop the components off the SCC stack and
4028 process them. */
4029 do
4030 {
9771b263 4031 x = sccstack.pop ();
6be34936
RG
4032
4033 VN_INFO (x)->on_sccstack = false;
9771b263 4034 scc.safe_push (x);
6be34936
RG
4035 } while (x != name);
4036
4037 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
9771b263 4038 if (scc.length ()
6be34936
RG
4039 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4040 {
4041 if (dump_file)
4042 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
9771b263 4043 "SCC size %u exceeding %u\n", scc.length (),
6be34936 4044 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
f5843d08 4045
6be34936
RG
4046 return false;
4047 }
4048
9771b263 4049 if (scc.length () > 1)
6be34936
RG
4050 sort_scc (scc);
4051
6be34936
RG
4052 process_scc (scc);
4053
6be34936
RG
4054 return true;
4055}
4056
89fb70a3
DB
4057/* Depth first search on NAME to discover and process SCC's in the SSA
4058 graph.
4059 Execution of this algorithm relies on the fact that the SCC's are
863d2a57
RG
4060 popped off the stack in topological order.
4061 Returns true if successful, false if we stopped processing SCC's due
fa10beec 4062 to resource constraints. */
89fb70a3 4063
863d2a57 4064static bool
89fb70a3
DB
4065DFS (tree name)
4066{
6e1aa848
DN
4067 vec<ssa_op_iter> itervec = vNULL;
4068 vec<tree> namevec = vNULL;
6be34936 4069 use_operand_p usep = NULL;
355fe088 4070 gimple *defstmt;
726a989a 4071 tree use;
89fb70a3 4072 ssa_op_iter iter;
89fb70a3 4073
6be34936 4074start_over:
89fb70a3
DB
4075 /* SCC info */
4076 VN_INFO (name)->dfsnum = next_dfs_num++;
4077 VN_INFO (name)->visited = true;
4078 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4079
9771b263 4080 sccstack.safe_push (name);
89fb70a3
DB
4081 VN_INFO (name)->on_sccstack = true;
4082 defstmt = SSA_NAME_DEF_STMT (name);
4083
4084 /* Recursively DFS on our operands, looking for SCC's. */
726a989a 4085 if (!gimple_nop_p (defstmt))
89fb70a3 4086 {
6be34936 4087 /* Push a new iterator. */
538dd0b7
DM
4088 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4089 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
6be34936
RG
4090 else
4091 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4092 }
4093 else
81f5094d 4094 clear_and_done_ssa_iter (&iter);
6be34936
RG
4095
4096 while (1)
4097 {
4098 /* If we are done processing uses of a name, go up the stack
4099 of iterators and process SCCs as we found them. */
4100 if (op_iter_done (&iter))
89fb70a3 4101 {
6be34936
RG
4102 /* See if we found an SCC. */
4103 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4104 if (!extract_and_process_scc_for_name (name))
4105 {
9771b263
DN
4106 namevec.release ();
4107 itervec.release ();
6be34936
RG
4108 return false;
4109 }
89fb70a3 4110
6be34936 4111 /* Check if we are done. */
9771b263 4112 if (namevec.is_empty ())
6be34936 4113 {
9771b263
DN
4114 namevec.release ();
4115 itervec.release ();
6be34936
RG
4116 return true;
4117 }
4118
4119 /* Restore the last use walker and continue walking there. */
4120 use = name;
9771b263
DN
4121 name = namevec.pop ();
4122 memcpy (&iter, &itervec.last (),
6be34936 4123 sizeof (ssa_op_iter));
9771b263 4124 itervec.pop ();
6be34936
RG
4125 goto continue_walking;
4126 }
89fb70a3 4127
6be34936
RG
4128 use = USE_FROM_PTR (usep);
4129
4130 /* Since we handle phi nodes, we will sometimes get
4131 invariants in the use expression. */
4132 if (TREE_CODE (use) == SSA_NAME)
4133 {
89fb70a3
DB
4134 if (! (VN_INFO (use)->visited))
4135 {
6be34936
RG
4136 /* Recurse by pushing the current use walking state on
4137 the stack and starting over. */
9771b263
DN
4138 itervec.safe_push (iter);
4139 namevec.safe_push (name);
6be34936
RG
4140 name = use;
4141 goto start_over;
4142
4143continue_walking:
89fb70a3
DB
4144 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4145 VN_INFO (use)->low);
4146 }
4147 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4148 && VN_INFO (use)->on_sccstack)
4149 {
4150 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4151 VN_INFO (name)->low);
4152 }
4153 }
863d2a57 4154
6be34936 4155 usep = op_iter_next_use (&iter);
89fb70a3
DB
4156 }
4157}
4158
89fb70a3
DB
4159/* Allocate a value number table. */
4160
4161static void
4162allocate_vn_table (vn_tables_t table)
4163{
c203e8a7
TS
4164 table->phis = new vn_phi_table_type (23);
4165 table->nary = new vn_nary_op_table_type (23);
4166 table->references = new vn_reference_table_type (23);
89fb70a3 4167
49a1fb2d 4168 gcc_obstack_init (&table->nary_obstack);
fcb87c50 4169 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
fb0b2914 4170 table->references_pool = new object_allocator<vn_reference_s>
fcb87c50 4171 ("VN references");
89fb70a3
DB
4172}
4173
4174/* Free a value number table. */
4175
4176static void
4177free_vn_table (vn_tables_t table)
4178{
c203e8a7
TS
4179 delete table->phis;
4180 table->phis = NULL;
4181 delete table->nary;
4182 table->nary = NULL;
4183 delete table->references;
4184 table->references = NULL;
49a1fb2d 4185 obstack_free (&table->nary_obstack, NULL);
af6a6eec
ML
4186 delete table->phis_pool;
4187 delete table->references_pool;
89fb70a3
DB
4188}
4189
4190static void
4191init_scc_vn (void)
4192{
4193 size_t i;
4194 int j;
4195 int *rpo_numbers_temp;
89fb70a3
DB
4196
4197 calculate_dominance_info (CDI_DOMINATORS);
9fe4f60a
RB
4198 mark_dfs_back_edges ();
4199
9771b263 4200 sccstack.create (0);
c203e8a7 4201 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
b8698a0f 4202
c9145754 4203 constant_value_ids = BITMAP_ALLOC (NULL);
b8698a0f 4204
89fb70a3 4205 next_dfs_num = 1;
c9145754 4206 next_value_id = 1;
b8698a0f 4207
9771b263 4208 vn_ssa_aux_table.create (num_ssa_names + 1);
89fb70a3
DB
4209 /* VEC_alloc doesn't actually grow it to the right size, it just
4210 preallocates the space to do so. */
9771b263 4211 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
cbfb21c1
SB
4212 gcc_obstack_init (&vn_ssa_aux_obstack);
4213
9771b263
DN
4214 shared_lookup_phiargs.create (0);
4215 shared_lookup_references.create (0);
8b1c6fd7 4216 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
0cae8d31
DM
4217 rpo_numbers_temp =
4218 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
89fb70a3
DB
4219 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4220
4221 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4222 the i'th block in RPO order is bb. We want to map bb's to RPO
4223 numbers, so we need to rearrange this array. */
0cae8d31 4224 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
89fb70a3
DB
4225 rpo_numbers[rpo_numbers_temp[j]] = j;
4226
cbfb21c1 4227 XDELETE (rpo_numbers_temp);
89fb70a3
DB
4228
4229 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4230
908ff6a3 4231 renumber_gimple_stmt_uids ();
89fb70a3
DB
4232
4233 /* Create the valid and optimistic value numbering tables. */
4234 valid_info = XCNEW (struct vn_tables_s);
4235 allocate_vn_table (valid_info);
4236 optimistic_info = XCNEW (struct vn_tables_s);
4237 allocate_vn_table (optimistic_info);
34c89697
RB
4238 current_info = valid_info;
4239
4240 /* Create the VN_INFO structures, and initialize value numbers to
4241 TOP or VARYING for parameters. */
4242 for (i = 1; i < num_ssa_names; i++)
4243 {
4244 tree name = ssa_name (i);
4245 if (!name)
4246 continue;
4247
4248 VN_INFO_GET (name)->valnum = VN_TOP;
34050b6b
RB
4249 VN_INFO (name)->needs_insertion = false;
4250 VN_INFO (name)->expr = NULL;
34c89697
RB
4251 VN_INFO (name)->value_id = 0;
4252
4253 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4254 continue;
4255
4256 switch (TREE_CODE (SSA_NAME_VAR (name)))
4257 {
4258 case VAR_DECL:
4259 /* Undefined vars keep TOP. */
4260 break;
4261
4262 case PARM_DECL:
4263 /* Parameters are VARYING but we can record a condition
4264 if we know it is a non-NULL pointer. */
4265 VN_INFO (name)->visited = true;
4266 VN_INFO (name)->valnum = name;
4267 if (POINTER_TYPE_P (TREE_TYPE (name))
4268 && nonnull_arg_p (SSA_NAME_VAR (name)))
4269 {
4270 tree ops[2];
4271 ops[0] = name;
4272 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4273 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4274 boolean_true_node, 0);
4275 if (dump_file && (dump_flags & TDF_DETAILS))
4276 {
4277 fprintf (dump_file, "Recording ");
4278 print_generic_expr (dump_file, name, TDF_SLIM);
4279 fprintf (dump_file, " != 0\n");
4280 }
4281 }
4282 break;
4283
4284 case RESULT_DECL:
4285 /* If the result is passed by invisible reference the default
4286 def is initialized, otherwise it's uninitialized. */
4287 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4288 {
4289 VN_INFO (name)->visited = true;
4290 VN_INFO (name)->valnum = name;
4291 }
4292 break;
4293
4294 default:
4295 gcc_unreachable ();
4296 }
4297 }
89fb70a3
DB
4298}
4299
4300void
4301free_scc_vn (void)
4302{
4303 size_t i;
4304
c203e8a7
TS
4305 delete constant_to_value_id;
4306 constant_to_value_id = NULL;
c9145754 4307 BITMAP_FREE (constant_value_ids);
9771b263
DN
4308 shared_lookup_phiargs.release ();
4309 shared_lookup_references.release ();
89fb70a3 4310 XDELETEVEC (rpo_numbers);
cbfb21c1 4311
89fb70a3
DB
4312 for (i = 0; i < num_ssa_names; i++)
4313 {
4314 tree name = ssa_name (i);
3d45dd59 4315 if (name
e93c66bc
RB
4316 && has_VN_INFO (name))
4317 {
4318 if (VN_INFO (name)->needs_insertion)
4319 release_ssa_name (name);
4320 else if (POINTER_TYPE_P (TREE_TYPE (name))
4321 && VN_INFO (name)->info.ptr_info)
4322 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4323 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4324 && VN_INFO (name)->info.range_info)
fa4511c2
RB
4325 {
4326 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4327 SSA_NAME_ANTI_RANGE_P (name)
4328 = VN_INFO (name)->range_info_anti_range_p;
4329 }
e93c66bc 4330 }
89fb70a3 4331 }
cbfb21c1 4332 obstack_free (&vn_ssa_aux_obstack, NULL);
9771b263 4333 vn_ssa_aux_table.release ();
cbfb21c1 4334
9771b263 4335 sccstack.release ();
89fb70a3
DB
4336 free_vn_table (valid_info);
4337 XDELETE (valid_info);
4338 free_vn_table (optimistic_info);
4339 XDELETE (optimistic_info);
e7cbc096
RB
4340
4341 BITMAP_FREE (const_parms);
89fb70a3
DB
4342}
4343
9ca966ca 4344/* Set *ID according to RESULT. */
9ad6bebe
NF
4345
4346static void
4347set_value_id_for_result (tree result, unsigned int *id)
4348{
9ca966ca
RB
4349 if (result && TREE_CODE (result) == SSA_NAME)
4350 *id = VN_INFO (result)->value_id;
4351 else if (result && is_gimple_min_invariant (result))
4352 *id = get_or_alloc_constant_value_id (result);
4353 else
4354 *id = get_next_value_id ();
9ad6bebe
NF
4355}
4356
caf55296 4357/* Set the value ids in the valid hash tables. */
c9145754
DB
4358
4359static void
4360set_hashtable_value_ids (void)
4361{
bf190e8d
LC
4362 vn_nary_op_iterator_type hin;
4363 vn_phi_iterator_type hip;
4364 vn_reference_iterator_type hir;
c9145754
DB
4365 vn_nary_op_t vno;
4366 vn_reference_t vr;
4367 vn_phi_t vp;
caf55296 4368
c9145754
DB
4369 /* Now set the value ids of the things we had put in the hash
4370 table. */
4371
c203e8a7 4372 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
9ad6bebe 4373 set_value_id_for_result (vno->result, &vno->value_id);
c9145754 4374
c203e8a7 4375 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
9ad6bebe 4376 set_value_id_for_result (vp->result, &vp->value_id);
c9145754 4377
c203e8a7
TS
4378 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4379 hir)
9ad6bebe 4380 set_value_id_for_result (vr->result, &vr->value_id);
c9145754
DB
4381}
4382
d2713985 4383class sccvn_dom_walker : public dom_walker
a764d660
RB
4384{
4385public:
7fd9012e 4386 sccvn_dom_walker ()
3daacdcd 4387 : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (vNULL) {}
ff569744 4388 ~sccvn_dom_walker ();
a764d660 4389
3daacdcd 4390 virtual edge before_dom_children (basic_block);
7fd9012e
RB
4391 virtual void after_dom_children (basic_block);
4392
4393 void record_cond (basic_block,
4394 enum tree_code code, tree lhs, tree rhs, bool value);
4395 void record_conds (basic_block,
4396 enum tree_code code, tree lhs, tree rhs, bool value);
a764d660
RB
4397
4398 bool fail;
7fd9012e
RB
4399 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4400 cond_stack;
a764d660
RB
4401};
4402
ff569744
RB
4403sccvn_dom_walker::~sccvn_dom_walker ()
4404{
4405 cond_stack.release ();
4406}
4407
7fd9012e
RB
4408/* Record a temporary condition for the BB and its dominated blocks. */
4409
4410void
4411sccvn_dom_walker::record_cond (basic_block bb,
4412 enum tree_code code, tree lhs, tree rhs,
4413 bool value)
4414{
4415 tree ops[2] = { lhs, rhs };
4416 vn_nary_op_t old = NULL;
4417 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4418 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4419 vn_nary_op_t cond
4420 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4421 value
4422 ? boolean_true_node
4423 : boolean_false_node, 0);
4424 if (dump_file && (dump_flags & TDF_DETAILS))
4425 {
4426 fprintf (dump_file, "Recording temporarily ");
4427 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4428 fprintf (dump_file, " %s ", get_tree_code_name (code));
4429 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4430 fprintf (dump_file, " == %s%s\n",
4431 value ? "true" : "false",
4432 old ? " (old entry saved)" : "");
4433 }
4434 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4435}
4436
4437/* Record temporary conditions for the BB and its dominated blocks
4438 according to LHS CODE RHS == VALUE and its dominated conditions. */
4439
4440void
4441sccvn_dom_walker::record_conds (basic_block bb,
4442 enum tree_code code, tree lhs, tree rhs,
4443 bool value)
4444{
4445 /* Record the original condition. */
4446 record_cond (bb, code, lhs, rhs, value);
4447
4448 if (!value)
4449 return;
4450
4451 /* Record dominated conditions if the condition is true. Note that
4452 the inversion is already recorded. */
4453 switch (code)
4454 {
4455 case LT_EXPR:
4456 case GT_EXPR:
4457 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4458 record_cond (bb, NE_EXPR, lhs, rhs, true);
4459 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4460 break;
4461
4462 case EQ_EXPR:
4463 record_cond (bb, LE_EXPR, lhs, rhs, true);
4464 record_cond (bb, GE_EXPR, lhs, rhs, true);
4465 record_cond (bb, LT_EXPR, lhs, rhs, false);
4466 record_cond (bb, GT_EXPR, lhs, rhs, false);
4467 break;
4468
4469 default:
4470 break;
4471 }
4472}
4473
4474/* Restore expressions and values derived from conditionals. */
4475
4476void
4477sccvn_dom_walker::after_dom_children (basic_block bb)
4478{
4479 while (!cond_stack.is_empty ()
4480 && cond_stack.last ().first == bb)
4481 {
4482 vn_nary_op_t cond = cond_stack.last ().second.first;
4483 vn_nary_op_t old = cond_stack.last ().second.second;
4484 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4485 if (old)
4486 vn_nary_op_insert_into (old, current_info->nary, false);
4487 cond_stack.pop ();
4488 }
4489}
4490
d2713985
RB
4491/* Value number all statements in BB. */
4492
3daacdcd 4493edge
d2713985 4494sccvn_dom_walker::before_dom_children (basic_block bb)
a764d660
RB
4495{
4496 edge e;
4497 edge_iterator ei;
4498
4499 if (fail)
3daacdcd 4500 return NULL;
a764d660 4501
310d5e7d
RB
4502 if (dump_file && (dump_flags & TDF_DETAILS))
4503 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4504
7fd9012e
RB
4505 /* If we have a single predecessor record the equivalence from a
4506 possible condition on the predecessor edge. */
3789bf84
RB
4507 edge pred_e = NULL;
4508 FOR_EACH_EDGE (e, ei, bb->preds)
4509 {
4510 /* Ignore simple backedges from this to allow recording conditions
4511 in loop headers. */
4512 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4513 continue;
4514 if (! pred_e)
4515 pred_e = e;
4516 else
4517 {
4518 pred_e = NULL;
4519 break;
4520 }
4521 }
4522 if (pred_e)
7fd9012e 4523 {
7fd9012e
RB
4524 /* Check if there are multiple executable successor edges in
4525 the source block. Otherwise there is no additional info
4526 to be recorded. */
4527 edge e2;
3789bf84
RB
4528 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4529 if (e2 != pred_e
7fd9012e
RB
4530 && e2->flags & EDGE_EXECUTABLE)
4531 break;
4532 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4533 {
3789bf84 4534 gimple *stmt = last_stmt (pred_e->src);
7fd9012e
RB
4535 if (stmt
4536 && gimple_code (stmt) == GIMPLE_COND)
4537 {
4538 enum tree_code code = gimple_cond_code (stmt);
4539 tree lhs = gimple_cond_lhs (stmt);
4540 tree rhs = gimple_cond_rhs (stmt);
4541 record_conds (bb, code, lhs, rhs,
3789bf84 4542 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
7fd9012e
RB
4543 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4544 if (code != ERROR_MARK)
4545 record_conds (bb, code, lhs, rhs,
3789bf84 4546 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
7fd9012e
RB
4547 }
4548 }
4549 }
4550
d2713985
RB
4551 /* Value-number all defs in the basic-block. */
4552 for (gphi_iterator gsi = gsi_start_phis (bb);
4553 !gsi_end_p (gsi); gsi_next (&gsi))
4554 {
4555 gphi *phi = gsi.phi ();
4556 tree res = PHI_RESULT (phi);
4557 if (!VN_INFO (res)->visited
4558 && !DFS (res))
4559 {
4560 fail = true;
3daacdcd 4561 return NULL;
d2713985
RB
4562 }
4563 }
4564 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4565 !gsi_end_p (gsi); gsi_next (&gsi))
4566 {
4567 ssa_op_iter i;
4568 tree op;
4569 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4570 if (!VN_INFO (op)->visited
4571 && !DFS (op))
4572 {
4573 fail = true;
3daacdcd 4574 return NULL;
d2713985
RB
4575 }
4576 }
4577
4578 /* Finally look at the last stmt. */
355fe088 4579 gimple *stmt = last_stmt (bb);
a764d660 4580 if (!stmt)
3daacdcd 4581 return NULL;
a764d660 4582
b2b222b3
RB
4583 enum gimple_code code = gimple_code (stmt);
4584 if (code != GIMPLE_COND
4585 && code != GIMPLE_SWITCH
4586 && code != GIMPLE_GOTO)
3daacdcd 4587 return NULL;
b2b222b3
RB
4588
4589 if (dump_file && (dump_flags & TDF_DETAILS))
4590 {
310d5e7d 4591 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
b2b222b3
RB
4592 print_gimple_stmt (dump_file, stmt, 0, 0);
4593 }
4594
a764d660
RB
4595 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4596 if value-numbering can prove they are not reachable. Handling
4597 computed gotos is also possible. */
4598 tree val;
b2b222b3 4599 switch (code)
a764d660
RB
4600 {
4601 case GIMPLE_COND:
4602 {
34050b6b
RB
4603 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4604 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4605 val = gimple_simplify (gimple_cond_code (stmt),
4606 boolean_type_node, lhs, rhs,
4607 NULL, vn_valueize);
7fd9012e
RB
4608 /* If that didn't simplify to a constant see if we have recorded
4609 temporary expressions from taken edges. */
4610 if (!val || TREE_CODE (val) != INTEGER_CST)
4611 {
4612 tree ops[2];
34050b6b
RB
4613 ops[0] = lhs;
4614 ops[1] = rhs;
7fd9012e
RB
4615 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4616 boolean_type_node, ops, NULL);
4617 }
a764d660
RB
4618 break;
4619 }
4620 case GIMPLE_SWITCH:
538dd0b7 4621 val = gimple_switch_index (as_a <gswitch *> (stmt));
a764d660
RB
4622 break;
4623 case GIMPLE_GOTO:
4624 val = gimple_goto_dest (stmt);
4625 break;
4626 default:
b2b222b3 4627 gcc_unreachable ();
a764d660
RB
4628 }
4629 if (!val)
3daacdcd 4630 return NULL;
a764d660
RB
4631
4632 edge taken = find_taken_edge (bb, vn_valueize (val));
4633 if (!taken)
3daacdcd 4634 return NULL;
a764d660
RB
4635
4636 if (dump_file && (dump_flags & TDF_DETAILS))
4637 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4638 "not executable\n", bb->index, bb->index, taken->dest->index);
4639
3daacdcd 4640 return taken;
a764d660
RB
4641}
4642
863d2a57 4643/* Do SCCVN. Returns true if it finished, false if we bailed out
1ec87690
RG
4644 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4645 how we use the alias oracle walking during the VN process. */
863d2a57
RG
4646
4647bool
1ec87690 4648run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
89fb70a3
DB
4649{
4650 size_t i;
b8698a0f 4651
1ec87690
RG
4652 default_vn_walk_kind = default_vn_walk_kind_;
4653
89fb70a3 4654 init_scc_vn ();
89fb70a3 4655
e7cbc096
RB
4656 /* Collect pointers we know point to readonly memory. */
4657 const_parms = BITMAP_ALLOC (NULL);
4658 tree fnspec = lookup_attribute ("fn spec",
4659 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4660 if (fnspec)
4661 {
4662 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4663 i = 1;
4664 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4665 arg; arg = DECL_CHAIN (arg), ++i)
4666 {
4667 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4668 break;
4669 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4670 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4671 {
4672 tree name = ssa_default_def (cfun, arg);
4673 if (name)
4674 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4675 }
4676 }
4677 }
4678
d2713985
RB
4679 /* Walk all blocks in dominator order, value-numbering stmts
4680 SSA defs and decide whether outgoing edges are not executable. */
4681 sccvn_dom_walker walker;
a764d660
RB
4682 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4683 if (walker.fail)
4684 {
4685 free_scc_vn ();
4686 return false;
4687 }
4688
d2713985
RB
4689 /* Initialize the value ids and prune out remaining VN_TOPs
4690 from dead code. */
c9145754
DB
4691 for (i = 1; i < num_ssa_names; ++i)
4692 {
4693 tree name = ssa_name (i);
4694 vn_ssa_aux_t info;
4695 if (!name)
4696 continue;
4697 info = VN_INFO (name);
d2713985
RB
4698 if (!info->visited)
4699 info->valnum = name;
f116fecf
RG
4700 if (info->valnum == name
4701 || info->valnum == VN_TOP)
c9145754
DB
4702 info->value_id = get_next_value_id ();
4703 else if (is_gimple_min_invariant (info->valnum))
4704 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4705 }
b8698a0f 4706
bb35348a
RB
4707 /* Propagate. */
4708 for (i = 1; i < num_ssa_names; ++i)
c9145754 4709 {
bb35348a
RB
4710 tree name = ssa_name (i);
4711 vn_ssa_aux_t info;
4712 if (!name)
4713 continue;
4714 info = VN_INFO (name);
4715 if (TREE_CODE (info->valnum) == SSA_NAME
4716 && info->valnum != name
4717 && info->value_id != VN_INFO (info->valnum)->value_id)
4718 info->value_id = VN_INFO (info->valnum)->value_id;
c9145754 4719 }
b8698a0f 4720
c9145754 4721 set_hashtable_value_ids ();
b8698a0f 4722
89fb70a3
DB
4723 if (dump_file && (dump_flags & TDF_DETAILS))
4724 {
4725 fprintf (dump_file, "Value numbers:\n");
4726 for (i = 0; i < num_ssa_names; i++)
4727 {
4728 tree name = ssa_name (i);
caf55296
RG
4729 if (name
4730 && VN_INFO (name)->visited
4731 && SSA_VAL (name) != name)
89fb70a3
DB
4732 {
4733 print_generic_expr (dump_file, name, 0);
4734 fprintf (dump_file, " = ");
caf55296 4735 print_generic_expr (dump_file, SSA_VAL (name), 0);
89fb70a3
DB
4736 fprintf (dump_file, "\n");
4737 }
4738 }
4739 }
863d2a57
RG
4740
4741 return true;
89fb70a3 4742}
c9145754
DB
4743
4744/* Return the maximum value id we have ever seen. */
4745
4746unsigned int
b8698a0f 4747get_max_value_id (void)
c9145754
DB
4748{
4749 return next_value_id;
4750}
4751
4752/* Return the next unique value id. */
4753
4754unsigned int
4755get_next_value_id (void)
4756{
4757 return next_value_id++;
4758}
4759
4760
330e765e 4761/* Compare two expressions E1 and E2 and return true if they are equal. */
c9145754
DB
4762
4763bool
4764expressions_equal_p (tree e1, tree e2)
4765{
330e765e 4766 /* The obvious case. */
c9145754
DB
4767 if (e1 == e2)
4768 return true;
4769
94852c8e
RB
4770 /* If either one is VN_TOP consider them equal. */
4771 if (e1 == VN_TOP || e2 == VN_TOP)
4772 return true;
4773
330e765e
EB
4774 /* If only one of them is null, they cannot be equal. */
4775 if (!e1 || !e2)
4776 return false;
4777
330e765e
EB
4778 /* Now perform the actual comparison. */
4779 if (TREE_CODE (e1) == TREE_CODE (e2)
4780 && operand_equal_p (e1, e2, OEP_PURE_SAME))
c9145754
DB
4781 return true;
4782
4783 return false;
4784}
4785
890065bf
RG
4786
4787/* Return true if the nary operation NARY may trap. This is a copy
4788 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4789
4790bool
4791vn_nary_may_trap (vn_nary_op_t nary)
4792{
4793 tree type;
6141b7db 4794 tree rhs2 = NULL_TREE;
890065bf
RG
4795 bool honor_nans = false;
4796 bool honor_snans = false;
4797 bool fp_operation = false;
4798 bool honor_trapv = false;
4799 bool handled, ret;
4800 unsigned i;
4801
4802 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4803 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4804 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4805 {
4806 type = nary->type;
4807 fp_operation = FLOAT_TYPE_P (type);
4808 if (fp_operation)
4809 {
4810 honor_nans = flag_trapping_math && !flag_finite_math_only;
4811 honor_snans = flag_signaling_nans != 0;
4812 }
4813 else if (INTEGRAL_TYPE_P (type)
4814 && TYPE_OVERFLOW_TRAPS (type))
4815 honor_trapv = true;
4816 }
6141b7db
RG
4817 if (nary->length >= 2)
4818 rhs2 = nary->op[1];
890065bf
RG
4819 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4820 honor_trapv,
4821 honor_nans, honor_snans, rhs2,
4822 &handled);
4823 if (handled
4824 && ret)
4825 return true;
4826
4827 for (i = 0; i < nary->length; ++i)
4828 if (tree_could_trap_p (nary->op[i]))
4829 return true;
4830
4831 return false;
4832}