]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-sccvn.c
poly_int: get_addr_base_and_unit_offset
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
CommitLineData
89fb70a3 1/* SCC value numbering for trees
cbe34bb5 2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
89fb70a3
DB
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
89fb70a3
DB
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
89fb70a3
DB
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5 25#include "rtl.h"
89fb70a3 26#include "tree.h"
c7131fb2 27#include "gimple.h"
957060b5 28#include "alloc-pool.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "expmed.h"
31#include "insn-config.h"
4d0cdd0c 32#include "memmodel.h"
957060b5
AM
33#include "emit-rtl.h"
34#include "cgraph.h"
35#include "gimple-pretty-print.h"
c7131fb2 36#include "alias.h"
40e23961 37#include "fold-const.h"
d8a2d370 38#include "stor-layout.h"
60393bbc 39#include "cfganal.h"
89fb70a3 40#include "tree-inline.h"
2fb9a547
AM
41#include "internal-fn.h"
42#include "gimple-fold.h"
43#include "tree-eh.h"
45b0be94 44#include "gimplify.h"
36566b39 45#include "flags.h"
36566b39
PK
46#include "dojump.h"
47#include "explow.h"
48#include "calls.h"
36566b39
PK
49#include "varasm.h"
50#include "stmt.h"
d8a2d370 51#include "expr.h"
442b4905
AM
52#include "tree-dfa.h"
53#include "tree-ssa.h"
7ee2468b 54#include "dumpfile.h"
89fb70a3 55#include "cfgloop.h"
863d2a57 56#include "params.h"
c2979eaf 57#include "tree-ssa-propagate.h"
a764d660
RB
58#include "tree-cfg.h"
59#include "domwalk.h"
d2713985 60#include "gimple-iterator.h"
34050b6b 61#include "gimple-match.h"
314e6352
ML
62#include "stringpool.h"
63#include "attribs.h"
5de583cc
RB
64#include "tree-pass.h"
65#include "statistics.h"
66#include "langhooks.h"
67#include "ipa-utils.h"
68#include "dbgcnt.h"
69#include "tree-cfgcleanup.h"
70#include "tree-ssa-loop.h"
71#include "tree-scalar-evolution.h"
72#include "tree-ssa-sccvn.h"
89fb70a3
DB
73
74/* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
79
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
83
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
88
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
93
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
97
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
102
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
106
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
109
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
115
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
070b797d 119 equivalent.
89fb70a3
DB
120 TODO:
121
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
129*/
130
bf190e8d 131
4da60082
RB
132static tree *last_vuse_ptr;
133static vn_lookup_kind vn_walk_kind;
134static vn_lookup_kind default_vn_walk_kind;
e7cbc096 135bitmap const_parms;
4da60082 136
bf190e8d
LC
137/* vn_nary_op hashtable helpers. */
138
8d67ee55 139struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
bf190e8d 140{
67f58944
TS
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
bf190e8d
LC
144};
145
146/* Return the computed hashcode for nary operation P1. */
147
148inline hashval_t
67f58944 149vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
bf190e8d
LC
150{
151 return vno1->hashcode;
152}
153
154/* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
156
157inline bool
67f58944 158vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
bf190e8d
LC
159{
160 return vn_nary_op_eq (vno1, vno2);
161}
162
c203e8a7 163typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
bf190e8d
LC
164typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165
166
167/* vn_phi hashtable helpers. */
168
169static int
170vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171
7edd9b15 172struct vn_phi_hasher : pointer_hash <vn_phi_s>
bf190e8d 173{
67f58944
TS
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
bf190e8d
LC
177};
178
179/* Return the computed hashcode for phi operation P1. */
180
181inline hashval_t
67f58944 182vn_phi_hasher::hash (const vn_phi_s *vp1)
bf190e8d
LC
183{
184 return vp1->hashcode;
185}
186
187/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188
189inline bool
67f58944 190vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
bf190e8d
LC
191{
192 return vn_phi_eq (vp1, vp2);
193}
194
195/* Free a phi operation structure VP. */
196
197inline void
67f58944 198vn_phi_hasher::remove (vn_phi_s *phi)
bf190e8d
LC
199{
200 phi->phiargs.release ();
201}
202
c203e8a7 203typedef hash_table<vn_phi_hasher> vn_phi_table_type;
bf190e8d
LC
204typedef vn_phi_table_type::iterator vn_phi_iterator_type;
205
206
207/* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
209
210static int
211vn_reference_op_eq (const void *p1, const void *p2)
212{
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
215
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
225}
226
227/* Free a reference operation structure VP. */
228
229static inline void
230free_reference (vn_reference_s *vr)
231{
232 vr->operands.release ();
233}
234
235
236/* vn_reference hashtable helpers. */
237
7edd9b15 238struct vn_reference_hasher : pointer_hash <vn_reference_s>
bf190e8d 239{
67f58944
TS
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
bf190e8d
LC
243};
244
245/* Return the hashcode for a given reference operation P1. */
246
247inline hashval_t
67f58944 248vn_reference_hasher::hash (const vn_reference_s *vr1)
bf190e8d
LC
249{
250 return vr1->hashcode;
251}
252
253inline bool
67f58944 254vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
bf190e8d
LC
255{
256 return vn_reference_eq (v, c);
257}
258
259inline void
67f58944 260vn_reference_hasher::remove (vn_reference_s *v)
bf190e8d
LC
261{
262 free_reference (v);
263}
264
c203e8a7 265typedef hash_table<vn_reference_hasher> vn_reference_table_type;
bf190e8d
LC
266typedef vn_reference_table_type::iterator vn_reference_iterator_type;
267
268
89fb70a3
DB
269/* The set of hashtables and alloc_pool's for their items. */
270
271typedef struct vn_tables_s
272{
c203e8a7
TS
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
49a1fb2d 276 struct obstack nary_obstack;
fb0b2914
ML
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
89fb70a3
DB
279} *vn_tables_t;
280
bf190e8d
LC
281
282/* vn_constant hashtable helpers. */
283
95fbe13e 284struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
bf190e8d 285{
67f58944
TS
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
bf190e8d
LC
288};
289
290/* Hash table hash function for vn_constant_t. */
291
292inline hashval_t
67f58944 293vn_constant_hasher::hash (const vn_constant_s *vc1)
bf190e8d
LC
294{
295 return vc1->hashcode;
296}
297
298/* Hash table equality function for vn_constant_t. */
299
300inline bool
67f58944 301vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
bf190e8d
LC
302{
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
305
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
307}
308
c203e8a7 309static hash_table<vn_constant_hasher> *constant_to_value_id;
c9145754 310static bitmap constant_value_ids;
89fb70a3 311
89fb70a3
DB
312
313/* Valid hashtables storing information we have proven to be
314 correct. */
315
316static vn_tables_t valid_info;
317
318/* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
320
321static vn_tables_t optimistic_info;
322
89fb70a3
DB
323/* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
326
327static vn_tables_t current_info;
328
329
330/* Reverse post order index for each basic block. */
331
332static int *rpo_numbers;
333
334#define SSA_VAL(x) (VN_INFO ((x))->valnum)
335
d1c0308e
RB
336/* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
339
340static inline tree
341vuse_ssa_val (tree x)
342{
343 if (!x)
344 return NULL_TREE;
345
346 do
347 {
d7a160a4
RB
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
d1c0308e
RB
354 }
355 while (SSA_NAME_IN_FREE_LIST (x));
356
357 return x;
358}
359
89fb70a3
DB
360/* This represents the top of the VN lattice, which is the universal
361 value. */
362
363tree VN_TOP;
364
c9145754
DB
365/* Unique counter for our value ids. */
366
367static unsigned int next_value_id;
368
89fb70a3
DB
369/* Next DFS number and the stack for strongly connected component
370 detection. */
371
372static unsigned int next_dfs_num;
9771b263 373static vec<tree> sccstack;
89fb70a3 374
3d45dd59 375
89fb70a3 376
cbfb21c1
SB
377/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
9771b263 379 without looping over the vec. */
89fb70a3 380
9771b263 381static vec<vn_ssa_aux_t> vn_ssa_aux_table;
cbfb21c1 382static struct obstack vn_ssa_aux_obstack;
89fb70a3 383
6a8b77b2
RB
384/* Return whether there is value numbering information for a given SSA name. */
385
386bool
387has_VN_INFO (tree name)
388{
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
392}
393
89fb70a3
DB
394/* Return the value numbering information for a given SSA name. */
395
396vn_ssa_aux_t
397VN_INFO (tree name)
398{
9771b263 399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
7a40b8b1 400 gcc_checking_assert (res);
c9145754 401 return res;
89fb70a3
DB
402}
403
404/* Set the value numbering info for a given SSA name to a given
405 value. */
406
407static inline void
408VN_INFO_SET (tree name, vn_ssa_aux_t value)
409{
9771b263 410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
89fb70a3
DB
411}
412
cbfb21c1
SB
413/* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
89fb70a3
DB
415
416vn_ssa_aux_t
417VN_INFO_GET (tree name)
418{
cbfb21c1
SB
419 vn_ssa_aux_t newinfo;
420
34050b6b
RB
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
3d9a9f94 423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
cbfb21c1 424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
9771b263 425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
511e5c48 426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
9771b263 427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
89fb70a3
DB
428 return newinfo;
429}
430
431
17742d62
RG
432/* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
434
435enum vn_kind
355fe088 436vn_get_stmt_kind (gimple *stmt)
17742d62
RG
437{
438 switch (gimple_code (stmt))
439 {
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
445 {
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
449 {
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
456 {
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
465
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
469
470 case tcc_constant:
471 return VN_CONSTANT;
472
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
480 }
481 default:
482 return VN_NONE;
483 }
484 }
485 default:
486 return VN_NONE;
487 }
488}
726a989a 489
bb9e4199
RG
490/* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
492
493unsigned int
494get_constant_value_id (tree constant)
495{
bf190e8d 496 vn_constant_s **slot;
bb9e4199 497 struct vn_constant_s vc;
726a989a
RB
498
499 vc.hashcode = vn_hash_constant_with_type (constant);
bb9e4199 500 vc.constant = constant;
c203e8a7 501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
bb9e4199 502 if (slot)
bf190e8d 503 return (*slot)->value_id;
bb9e4199
RG
504 return 0;
505}
506
c9145754
DB
507/* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
509
510unsigned int
511get_or_alloc_constant_value_id (tree constant)
512{
bf190e8d 513 vn_constant_s **slot;
a7d04a53
RG
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
b8698a0f 516
a7d04a53
RG
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
c203e8a7 519 slot = constant_to_value_id->find_slot (&vc, INSERT);
c9145754 520 if (*slot)
bf190e8d 521 return (*slot)->value_id;
a7d04a53
RG
522
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
bf190e8d 527 *slot = vcp;
a7d04a53
RG
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
c9145754
DB
530}
531
532/* Return true if V is a value id for a constant. */
533
534bool
535value_id_constant_p (unsigned int v)
536{
b8698a0f 537 return bitmap_bit_p (constant_value_ids, v);
c9145754
DB
538}
539
b40bf772 540/* Compute the hash for a reference operand VRO1. */
89fb70a3 541
4e44a6e8
AK
542static void
543vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
89fb70a3 544{
4e44a6e8 545 hstate.add_int (vro1->opcode);
85169114 546 if (vro1->op0)
4e44a6e8 547 inchash::add_expr (vro1->op0, hstate);
85169114 548 if (vro1->op1)
4e44a6e8 549 inchash::add_expr (vro1->op1, hstate);
85169114 550 if (vro1->op2)
4e44a6e8 551 inchash::add_expr (vro1->op2, hstate);
89fb70a3
DB
552}
553
89fb70a3
DB
554/* Compute a hash for the reference operation VR1 and return it. */
555
26f3a4e1 556static hashval_t
89fb70a3
DB
557vn_reference_compute_hash (const vn_reference_t vr1)
558{
4e44a6e8
AK
559 inchash::hash hstate;
560 hashval_t result;
89fb70a3
DB
561 int i;
562 vn_reference_op_t vro;
b9c25734 563 poly_int64 off = -1;
70f34814 564 bool deref = false;
89fb70a3 565
9771b263 566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
70f34814
RG
567 {
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
b9c25734 572 if (maybe_ne (vro->off, -1))
70f34814 573 {
b9c25734 574 if (known_eq (off, -1))
70f34814
RG
575 off = 0;
576 off += vro->off;
577 }
578 else
579 {
b9c25734
RS
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
70f34814
RG
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
586 {
587 if (vro->op0)
588 {
589 tree op = TREE_OPERAND (vro->op0, 0);
4e44a6e8
AK
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
70f34814
RG
592 }
593 }
594 else
4e44a6e8 595 vn_reference_op_compute_hash (vro, hstate);
70f34814
RG
596 }
597 }
4e44a6e8
AK
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
9708c51d
RG
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
89fb70a3
DB
602
603 return result;
604}
605
bf190e8d 606/* Return true if reference operations VR1 and VR2 are equivalent. This
89fb70a3
DB
607 means they have the same set of operands and vuses. */
608
bf190e8d
LC
609bool
610vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
89fb70a3 611{
70f34814 612 unsigned i, j;
89fb70a3 613
5006671f
RG
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
89fb70a3 617
5006671f
RG
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
89fb70a3
DB
620 return false;
621
5006671f
RG
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
625
70f34814 626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
89fb70a3
DB
627 return false;
628
5ccbfc1f
RG
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
631 {
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
634 }
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
643
70f34814
RG
644 i = 0;
645 j = 0;
646 do
647 {
b9c25734 648 poly_int64 off1 = 0, off2 = 0;
70f34814
RG
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
9771b263 652 for (; vr1->operands.iterate (i, &vro1); i++)
70f34814
RG
653 {
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
ee45a32d
EB
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
b9c25734 659 if (known_eq (vro1->off, -1))
70f34814
RG
660 break;
661 off1 += vro1->off;
662 }
9771b263 663 for (; vr2->operands.iterate (j, &vro2); j++)
70f34814
RG
664 {
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
ee45a32d
EB
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
b9c25734 670 if (known_eq (vro2->off, -1))
70f34814
RG
671 break;
672 off2 += vro2->off;
673 }
b9c25734 674 if (maybe_ne (off1, off2))
70f34814
RG
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
677 {
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
8bf43909 683 deref1 = false;
70f34814
RG
684 }
685 if (deref2 && vro2->opcode == ADDR_EXPR)
686 {
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
8bf43909 692 deref2 = false;
70f34814 693 }
8bf43909
RG
694 if (deref1 != deref2)
695 return false;
70f34814
RG
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
700 }
9771b263
DN
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
89fb70a3 703
5006671f 704 return true;
89fb70a3
DB
705}
706
726a989a 707/* Copy the operations present in load/store REF into RESULT, a vector of
89fb70a3
DB
708 vn_reference_op_s's. */
709
26f3a4e1 710static void
9771b263 711copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
89fb70a3 712{
9f59420d
RG
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
714 {
715 vn_reference_op_s temp;
716
39e843e8
RB
717 result->reserve (3);
718
9f59420d 719 memset (&temp, 0, sizeof (temp));
6d6c9525 720 temp.type = TREE_TYPE (ref);
9f59420d 721 temp.opcode = TREE_CODE (ref);
150e3929
RG
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
70f34814 725 temp.off = -1;
1fefbb66
RB
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
39e843e8 728 result->quick_push (temp);
9f59420d
RG
729
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
4d948885
RG
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
39e843e8 735 result->quick_push (temp);
4d948885
RG
736
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
70f34814 741 temp.off = -1;
39e843e8 742 result->quick_push (temp);
9f59420d
RG
743 return;
744 }
745
89fb70a3 746 /* For non-calls, store the information that makes up the address. */
1eadb567 747 tree orig = ref;
89fb70a3
DB
748 while (ref)
749 {
750 vn_reference_op_s temp;
751
752 memset (&temp, 0, sizeof (temp));
6d6c9525 753 temp.type = TREE_TYPE (ref);
89fb70a3 754 temp.opcode = TREE_CODE (ref);
70f34814 755 temp.off = -1;
89fb70a3
DB
756
757 switch (temp.opcode)
758 {
4ec0a198
TV
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
842679dc
TV
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
70f34814
RG
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
8ab1d9d7
RB
769 {
770 offset_int off = mem_ref_offset (ref);
771 if (wi::fits_shwi_p (off))
772 temp.off = off.to_shwi ();
773 }
1fefbb66
RB
774 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
775 temp.base = MR_DEPENDENCE_BASE (ref);
ee45a32d 776 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
70f34814 777 break;
89fb70a3 778 case BIT_FIELD_REF:
ee45a32d 779 /* Record bits, position and storage order. */
89fb70a3
DB
780 temp.op0 = TREE_OPERAND (ref, 1);
781 temp.op1 = TREE_OPERAND (ref, 2);
1fefbb66
RB
782 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
783 {
784 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
785 if (off % BITS_PER_UNIT == 0)
786 temp.off = off / BITS_PER_UNIT;
787 }
ee45a32d 788 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
89fb70a3
DB
789 break;
790 case COMPONENT_REF:
ba2e1892
RG
791 /* The field decl is enough to unambiguously specify the field,
792 a matching type is not necessary and a mismatching type
793 is always a spurious difference. */
794 temp.type = NULL_TREE;
b45d2719
RG
795 temp.op0 = TREE_OPERAND (ref, 1);
796 temp.op1 = TREE_OPERAND (ref, 2);
70f34814
RG
797 {
798 tree this_offset = component_ref_field_offset (ref);
799 if (this_offset
b9c25734 800 && poly_int_tree_p (this_offset))
70f34814
RG
801 {
802 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
803 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
804 {
b9c25734
RS
805 poly_offset_int off
806 = (wi::to_poly_offset (this_offset)
8de73453 807 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
b9c25734
RS
808 /* Probibit value-numbering zero offset components
809 of addresses the same before the pass folding
810 __builtin_object_size had a chance to run
811 (checking cfun->after_inlining does the
812 trick here). */
813 if (TREE_CODE (orig) != ADDR_EXPR
814 || maybe_ne (off, 0)
815 || cfun->after_inlining)
816 off.to_shwi (&temp.off);
70f34814
RG
817 }
818 }
819 }
89fb70a3
DB
820 break;
821 case ARRAY_RANGE_REF:
822 case ARRAY_REF:
cef5388d
RB
823 {
824 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
825 /* Record index as operand. */
826 temp.op0 = TREE_OPERAND (ref, 1);
827 /* Always record lower bounds and element size. */
828 temp.op1 = array_ref_low_bound (ref);
829 /* But record element size in units of the type alignment. */
830 temp.op2 = TREE_OPERAND (ref, 3);
831 temp.align = eltype->type_common.align;
832 if (! temp.op2)
833 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
834 size_int (TYPE_ALIGN_UNIT (eltype)));
b9c25734
RS
835 if (poly_int_tree_p (temp.op0)
836 && poly_int_tree_p (temp.op1)
cef5388d
RB
837 && TREE_CODE (temp.op2) == INTEGER_CST)
838 {
b9c25734
RS
839 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
840 - wi::to_poly_offset (temp.op1))
841 * wi::to_offset (temp.op2)
842 * vn_ref_op_align_unit (&temp));
843 off.to_shwi (&temp.off);
cef5388d
RB
844 }
845 }
89fb70a3 846 break;
6d6c9525
RG
847 case VAR_DECL:
848 if (DECL_HARD_REGISTER (ref))
849 {
850 temp.op0 = ref;
851 break;
852 }
853 /* Fallthru. */
854 case PARM_DECL:
855 case CONST_DECL:
856 case RESULT_DECL:
857 /* Canonicalize decls to MEM[&decl] which is what we end up with
858 when valueizing MEM[ptr] with ptr = &decl. */
859 temp.opcode = MEM_REF;
860 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
861 temp.off = 0;
9771b263 862 result->safe_push (temp);
6d6c9525 863 temp.opcode = ADDR_EXPR;
39e843e8 864 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
6d6c9525
RG
865 temp.type = TREE_TYPE (temp.op0);
866 temp.off = -1;
867 break;
4794afa5
DB
868 case STRING_CST:
869 case INTEGER_CST:
870 case COMPLEX_CST:
871 case VECTOR_CST:
26b70b9f 872 case REAL_CST:
0747aae4 873 case FIXED_CST:
bb0c55f6 874 case CONSTRUCTOR:
89fb70a3
DB
875 case SSA_NAME:
876 temp.op0 = ref;
877 break;
ce94d354
RG
878 case ADDR_EXPR:
879 if (is_gimple_min_invariant (ref))
880 {
881 temp.op0 = ref;
882 break;
883 }
8403c2cf 884 break;
4794afa5
DB
885 /* These are only interesting for their operands, their
886 existence, and their type. They will never be the last
887 ref in the chain of references (IE they require an
888 operand), so we don't have to put anything
889 for op* as it will be handled by the iteration */
4794afa5 890 case REALPART_EXPR:
ee45a32d
EB
891 temp.off = 0;
892 break;
4794afa5 893 case VIEW_CONVERT_EXPR:
70f34814 894 temp.off = 0;
ee45a32d 895 temp.reverse = storage_order_barrier_p (ref);
70f34814
RG
896 break;
897 case IMAGPART_EXPR:
898 /* This is only interesting for its constant offset. */
899 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
89fb70a3 900 break;
4794afa5
DB
901 default:
902 gcc_unreachable ();
89fb70a3 903 }
9771b263 904 result->safe_push (temp);
89fb70a3 905
ce94d354 906 if (REFERENCE_CLASS_P (ref)
4ec0a198 907 || TREE_CODE (ref) == MODIFY_EXPR
842679dc 908 || TREE_CODE (ref) == WITH_SIZE_EXPR
ce94d354
RG
909 || (TREE_CODE (ref) == ADDR_EXPR
910 && !is_gimple_min_invariant (ref)))
89fb70a3
DB
911 ref = TREE_OPERAND (ref, 0);
912 else
913 ref = NULL_TREE;
914 }
915}
916
b45d2719
RG
917/* Build a alias-oracle reference abstraction in *REF from the vn_reference
918 operands in *OPS, the reference alias set SET and the reference type TYPE.
919 Return true if something useful was produced. */
53f3815c 920
b45d2719
RG
921bool
922ao_ref_init_from_vn_reference (ao_ref *ref,
923 alias_set_type set, tree type,
9771b263 924 vec<vn_reference_op_s> ops)
53f3815c
RG
925{
926 vn_reference_op_t op;
927 unsigned i;
b45d2719
RG
928 tree base = NULL_TREE;
929 tree *op0_p = &base;
b9c25734
RS
930 poly_offset_int offset = 0;
931 poly_offset_int max_size;
932 poly_offset_int size = -1;
b45d2719 933 tree size_tree = NULL_TREE;
70f34814 934 alias_set_type base_alias_set = -1;
b45d2719
RG
935
936 /* First get the final access size from just the outermost expression. */
9771b263 937 op = &ops[0];
b45d2719 938 if (op->opcode == COMPONENT_REF)
70f34814 939 size_tree = DECL_SIZE (op->op0);
b45d2719
RG
940 else if (op->opcode == BIT_FIELD_REF)
941 size_tree = op->op0;
942 else
943 {
ef4bddc2 944 machine_mode mode = TYPE_MODE (type);
b45d2719
RG
945 if (mode == BLKmode)
946 size_tree = TYPE_SIZE (type);
947 else
b9c25734 948 size = GET_MODE_BITSIZE (mode);
b45d2719 949 }
b0463d3d 950 if (size_tree != NULL_TREE
b9c25734
RS
951 && poly_int_tree_p (size_tree))
952 size = wi::to_poly_offset (size_tree);
b45d2719
RG
953
954 /* Initially, maxsize is the same as the accessed element size.
955 In the following it will only grow (or become -1). */
956 max_size = size;
53f3815c 957
b45d2719
RG
958 /* Compute cumulative bit-offset for nested component-refs and array-refs,
959 and find the ultimate containing object. */
9771b263 960 FOR_EACH_VEC_ELT (ops, i, op)
53f3815c
RG
961 {
962 switch (op->opcode)
963 {
b45d2719
RG
964 /* These may be in the reference ops, but we cannot do anything
965 sensible with them here. */
b45d2719 966 case ADDR_EXPR:
70f34814
RG
967 /* Apart from ADDR_EXPR arguments to MEM_REF. */
968 if (base != NULL_TREE
969 && TREE_CODE (base) == MEM_REF
970 && op->op0
971 && DECL_P (TREE_OPERAND (op->op0, 0)))
972 {
9771b263 973 vn_reference_op_t pop = &ops[i-1];
70f34814 974 base = TREE_OPERAND (op->op0, 0);
b9c25734 975 if (known_eq (pop->off, -1))
70f34814
RG
976 {
977 max_size = -1;
978 offset = 0;
979 }
980 else
981 offset += pop->off * BITS_PER_UNIT;
982 op0_p = NULL;
983 break;
984 }
985 /* Fallthru. */
986 case CALL_EXPR:
b45d2719 987 return false;
53f3815c 988
b45d2719 989 /* Record the base objects. */
70f34814
RG
990 case MEM_REF:
991 base_alias_set = get_deref_alias_set (op->op0);
992 *op0_p = build2 (MEM_REF, op->type,
993 NULL_TREE, op->op0);
1fefbb66
RB
994 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
995 MR_DEPENDENCE_BASE (*op0_p) = op->base;
70f34814
RG
996 op0_p = &TREE_OPERAND (*op0_p, 0);
997 break;
998
b45d2719
RG
999 case VAR_DECL:
1000 case PARM_DECL:
1001 case RESULT_DECL:
1002 case SSA_NAME:
b45d2719 1003 *op0_p = op->op0;
70f34814 1004 op0_p = NULL;
b45d2719
RG
1005 break;
1006
1007 /* And now the usual component-reference style ops. */
53f3815c 1008 case BIT_FIELD_REF:
b0463d3d 1009 offset += wi::to_offset (op->op1);
53f3815c
RG
1010 break;
1011
1012 case COMPONENT_REF:
b45d2719
RG
1013 {
1014 tree field = op->op0;
1015 /* We do not have a complete COMPONENT_REF tree here so we
1016 cannot use component_ref_field_offset. Do the interesting
1017 parts manually. */
b0463d3d 1018 tree this_offset = DECL_FIELD_OFFSET (field);
b45d2719 1019
b9c25734 1020 if (op->op1 || !poly_int_tree_p (this_offset))
b45d2719
RG
1021 max_size = -1;
1022 else
1023 {
b9c25734
RS
1024 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1025 << LOG2_BITS_PER_UNIT);
b0463d3d
EB
1026 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1027 offset += woffset;
b45d2719
RG
1028 }
1029 break;
1030 }
53f3815c
RG
1031
1032 case ARRAY_RANGE_REF:
1033 case ARRAY_REF:
e52201b6 1034 /* We recorded the lower bound and the element size. */
b9c25734
RS
1035 if (!poly_int_tree_p (op->op0)
1036 || !poly_int_tree_p (op->op1)
b0463d3d 1037 || TREE_CODE (op->op2) != INTEGER_CST)
b45d2719
RG
1038 max_size = -1;
1039 else
1040 {
b9c25734
RS
1041 poly_offset_int woffset
1042 = wi::sext (wi::to_poly_offset (op->op0)
1043 - wi::to_poly_offset (op->op1),
b0463d3d 1044 TYPE_PRECISION (TREE_TYPE (op->op0)));
cef5388d 1045 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
8de73453 1046 woffset <<= LOG2_BITS_PER_UNIT;
b0463d3d 1047 offset += woffset;
b45d2719
RG
1048 }
1049 break;
1050
1051 case REALPART_EXPR:
1052 break;
1053
1054 case IMAGPART_EXPR:
1055 offset += size;
1056 break;
1057
1058 case VIEW_CONVERT_EXPR:
53f3815c
RG
1059 break;
1060
1061 case STRING_CST:
1062 case INTEGER_CST:
1063 case COMPLEX_CST:
1064 case VECTOR_CST:
1065 case REAL_CST:
1066 case CONSTRUCTOR:
53f3815c 1067 case CONST_DECL:
b45d2719 1068 return false;
53f3815c
RG
1069
1070 default:
b45d2719 1071 return false;
53f3815c
RG
1072 }
1073 }
1074
b45d2719
RG
1075 if (base == NULL_TREE)
1076 return false;
1077
1078 ref->ref = NULL_TREE;
1079 ref->base = base;
b45d2719 1080 ref->ref_alias_set = set;
70f34814
RG
1081 if (base_alias_set != -1)
1082 ref->base_alias_set = base_alias_set;
1083 else
1084 ref->base_alias_set = get_alias_set (base);
14cd91f9
RG
1085 /* We discount volatiles from value-numbering elsewhere. */
1086 ref->volatile_p = false;
b45d2719 1087
b9c25734 1088 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
b0463d3d
EB
1089 {
1090 ref->offset = 0;
1091 ref->size = -1;
1092 ref->max_size = -1;
1093 return true;
1094 }
1095
b9c25734 1096 if (!offset.to_shwi (&ref->offset))
b0463d3d
EB
1097 {
1098 ref->offset = 0;
1099 ref->max_size = -1;
1100 return true;
1101 }
1102
b9c25734 1103 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
b0463d3d 1104 ref->max_size = -1;
b0463d3d 1105
b45d2719 1106 return true;
53f3815c
RG
1107}
1108
726a989a
RB
1109/* Copy the operations present in load/store/call REF into RESULT, a vector of
1110 vn_reference_op_s's. */
1111
26f3a4e1 1112static void
538dd0b7 1113copy_reference_ops_from_call (gcall *call,
9771b263 1114 vec<vn_reference_op_s> *result)
726a989a
RB
1115{
1116 vn_reference_op_s temp;
726a989a 1117 unsigned i;
6867d9a9 1118 tree lhs = gimple_call_lhs (call);
7eab31ed 1119 int lr;
6867d9a9
TV
1120
1121 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1122 different. By adding the lhs here in the vector, we ensure that the
1123 hashcode is different, guaranteeing a different value number. */
1124 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1125 {
1126 memset (&temp, 0, sizeof (temp));
1127 temp.opcode = MODIFY_EXPR;
1128 temp.type = TREE_TYPE (lhs);
1129 temp.op0 = lhs;
1130 temp.off = -1;
9771b263 1131 result->safe_push (temp);
6867d9a9 1132 }
726a989a 1133
7eab31ed 1134 /* Copy the type, opcode, function, static chain and EH region, if any. */
726a989a
RB
1135 memset (&temp, 0, sizeof (temp));
1136 temp.type = gimple_call_return_type (call);
1137 temp.opcode = CALL_EXPR;
ce94d354 1138 temp.op0 = gimple_call_fn (call);
7aec7a38 1139 temp.op1 = gimple_call_chain (call);
7eab31ed
EB
1140 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1141 temp.op2 = size_int (lr);
70f34814 1142 temp.off = -1;
d5e254e1
IE
1143 if (gimple_call_with_bounds_p (call))
1144 temp.with_bounds = 1;
9771b263 1145 result->safe_push (temp);
726a989a 1146
ce94d354
RG
1147 /* Copy the call arguments. As they can be references as well,
1148 just chain them together. */
726a989a
RB
1149 for (i = 0; i < gimple_call_num_args (call); ++i)
1150 {
1151 tree callarg = gimple_call_arg (call, i);
ce94d354 1152 copy_reference_ops_from_ref (callarg, result);
726a989a 1153 }
726a989a
RB
1154}
1155
aa7069aa
RG
1156/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1157 *I_P to point to the last element of the replacement. */
a0f79fcc 1158static bool
9771b263 1159vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
aa7069aa 1160 unsigned int *i_p)
89fb70a3 1161{
aa7069aa 1162 unsigned int i = *i_p;
9771b263
DN
1163 vn_reference_op_t op = &(*ops)[i];
1164 vn_reference_op_t mem_op = &(*ops)[i - 1];
70f34814 1165 tree addr_base;
a90c8804 1166 poly_int64 addr_offset = 0;
70f34814
RG
1167
1168 /* The only thing we have to do is from &OBJ.foo.bar add the offset
073a8998 1169 from .foo.bar to the preceding MEM_REF offset and replace the
70f34814
RG
1170 address with &OBJ. */
1171 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1172 &addr_offset);
1173 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
d1de852b 1174 if (addr_base != TREE_OPERAND (op->op0, 0))
70f34814 1175 {
a90c8804
RS
1176 poly_offset_int off
1177 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1178 SIGNED)
1179 + addr_offset);
807e902e 1180 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
70f34814 1181 op->op0 = build_fold_addr_expr (addr_base);
9541ffee 1182 if (tree_fits_shwi_p (mem_op->op0))
eb1ce453 1183 mem_op->off = tree_to_shwi (mem_op->op0);
70f34814
RG
1184 else
1185 mem_op->off = -1;
a0f79fcc 1186 return true;
aa7069aa 1187 }
a0f79fcc 1188 return false;
aa7069aa 1189}
89fb70a3 1190
a03a9774
RG
1191/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1192 *I_P to point to the last element of the replacement. */
a0f79fcc 1193static bool
9771b263 1194vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
a03a9774
RG
1195 unsigned int *i_p)
1196{
1197 unsigned int i = *i_p;
9771b263
DN
1198 vn_reference_op_t op = &(*ops)[i];
1199 vn_reference_op_t mem_op = &(*ops)[i - 1];
355fe088 1200 gimple *def_stmt;
a03a9774 1201 enum tree_code code;
a90c8804 1202 poly_offset_int off;
a03a9774
RG
1203
1204 def_stmt = SSA_NAME_DEF_STMT (op->op0);
d0c422cb 1205 if (!is_gimple_assign (def_stmt))
a0f79fcc 1206 return false;
a03a9774
RG
1207
1208 code = gimple_assign_rhs_code (def_stmt);
1209 if (code != ADDR_EXPR
1210 && code != POINTER_PLUS_EXPR)
a0f79fcc 1211 return false;
a03a9774 1212
a90c8804 1213 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
a03a9774
RG
1214
1215 /* The only thing we have to do is from &OBJ.foo.bar add the offset
073a8998 1216 from .foo.bar to the preceding MEM_REF offset and replace the
a03a9774
RG
1217 address with &OBJ. */
1218 if (code == ADDR_EXPR)
1219 {
1220 tree addr, addr_base;
a90c8804 1221 poly_int64 addr_offset;
a03a9774
RG
1222
1223 addr = gimple_assign_rhs1 (def_stmt);
1224 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1225 &addr_offset);
4da60082
RB
1226 /* If that didn't work because the address isn't invariant propagate
1227 the reference tree from the address operation in case the current
1228 dereference isn't offsetted. */
1229 if (!addr_base
1230 && *i_p == ops->length () - 1
a90c8804 1231 && known_eq (off, 0)
4da60082
RB
1232 /* This makes us disable this transform for PRE where the
1233 reference ops might be also used for code insertion which
1234 is invalid. */
1235 && default_vn_walk_kind == VN_WALKREWRITE)
1236 {
1237 auto_vec<vn_reference_op_s, 32> tem;
1238 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
e4969090
RB
1239 /* Make sure to preserve TBAA info. The only objects not
1240 wrapped in MEM_REFs that can have their address taken are
1241 STRING_CSTs. */
1242 if (tem.length () >= 2
1243 && tem[tem.length () - 2].opcode == MEM_REF)
1244 {
1245 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
8e6cdc90
RS
1246 new_mem_op->op0
1247 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
a90c8804 1248 wi::to_poly_wide (new_mem_op->op0));
e4969090
RB
1249 }
1250 else
1251 gcc_assert (tem.last ().opcode == STRING_CST);
4da60082
RB
1252 ops->pop ();
1253 ops->pop ();
1254 ops->safe_splice (tem);
1255 --*i_p;
a0f79fcc 1256 return true;
4da60082 1257 }
a03a9774
RG
1258 if (!addr_base
1259 || TREE_CODE (addr_base) != MEM_REF)
a0f79fcc 1260 return false;
a03a9774 1261
807e902e 1262 off += addr_offset;
27bcd47c 1263 off += mem_ref_offset (addr_base);
a03a9774
RG
1264 op->op0 = TREE_OPERAND (addr_base, 0);
1265 }
1266 else
1267 {
1268 tree ptr, ptroff;
1269 ptr = gimple_assign_rhs1 (def_stmt);
1270 ptroff = gimple_assign_rhs2 (def_stmt);
1271 if (TREE_CODE (ptr) != SSA_NAME
1272 || TREE_CODE (ptroff) != INTEGER_CST)
a0f79fcc 1273 return false;
a03a9774 1274
807e902e 1275 off += wi::to_offset (ptroff);
d0c422cb 1276 op->op0 = ptr;
a03a9774
RG
1277 }
1278
807e902e 1279 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
9541ffee 1280 if (tree_fits_shwi_p (mem_op->op0))
eb1ce453 1281 mem_op->off = tree_to_shwi (mem_op->op0);
a03a9774
RG
1282 else
1283 mem_op->off = -1;
1284 if (TREE_CODE (op->op0) == SSA_NAME)
e093ffe3
RG
1285 op->op0 = SSA_VAL (op->op0);
1286 if (TREE_CODE (op->op0) != SSA_NAME)
1287 op->opcode = TREE_CODE (op->op0);
a03a9774
RG
1288
1289 /* And recurse. */
1290 if (TREE_CODE (op->op0) == SSA_NAME)
1291 vn_reference_maybe_forwprop_address (ops, i_p);
1292 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1293 vn_reference_fold_indirect (ops, i_p);
a0f79fcc 1294 return true;
a03a9774
RG
1295}
1296
12bd5a1e
RG
1297/* Optimize the reference REF to a constant if possible or return
1298 NULL_TREE if not. */
1299
1300tree
1301fully_constant_vn_reference_p (vn_reference_t ref)
1302{
9771b263 1303 vec<vn_reference_op_s> operands = ref->operands;
12bd5a1e
RG
1304 vn_reference_op_t op;
1305
1306 /* Try to simplify the translated expression if it is
1307 a call to a builtin function with at most two arguments. */
9771b263 1308 op = &operands[0];
12bd5a1e
RG
1309 if (op->opcode == CALL_EXPR
1310 && TREE_CODE (op->op0) == ADDR_EXPR
1311 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1312 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
9771b263
DN
1313 && operands.length () >= 2
1314 && operands.length () <= 3)
12bd5a1e
RG
1315 {
1316 vn_reference_op_t arg0, arg1 = NULL;
1317 bool anyconst = false;
9771b263
DN
1318 arg0 = &operands[1];
1319 if (operands.length () > 2)
1320 arg1 = &operands[2];
12bd5a1e
RG
1321 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1322 || (arg0->opcode == ADDR_EXPR
1323 && is_gimple_min_invariant (arg0->op0)))
1324 anyconst = true;
1325 if (arg1
1326 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1327 || (arg1->opcode == ADDR_EXPR
1328 && is_gimple_min_invariant (arg1->op0))))
1329 anyconst = true;
1330 if (anyconst)
1331 {
1332 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1333 arg1 ? 2 : 1,
1334 arg0->op0,
1335 arg1 ? arg1->op0 : NULL);
1336 if (folded
1337 && TREE_CODE (folded) == NOP_EXPR)
1338 folded = TREE_OPERAND (folded, 0);
1339 if (folded
1340 && is_gimple_min_invariant (folded))
1341 return folded;
1342 }
1343 }
1344
8403c2cf
RB
1345 /* Simplify reads from constants or constant initializers. */
1346 else if (BITS_PER_UNIT == 8
1347 && is_gimple_reg_type (ref->type)
1348 && (!INTEGRAL_TYPE_P (ref->type)
1349 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
12bd5a1e 1350 {
b9c25734 1351 poly_int64 off = 0;
552b2afe
JJ
1352 HOST_WIDE_INT size;
1353 if (INTEGRAL_TYPE_P (ref->type))
1354 size = TYPE_PRECISION (ref->type);
1355 else
1356 size = tree_to_shwi (TYPE_SIZE (ref->type));
8403c2cf
RB
1357 if (size % BITS_PER_UNIT != 0
1358 || size > MAX_BITSIZE_MODE_ANY_MODE)
1359 return NULL_TREE;
1360 size /= BITS_PER_UNIT;
1361 unsigned i;
1362 for (i = 0; i < operands.length (); ++i)
1363 {
13e88953
RB
1364 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1365 {
1366 ++i;
1367 break;
1368 }
b9c25734 1369 if (known_eq (operands[i].off, -1))
8403c2cf
RB
1370 return NULL_TREE;
1371 off += operands[i].off;
1372 if (operands[i].opcode == MEM_REF)
1373 {
1374 ++i;
1375 break;
1376 }
1377 }
1378 vn_reference_op_t base = &operands[--i];
1379 tree ctor = error_mark_node;
1380 tree decl = NULL_TREE;
1381 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1382 ctor = base->op0;
1383 else if (base->opcode == MEM_REF
1384 && base[1].opcode == ADDR_EXPR
1385 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
6a248fce
RB
1386 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1387 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
8403c2cf
RB
1388 {
1389 decl = TREE_OPERAND (base[1].op0, 0);
6a248fce
RB
1390 if (TREE_CODE (decl) == STRING_CST)
1391 ctor = decl;
1392 else
1393 ctor = ctor_for_folding (decl);
8403c2cf
RB
1394 }
1395 if (ctor == NULL_TREE)
1396 return build_zero_cst (ref->type);
1397 else if (ctor != error_mark_node)
1398 {
b9c25734 1399 HOST_WIDE_INT const_off;
8403c2cf
RB
1400 if (decl)
1401 {
1402 tree res = fold_ctor_reference (ref->type, ctor,
1403 off * BITS_PER_UNIT,
1404 size * BITS_PER_UNIT, decl);
1405 if (res)
1406 {
1407 STRIP_USELESS_TYPE_CONVERSION (res);
1408 if (is_gimple_min_invariant (res))
1409 return res;
1410 }
1411 }
b9c25734 1412 else if (off.is_constant (&const_off))
8403c2cf
RB
1413 {
1414 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
b9c25734 1415 int len = native_encode_expr (ctor, buf, size, const_off);
1ff0a84c
JJ
1416 if (len > 0)
1417 return native_interpret_expr (ref->type, buf, len);
8403c2cf
RB
1418 }
1419 }
12bd5a1e
RG
1420 }
1421
1422 return NULL_TREE;
1423}
1424
ee45a32d
EB
1425/* Return true if OPS contain a storage order barrier. */
1426
1427static bool
1428contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1429{
1430 vn_reference_op_t op;
1431 unsigned i;
1432
1433 FOR_EACH_VEC_ELT (ops, i, op)
1434 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1435 return true;
1436
1437 return false;
1438}
1439
89fb70a3
DB
1440/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1441 structures into their value numbers. This is done in-place, and
3ceaf2f5
RG
1442 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1443 whether any operands were valueized. */
89fb70a3 1444
9771b263
DN
1445static vec<vn_reference_op_s>
1446valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
89fb70a3
DB
1447{
1448 vn_reference_op_t vro;
aa7069aa 1449 unsigned int i;
89fb70a3 1450
3ceaf2f5
RG
1451 *valueized_anything = false;
1452
9771b263 1453 FOR_EACH_VEC_ELT (orig, i, vro)
89fb70a3
DB
1454 {
1455 if (vro->opcode == SSA_NAME
1456 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
c9145754 1457 {
3ceaf2f5
RG
1458 tree tem = SSA_VAL (vro->op0);
1459 if (tem != vro->op0)
1460 {
1461 *valueized_anything = true;
1462 vro->op0 = tem;
1463 }
c9145754
DB
1464 /* If it transforms from an SSA_NAME to a constant, update
1465 the opcode. */
1466 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1467 vro->opcode = TREE_CODE (vro->op0);
1468 }
aa7069aa 1469 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
3ceaf2f5
RG
1470 {
1471 tree tem = SSA_VAL (vro->op1);
1472 if (tem != vro->op1)
1473 {
1474 *valueized_anything = true;
1475 vro->op1 = tem;
1476 }
1477 }
aa7069aa 1478 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
3ceaf2f5
RG
1479 {
1480 tree tem = SSA_VAL (vro->op2);
1481 if (tem != vro->op2)
1482 {
1483 *valueized_anything = true;
1484 vro->op2 = tem;
1485 }
1486 }
70f34814
RG
1487 /* If it transforms from an SSA_NAME to an address, fold with
1488 a preceding indirect reference. */
1489 if (i > 0
1490 && vro->op0
1491 && TREE_CODE (vro->op0) == ADDR_EXPR
9771b263 1492 && orig[i - 1].opcode == MEM_REF)
a0f79fcc
RB
1493 {
1494 if (vn_reference_fold_indirect (&orig, &i))
1495 *valueized_anything = true;
1496 }
a03a9774
RG
1497 else if (i > 0
1498 && vro->opcode == SSA_NAME
9771b263 1499 && orig[i - 1].opcode == MEM_REF)
a0f79fcc
RB
1500 {
1501 if (vn_reference_maybe_forwprop_address (&orig, &i))
1502 *valueized_anything = true;
1503 }
70f34814
RG
1504 /* If it transforms a non-constant ARRAY_REF into a constant
1505 one, adjust the constant offset. */
1506 else if (vro->opcode == ARRAY_REF
b9c25734
RS
1507 && known_eq (vro->off, -1)
1508 && poly_int_tree_p (vro->op0)
1509 && poly_int_tree_p (vro->op1)
70f34814
RG
1510 && TREE_CODE (vro->op2) == INTEGER_CST)
1511 {
b9c25734
RS
1512 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1513 - wi::to_poly_offset (vro->op1))
1514 * wi::to_offset (vro->op2)
1515 * vn_ref_op_align_unit (vro));
1516 off.to_shwi (&vro->off);
70f34814 1517 }
89fb70a3
DB
1518 }
1519
1520 return orig;
1521}
1522
9771b263
DN
1523static vec<vn_reference_op_s>
1524valueize_refs (vec<vn_reference_op_s> orig)
3ceaf2f5
RG
1525{
1526 bool tem;
1527 return valueize_refs_1 (orig, &tem);
1528}
1529
9771b263 1530static vec<vn_reference_op_s> shared_lookup_references;
aa7069aa
RG
1531
1532/* Create a vector of vn_reference_op_s structures from REF, a
1533 REFERENCE_CLASS_P tree. The vector is shared among all callers of
3ceaf2f5
RG
1534 this function. *VALUEIZED_ANYTHING will specify whether any
1535 operands were valueized. */
aa7069aa 1536
9771b263 1537static vec<vn_reference_op_s>
3ceaf2f5 1538valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
aa7069aa
RG
1539{
1540 if (!ref)
6e1aa848 1541 return vNULL;
9771b263 1542 shared_lookup_references.truncate (0);
aa7069aa 1543 copy_reference_ops_from_ref (ref, &shared_lookup_references);
3ceaf2f5
RG
1544 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1545 valueized_anything);
aa7069aa
RG
1546 return shared_lookup_references;
1547}
1548
1549/* Create a vector of vn_reference_op_s structures from CALL, a
1550 call statement. The vector is shared among all callers of
1551 this function. */
1552
9771b263 1553static vec<vn_reference_op_s>
538dd0b7 1554valueize_shared_reference_ops_from_call (gcall *call)
aa7069aa
RG
1555{
1556 if (!call)
6e1aa848 1557 return vNULL;
9771b263 1558 shared_lookup_references.truncate (0);
aa7069aa
RG
1559 copy_reference_ops_from_call (call, &shared_lookup_references);
1560 shared_lookup_references = valueize_refs (shared_lookup_references);
1561 return shared_lookup_references;
1562}
1563
896c8b96
RG
1564/* Lookup a SCCVN reference operation VR in the current hash table.
1565 Returns the resulting value number if it exists in the hash table,
c9145754
DB
1566 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1567 vn_reference_t stored in the hashtable if something is found. */
896c8b96
RG
1568
1569static tree
c9145754 1570vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
896c8b96 1571{
bf190e8d 1572 vn_reference_s **slot;
896c8b96
RG
1573 hashval_t hash;
1574
1575 hash = vr->hashcode;
c203e8a7 1576 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
896c8b96 1577 if (!slot && current_info == optimistic_info)
c203e8a7 1578 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
896c8b96 1579 if (slot)
c9145754
DB
1580 {
1581 if (vnresult)
1582 *vnresult = (vn_reference_t)*slot;
1583 return ((vn_reference_t)*slot)->result;
1584 }
b8698a0f 1585
896c8b96
RG
1586 return NULL_TREE;
1587}
1588
5006671f
RG
1589/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1590 with the current VUSE and performs the expression lookup. */
1591
1592static void *
9bb06c2a
RG
1593vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1594 unsigned int cnt, void *vr_)
5006671f
RG
1595{
1596 vn_reference_t vr = (vn_reference_t)vr_;
bf190e8d 1597 vn_reference_s **slot;
5006671f
RG
1598 hashval_t hash;
1599
9bb06c2a
RG
1600 /* This bounds the stmt walks we perform on reference lookups
1601 to O(1) instead of O(N) where N is the number of dominating
1602 stores. */
1603 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1604 return (void *)-1;
1605
d0ca0bcb
RG
1606 if (last_vuse_ptr)
1607 *last_vuse_ptr = vuse;
1608
5006671f 1609 /* Fixup vuse and hash. */
9708c51d
RG
1610 if (vr->vuse)
1611 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
d1c0308e 1612 vr->vuse = vuse_ssa_val (vuse);
9708c51d
RG
1613 if (vr->vuse)
1614 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
5006671f
RG
1615
1616 hash = vr->hashcode;
c203e8a7 1617 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
5006671f 1618 if (!slot && current_info == optimistic_info)
c203e8a7 1619 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
5006671f
RG
1620 if (slot)
1621 return *slot;
b8698a0f 1622
5006671f
RG
1623 return NULL;
1624}
c9145754 1625
b55eb410
RG
1626/* Lookup an existing or insert a new vn_reference entry into the
1627 value table for the VUSE, SET, TYPE, OPERANDS reference which
9179ed9d 1628 has the value VALUE which is either a constant or an SSA name. */
b55eb410
RG
1629
1630static vn_reference_t
9179ed9d
RG
1631vn_reference_lookup_or_insert_for_pieces (tree vuse,
1632 alias_set_type set,
1633 tree type,
9771b263
DN
1634 vec<vn_reference_op_s,
1635 va_heap> operands,
9179ed9d 1636 tree value)
b55eb410 1637{
703c9ccd 1638 vn_reference_s vr1;
b55eb410 1639 vn_reference_t result;
9179ed9d 1640 unsigned value_id;
b55eb410
RG
1641 vr1.vuse = vuse;
1642 vr1.operands = operands;
1643 vr1.type = type;
1644 vr1.set = set;
1645 vr1.hashcode = vn_reference_compute_hash (&vr1);
1646 if (vn_reference_lookup_1 (&vr1, &result))
1647 return result;
9179ed9d
RG
1648 if (TREE_CODE (value) == SSA_NAME)
1649 value_id = VN_INFO (value)->value_id;
1650 else
1651 value_id = get_or_alloc_constant_value_id (value);
b55eb410 1652 return vn_reference_insert_pieces (vuse, set, type,
9771b263 1653 operands.copy (), value, value_id);
b55eb410
RG
1654}
1655
64ea4e15 1656static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
343ae898 1657static unsigned mprts_hook_cnt;
64ea4e15
RB
1658
1659/* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1660
1661static tree
204b99cd 1662vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
64ea4e15
RB
1663{
1664 if (!rcode.is_tree_code ())
1665 return NULL_TREE;
204b99cd
RB
1666 tree *ops = ops_;
1667 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1668 if (rcode == CONSTRUCTOR
1669 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1670 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1671 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1672 {
1673 length = CONSTRUCTOR_NELTS (ops_[0]);
1674 ops = XALLOCAVEC (tree, length);
1675 for (unsigned i = 0; i < length; ++i)
1676 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1677 }
64ea4e15 1678 vn_nary_op_t vnresult = NULL;
204b99cd
RB
1679 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1680 type, ops, &vnresult);
343ae898
RB
1681 /* We can end up endlessly recursing simplifications if the lookup above
1682 presents us with a def-use chain that mirrors the original simplification.
1683 See PR80887 for an example. Limit successful lookup artificially
1684 to 10 times if we are called as mprts_hook. */
1685 if (res
1686 && mprts_hook
1687 && --mprts_hook_cnt == 0)
1688 {
1689 if (dump_file && (dump_flags & TDF_DETAILS))
1690 fprintf (dump_file, "Resetting mprts_hook after too many "
1691 "invocations.\n");
1692 mprts_hook = NULL;
1693 }
1694 return res;
64ea4e15
RB
1695}
1696
1697/* Return a value-number for RCODE OPS... either by looking up an existing
351168fe
RB
1698 value-number for the simplified result or by inserting the operation if
1699 INSERT is true. */
64ea4e15
RB
1700
1701static tree
351168fe
RB
1702vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1703 bool insert)
64ea4e15
RB
1704{
1705 tree result = NULL_TREE;
1706 /* We will be creating a value number for
1ef33ef3 1707 RCODE (OPS...).
64ea4e15
RB
1708 So first simplify and lookup this expression to see if it
1709 is already available. */
1710 mprts_hook = vn_lookup_simplify_result;
343ae898 1711 mprts_hook_cnt = 9;
64ea4e15
RB
1712 bool res = false;
1713 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1714 {
1715 case 1:
1716 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1717 break;
1718 case 2:
1719 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1720 break;
1721 case 3:
1722 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1723 break;
1724 }
1725 mprts_hook = NULL;
1726 gimple *new_stmt = NULL;
1727 if (res
1728 && gimple_simplified_result_is_gimple_val (rcode, ops))
1729 /* The expression is already available. */
1730 result = ops[0];
1731 else
1732 {
1733 tree val = vn_lookup_simplify_result (rcode, type, ops);
351168fe 1734 if (!val && insert)
64ea4e15
RB
1735 {
1736 gimple_seq stmts = NULL;
1737 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1738 if (result)
1739 {
1740 gcc_assert (gimple_seq_singleton_p (stmts));
1741 new_stmt = gimple_seq_first_stmt (stmts);
1742 }
1743 }
1744 else
1745 /* The expression is already available. */
1746 result = val;
1747 }
1748 if (new_stmt)
1749 {
1750 /* The expression is not yet available, value-number lhs to
1751 the new SSA_NAME we created. */
1752 /* Initialize value-number information properly. */
1753 VN_INFO_GET (result)->valnum = result;
1754 VN_INFO (result)->value_id = get_next_value_id ();
1755 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1756 new_stmt);
1757 VN_INFO (result)->needs_insertion = true;
1ef33ef3
RB
1758 /* ??? PRE phi-translation inserts NARYs without corresponding
1759 SSA name result. Re-use those but set their result according
1760 to the stmt we just built. */
1761 vn_nary_op_t nary = NULL;
1762 vn_nary_op_lookup_stmt (new_stmt, &nary);
1763 if (nary)
1764 {
1765 gcc_assert (nary->result == NULL_TREE);
1766 nary->result = gimple_assign_lhs (new_stmt);
1767 }
64ea4e15
RB
1768 /* As all "inserted" statements are singleton SCCs, insert
1769 to the valid table. This is strictly needed to
1770 avoid re-generating new value SSA_NAMEs for the same
1771 expression during SCC iteration over and over (the
1772 optimistic table gets cleared after each iteration).
1773 We do not need to insert into the optimistic table, as
1774 lookups there will fall back to the valid table. */
1ef33ef3 1775 else if (current_info == optimistic_info)
64ea4e15
RB
1776 {
1777 current_info = valid_info;
1778 vn_nary_op_insert_stmt (new_stmt, result);
1779 current_info = optimistic_info;
1780 }
1781 else
1782 vn_nary_op_insert_stmt (new_stmt, result);
1783 if (dump_file && (dump_flags & TDF_DETAILS))
1784 {
1785 fprintf (dump_file, "Inserting name ");
ef6cb4c7 1786 print_generic_expr (dump_file, result);
64ea4e15
RB
1787 fprintf (dump_file, " for expression ");
1788 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1789 fprintf (dump_file, "\n");
1790 }
1791 }
1792 return result;
1793}
1794
351168fe
RB
1795/* Return a value-number for RCODE OPS... either by looking up an existing
1796 value-number for the simplified result or by inserting the operation. */
1797
1798static tree
1799vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1800{
1801 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1802}
1803
1804/* Try to simplify the expression RCODE OPS... of type TYPE and return
1805 its value if present. */
1806
1807tree
1808vn_nary_simplify (vn_nary_op_t nary)
1809{
1810 if (nary->length > 3)
1811 return NULL_TREE;
1812 tree ops[3];
1813 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1814 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1815}
1816
1817
01df5c8a
RG
1818/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1819 from the statement defining VUSE and if not successful tries to
073a8998 1820 translate *REFP and VR_ through an aggregate copy at the definition
e7cbc096
RB
1821 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1822 of *REF and *VR. If only disambiguation was performed then
1823 *DISAMBIGUATE_ONLY is set to true. */
01df5c8a
RG
1824
1825static void *
50f0aa20 1826vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
e7cbc096 1827 bool *disambiguate_only)
01df5c8a
RG
1828{
1829 vn_reference_t vr = (vn_reference_t)vr_;
355fe088 1830 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
e7cbc096 1831 tree base = ao_ref_base (ref);
b9c25734 1832 HOST_WIDE_INT offseti, maxsizei;
199d1d48 1833 static vec<vn_reference_op_s> lhs_ops;
8ea34dab
RG
1834 ao_ref lhs_ref;
1835 bool lhs_ref_ok = false;
b9c25734 1836 poly_int64 copy_size;
01df5c8a 1837
e7cbc096
RB
1838 /* If the reference is based on a parameter that was determined as
1839 pointing to readonly memory it doesn't change. */
1840 if (TREE_CODE (base) == MEM_REF
1841 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1842 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1843 && bitmap_bit_p (const_parms,
1844 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1845 {
1846 *disambiguate_only = true;
1847 return NULL;
1848 }
1849
4fa4929e
RG
1850 /* First try to disambiguate after value-replacing in the definitions LHS. */
1851 if (is_gimple_assign (def_stmt))
1852 {
1853 tree lhs = gimple_assign_lhs (def_stmt);
25aa059e 1854 bool valueized_anything = false;
8ea34dab 1855 /* Avoid re-allocation overhead. */
9771b263 1856 lhs_ops.truncate (0);
8ea34dab 1857 copy_reference_ops_from_ref (lhs, &lhs_ops);
25aa059e 1858 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
25aa059e
RG
1859 if (valueized_anything)
1860 {
1861 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1862 get_alias_set (lhs),
1863 TREE_TYPE (lhs), lhs_ops);
1864 if (lhs_ref_ok
1865 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
e7cbc096
RB
1866 {
1867 *disambiguate_only = true;
1868 return NULL;
1869 }
25aa059e
RG
1870 }
1871 else
1872 {
1873 ao_ref_init (&lhs_ref, lhs);
1874 lhs_ref_ok = true;
1875 }
d7a160a4
RB
1876
1877 /* If we reach a clobbering statement try to skip it and see if
1878 we find a VN result with exactly the same value as the
1879 possible clobber. In this case we can ignore the clobber
1880 and return the found value.
1881 Note that we don't need to worry about partial overlapping
1882 accesses as we then can use TBAA to disambiguate against the
1883 clobbering statement when looking up a load (thus the
1884 VN_WALKREWRITE guard). */
1885 if (vn_walk_kind == VN_WALKREWRITE
1886 && is_gimple_reg_type (TREE_TYPE (lhs))
1887 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1888 {
1889 tree *saved_last_vuse_ptr = last_vuse_ptr;
1890 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1891 last_vuse_ptr = NULL;
1892 tree saved_vuse = vr->vuse;
1893 hashval_t saved_hashcode = vr->hashcode;
1894 void *res = vn_reference_lookup_2 (ref,
1895 gimple_vuse (def_stmt), 0, vr);
1896 /* Need to restore vr->vuse and vr->hashcode. */
1897 vr->vuse = saved_vuse;
1898 vr->hashcode = saved_hashcode;
1899 last_vuse_ptr = saved_last_vuse_ptr;
1900 if (res && res != (void *)-1)
1901 {
1902 vn_reference_t vnresult = (vn_reference_t) res;
1903 if (vnresult->result
1904 && operand_equal_p (vnresult->result,
1905 gimple_assign_rhs1 (def_stmt), 0))
1906 return res;
1907 }
1908 }
4fa4929e 1909 }
50f0aa20
RB
1910 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1911 && gimple_call_num_args (def_stmt) <= 4)
1912 {
1913 /* For builtin calls valueize its arguments and call the
1914 alias oracle again. Valueization may improve points-to
1915 info of pointers and constify size and position arguments.
1916 Originally this was motivated by PR61034 which has
1917 conditional calls to free falsely clobbering ref because
1918 of imprecise points-to info of the argument. */
1919 tree oldargs[4];
61dd7fbc 1920 bool valueized_anything = false;
50f0aa20
RB
1921 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1922 {
1923 oldargs[i] = gimple_call_arg (def_stmt, i);
a7976089
RB
1924 tree val = vn_valueize (oldargs[i]);
1925 if (val != oldargs[i])
50f0aa20 1926 {
a7976089 1927 gimple_call_set_arg (def_stmt, i, val);
50f0aa20
RB
1928 valueized_anything = true;
1929 }
1930 }
1931 if (valueized_anything)
1932 {
538dd0b7
DM
1933 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1934 ref);
50f0aa20
RB
1935 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1936 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1937 if (!res)
e7cbc096
RB
1938 {
1939 *disambiguate_only = true;
1940 return NULL;
1941 }
50f0aa20
RB
1942 }
1943 }
1944
e7cbc096 1945 if (*disambiguate_only)
50f0aa20 1946 return (void *)-1;
4fa4929e 1947
01df5c8a
RG
1948 /* If we cannot constrain the size of the reference we cannot
1949 test if anything kills it. */
b9c25734 1950 if (!ref->max_size_known_p ())
01df5c8a
RG
1951 return (void *)-1;
1952
b9c25734
RS
1953 poly_int64 offset = ref->offset;
1954 poly_int64 maxsize = ref->max_size;
1955
47598145
MM
1956 /* We can't deduce anything useful from clobbers. */
1957 if (gimple_clobber_p (def_stmt))
1958 return (void *)-1;
1959
01df5c8a 1960 /* def_stmt may-defs *ref. See if we can derive a value for *ref
47598145 1961 from that definition.
01df5c8a 1962 1) Memset. */
b45d2719 1963 if (is_gimple_reg_type (vr->type)
c7ee7b45 1964 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
01df5c8a 1965 && integer_zerop (gimple_call_arg (def_stmt, 1))
b9c25734 1966 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
01df5c8a
RG
1967 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1968 {
1969 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1970 tree base2;
588db50c 1971 poly_int64 offset2, size2, maxsize2;
ee45a32d
EB
1972 bool reverse;
1973 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1974 &reverse);
b9c25734
RS
1975 tree len = gimple_call_arg (def_stmt, 2);
1976 if (known_size_p (maxsize2)
01df5c8a 1977 && operand_equal_p (base, base2, 0)
b9c25734
RS
1978 && known_subrange_p (offset, maxsize, offset2,
1979 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
b45d2719 1980 {
e8160c9a 1981 tree val = build_zero_cst (vr->type);
9179ed9d 1982 return vn_reference_lookup_or_insert_for_pieces
b55eb410 1983 (vuse, vr->set, vr->type, vr->operands, val);
b45d2719 1984 }
01df5c8a
RG
1985 }
1986
1987 /* 2) Assignment from an empty CONSTRUCTOR. */
b45d2719 1988 else if (is_gimple_reg_type (vr->type)
01df5c8a
RG
1989 && gimple_assign_single_p (def_stmt)
1990 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1991 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1992 {
1993 tree base2;
588db50c 1994 poly_int64 offset2, size2, maxsize2;
ee45a32d 1995 bool reverse;
01df5c8a 1996 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
ee45a32d 1997 &offset2, &size2, &maxsize2, &reverse);
b9c25734 1998 if (known_size_p (maxsize2)
9c8cbc74 1999 && operand_equal_p (base, base2, 0)
b9c25734 2000 && known_subrange_p (offset, maxsize, offset2, size2))
b45d2719 2001 {
e8160c9a 2002 tree val = build_zero_cst (vr->type);
9179ed9d 2003 return vn_reference_lookup_or_insert_for_pieces
b55eb410 2004 (vuse, vr->set, vr->type, vr->operands, val);
b45d2719 2005 }
01df5c8a
RG
2006 }
2007
c867aba0
RG
2008 /* 3) Assignment from a constant. We can use folds native encode/interpret
2009 routines to extract the assigned bits. */
b9c25734 2010 else if (known_eq (ref->size, maxsize)
c867aba0 2011 && is_gimple_reg_type (vr->type)
ee45a32d 2012 && !contains_storage_order_barrier_p (vr->operands)
c867aba0 2013 && gimple_assign_single_p (def_stmt)
64ea4e15 2014 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
b9c25734
RS
2015 /* native_encode and native_decode operate on arrays of bytes
2016 and so fundamentally need a compile-time size and offset. */
2017 && maxsize.is_constant (&maxsizei)
2018 && maxsizei % BITS_PER_UNIT == 0
2019 && offset.is_constant (&offseti)
2020 && offseti % BITS_PER_UNIT == 0
64ea4e15
RB
2021 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2022 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2023 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
c867aba0
RG
2024 {
2025 tree base2;
588db50c 2026 HOST_WIDE_INT offset2, size2;
ee45a32d 2027 bool reverse;
588db50c
RS
2028 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2029 &offset2, &size2, &reverse);
2030 if (base2
2031 && !reverse
c867aba0
RG
2032 && size2 % BITS_PER_UNIT == 0
2033 && offset2 % BITS_PER_UNIT == 0
2034 && operand_equal_p (base, base2, 0)
b9c25734 2035 && known_subrange_p (offseti, maxsizei, offset2, size2))
c867aba0
RG
2036 {
2037 /* We support up to 512-bit values (for V8DFmode). */
2038 unsigned char buffer[64];
2039 int len;
2040
64ea4e15
RB
2041 tree rhs = gimple_assign_rhs1 (def_stmt);
2042 if (TREE_CODE (rhs) == SSA_NAME)
2043 rhs = SSA_VAL (rhs);
c867aba0
RG
2044 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2045 buffer, sizeof (buffer));
2046 if (len > 0)
2047 {
f35ea97d
RB
2048 tree type = vr->type;
2049 /* Make sure to interpret in a type that has a range
2050 covering the whole access size. */
2051 if (INTEGRAL_TYPE_P (vr->type)
b9c25734
RS
2052 && maxsizei != TYPE_PRECISION (vr->type))
2053 type = build_nonstandard_integer_type (maxsizei,
f35ea97d
RB
2054 TYPE_UNSIGNED (type));
2055 tree val = native_interpret_expr (type,
c867aba0 2056 buffer
b9c25734 2057 + ((offseti - offset2)
c867aba0 2058 / BITS_PER_UNIT),
b9c25734 2059 maxsizei / BITS_PER_UNIT);
f35ea97d
RB
2060 /* If we chop off bits because the types precision doesn't
2061 match the memory access size this is ok when optimizing
2062 reads but not when called from the DSE code during
2063 elimination. */
2064 if (val
2065 && type != vr->type)
2066 {
2067 if (! int_fits_type_p (val, vr->type))
2068 val = NULL_TREE;
2069 else
2070 val = fold_convert (vr->type, val);
2071 }
2072
c867aba0 2073 if (val)
9179ed9d 2074 return vn_reference_lookup_or_insert_for_pieces
f35ea97d 2075 (vuse, vr->set, vr->type, vr->operands, val);
c867aba0
RG
2076 }
2077 }
2078 }
2079
0147184e
RG
2080 /* 4) Assignment from an SSA name which definition we may be able
2081 to access pieces from. */
b9c25734 2082 else if (known_eq (ref->size, maxsize)
0147184e 2083 && is_gimple_reg_type (vr->type)
ee45a32d 2084 && !contains_storage_order_barrier_p (vr->operands)
0147184e
RG
2085 && gimple_assign_single_p (def_stmt)
2086 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2087 {
64ea4e15 2088 tree base2;
588db50c 2089 poly_int64 offset2, size2, maxsize2;
64ea4e15
RB
2090 bool reverse;
2091 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2092 &offset2, &size2, &maxsize2,
2093 &reverse);
2094 if (!reverse
588db50c
RS
2095 && known_size_p (maxsize2)
2096 && known_eq (maxsize2, size2)
64ea4e15 2097 && operand_equal_p (base, base2, 0)
b9c25734 2098 && known_subrange_p (offset, maxsize, offset2, size2)
64ea4e15
RB
2099 /* ??? We can't handle bitfield precision extracts without
2100 either using an alternate type for the BIT_FIELD_REF and
2101 then doing a conversion or possibly adjusting the offset
bd2c6270 2102 according to endianness. */
64ea4e15 2103 && (! INTEGRAL_TYPE_P (vr->type)
b9c25734
RS
2104 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2105 && multiple_p (ref->size, BITS_PER_UNIT))
0147184e 2106 {
64ea4e15
RB
2107 code_helper rcode = BIT_FIELD_REF;
2108 tree ops[3];
2109 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2110 ops[1] = bitsize_int (ref->size);
2111 ops[2] = bitsize_int (offset - offset2);
2112 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
24d020bd
RB
2113 if (val
2114 && (TREE_CODE (val) != SSA_NAME
2115 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
0147184e 2116 {
64ea4e15
RB
2117 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2118 (vuse, vr->set, vr->type, vr->operands, val);
2119 return res;
0147184e
RG
2120 }
2121 }
2122 }
2123
2124 /* 5) For aggregate copies translate the reference through them if
01df5c8a 2125 the copy kills ref. */
3bc27de7
RG
2126 else if (vn_walk_kind == VN_WALKREWRITE
2127 && gimple_assign_single_p (def_stmt)
01df5c8a 2128 && (DECL_P (gimple_assign_rhs1 (def_stmt))
70f34814 2129 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
01df5c8a
RG
2130 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2131 {
2132 tree base2;
ee45a32d 2133 int i, j, k;
ef062b13 2134 auto_vec<vn_reference_op_s> rhs;
01df5c8a 2135 vn_reference_op_t vro;
b45d2719 2136 ao_ref r;
01df5c8a 2137
8ea34dab
RG
2138 if (!lhs_ref_ok)
2139 return (void *)-1;
2140
01df5c8a 2141 /* See if the assignment kills REF. */
8ea34dab 2142 base2 = ao_ref_base (&lhs_ref);
b9c25734 2143 if (!lhs_ref.max_size_known_p ()
ea3eac3a
RB
2144 || (base != base2
2145 && (TREE_CODE (base) != MEM_REF
2146 || TREE_CODE (base2) != MEM_REF
2147 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2148 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2149 TREE_OPERAND (base2, 1))))
a0f79fcc 2150 || !stmt_kills_ref_p (def_stmt, ref))
01df5c8a
RG
2151 return (void *)-1;
2152
8ea34dab
RG
2153 /* Find the common base of ref and the lhs. lhs_ops already
2154 contains valueized operands for the lhs. */
9771b263
DN
2155 i = vr->operands.length () - 1;
2156 j = lhs_ops.length () - 1;
35ecd408 2157 while (j >= 0 && i >= 0
9771b263 2158 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
01df5c8a
RG
2159 {
2160 i--;
2161 j--;
2162 }
35ecd408 2163
25aa059e
RG
2164 /* ??? The innermost op should always be a MEM_REF and we already
2165 checked that the assignment to the lhs kills vr. Thus for
2166 aggregate copies using char[] types the vn_reference_op_eq
2167 may fail when comparing types for compatibility. But we really
2168 don't care here - further lookups with the rewritten operands
2169 will simply fail if we messed up types too badly. */
b9c25734 2170 poly_int64 extra_off = 0;
4f9dbaaa 2171 if (j == 0 && i >= 0
9771b263 2172 && lhs_ops[0].opcode == MEM_REF
b9c25734 2173 && maybe_ne (lhs_ops[0].off, -1))
8403c2cf 2174 {
b9c25734 2175 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
8403c2cf
RB
2176 i--, j--;
2177 else if (vr->operands[i].opcode == MEM_REF
b9c25734 2178 && maybe_ne (vr->operands[i].off, -1))
8403c2cf
RB
2179 {
2180 extra_off = vr->operands[i].off - lhs_ops[0].off;
2181 i--, j--;
2182 }
2183 }
25aa059e 2184
01df5c8a
RG
2185 /* i now points to the first additional op.
2186 ??? LHS may not be completely contained in VR, one or more
2187 VIEW_CONVERT_EXPRs could be in its way. We could at least
2188 try handling outermost VIEW_CONVERT_EXPRs. */
2189 if (j != -1)
2190 return (void *)-1;
01df5c8a 2191
ee45a32d
EB
2192 /* Punt if the additional ops contain a storage order barrier. */
2193 for (k = i; k >= 0; k--)
2194 {
2195 vro = &vr->operands[k];
2196 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2197 return (void *)-1;
2198 }
2199
01df5c8a
RG
2200 /* Now re-write REF to be based on the rhs of the assignment. */
2201 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
8403c2cf
RB
2202
2203 /* Apply an extra offset to the inner MEM_REF of the RHS. */
b9c25734 2204 if (maybe_ne (extra_off, 0))
8403c2cf
RB
2205 {
2206 if (rhs.length () < 2
2207 || rhs[0].opcode != MEM_REF
b9c25734 2208 || known_eq (rhs[0].off, -1))
8403c2cf
RB
2209 return (void *)-1;
2210 rhs[0].off += extra_off;
2211 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2212 build_int_cst (TREE_TYPE (rhs[0].op0),
2213 extra_off));
2214 }
2215
01df5c8a 2216 /* We need to pre-pend vr->operands[0..i] to rhs. */
26f3a4e1 2217 vec<vn_reference_op_s> old = vr->operands;
9771b263 2218 if (i + 1 + rhs.length () > vr->operands.length ())
ec67c62e 2219 vr->operands.safe_grow (i + 1 + rhs.length ());
01df5c8a 2220 else
9771b263
DN
2221 vr->operands.truncate (i + 1 + rhs.length ());
2222 FOR_EACH_VEC_ELT (rhs, j, vro)
2223 vr->operands[i + 1 + j] = *vro;
b55eb410 2224 vr->operands = valueize_refs (vr->operands);
26f3a4e1
RB
2225 if (old == shared_lookup_references)
2226 shared_lookup_references = vr->operands;
01df5c8a 2227 vr->hashcode = vn_reference_compute_hash (vr);
c7ee7b45 2228
8403c2cf
RB
2229 /* Try folding the new reference to a constant. */
2230 tree val = fully_constant_vn_reference_p (vr);
2231 if (val)
2232 return vn_reference_lookup_or_insert_for_pieces
2233 (vuse, vr->set, vr->type, vr->operands, val);
2234
c7ee7b45
RG
2235 /* Adjust *ref from the new operands. */
2236 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2237 return (void *)-1;
2238 /* This can happen with bitfields. */
b9c25734 2239 if (maybe_ne (ref->size, r.size))
c7ee7b45
RG
2240 return (void *)-1;
2241 *ref = r;
2242
2243 /* Do not update last seen VUSE after translating. */
2244 last_vuse_ptr = NULL;
2245
2246 /* Keep looking for the adjusted *REF / VR pair. */
2247 return NULL;
2248 }
2249
0147184e 2250 /* 6) For memcpy copies translate the reference through them if
c7ee7b45
RG
2251 the copy kills ref. */
2252 else if (vn_walk_kind == VN_WALKREWRITE
2253 && is_gimple_reg_type (vr->type)
2254 /* ??? Handle BCOPY as well. */
2255 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2256 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2257 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2258 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2259 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2260 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2261 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
b9c25734 2262 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
c7ee7b45
RG
2263 {
2264 tree lhs, rhs;
2265 ao_ref r;
b9c25734 2266 poly_int64 rhs_offset, lhs_offset;
c7ee7b45 2267 vn_reference_op_s op;
b9c25734
RS
2268 poly_uint64 mem_offset;
2269 poly_int64 at, byte_maxsize;
c7ee7b45 2270
c7ee7b45 2271 /* Only handle non-variable, addressable refs. */
b9c25734
RS
2272 if (maybe_ne (ref->size, maxsize)
2273 || !multiple_p (offset, BITS_PER_UNIT, &at)
2274 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
c7ee7b45
RG
2275 return (void *)-1;
2276
2277 /* Extract a pointer base and an offset for the destination. */
2278 lhs = gimple_call_arg (def_stmt, 0);
2279 lhs_offset = 0;
2280 if (TREE_CODE (lhs) == SSA_NAME)
e65757f3
RB
2281 {
2282 lhs = SSA_VAL (lhs);
2283 if (TREE_CODE (lhs) == SSA_NAME)
2284 {
355fe088 2285 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
e65757f3
RB
2286 if (gimple_assign_single_p (def_stmt)
2287 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2288 lhs = gimple_assign_rhs1 (def_stmt);
2289 }
2290 }
c7ee7b45
RG
2291 if (TREE_CODE (lhs) == ADDR_EXPR)
2292 {
2293 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
a90c8804 2294 &lhs_offset);
c7ee7b45
RG
2295 if (!tem)
2296 return (void *)-1;
2297 if (TREE_CODE (tem) == MEM_REF
b9c25734 2298 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
c7ee7b45
RG
2299 {
2300 lhs = TREE_OPERAND (tem, 0);
e65757f3
RB
2301 if (TREE_CODE (lhs) == SSA_NAME)
2302 lhs = SSA_VAL (lhs);
b9c25734 2303 lhs_offset += mem_offset;
c7ee7b45
RG
2304 }
2305 else if (DECL_P (tem))
2306 lhs = build_fold_addr_expr (tem);
2307 else
2308 return (void *)-1;
2309 }
2310 if (TREE_CODE (lhs) != SSA_NAME
2311 && TREE_CODE (lhs) != ADDR_EXPR)
2312 return (void *)-1;
2313
2314 /* Extract a pointer base and an offset for the source. */
2315 rhs = gimple_call_arg (def_stmt, 1);
2316 rhs_offset = 0;
2317 if (TREE_CODE (rhs) == SSA_NAME)
2318 rhs = SSA_VAL (rhs);
2319 if (TREE_CODE (rhs) == ADDR_EXPR)
2320 {
2321 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
a90c8804 2322 &rhs_offset);
c7ee7b45
RG
2323 if (!tem)
2324 return (void *)-1;
2325 if (TREE_CODE (tem) == MEM_REF
b9c25734 2326 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
c7ee7b45
RG
2327 {
2328 rhs = TREE_OPERAND (tem, 0);
b9c25734 2329 rhs_offset += mem_offset;
c7ee7b45 2330 }
6a248fce
RB
2331 else if (DECL_P (tem)
2332 || TREE_CODE (tem) == STRING_CST)
c7ee7b45
RG
2333 rhs = build_fold_addr_expr (tem);
2334 else
2335 return (void *)-1;
2336 }
2337 if (TREE_CODE (rhs) != SSA_NAME
2338 && TREE_CODE (rhs) != ADDR_EXPR)
2339 return (void *)-1;
2340
c7ee7b45 2341 /* The bases of the destination and the references have to agree. */
c7ee7b45 2342 if (TREE_CODE (base) == MEM_REF)
7a667e64
RS
2343 {
2344 if (TREE_OPERAND (base, 0) != lhs
b9c25734 2345 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
7a667e64 2346 return (void *) -1;
b9c25734 2347 at += mem_offset;
7a667e64
RS
2348 }
2349 else if (!DECL_P (base)
2350 || TREE_CODE (lhs) != ADDR_EXPR
2351 || TREE_OPERAND (lhs, 0) != base)
2352 return (void *)-1;
2353
e65757f3
RB
2354 /* If the access is completely outside of the memcpy destination
2355 area there is no aliasing. */
b9c25734 2356 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
e65757f3
RB
2357 return NULL;
2358 /* And the access has to be contained within the memcpy destination. */
b9c25734 2359 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
c7ee7b45
RG
2360 return (void *)-1;
2361
2362 /* Make room for 2 operands in the new reference. */
9771b263 2363 if (vr->operands.length () < 2)
c7ee7b45 2364 {
9771b263
DN
2365 vec<vn_reference_op_s> old = vr->operands;
2366 vr->operands.safe_grow_cleared (2);
ec67c62e 2367 if (old == shared_lookup_references)
26f3a4e1 2368 shared_lookup_references = vr->operands;
c7ee7b45
RG
2369 }
2370 else
9771b263 2371 vr->operands.truncate (2);
c7ee7b45
RG
2372
2373 /* The looked-through reference is a simple MEM_REF. */
2374 memset (&op, 0, sizeof (op));
2375 op.type = vr->type;
2376 op.opcode = MEM_REF;
2aa8aa18 2377 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
c7ee7b45 2378 op.off = at - lhs_offset + rhs_offset;
9771b263 2379 vr->operands[0] = op;
6d6c9525 2380 op.type = TREE_TYPE (rhs);
c7ee7b45
RG
2381 op.opcode = TREE_CODE (rhs);
2382 op.op0 = rhs;
2383 op.off = -1;
9771b263 2384 vr->operands[1] = op;
c7ee7b45 2385 vr->hashcode = vn_reference_compute_hash (vr);
b45d2719 2386
edd048e2
RB
2387 /* Try folding the new reference to a constant. */
2388 tree val = fully_constant_vn_reference_p (vr);
2389 if (val)
2390 return vn_reference_lookup_or_insert_for_pieces
2391 (vuse, vr->set, vr->type, vr->operands, val);
2392
b45d2719
RG
2393 /* Adjust *ref from the new operands. */
2394 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
01df5c8a 2395 return (void *)-1;
03472fdd 2396 /* This can happen with bitfields. */
b9c25734 2397 if (maybe_ne (ref->size, r.size))
03472fdd 2398 return (void *)-1;
b45d2719 2399 *ref = r;
01df5c8a 2400
d0ca0bcb
RG
2401 /* Do not update last seen VUSE after translating. */
2402 last_vuse_ptr = NULL;
2403
01df5c8a
RG
2404 /* Keep looking for the adjusted *REF / VR pair. */
2405 return NULL;
2406 }
2407
2408 /* Bail out and stop walking. */
2409 return (void *)-1;
2410}
2411
3c5b29f5
RB
2412/* Return a reference op vector from OP that can be used for
2413 vn_reference_lookup_pieces. The caller is responsible for releasing
2414 the vector. */
2415
2416vec<vn_reference_op_s>
2417vn_reference_operands_for_lookup (tree op)
2418{
2419 bool valueized;
2420 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2421}
2422
c9145754
DB
2423/* Lookup a reference operation by it's parts, in the current hash table.
2424 Returns the resulting value number if it exists in the hash table,
2425 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2426 vn_reference_t stored in the hashtable if something is found. */
89fb70a3
DB
2427
2428tree
b45d2719 2429vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
9771b263 2430 vec<vn_reference_op_s> operands,
3bc27de7 2431 vn_reference_t *vnresult, vn_lookup_kind kind)
c9145754
DB
2432{
2433 struct vn_reference_s vr1;
5006671f 2434 vn_reference_t tmp;
12bd5a1e 2435 tree cst;
5006671f
RG
2436
2437 if (!vnresult)
2438 vnresult = &tmp;
2439 *vnresult = NULL;
01df5c8a 2440
d1c0308e 2441 vr1.vuse = vuse_ssa_val (vuse);
9771b263
DN
2442 shared_lookup_references.truncate (0);
2443 shared_lookup_references.safe_grow (operands.length ());
2444 memcpy (shared_lookup_references.address (),
2445 operands.address (),
01df5c8a 2446 sizeof (vn_reference_op_s)
9771b263 2447 * operands.length ());
01df5c8a
RG
2448 vr1.operands = operands = shared_lookup_references
2449 = valueize_refs (shared_lookup_references);
b45d2719
RG
2450 vr1.type = type;
2451 vr1.set = set;
c9145754 2452 vr1.hashcode = vn_reference_compute_hash (&vr1);
12bd5a1e
RG
2453 if ((cst = fully_constant_vn_reference_p (&vr1)))
2454 return cst;
c9145754 2455
12bd5a1e 2456 vn_reference_lookup_1 (&vr1, vnresult);
5006671f 2457 if (!*vnresult
3bc27de7 2458 && kind != VN_NOWALK
5006671f 2459 && vr1.vuse)
53f3815c 2460 {
b45d2719 2461 ao_ref r;
3bc27de7 2462 vn_walk_kind = kind;
b45d2719 2463 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
01df5c8a 2464 *vnresult =
b45d2719 2465 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
01df5c8a 2466 vn_reference_lookup_2,
92a5094e 2467 vn_reference_lookup_3,
76be46db 2468 vuse_ssa_val, &vr1);
26f3a4e1 2469 gcc_checking_assert (vr1.operands == shared_lookup_references);
53f3815c
RG
2470 }
2471
5006671f
RG
2472 if (*vnresult)
2473 return (*vnresult)->result;
2474
2475 return NULL_TREE;
c9145754
DB
2476}
2477
2478/* Lookup OP in the current hash table, and return the resulting value
2479 number if it exists in the hash table. Return NULL_TREE if it does
2480 not exist in the hash table or if the result field of the structure
2481 was NULL.. VNRESULT will be filled in with the vn_reference_t
1c48bff1
RB
2482 stored in the hashtable if one exists. When TBAA_P is false assume
2483 we are looking up a store and treat it as having alias-set zero. */
c9145754
DB
2484
2485tree
3bc27de7 2486vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1c48bff1 2487 vn_reference_t *vnresult, bool tbaa_p)
89fb70a3 2488{
9771b263 2489 vec<vn_reference_op_s> operands;
89fb70a3 2490 struct vn_reference_s vr1;
12bd5a1e 2491 tree cst;
3ceaf2f5 2492 bool valuezied_anything;
5006671f 2493
c9145754
DB
2494 if (vnresult)
2495 *vnresult = NULL;
89fb70a3 2496
d1c0308e 2497 vr1.vuse = vuse_ssa_val (vuse);
3ceaf2f5
RG
2498 vr1.operands = operands
2499 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
b45d2719 2500 vr1.type = TREE_TYPE (op);
87440c29 2501 vr1.set = tbaa_p ? get_alias_set (op) : 0;
89fb70a3 2502 vr1.hashcode = vn_reference_compute_hash (&vr1);
12bd5a1e
RG
2503 if ((cst = fully_constant_vn_reference_p (&vr1)))
2504 return cst;
896c8b96 2505
3bc27de7 2506 if (kind != VN_NOWALK
5006671f
RG
2507 && vr1.vuse)
2508 {
2509 vn_reference_t wvnresult;
b45d2719 2510 ao_ref r;
3ceaf2f5
RG
2511 /* Make sure to use a valueized reference if we valueized anything.
2512 Otherwise preserve the full reference for advanced TBAA. */
2513 if (!valuezied_anything
2514 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2515 vr1.operands))
6d6c9525 2516 ao_ref_init (&r, op);
1c48bff1
RB
2517 if (! tbaa_p)
2518 r.ref_alias_set = r.base_alias_set = 0;
3bc27de7 2519 vn_walk_kind = kind;
5006671f 2520 wvnresult =
b45d2719 2521 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
01df5c8a 2522 vn_reference_lookup_2,
92a5094e 2523 vn_reference_lookup_3,
76be46db 2524 vuse_ssa_val, &vr1);
26f3a4e1 2525 gcc_checking_assert (vr1.operands == shared_lookup_references);
5006671f
RG
2526 if (wvnresult)
2527 {
2528 if (vnresult)
2529 *vnresult = wvnresult;
2530 return wvnresult->result;
2531 }
2532
2533 return NULL_TREE;
896c8b96 2534 }
89fb70a3 2535
5006671f 2536 return vn_reference_lookup_1 (&vr1, vnresult);
89fb70a3
DB
2537}
2538
26f3a4e1
RB
2539/* Lookup CALL in the current hash table and return the entry in
2540 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2541
2542void
538dd0b7 2543vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
26f3a4e1
RB
2544 vn_reference_t vr)
2545{
27732ffd
ML
2546 if (vnresult)
2547 *vnresult = NULL;
2548
26f3a4e1
RB
2549 tree vuse = gimple_vuse (call);
2550
2551 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2552 vr->operands = valueize_shared_reference_ops_from_call (call);
2553 vr->type = gimple_expr_type (call);
2554 vr->set = 0;
2555 vr->hashcode = vn_reference_compute_hash (vr);
2556 vn_reference_lookup_1 (vr, vnresult);
2557}
c9145754 2558
89fb70a3 2559/* Insert OP into the current hash table with a value number of
c9145754 2560 RESULT, and return the resulting reference structure we created. */
89fb70a3 2561
26f3a4e1 2562static vn_reference_t
4ec0a198 2563vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
89fb70a3 2564{
bf190e8d 2565 vn_reference_s **slot;
89fb70a3 2566 vn_reference_t vr1;
39e843e8 2567 bool tem;
89fb70a3 2568
af6a6eec 2569 vr1 = current_info->references_pool->allocate ();
c9145754
DB
2570 if (TREE_CODE (result) == SSA_NAME)
2571 vr1->value_id = VN_INFO (result)->value_id;
2572 else
2573 vr1->value_id = get_or_alloc_constant_value_id (result);
5006671f 2574 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
39e843e8 2575 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
b45d2719
RG
2576 vr1->type = TREE_TYPE (op);
2577 vr1->set = get_alias_set (op);
89fb70a3
DB
2578 vr1->hashcode = vn_reference_compute_hash (vr1);
2579 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
4ec0a198 2580 vr1->result_vdef = vdef;
89fb70a3 2581
c203e8a7
TS
2582 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2583 INSERT);
89fb70a3
DB
2584
2585 /* Because we lookup stores using vuses, and value number failures
2586 using the vdefs (see visit_reference_op_store for how and why),
2587 it's possible that on failure we may try to insert an already
2588 inserted store. This is not wrong, there is no ssa name for a
2589 store that we could use as a differentiator anyway. Thus, unlike
2590 the other lookup functions, you cannot gcc_assert (!*slot)
2591 here. */
2592
8d0eca24
RG
2593 /* But free the old slot in case of a collision. */
2594 if (*slot)
2595 free_reference (*slot);
89fb70a3
DB
2596
2597 *slot = vr1;
c9145754
DB
2598 return vr1;
2599}
2600
2601/* Insert a reference by it's pieces into the current hash table with
2602 a value number of RESULT. Return the resulting reference
2603 structure we created. */
2604
2605vn_reference_t
b45d2719 2606vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
9771b263 2607 vec<vn_reference_op_s> operands,
c9145754
DB
2608 tree result, unsigned int value_id)
2609
2610{
bf190e8d 2611 vn_reference_s **slot;
c9145754
DB
2612 vn_reference_t vr1;
2613
af6a6eec 2614 vr1 = current_info->references_pool->allocate ();
5006671f
RG
2615 vr1->value_id = value_id;
2616 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
c9145754 2617 vr1->operands = valueize_refs (operands);
b45d2719
RG
2618 vr1->type = type;
2619 vr1->set = set;
c9145754
DB
2620 vr1->hashcode = vn_reference_compute_hash (vr1);
2621 if (result && TREE_CODE (result) == SSA_NAME)
2622 result = SSA_VAL (result);
2623 vr1->result = result;
2624
c203e8a7
TS
2625 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2626 INSERT);
b8698a0f 2627
c9145754 2628 /* At this point we should have all the things inserted that we have
5006671f
RG
2629 seen before, and we should never try inserting something that
2630 already exists. */
c9145754
DB
2631 gcc_assert (!*slot);
2632 if (*slot)
2633 free_reference (*slot);
2634
2635 *slot = vr1;
2636 return vr1;
89fb70a3
DB
2637}
2638
49a1fb2d 2639/* Compute and return the hash value for nary operation VBO1. */
89fb70a3 2640
26f3a4e1 2641static hashval_t
49a1fb2d 2642vn_nary_op_compute_hash (const vn_nary_op_t vno1)
89fb70a3 2643{
4e44a6e8 2644 inchash::hash hstate;
49a1fb2d 2645 unsigned i;
89fb70a3 2646
49a1fb2d
RG
2647 for (i = 0; i < vno1->length; ++i)
2648 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2649 vno1->op[i] = SSA_VAL (vno1->op[i]);
89fb70a3 2650
7fd9012e
RB
2651 if (((vno1->length == 2
2652 && commutative_tree_code (vno1->opcode))
2653 || (vno1->length == 3
2654 && commutative_ternary_tree_code (vno1->opcode)))
14e72812 2655 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
6b4db501 2656 std::swap (vno1->op[0], vno1->op[1]);
7fd9012e 2657 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
14e72812 2658 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
7fd9012e
RB
2659 {
2660 std::swap (vno1->op[0], vno1->op[1]);
2661 vno1->opcode = swap_tree_comparison (vno1->opcode);
2662 }
89fb70a3 2663
4e44a6e8 2664 hstate.add_int (vno1->opcode);
49a1fb2d 2665 for (i = 0; i < vno1->length; ++i)
4e44a6e8 2666 inchash::add_expr (vno1->op[i], hstate);
89fb70a3 2667
4e44a6e8 2668 return hstate.end ();
89fb70a3
DB
2669}
2670
bf190e8d 2671/* Compare nary operations VNO1 and VNO2 and return true if they are
89fb70a3
DB
2672 equivalent. */
2673
bf190e8d
LC
2674bool
2675vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
89fb70a3 2676{
49a1fb2d
RG
2677 unsigned i;
2678
85169114
PB
2679 if (vno1->hashcode != vno2->hashcode)
2680 return false;
2681
5a7d7f9c
RG
2682 if (vno1->length != vno2->length)
2683 return false;
2684
49a1fb2d 2685 if (vno1->opcode != vno2->opcode
63a14fa3 2686 || !types_compatible_p (vno1->type, vno2->type))
49a1fb2d
RG
2687 return false;
2688
2689 for (i = 0; i < vno1->length; ++i)
2690 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2691 return false;
2692
27ecd5c2
AP
2693 /* BIT_INSERT_EXPR has an implict operand as the type precision
2694 of op1. Need to check to make sure they are the same. */
2695 if (vno1->opcode == BIT_INSERT_EXPR
2696 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2697 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2698 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2699 return false;
2700
49a1fb2d 2701 return true;
89fb70a3
DB
2702}
2703
9ad6bebe 2704/* Initialize VNO from the pieces provided. */
89fb70a3 2705
9ad6bebe
NF
2706static void
2707init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
5a7d7f9c 2708 enum tree_code code, tree type, tree *ops)
9ad6bebe
NF
2709{
2710 vno->opcode = code;
2711 vno->length = length;
2712 vno->type = type;
5a7d7f9c 2713 memcpy (&vno->op[0], ops, sizeof (tree) * length);
9ad6bebe
NF
2714}
2715
2716/* Initialize VNO from OP. */
2717
2718static void
2719init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2720{
2721 unsigned i;
2722
2723 vno->opcode = TREE_CODE (op);
2724 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2725 vno->type = TREE_TYPE (op);
2726 for (i = 0; i < vno->length; ++i)
2727 vno->op[i] = TREE_OPERAND (op, i);
2728}
2729
5a7d7f9c
RG
2730/* Return the number of operands for a vn_nary ops structure from STMT. */
2731
2732static unsigned int
355fe088 2733vn_nary_length_from_stmt (gimple *stmt)
5a7d7f9c
RG
2734{
2735 switch (gimple_assign_rhs_code (stmt))
2736 {
2737 case REALPART_EXPR:
2738 case IMAGPART_EXPR:
2739 case VIEW_CONVERT_EXPR:
2740 return 1;
2741
91af9dc9
RG
2742 case BIT_FIELD_REF:
2743 return 3;
2744
5a7d7f9c
RG
2745 case CONSTRUCTOR:
2746 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2747
2748 default:
2749 return gimple_num_ops (stmt) - 1;
2750 }
2751}
2752
9ad6bebe
NF
2753/* Initialize VNO from STMT. */
2754
2755static void
355fe088 2756init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
9ad6bebe
NF
2757{
2758 unsigned i;
2759
2760 vno->opcode = gimple_assign_rhs_code (stmt);
9ad6bebe 2761 vno->type = gimple_expr_type (stmt);
5a7d7f9c
RG
2762 switch (vno->opcode)
2763 {
2764 case REALPART_EXPR:
2765 case IMAGPART_EXPR:
2766 case VIEW_CONVERT_EXPR:
2767 vno->length = 1;
2768 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2769 break;
2770
91af9dc9
RG
2771 case BIT_FIELD_REF:
2772 vno->length = 3;
2773 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2774 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2775 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2776 break;
2777
5a7d7f9c
RG
2778 case CONSTRUCTOR:
2779 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2780 for (i = 0; i < vno->length; ++i)
2781 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2782 break;
2783
2784 default:
91af9dc9 2785 gcc_checking_assert (!gimple_assign_single_p (stmt));
5a7d7f9c
RG
2786 vno->length = gimple_num_ops (stmt) - 1;
2787 for (i = 0; i < vno->length; ++i)
2788 vno->op[i] = gimple_op (stmt, i + 1);
2789 }
9ad6bebe
NF
2790}
2791
2792/* Compute the hashcode for VNO and look for it in the hash table;
2793 return the resulting value number if it exists in the hash table.
2794 Return NULL_TREE if it does not exist in the hash table or if the
2795 result field of the operation is NULL. VNRESULT will contain the
2796 vn_nary_op_t from the hashtable if it exists. */
2797
2798static tree
2799vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
c9145754 2800{
bf190e8d 2801 vn_nary_op_s **slot;
9ad6bebe 2802
c9145754
DB
2803 if (vnresult)
2804 *vnresult = NULL;
9ad6bebe
NF
2805
2806 vno->hashcode = vn_nary_op_compute_hash (vno);
c203e8a7
TS
2807 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2808 NO_INSERT);
c9145754 2809 if (!slot && current_info == optimistic_info)
c203e8a7
TS
2810 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2811 NO_INSERT);
c9145754
DB
2812 if (!slot)
2813 return NULL_TREE;
2814 if (vnresult)
bf190e8d
LC
2815 *vnresult = *slot;
2816 return (*slot)->result;
c9145754
DB
2817}
2818
9ad6bebe
NF
2819/* Lookup a n-ary operation by its pieces and return the resulting value
2820 number if it exists in the hash table. Return NULL_TREE if it does
2821 not exist in the hash table or if the result field of the operation
2822 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2823 if it exists. */
2824
2825tree
2826vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
5a7d7f9c 2827 tree type, tree *ops, vn_nary_op_t *vnresult)
9ad6bebe 2828{
5a7d7f9c
RG
2829 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2830 sizeof_vn_nary_op (length));
2831 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2832 return vn_nary_op_lookup_1 (vno1, vnresult);
9ad6bebe
NF
2833}
2834
c9145754
DB
2835/* Lookup OP in the current hash table, and return the resulting value
2836 number if it exists in the hash table. Return NULL_TREE if it does
2837 not exist in the hash table or if the result field of the operation
2838 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2839 if it exists. */
2840
2841tree
2842vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
89fb70a3 2843{
5a7d7f9c
RG
2844 vn_nary_op_t vno1
2845 = XALLOCAVAR (struct vn_nary_op_s,
2846 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2847 init_vn_nary_op_from_op (vno1, op);
2848 return vn_nary_op_lookup_1 (vno1, vnresult);
89fb70a3
DB
2849}
2850
726a989a
RB
2851/* Lookup the rhs of STMT in the current hash table, and return the resulting
2852 value number if it exists in the hash table. Return NULL_TREE if
2853 it does not exist in the hash table. VNRESULT will contain the
2854 vn_nary_op_t from the hashtable if it exists. */
2855
2856tree
355fe088 2857vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
726a989a 2858{
5a7d7f9c
RG
2859 vn_nary_op_t vno1
2860 = XALLOCAVAR (struct vn_nary_op_s,
2861 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2862 init_vn_nary_op_from_stmt (vno1, stmt);
2863 return vn_nary_op_lookup_1 (vno1, vnresult);
9ad6bebe
NF
2864}
2865
2866/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2867
2868static vn_nary_op_t
2869alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2870{
2871 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2872}
2873
2874/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2875 obstack. */
2876
2877static vn_nary_op_t
2878alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2879{
2880 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2881 &current_info->nary_obstack);
2882
2883 vno1->value_id = value_id;
2884 vno1->length = length;
2885 vno1->result = result;
2886
2887 return vno1;
2888}
2889
2890/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2891 VNO->HASHCODE first. */
2892
2893static vn_nary_op_t
c203e8a7 2894vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
bf190e8d 2895 bool compute_hash)
9ad6bebe 2896{
bf190e8d 2897 vn_nary_op_s **slot;
9ad6bebe
NF
2898
2899 if (compute_hash)
2900 vno->hashcode = vn_nary_op_compute_hash (vno);
2901
c203e8a7 2902 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
41aa3a38
RB
2903 /* While we do not want to insert things twice it's awkward to
2904 avoid it in the case where visit_nary_op pattern-matches stuff
2905 and ends up simplifying the replacement to itself. We then
2906 get two inserts, one from visit_nary_op and one from
2907 vn_nary_build_or_lookup.
2908 So allow inserts with the same value number. */
2909 if (*slot && (*slot)->result == vno->result)
2910 return *slot;
2911
9ad6bebe
NF
2912 gcc_assert (!*slot);
2913
2914 *slot = vno;
2915 return vno;
726a989a
RB
2916}
2917
c9145754
DB
2918/* Insert a n-ary operation into the current hash table using it's
2919 pieces. Return the vn_nary_op_t structure we created and put in
2920 the hashtable. */
2921
2922vn_nary_op_t
2923vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
5a7d7f9c
RG
2924 tree type, tree *ops,
2925 tree result, unsigned int value_id)
c9145754 2926{
5a7d7f9c
RG
2927 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2928 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
9ad6bebe 2929 return vn_nary_op_insert_into (vno1, current_info->nary, true);
c9145754
DB
2930}
2931
89fb70a3 2932/* Insert OP into the current hash table with a value number of
c9145754
DB
2933 RESULT. Return the vn_nary_op_t structure we created and put in
2934 the hashtable. */
89fb70a3 2935
c9145754 2936vn_nary_op_t
49a1fb2d 2937vn_nary_op_insert (tree op, tree result)
89fb70a3 2938{
49a1fb2d 2939 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
49a1fb2d 2940 vn_nary_op_t vno1;
49a1fb2d 2941
9ad6bebe
NF
2942 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2943 init_vn_nary_op_from_op (vno1, op);
2944 return vn_nary_op_insert_into (vno1, current_info->nary, true);
89fb70a3
DB
2945}
2946
726a989a
RB
2947/* Insert the rhs of STMT into the current hash table with a value number of
2948 RESULT. */
2949
60dd79ca 2950static vn_nary_op_t
355fe088 2951vn_nary_op_insert_stmt (gimple *stmt, tree result)
726a989a 2952{
5a7d7f9c
RG
2953 vn_nary_op_t vno1
2954 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2955 result, VN_INFO (result)->value_id);
9ad6bebe
NF
2956 init_vn_nary_op_from_stmt (vno1, stmt);
2957 return vn_nary_op_insert_into (vno1, current_info->nary, true);
726a989a
RB
2958}
2959
89fb70a3
DB
2960/* Compute a hashcode for PHI operation VP1 and return it. */
2961
2962static inline hashval_t
2963vn_phi_compute_hash (vn_phi_t vp1)
2964{
e6503e0a
RB
2965 inchash::hash hstate (vp1->phiargs.length () > 2
2966 ? vp1->block->index : vp1->phiargs.length ());
89fb70a3 2967 tree phi1op;
1d295886 2968 tree type;
9fe4f60a
RB
2969 edge e;
2970 edge_iterator ei;
89fb70a3 2971
1d295886
RG
2972 /* If all PHI arguments are constants we need to distinguish
2973 the PHI node via its type. */
24d63016 2974 type = vp1->type;
4e44a6e8 2975 hstate.merge_hash (vn_hash_type (type));
1d295886 2976
9fe4f60a 2977 FOR_EACH_EDGE (e, ei, vp1->block->preds)
89fb70a3 2978 {
9fe4f60a
RB
2979 /* Don't hash backedge values they need to be handled as VN_TOP
2980 for optimistic value-numbering. */
2981 if (e->flags & EDGE_DFS_BACK)
2982 continue;
2983
2984 phi1op = vp1->phiargs[e->dest_idx];
89fb70a3
DB
2985 if (phi1op == VN_TOP)
2986 continue;
4e44a6e8 2987 inchash::add_expr (phi1op, hstate);
89fb70a3
DB
2988 }
2989
4e44a6e8 2990 return hstate.end ();
89fb70a3
DB
2991}
2992
e6503e0a 2993
87961d1b
RB
2994/* Return true if COND1 and COND2 represent the same condition, set
2995 *INVERTED_P if one needs to be inverted to make it the same as
2996 the other. */
2997
2998static bool
8ee1b0a0
RB
2999cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3000 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
87961d1b
RB
3001{
3002 enum tree_code code1 = gimple_cond_code (cond1);
3003 enum tree_code code2 = gimple_cond_code (cond2);
87961d1b
RB
3004
3005 *inverted_p = false;
3006 if (code1 == code2)
3007 ;
3008 else if (code1 == swap_tree_comparison (code2))
3009 std::swap (lhs2, rhs2);
3010 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3011 *inverted_p = true;
3012 else if (code1 == invert_tree_comparison
3013 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3014 {
3015 std::swap (lhs2, rhs2);
3016 *inverted_p = true;
3017 }
3018 else
3019 return false;
3020
94852c8e
RB
3021 return ((expressions_equal_p (lhs1, lhs2)
3022 && expressions_equal_p (rhs1, rhs2))
3023 || (commutative_tree_code (code1)
3024 && expressions_equal_p (lhs1, rhs2)
3025 && expressions_equal_p (rhs1, lhs2)));
87961d1b
RB
3026}
3027
89fb70a3
DB
3028/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3029
3030static int
bf190e8d 3031vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
89fb70a3 3032{
85169114
PB
3033 if (vp1->hashcode != vp2->hashcode)
3034 return false;
3035
e6503e0a 3036 if (vp1->block != vp2->block)
89fb70a3 3037 {
e6503e0a 3038 if (vp1->phiargs.length () != vp2->phiargs.length ())
1d295886
RG
3039 return false;
3040
e6503e0a 3041 switch (vp1->phiargs.length ())
89fb70a3 3042 {
e6503e0a
RB
3043 case 1:
3044 /* Single-arg PHIs are just copies. */
3045 break;
3046
3047 case 2:
3048 {
3049 /* Rule out backedges into the PHI. */
3050 if (vp1->block->loop_father->header == vp1->block
3051 || vp2->block->loop_father->header == vp2->block)
3052 return false;
3053
3054 /* If the PHI nodes do not have compatible types
3055 they are not the same. */
3056 if (!types_compatible_p (vp1->type, vp2->type))
3057 return false;
3058
3059 basic_block idom1
3060 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3061 basic_block idom2
3062 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3063 /* If the immediate dominator end in switch stmts multiple
3064 values may end up in the same PHI arg via intermediate
3065 CFG merges. */
3066 if (EDGE_COUNT (idom1->succs) != 2
3067 || EDGE_COUNT (idom2->succs) != 2)
3068 return false;
3069
3070 /* Verify the controlling stmt is the same. */
2a900079
RB
3071 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3072 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3073 if (! last1 || ! last2)
e6503e0a 3074 return false;
87961d1b 3075 bool inverted_p;
2a900079
RB
3076 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3077 last2, vp2->cclhs, vp2->ccrhs,
8ee1b0a0 3078 &inverted_p))
e6503e0a
RB
3079 return false;
3080
3081 /* Get at true/false controlled edges into the PHI. */
3082 edge te1, te2, fe1, fe2;
3083 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3084 &te1, &fe1)
3085 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3086 &te2, &fe2))
3087 return false;
3088
87961d1b
RB
3089 /* Swap edges if the second condition is the inverted of the
3090 first. */
3091 if (inverted_p)
3092 std::swap (te2, fe2);
3093
e6503e0a
RB
3094 /* ??? Handle VN_TOP specially. */
3095 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3096 vp2->phiargs[te2->dest_idx])
3097 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3098 vp2->phiargs[fe2->dest_idx]))
3099 return false;
3100
3101 return true;
3102 }
3103
3104 default:
3105 return false;
89fb70a3 3106 }
89fb70a3 3107 }
e6503e0a
RB
3108
3109 /* If the PHI nodes do not have compatible types
3110 they are not the same. */
3111 if (!types_compatible_p (vp1->type, vp2->type))
3112 return false;
3113
3114 /* Any phi in the same block will have it's arguments in the
3115 same edge order, because of how we store phi nodes. */
3116 int i;
3117 tree phi1op;
3118 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3119 {
3120 tree phi2op = vp2->phiargs[i];
3121 if (phi1op == VN_TOP || phi2op == VN_TOP)
3122 continue;
3123 if (!expressions_equal_p (phi1op, phi2op))
3124 return false;
3125 }
3126
3127 return true;
89fb70a3
DB
3128}
3129
9771b263 3130static vec<tree> shared_lookup_phiargs;
89fb70a3
DB
3131
3132/* Lookup PHI in the current hash table, and return the resulting
3133 value number if it exists in the hash table. Return NULL_TREE if
3134 it does not exist in the hash table. */
3135
de081cfd 3136static tree
355fe088 3137vn_phi_lookup (gimple *phi)
89fb70a3 3138{
bf190e8d 3139 vn_phi_s **slot;
89fb70a3 3140 struct vn_phi_s vp1;
9fe4f60a
RB
3141 edge e;
3142 edge_iterator ei;
89fb70a3 3143
9771b263 3144 shared_lookup_phiargs.truncate (0);
9fe4f60a 3145 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
89fb70a3
DB
3146
3147 /* Canonicalize the SSA_NAME's to their value number. */
9fe4f60a 3148 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
89fb70a3 3149 {
9fe4f60a 3150 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
89fb70a3 3151 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
9fe4f60a 3152 shared_lookup_phiargs[e->dest_idx] = def;
89fb70a3 3153 }
24d63016 3154 vp1.type = TREE_TYPE (gimple_phi_result (phi));
89fb70a3 3155 vp1.phiargs = shared_lookup_phiargs;
726a989a 3156 vp1.block = gimple_bb (phi);
8ee1b0a0
RB
3157 /* Extract values of the controlling condition. */
3158 vp1.cclhs = NULL_TREE;
3159 vp1.ccrhs = NULL_TREE;
3160 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3161 if (EDGE_COUNT (idom1->succs) == 2)
2a900079 3162 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
8ee1b0a0
RB
3163 {
3164 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3165 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3166 }
89fb70a3 3167 vp1.hashcode = vn_phi_compute_hash (&vp1);
c203e8a7
TS
3168 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3169 NO_INSERT);
27fa4044 3170 if (!slot && current_info == optimistic_info)
c203e8a7
TS
3171 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3172 NO_INSERT);
89fb70a3
DB
3173 if (!slot)
3174 return NULL_TREE;
bf190e8d 3175 return (*slot)->result;
89fb70a3
DB
3176}
3177
3178/* Insert PHI into the current hash table with a value number of
3179 RESULT. */
3180
c9145754 3181static vn_phi_t
355fe088 3182vn_phi_insert (gimple *phi, tree result)
89fb70a3 3183{
bf190e8d 3184 vn_phi_s **slot;
af6a6eec 3185 vn_phi_t vp1 = current_info->phis_pool->allocate ();
6e1aa848 3186 vec<tree> args = vNULL;
9fe4f60a
RB
3187 edge e;
3188 edge_iterator ei;
3189
3190 args.safe_grow (gimple_phi_num_args (phi));
89fb70a3
DB
3191
3192 /* Canonicalize the SSA_NAME's to their value number. */
9fe4f60a 3193 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
89fb70a3 3194 {
9fe4f60a 3195 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
89fb70a3 3196 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
9fe4f60a 3197 args[e->dest_idx] = def;
89fb70a3 3198 }
c9145754 3199 vp1->value_id = VN_INFO (result)->value_id;
24d63016 3200 vp1->type = TREE_TYPE (gimple_phi_result (phi));
89fb70a3 3201 vp1->phiargs = args;
726a989a 3202 vp1->block = gimple_bb (phi);
8ee1b0a0
RB
3203 /* Extract values of the controlling condition. */
3204 vp1->cclhs = NULL_TREE;
3205 vp1->ccrhs = NULL_TREE;
3206 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3207 if (EDGE_COUNT (idom1->succs) == 2)
2a900079 3208 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
8ee1b0a0
RB
3209 {
3210 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3211 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3212 }
89fb70a3
DB
3213 vp1->result = result;
3214 vp1->hashcode = vn_phi_compute_hash (vp1);
3215
c203e8a7 3216 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
89fb70a3
DB
3217
3218 /* Because we iterate over phi operations more than once, it's
3219 possible the slot might already exist here, hence no assert.*/
3220 *slot = vp1;
c9145754 3221 return vp1;
89fb70a3
DB
3222}
3223
3224
3225/* Print set of components in strongly connected component SCC to OUT. */
3226
3227static void
9771b263 3228print_scc (FILE *out, vec<tree> scc)
89fb70a3
DB
3229{
3230 tree var;
3231 unsigned int i;
3232
24c40f9a 3233 fprintf (out, "SCC consists of %u:", scc.length ());
9771b263 3234 FOR_EACH_VEC_ELT (scc, i, var)
89fb70a3 3235 {
89fb70a3 3236 fprintf (out, " ");
ef6cb4c7 3237 print_generic_expr (out, var);
89fb70a3
DB
3238 }
3239 fprintf (out, "\n");
3240}
3241
fac40b02
RB
3242/* Return true if BB1 is dominated by BB2 taking into account edges
3243 that are not executable. */
3244
3245static bool
3246dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3247{
3248 edge_iterator ei;
3249 edge e;
3250
3251 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3252 return true;
3253
3254 /* Before iterating we'd like to know if there exists a
3255 (executable) path from bb2 to bb1 at all, if not we can
3256 directly return false. For now simply iterate once. */
3257
3258 /* Iterate to the single executable bb1 predecessor. */
3259 if (EDGE_COUNT (bb1->preds) > 1)
3260 {
3261 edge prede = NULL;
3262 FOR_EACH_EDGE (e, ei, bb1->preds)
3263 if (e->flags & EDGE_EXECUTABLE)
3264 {
3265 if (prede)
3266 {
3267 prede = NULL;
3268 break;
3269 }
3270 prede = e;
3271 }
3272 if (prede)
3273 {
3274 bb1 = prede->src;
3275
3276 /* Re-do the dominance check with changed bb1. */
3277 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3278 return true;
3279 }
3280 }
3281
3282 /* Iterate to the single executable bb2 successor. */
3283 edge succe = NULL;
3284 FOR_EACH_EDGE (e, ei, bb2->succs)
3285 if (e->flags & EDGE_EXECUTABLE)
3286 {
3287 if (succe)
3288 {
3289 succe = NULL;
3290 break;
3291 }
3292 succe = e;
3293 }
3294 if (succe)
3295 {
3296 /* Verify the reached block is only reached through succe.
3297 If there is only one edge we can spare us the dominator
3298 check and iterate directly. */
3299 if (EDGE_COUNT (succe->dest->preds) > 1)
3300 {
3301 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3302 if (e != succe
3303 && (e->flags & EDGE_EXECUTABLE))
3304 {
3305 succe = NULL;
3306 break;
3307 }
3308 }
3309 if (succe)
3310 {
3311 bb2 = succe->dest;
3312
3313 /* Re-do the dominance check with changed bb2. */
3314 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3315 return true;
3316 }
3317 }
3318
3319 /* We could now iterate updating bb1 / bb2. */
3320 return false;
3321}
3322
89fb70a3
DB
3323/* Set the value number of FROM to TO, return true if it has changed
3324 as a result. */
3325
3326static inline bool
3327set_ssa_val_to (tree from, tree to)
3328{
90bc4623 3329 tree currval = SSA_VAL (from);
a90c8804 3330 poly_int64 toff, coff;
89fb70a3 3331
a764d660
RB
3332 /* The only thing we allow as value numbers are ssa_names
3333 and invariants. So assert that here. We don't allow VN_TOP
3334 as visiting a stmt should produce a value-number other than
3335 that.
3336 ??? Still VN_TOP can happen for unreachable code, so force
3337 it to varying in that case. Not all code is prepared to
3338 get VN_TOP on valueization. */
3339 if (to == VN_TOP)
3340 {
3341 if (dump_file && (dump_flags & TDF_DETAILS))
3342 fprintf (dump_file, "Forcing value number to varying on "
3343 "receiving VN_TOP\n");
3344 to = from;
3345 }
3346
3347 gcc_assert (to != NULL_TREE
703c9ccd
RB
3348 && ((TREE_CODE (to) == SSA_NAME
3349 && (to == from || SSA_VAL (to) == to))
a764d660
RB
3350 || is_gimple_min_invariant (to)));
3351
90bc4623
RG
3352 if (from != to)
3353 {
3354 if (currval == from)
3355 {
3356 if (dump_file && (dump_flags & TDF_DETAILS))
3357 {
3358 fprintf (dump_file, "Not changing value number of ");
ef6cb4c7 3359 print_generic_expr (dump_file, from);
90bc4623 3360 fprintf (dump_file, " from VARYING to ");
ef6cb4c7 3361 print_generic_expr (dump_file, to);
90bc4623
RG
3362 fprintf (dump_file, "\n");
3363 }
3364 return false;
3365 }
37f6a157
RB
3366 else if (currval != VN_TOP
3367 && ! is_gimple_min_invariant (currval)
3368 && is_gimple_min_invariant (to))
3369 {
3370 if (dump_file && (dump_flags & TDF_DETAILS))
3371 {
3372 fprintf (dump_file, "Forcing VARYING instead of changing "
3373 "value number of ");
ef6cb4c7 3374 print_generic_expr (dump_file, from);
37f6a157 3375 fprintf (dump_file, " from ");
ef6cb4c7 3376 print_generic_expr (dump_file, currval);
37f6a157 3377 fprintf (dump_file, " (non-constant) to ");
ef6cb4c7 3378 print_generic_expr (dump_file, to);
37f6a157
RB
3379 fprintf (dump_file, " (constant)\n");
3380 }
3381 to = from;
3382 }
90bc4623
RG
3383 else if (TREE_CODE (to) == SSA_NAME
3384 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3385 to = from;
3386 }
fe4fefa0 3387
89fb70a3
DB
3388 if (dump_file && (dump_flags & TDF_DETAILS))
3389 {
3390 fprintf (dump_file, "Setting value number of ");
ef6cb4c7 3391 print_generic_expr (dump_file, from);
89fb70a3 3392 fprintf (dump_file, " to ");
ef6cb4c7 3393 print_generic_expr (dump_file, to);
89fb70a3
DB
3394 }
3395
d1de852b
RB
3396 if (currval != to
3397 && !operand_equal_p (currval, to, 0)
09fdb701
RB
3398 /* Different undefined SSA names are not actually different. See
3399 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3400 && !(TREE_CODE (currval) == SSA_NAME
3401 && TREE_CODE (to) == SSA_NAME
3402 && ssa_undefined_value_p (currval, false)
3403 && ssa_undefined_value_p (to, false))
d1de852b
RB
3404 /* ??? For addresses involving volatile objects or types operand_equal_p
3405 does not reliably detect ADDR_EXPRs as equal. We know we are only
3406 getting invariant gimple addresses here, so can use
3407 get_addr_base_and_unit_offset to do this comparison. */
3408 && !(TREE_CODE (currval) == ADDR_EXPR
3409 && TREE_CODE (to) == ADDR_EXPR
3410 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3411 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
a90c8804 3412 && known_eq (coff, toff)))
89fb70a3 3413 {
331dc840
RB
3414 if (dump_file && (dump_flags & TDF_DETAILS))
3415 fprintf (dump_file, " (changed)\n");
3416
e93c66bc
RB
3417 /* If we equate two SSA names we have to make the side-band info
3418 of the leader conservative (and remember whatever original value
3419 was present). */
3420 if (TREE_CODE (to) == SSA_NAME)
3421 {
3422 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3423 && SSA_NAME_RANGE_INFO (to))
3424 {
3425 if (SSA_NAME_IS_DEFAULT_DEF (to)
fac40b02
RB
3426 || dominated_by_p_w_unex
3427 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3428 gimple_bb (SSA_NAME_DEF_STMT (to))))
e93c66bc
RB
3429 /* Keep the info from the dominator. */
3430 ;
e93c66bc
RB
3431 else
3432 {
3433 /* Save old info. */
3434 if (! VN_INFO (to)->info.range_info)
fa4511c2
RB
3435 {
3436 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3437 VN_INFO (to)->range_info_anti_range_p
3438 = SSA_NAME_ANTI_RANGE_P (to);
3439 }
e93c66bc
RB
3440 /* Rather than allocating memory and unioning the info
3441 just clear it. */
331dc840
RB
3442 if (dump_file && (dump_flags & TDF_DETAILS))
3443 {
3444 fprintf (dump_file, "clearing range info of ");
3445 print_generic_expr (dump_file, to);
3446 fprintf (dump_file, "\n");
3447 }
e93c66bc
RB
3448 SSA_NAME_RANGE_INFO (to) = NULL;
3449 }
3450 }
3451 else if (POINTER_TYPE_P (TREE_TYPE (to))
3452 && SSA_NAME_PTR_INFO (to))
3453 {
3454 if (SSA_NAME_IS_DEFAULT_DEF (to)
fac40b02
RB
3455 || dominated_by_p_w_unex
3456 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3457 gimple_bb (SSA_NAME_DEF_STMT (to))))
e93c66bc
RB
3458 /* Keep the info from the dominator. */
3459 ;
dd6f2cf9
RB
3460 else if (! SSA_NAME_PTR_INFO (from)
3461 /* Handle the case of trivially equivalent info. */
3462 || memcmp (SSA_NAME_PTR_INFO (to),
3463 SSA_NAME_PTR_INFO (from),
3464 sizeof (ptr_info_def)) != 0)
e93c66bc
RB
3465 {
3466 /* Save old info. */
3467 if (! VN_INFO (to)->info.ptr_info)
3468 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3469 /* Rather than allocating memory and unioning the info
3470 just clear it. */
331dc840
RB
3471 if (dump_file && (dump_flags & TDF_DETAILS))
3472 {
3473 fprintf (dump_file, "clearing points-to info of ");
3474 print_generic_expr (dump_file, to);
3475 fprintf (dump_file, "\n");
3476 }
e93c66bc
RB
3477 SSA_NAME_PTR_INFO (to) = NULL;
3478 }
3479 }
3480 }
3481
5006671f 3482 VN_INFO (from)->valnum = to;
89fb70a3
DB
3483 return true;
3484 }
8495c94f
RG
3485 if (dump_file && (dump_flags & TDF_DETAILS))
3486 fprintf (dump_file, "\n");
89fb70a3
DB
3487 return false;
3488}
3489
00115921
TV
3490/* Mark as processed all the definitions in the defining stmt of USE, or
3491 the USE itself. */
3492
3493static void
3494mark_use_processed (tree use)
3495{
3496 ssa_op_iter iter;
3497 def_operand_p defp;
355fe088 3498 gimple *stmt = SSA_NAME_DEF_STMT (use);
00115921
TV
3499
3500 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3501 {
3502 VN_INFO (use)->use_processed = true;
3503 return;
3504 }
3505
3506 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3507 {
3508 tree def = DEF_FROM_PTR (defp);
3509
3510 VN_INFO (def)->use_processed = true;
3511 }
3512}
3513
89fb70a3
DB
3514/* Set all definitions in STMT to value number to themselves.
3515 Return true if a value number changed. */
3516
3517static bool
355fe088 3518defs_to_varying (gimple *stmt)
89fb70a3
DB
3519{
3520 bool changed = false;
3521 ssa_op_iter iter;
3522 def_operand_p defp;
3523
3524 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3525 {
3526 tree def = DEF_FROM_PTR (defp);
89fb70a3
DB
3527 changed |= set_ssa_val_to (def, def);
3528 }
3529 return changed;
3530}
3531
3532/* Visit a copy between LHS and RHS, return true if the value number
3533 changed. */
3534
3535static bool
3536visit_copy (tree lhs, tree rhs)
3537{
34050b6b 3538 /* Valueize. */
0d5a1b56 3539 rhs = SSA_VAL (rhs);
89fb70a3
DB
3540
3541 return set_ssa_val_to (lhs, rhs);
3542}
3543
68b948d3
RB
3544/* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3545 is the same. */
3546
3547static tree
3548valueized_wider_op (tree wide_type, tree op)
3549{
3550 if (TREE_CODE (op) == SSA_NAME)
3551 op = SSA_VAL (op);
3552
3553 /* Either we have the op widened available. */
3554 tree ops[3] = {};
3555 ops[0] = op;
3556 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3557 wide_type, ops, NULL);
3558 if (tem)
3559 return tem;
3560
3561 /* Or the op is truncated from some existing value. */
3562 if (TREE_CODE (op) == SSA_NAME)
3563 {
3564 gimple *def = SSA_NAME_DEF_STMT (op);
3565 if (is_gimple_assign (def)
3566 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3567 {
3568 tem = gimple_assign_rhs1 (def);
3569 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3570 {
3571 if (TREE_CODE (tem) == SSA_NAME)
3572 tem = SSA_VAL (tem);
3573 return tem;
3574 }
3575 }
3576 }
3577
3578 /* For constants simply extend it. */
3579 if (TREE_CODE (op) == INTEGER_CST)
8e6cdc90 3580 return wide_int_to_tree (wide_type, wi::to_wide (op));
68b948d3
RB
3581
3582 return NULL_TREE;
3583}
3584
2262707f 3585/* Visit a nary operator RHS, value number it, and return true if the
89fb70a3
DB
3586 value number of LHS has changed as a result. */
3587
3588static bool
68b948d3 3589visit_nary_op (tree lhs, gassign *stmt)
89fb70a3 3590{
726a989a 3591 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
89fb70a3 3592 if (result)
68b948d3
RB
3593 return set_ssa_val_to (lhs, result);
3594
3595 /* Do some special pattern matching for redundancies of operations
3596 in different types. */
3597 enum tree_code code = gimple_assign_rhs_code (stmt);
3598 tree type = TREE_TYPE (lhs);
3599 tree rhs1 = gimple_assign_rhs1 (stmt);
3600 switch (code)
726a989a 3601 {
68b948d3
RB
3602 CASE_CONVERT:
3603 /* Match arithmetic done in a different type where we can easily
3604 substitute the result from some earlier sign-changed or widened
3605 operation. */
3606 if (INTEGRAL_TYPE_P (type)
3607 && TREE_CODE (rhs1) == SSA_NAME
3608 /* We only handle sign-changes or zero-extension -> & mask. */
3609 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3610 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3611 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3612 {
3613 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3614 if (def
3615 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3616 || gimple_assign_rhs_code (def) == MINUS_EXPR
3617 || gimple_assign_rhs_code (def) == MULT_EXPR))
3618 {
3619 tree ops[3] = {};
3620 /* Either we have the op widened available. */
3621 ops[0] = valueized_wider_op (type,
3622 gimple_assign_rhs1 (def));
3623 if (ops[0])
3624 ops[1] = valueized_wider_op (type,
3625 gimple_assign_rhs2 (def));
3626 if (ops[0] && ops[1])
3627 {
3628 ops[0] = vn_nary_op_lookup_pieces
3629 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3630 /* We have wider operation available. */
3631 if (ops[0])
3632 {
3633 unsigned lhs_prec = TYPE_PRECISION (type);
3634 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3635 if (lhs_prec == rhs_prec)
3636 {
3637 ops[1] = NULL_TREE;
3638 result = vn_nary_build_or_lookup (NOP_EXPR,
3639 type, ops);
3640 if (result)
41aa3a38
RB
3641 {
3642 bool changed = set_ssa_val_to (lhs, result);
3643 vn_nary_op_insert_stmt (stmt, result);
3644 return changed;
3645 }
68b948d3
RB
3646 }
3647 else
3648 {
3649 ops[1] = wide_int_to_tree (type,
3650 wi::mask (rhs_prec, false,
3651 lhs_prec));
3652 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3653 TREE_TYPE (lhs),
3654 ops);
3655 if (result)
41aa3a38
RB
3656 {
3657 bool changed = set_ssa_val_to (lhs, result);
3658 vn_nary_op_insert_stmt (stmt, result);
3659 return changed;
3660 }
68b948d3
RB
3661 }
3662 }
3663 }
3664 }
3665 }
3666 default:;
726a989a
RB
3667 }
3668
68b948d3
RB
3669 bool changed = set_ssa_val_to (lhs, lhs);
3670 vn_nary_op_insert_stmt (stmt, lhs);
726a989a
RB
3671 return changed;
3672}
3673
3674/* Visit a call STMT storing into LHS. Return true if the value number
3675 of the LHS has changed as a result. */
3676
3677static bool
538dd0b7 3678visit_reference_op_call (tree lhs, gcall *stmt)
89fb70a3
DB
3679{
3680 bool changed = false;
726a989a 3681 struct vn_reference_s vr1;
00115921 3682 vn_reference_t vnresult = NULL;
00115921 3683 tree vdef = gimple_vdef (stmt);
89fb70a3 3684
6867d9a9
TV
3685 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3686 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3687 lhs = NULL_TREE;
3688
26f3a4e1 3689 vn_reference_lookup_call (stmt, &vnresult, &vr1);
00115921 3690 if (vnresult)
89fb70a3 3691 {
4583fada 3692 if (vnresult->result_vdef && vdef)
00115921 3693 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
b5bbe47b
RB
3694 else if (vdef)
3695 /* If the call was discovered to be pure or const reflect
3696 that as far as possible. */
3697 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
00115921
TV
3698
3699 if (!vnresult->result && lhs)
3700 vnresult->result = lhs;
3701
3702 if (vnresult->result && lhs)
34050b6b 3703 changed |= set_ssa_val_to (lhs, vnresult->result);
89fb70a3
DB
3704 }
3705 else
3706 {
726a989a 3707 vn_reference_t vr2;
26f3a4e1 3708 vn_reference_s **slot;
113d06a4 3709 tree vdef_val = vdef;
00115921 3710 if (vdef)
113d06a4
RB
3711 {
3712 /* If we value numbered an indirect functions function to
3713 one not clobbering memory value number its VDEF to its
3714 VUSE. */
3715 tree fn = gimple_call_fn (stmt);
3716 if (fn && TREE_CODE (fn) == SSA_NAME)
3717 {
3718 fn = SSA_VAL (fn);
3719 if (TREE_CODE (fn) == ADDR_EXPR
3720 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3721 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3722 & (ECF_CONST | ECF_PURE)))
3723 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3724 }
3725 changed |= set_ssa_val_to (vdef, vdef_val);
3726 }
00115921
TV
3727 if (lhs)
3728 changed |= set_ssa_val_to (lhs, lhs);
af6a6eec 3729 vr2 = current_info->references_pool->allocate ();
5006671f 3730 vr2->vuse = vr1.vuse;
26f3a4e1
RB
3731 /* As we are not walking the virtual operand chain we know the
3732 shared_lookup_references are still original so we can re-use
3733 them here. */
3734 vr2->operands = vr1.operands.copy ();
b45d2719
RG
3735 vr2->type = vr1.type;
3736 vr2->set = vr1.set;
726a989a
RB
3737 vr2->hashcode = vr1.hashcode;
3738 vr2->result = lhs;
113d06a4 3739 vr2->result_vdef = vdef_val;
c203e8a7
TS
3740 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3741 INSERT);
26f3a4e1 3742 gcc_assert (!*slot);
726a989a 3743 *slot = vr2;
89fb70a3
DB
3744 }
3745
3746 return changed;
3747}
3748
3749/* Visit a load from a reference operator RHS, part of STMT, value number it,
3750 and return true if the value number of the LHS has changed as a result. */
3751
3752static bool
355fe088 3753visit_reference_op_load (tree lhs, tree op, gimple *stmt)
89fb70a3
DB
3754{
3755 bool changed = false;
d0ca0bcb
RG
3756 tree last_vuse;
3757 tree result;
3758
3759 last_vuse = gimple_vuse (stmt);
3760 last_vuse_ptr = &last_vuse;
1ec87690 3761 result = vn_reference_lookup (op, gimple_vuse (stmt),
1c48bff1 3762 default_vn_walk_kind, NULL, true);
d0ca0bcb 3763 last_vuse_ptr = NULL;
89fb70a3 3764
3d45dd59
RG
3765 /* We handle type-punning through unions by value-numbering based
3766 on offset and size of the access. Be prepared to handle a
3767 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3768 if (result
3769 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3770 {
3771 /* We will be setting the value number of lhs to the value number
3772 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3773 So first simplify and lookup this expression to see if it
3774 is already available. */
c0f62740
RB
3775 code_helper rcode = VIEW_CONVERT_EXPR;
3776 tree ops[3] = { result };
64ea4e15 3777 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3d45dd59
RG
3778 }
3779
89fb70a3 3780 if (result)
34050b6b 3781 changed = set_ssa_val_to (lhs, result);
89fb70a3
DB
3782 else
3783 {
3784 changed = set_ssa_val_to (lhs, lhs);
4ec0a198 3785 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
89fb70a3
DB
3786 }
3787
3788 return changed;
3789}
3790
3791
3792/* Visit a store to a reference operator LHS, part of STMT, value number it,
3793 and return true if the value number of the LHS has changed as a result. */
3794
3795static bool
355fe088 3796visit_reference_op_store (tree lhs, tree op, gimple *stmt)
89fb70a3
DB
3797{
3798 bool changed = false;
4ec0a198 3799 vn_reference_t vnresult = NULL;
ca0e1607 3800 tree assign;
89fb70a3 3801 bool resultsame = false;
4ec0a198
TV
3802 tree vuse = gimple_vuse (stmt);
3803 tree vdef = gimple_vdef (stmt);
89fb70a3 3804
703c9ccd
RB
3805 if (TREE_CODE (op) == SSA_NAME)
3806 op = SSA_VAL (op);
3807
89fb70a3
DB
3808 /* First we want to lookup using the *vuses* from the store and see
3809 if there the last store to this location with the same address
3810 had the same value.
3811
3812 The vuses represent the memory state before the store. If the
3813 memory state, address, and value of the store is the same as the
3814 last store to this location, then this store will produce the
3815 same memory state as that store.
3816
3817 In this case the vdef versions for this store are value numbered to those
3818 vuse versions, since they represent the same memory state after
3819 this store.
3820
3821 Otherwise, the vdefs for the store are used when inserting into
3822 the table, since the store generates a new memory state. */
3823
ca0e1607
RB
3824 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3825 if (vnresult
3826 && vnresult->result)
89fb70a3 3827 {
ca0e1607 3828 tree result = vnresult->result;
89fb70a3
DB
3829 if (TREE_CODE (result) == SSA_NAME)
3830 result = SSA_VAL (result);
3831 resultsame = expressions_equal_p (result, op);
59896334
RB
3832 if (resultsame)
3833 {
3834 /* If the TBAA state isn't compatible for downstream reads
3835 we cannot value-number the VDEFs the same. */
3836 alias_set_type set = get_alias_set (lhs);
3837 if (vnresult->set != set
3838 && ! alias_set_subset_of (set, vnresult->set))
3839 resultsame = false;
3840 }
89fb70a3
DB
3841 }
3842
ca0e1607
RB
3843 if (!resultsame)
3844 {
26f3a4e1
RB
3845 /* Only perform the following when being called from PRE
3846 which embeds tail merging. */
ca0e1607 3847 if (default_vn_walk_kind == VN_WALK)
4ec0a198 3848 {
ca0e1607
RB
3849 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3850 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3851 if (vnresult)
3852 {
3853 VN_INFO (vdef)->use_processed = true;
3854 return set_ssa_val_to (vdef, vnresult->result_vdef);
3855 }
4ec0a198 3856 }
89fb70a3
DB
3857
3858 if (dump_file && (dump_flags & TDF_DETAILS))
3859 {
3860 fprintf (dump_file, "No store match\n");
3861 fprintf (dump_file, "Value numbering store ");
ef6cb4c7 3862 print_generic_expr (dump_file, lhs);
89fb70a3 3863 fprintf (dump_file, " to ");
ef6cb4c7 3864 print_generic_expr (dump_file, op);
89fb70a3
DB
3865 fprintf (dump_file, "\n");
3866 }
3867 /* Have to set value numbers before insert, since insert is
3868 going to valueize the references in-place. */
4ec0a198 3869 if (vdef)
ca0e1607 3870 changed |= set_ssa_val_to (vdef, vdef);
89fb70a3 3871
9a327766
RG
3872 /* Do not insert structure copies into the tables. */
3873 if (is_gimple_min_invariant (op)
3874 || is_gimple_reg (op))
4ec0a198
TV
3875 vn_reference_insert (lhs, op, vdef, NULL);
3876
26f3a4e1
RB
3877 /* Only perform the following when being called from PRE
3878 which embeds tail merging. */
3879 if (default_vn_walk_kind == VN_WALK)
3880 {
3881 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3882 vn_reference_insert (assign, lhs, vuse, vdef);
3883 }
89fb70a3
DB
3884 }
3885 else
3886 {
5006671f
RG
3887 /* We had a match, so value number the vdef to have the value
3888 number of the vuse it came from. */
89fb70a3
DB
3889
3890 if (dump_file && (dump_flags & TDF_DETAILS))
aa326bfb 3891 fprintf (dump_file, "Store matched earlier value, "
89fb70a3
DB
3892 "value numbering store vdefs to matching vuses.\n");
3893
4ec0a198 3894 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
89fb70a3
DB
3895 }
3896
3897 return changed;
3898}
3899
3900/* Visit and value number PHI, return true if the value number
3901 changed. */
3902
3903static bool
355fe088 3904visit_phi (gimple *phi)
89fb70a3 3905{
0fa0fdb7 3906 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
94852c8e 3907 unsigned n_executable = 0;
0fa0fdb7
RB
3908 bool allsame = true;
3909 edge_iterator ei;
3910 edge e;
89fb70a3 3911
62b0d9ec
DJ
3912 /* TODO: We could check for this in init_sccvn, and replace this
3913 with a gcc_assert. */
3914 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3915 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3916
89fb70a3
DB
3917 /* See if all non-TOP arguments have the same value. TOP is
3918 equivalent to everything, so we can ignore it. */
a764d660
RB
3919 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3920 if (e->flags & EDGE_EXECUTABLE)
3921 {
3922 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
89fb70a3 3923
94852c8e 3924 ++n_executable;
a764d660
RB
3925 if (TREE_CODE (def) == SSA_NAME)
3926 def = SSA_VAL (def);
3927 if (def == VN_TOP)
0fa0fdb7
RB
3928 ;
3929 /* Ignore undefined defs for sameval but record one. */
3930 else if (TREE_CODE (def) == SSA_NAME
3931 && ssa_undefined_value_p (def, false))
3932 seen_undef = def;
3933 else if (sameval == VN_TOP)
9fe4f60a
RB
3934 sameval = def;
3935 else if (!expressions_equal_p (def, sameval))
a764d660 3936 {
9fe4f60a
RB
3937 allsame = false;
3938 break;
a764d660
RB
3939 }
3940 }
89fb70a3 3941
0fa0fdb7
RB
3942
3943 /* If none of the edges was executable keep the value-number at VN_TOP,
3944 if only a single edge is exectuable use its value. */
3945 if (n_executable <= 1)
3946 result = seen_undef ? seen_undef : sameval;
897da303
RB
3947 /* If we saw only undefined values and VN_TOP use one of the
3948 undefined values. */
0fa0fdb7 3949 else if (sameval == VN_TOP)
897da303 3950 result = seen_undef ? seen_undef : sameval;
9fe4f60a
RB
3951 /* First see if it is equivalent to a phi node in this block. We prefer
3952 this as it allows IV elimination - see PRs 66502 and 67167. */
0fa0fdb7
RB
3953 else if ((result = vn_phi_lookup (phi)))
3954 ;
3955 /* If all values are the same use that, unless we've seen undefined
3956 values as well and the value isn't constant.
3957 CCP/copyprop have the same restriction to not remove uninit warnings. */
3958 else if (allsame
3959 && (! seen_undef || is_gimple_min_invariant (sameval)))
3960 result = sameval;
89fb70a3
DB
3961 else
3962 {
0fa0fdb7
RB
3963 result = PHI_RESULT (phi);
3964 /* Only insert PHIs that are varying, for constant value numbers
3965 we mess up equivalences otherwise as we are only comparing
3966 the immediate controlling predicates. */
3967 vn_phi_insert (phi, result);
89fb70a3
DB
3968 }
3969
0fa0fdb7 3970 return set_ssa_val_to (PHI_RESULT (phi), result);
89fb70a3
DB
3971}
3972
89fb70a3
DB
3973/* Try to simplify RHS using equivalences and constant folding. */
3974
3975static tree
538dd0b7 3976try_to_simplify (gassign *stmt)
89fb70a3 3977{
d3878abf 3978 enum tree_code code = gimple_assign_rhs_code (stmt);
ed97ddc6
RG
3979 tree tem;
3980
5be891a4
RG
3981 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3982 in this case, there is no point in doing extra work. */
d3878abf 3983 if (code == SSA_NAME)
726a989a 3984 return NULL_TREE;
ed97ddc6 3985
cfef45c8 3986 /* First try constant folding based on our current lattice. */
34050b6b 3987 mprts_hook = vn_lookup_simplify_result;
343ae898 3988 mprts_hook_cnt = 9;
ff734093 3989 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
34050b6b 3990 mprts_hook = NULL;
d3878abf
RG
3991 if (tem
3992 && (TREE_CODE (tem) == SSA_NAME
3993 || is_gimple_min_invariant (tem)))
cfef45c8
RG
3994 return tem;
3995
726a989a 3996 return NULL_TREE;
89fb70a3
DB
3997}
3998
3999/* Visit and value number USE, return true if the value number
4000 changed. */
4001
4002static bool
4003visit_use (tree use)
4004{
4005 bool changed = false;
355fe088 4006 gimple *stmt = SSA_NAME_DEF_STMT (use);
89fb70a3 4007
00115921 4008 mark_use_processed (use);
89fb70a3 4009
a7976089
RB
4010 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4011 && !SSA_NAME_IS_DEFAULT_DEF (use));
4012
4013 if (dump_file && (dump_flags & TDF_DETAILS))
89fb70a3
DB
4014 {
4015 fprintf (dump_file, "Value numbering ");
ef6cb4c7 4016 print_generic_expr (dump_file, use);
89fb70a3 4017 fprintf (dump_file, " stmt = ");
ef6cb4c7 4018 print_gimple_stmt (dump_file, stmt, 0);
89fb70a3
DB
4019 }
4020
a7976089 4021 if (gimple_code (stmt) == GIMPLE_PHI)
6a8b77b2
RB
4022 changed = visit_phi (stmt);
4023 else if (gimple_has_volatile_ops (stmt))
4024 changed = defs_to_varying (stmt);
4025 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4026 {
4027 enum tree_code code = gimple_assign_rhs_code (ass);
4028 tree lhs = gimple_assign_lhs (ass);
4029 tree rhs1 = gimple_assign_rhs1 (ass);
4030 tree simplified;
4031
4032 /* Shortcut for copies. Simplifying copies is pointless,
4033 since we copy the expression and value they represent. */
4034 if (code == SSA_NAME
4035 && TREE_CODE (lhs) == SSA_NAME)
4036 {
4037 changed = visit_copy (lhs, rhs1);
4038 goto done;
4039 }
4040 simplified = try_to_simplify (ass);
4041 if (simplified)
89fb70a3 4042 {
6a8b77b2 4043 if (dump_file && (dump_flags & TDF_DETAILS))
8b0a5125 4044 {
6a8b77b2 4045 fprintf (dump_file, "RHS ");
ef6cb4c7 4046 print_gimple_expr (dump_file, ass, 0);
6a8b77b2 4047 fprintf (dump_file, " simplified to ");
ef6cb4c7 4048 print_generic_expr (dump_file, simplified);
6a8b77b2
RB
4049 fprintf (dump_file, "\n");
4050 }
4051 }
4052 /* Setting value numbers to constants will occasionally
4053 screw up phi congruence because constants are not
4054 uniquely associated with a single ssa name that can be
4055 looked up. */
4056 if (simplified
4057 && is_gimple_min_invariant (simplified)
4058 && TREE_CODE (lhs) == SSA_NAME)
4059 {
4060 changed = set_ssa_val_to (lhs, simplified);
4061 goto done;
4062 }
4063 else if (simplified
4064 && TREE_CODE (simplified) == SSA_NAME
4065 && TREE_CODE (lhs) == SSA_NAME)
4066 {
4067 changed = visit_copy (lhs, simplified);
4068 goto done;
4069 }
4070
4071 if ((TREE_CODE (lhs) == SSA_NAME
4072 /* We can substitute SSA_NAMEs that are live over
4073 abnormal edges with their constant value. */
4074 && !(gimple_assign_copy_p (ass)
4075 && is_gimple_min_invariant (rhs1))
4076 && !(simplified
4077 && is_gimple_min_invariant (simplified))
4078 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4079 /* Stores or copies from SSA_NAMEs that are live over
4080 abnormal edges are a problem. */
4081 || (code == SSA_NAME
4082 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4083 changed = defs_to_varying (ass);
4084 else if (REFERENCE_CLASS_P (lhs)
4085 || DECL_P (lhs))
4086 changed = visit_reference_op_store (lhs, rhs1, ass);
4087 else if (TREE_CODE (lhs) == SSA_NAME)
4088 {
4089 if ((gimple_assign_copy_p (ass)
4090 && is_gimple_min_invariant (rhs1))
4091 || (simplified
4092 && is_gimple_min_invariant (simplified)))
4093 {
4094 if (simplified)
4095 changed = set_ssa_val_to (lhs, simplified);
4096 else
4097 changed = set_ssa_val_to (lhs, rhs1);
4098 }
4099 else
4100 {
4101 /* Visit the original statement. */
4102 switch (vn_get_stmt_kind (ass))
4103 {
4104 case VN_NARY:
4105 changed = visit_nary_op (lhs, ass);
4106 break;
4107 case VN_REFERENCE:
4108 changed = visit_reference_op_load (lhs, rhs1, ass);
4109 break;
4110 default:
4111 changed = defs_to_varying (ass);
4112 break;
4113 }
8b0a5125 4114 }
6a8b77b2
RB
4115 }
4116 else
4117 changed = defs_to_varying (ass);
4118 }
4119 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4120 {
4121 tree lhs = gimple_call_lhs (call_stmt);
4122 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4123 {
4124 /* Try constant folding based on our current lattice. */
4125 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4126 vn_valueize);
726a989a 4127 if (simplified)
89fb70a3
DB
4128 {
4129 if (dump_file && (dump_flags & TDF_DETAILS))
4130 {
6a8b77b2 4131 fprintf (dump_file, "call ");
ef6cb4c7 4132 print_gimple_expr (dump_file, call_stmt, 0);
89fb70a3 4133 fprintf (dump_file, " simplified to ");
ef6cb4c7 4134 print_generic_expr (dump_file, simplified);
34050b6b 4135 fprintf (dump_file, "\n");
89fb70a3
DB
4136 }
4137 }
4138 /* Setting value numbers to constants will occasionally
4139 screw up phi congruence because constants are not
4140 uniquely associated with a single ssa name that can be
4141 looked up. */
726a989a 4142 if (simplified
6a8b77b2 4143 && is_gimple_min_invariant (simplified))
89fb70a3 4144 {
89fb70a3 4145 changed = set_ssa_val_to (lhs, simplified);
6a8b77b2
RB
4146 if (gimple_vdef (call_stmt))
4147 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4148 SSA_VAL (gimple_vuse (call_stmt)));
89fb70a3
DB
4149 goto done;
4150 }
726a989a 4151 else if (simplified
6a8b77b2 4152 && TREE_CODE (simplified) == SSA_NAME)
89fb70a3
DB
4153 {
4154 changed = visit_copy (lhs, simplified);
6a8b77b2
RB
4155 if (gimple_vdef (call_stmt))
4156 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4157 SSA_VAL (gimple_vuse (call_stmt)));
89fb70a3
DB
4158 goto done;
4159 }
6a8b77b2 4160 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
89fb70a3 4161 {
6a8b77b2
RB
4162 changed = defs_to_varying (call_stmt);
4163 goto done;
89fb70a3 4164 }
89fb70a3 4165 }
726a989a 4166
113d06a4
RB
4167 /* Pick up flags from a devirtualization target. */
4168 tree fn = gimple_call_fn (stmt);
4169 int extra_fnflags = 0;
4170 if (fn && TREE_CODE (fn) == SSA_NAME)
4171 {
4172 fn = SSA_VAL (fn);
4173 if (TREE_CODE (fn) == ADDR_EXPR
4174 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4175 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4176 }
6a8b77b2
RB
4177 if (!gimple_call_internal_p (call_stmt)
4178 && (/* Calls to the same function with the same vuse
4179 and the same operands do not necessarily return the same
4180 value, unless they're pure or const. */
113d06a4
RB
4181 ((gimple_call_flags (call_stmt) | extra_fnflags)
4182 & (ECF_PURE | ECF_CONST))
6a8b77b2
RB
4183 /* If calls have a vdef, subsequent calls won't have
4184 the same incoming vuse. So, if 2 calls with vdef have the
4185 same vuse, we know they're not subsequent.
4186 We can value number 2 calls to the same function with the
4187 same vuse and the same operands which are not subsequent
4188 the same, because there is no code in the program that can
4189 compare the 2 values... */
4190 || (gimple_vdef (call_stmt)
4191 /* ... unless the call returns a pointer which does
4192 not alias with anything else. In which case the
4193 information that the values are distinct are encoded
4194 in the IL. */
4195 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4196 /* Only perform the following when being called from PRE
4197 which embeds tail merging. */
4198 && default_vn_walk_kind == VN_WALK)))
4199 changed = visit_reference_op_call (lhs, call_stmt);
00115921 4200 else
6a8b77b2 4201 changed = defs_to_varying (call_stmt);
89fb70a3 4202 }
6a8b77b2
RB
4203 else
4204 changed = defs_to_varying (stmt);
89fb70a3
DB
4205 done:
4206 return changed;
4207}
4208
4209/* Compare two operands by reverse postorder index */
4210
4211static int
4212compare_ops (const void *pa, const void *pb)
4213{
4214 const tree opa = *((const tree *)pa);
4215 const tree opb = *((const tree *)pb);
355fe088
TS
4216 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4217 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
89fb70a3
DB
4218 basic_block bba;
4219 basic_block bbb;
4220
726a989a 4221 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3d8fa148 4222 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
726a989a 4223 else if (gimple_nop_p (opstmta))
89fb70a3 4224 return -1;
726a989a 4225 else if (gimple_nop_p (opstmtb))
89fb70a3
DB
4226 return 1;
4227
726a989a
RB
4228 bba = gimple_bb (opstmta);
4229 bbb = gimple_bb (opstmtb);
89fb70a3
DB
4230
4231 if (!bba && !bbb)
3d8fa148 4232 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
89fb70a3
DB
4233 else if (!bba)
4234 return -1;
4235 else if (!bbb)
4236 return 1;
4237
4238 if (bba == bbb)
4239 {
726a989a
RB
4240 if (gimple_code (opstmta) == GIMPLE_PHI
4241 && gimple_code (opstmtb) == GIMPLE_PHI)
3d8fa148 4242 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
726a989a 4243 else if (gimple_code (opstmta) == GIMPLE_PHI)
89fb70a3 4244 return -1;
726a989a 4245 else if (gimple_code (opstmtb) == GIMPLE_PHI)
89fb70a3 4246 return 1;
3d8fa148
DK
4247 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4248 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4249 else
4250 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
89fb70a3
DB
4251 }
4252 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4253}
4254
4255/* Sort an array containing members of a strongly connected component
4256 SCC so that the members are ordered by RPO number.
4257 This means that when the sort is complete, iterating through the
4258 array will give you the members in RPO order. */
4259
4260static void
9771b263 4261sort_scc (vec<tree> scc)
89fb70a3 4262{
9771b263 4263 scc.qsort (compare_ops);
89fb70a3
DB
4264}
4265
880ad25f 4266/* Insert the no longer used nary ONARY to the hash INFO. */
c82e0b3b 4267
880ad25f
RG
4268static void
4269copy_nary (vn_nary_op_t onary, vn_tables_t info)
c82e0b3b 4270{
9ad6bebe
NF
4271 size_t size = sizeof_vn_nary_op (onary->length);
4272 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4273 &info->nary_obstack);
c82e0b3b 4274 memcpy (nary, onary, size);
9ad6bebe 4275 vn_nary_op_insert_into (nary, info->nary, false);
c82e0b3b
RG
4276}
4277
880ad25f 4278/* Insert the no longer used phi OPHI to the hash INFO. */
c82e0b3b 4279
880ad25f
RG
4280static void
4281copy_phi (vn_phi_t ophi, vn_tables_t info)
c82e0b3b 4282{
af6a6eec 4283 vn_phi_t phi = info->phis_pool->allocate ();
bf190e8d 4284 vn_phi_s **slot;
c82e0b3b 4285 memcpy (phi, ophi, sizeof (*phi));
9771b263 4286 ophi->phiargs.create (0);
c203e8a7 4287 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
880ad25f 4288 gcc_assert (!*slot);
c82e0b3b 4289 *slot = phi;
c82e0b3b
RG
4290}
4291
880ad25f 4292/* Insert the no longer used reference OREF to the hash INFO. */
c82e0b3b 4293
880ad25f
RG
4294static void
4295copy_reference (vn_reference_t oref, vn_tables_t info)
c82e0b3b 4296{
c82e0b3b 4297 vn_reference_t ref;
bf190e8d 4298 vn_reference_s **slot;
af6a6eec 4299 ref = info->references_pool->allocate ();
c82e0b3b 4300 memcpy (ref, oref, sizeof (*ref));
9771b263 4301 oref->operands.create (0);
c203e8a7 4302 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
c82e0b3b
RG
4303 if (*slot)
4304 free_reference (*slot);
4305 *slot = ref;
c82e0b3b
RG
4306}
4307
89fb70a3
DB
4308/* Process a strongly connected component in the SSA graph. */
4309
4310static void
9771b263 4311process_scc (vec<tree> scc)
89fb70a3 4312{
880ad25f
RG
4313 tree var;
4314 unsigned int i;
4315 unsigned int iterations = 0;
4316 bool changed = true;
bf190e8d
LC
4317 vn_nary_op_iterator_type hin;
4318 vn_phi_iterator_type hip;
4319 vn_reference_iterator_type hir;
880ad25f
RG
4320 vn_nary_op_t nary;
4321 vn_phi_t phi;
4322 vn_reference_t ref;
89fb70a3 4323
880ad25f 4324 /* If the SCC has a single member, just visit it. */
9771b263 4325 if (scc.length () == 1)
89fb70a3 4326 {
9771b263 4327 tree use = scc[0];
72a07d9b
RB
4328 if (VN_INFO (use)->use_processed)
4329 return;
4330 /* We need to make sure it doesn't form a cycle itself, which can
4331 happen for self-referential PHI nodes. In that case we would
4332 end up inserting an expression with VN_TOP operands into the
4333 valid table which makes us derive bogus equivalences later.
4334 The cheapest way to check this is to assume it for all PHI nodes. */
4335 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4336 /* Fallthru to iteration. */ ;
4337 else
4338 {
4339 visit_use (use);
4340 return;
4341 }
89fb70a3 4342 }
880ad25f 4343
b2b222b3
RB
4344 if (dump_file && (dump_flags & TDF_DETAILS))
4345 print_scc (dump_file, scc);
4346
880ad25f
RG
4347 /* Iterate over the SCC with the optimistic table until it stops
4348 changing. */
4349 current_info = optimistic_info;
4350 while (changed)
89fb70a3 4351 {
880ad25f
RG
4352 changed = false;
4353 iterations++;
90bc4623
RG
4354 if (dump_file && (dump_flags & TDF_DETAILS))
4355 fprintf (dump_file, "Starting iteration %d\n", iterations);
880ad25f
RG
4356 /* As we are value-numbering optimistically we have to
4357 clear the expression tables and the simplified expressions
4358 in each iteration until we converge. */
c203e8a7
TS
4359 optimistic_info->nary->empty ();
4360 optimistic_info->phis->empty ();
4361 optimistic_info->references->empty ();
880ad25f
RG
4362 obstack_free (&optimistic_info->nary_obstack, NULL);
4363 gcc_obstack_init (&optimistic_info->nary_obstack);
af6a6eec
ML
4364 optimistic_info->phis_pool->release ();
4365 optimistic_info->references_pool->release ();
9771b263 4366 FOR_EACH_VEC_ELT (scc, i, var)
34050b6b
RB
4367 gcc_assert (!VN_INFO (var)->needs_insertion
4368 && VN_INFO (var)->expr == NULL);
9771b263 4369 FOR_EACH_VEC_ELT (scc, i, var)
880ad25f
RG
4370 changed |= visit_use (var);
4371 }
89fb70a3 4372
b2b222b3
RB
4373 if (dump_file && (dump_flags & TDF_DETAILS))
4374 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
880ad25f 4375 statistics_histogram_event (cfun, "SCC iterations", iterations);
89fb70a3 4376
880ad25f
RG
4377 /* Finally, copy the contents of the no longer used optimistic
4378 table to the valid table. */
c203e8a7 4379 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
880ad25f 4380 copy_nary (nary, valid_info);
c203e8a7 4381 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
880ad25f 4382 copy_phi (phi, valid_info);
c203e8a7 4383 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
bf190e8d 4384 ref, vn_reference_t, hir)
880ad25f
RG
4385 copy_reference (ref, valid_info);
4386
4387 current_info = valid_info;
89fb70a3
DB
4388}
4389
6be34936
RG
4390
4391/* Pop the components of the found SCC for NAME off the SCC stack
4392 and process them. Returns true if all went well, false if
4393 we run into resource limits. */
4394
24c40f9a 4395static void
6be34936
RG
4396extract_and_process_scc_for_name (tree name)
4397{
ef062b13 4398 auto_vec<tree> scc;
6be34936
RG
4399 tree x;
4400
4401 /* Found an SCC, pop the components off the SCC stack and
4402 process them. */
4403 do
4404 {
9771b263 4405 x = sccstack.pop ();
6be34936
RG
4406
4407 VN_INFO (x)->on_sccstack = false;
9771b263 4408 scc.safe_push (x);
6be34936
RG
4409 } while (x != name);
4410
24c40f9a
RB
4411 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4412 incredibly large.
4413 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4414 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
6be34936
RG
4415 {
4416 if (dump_file)
24c40f9a
RB
4417 {
4418 print_scc (dump_file, scc);
4419 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4420 "size %u exceeding %u\n", scc.length (),
4421 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4422 }
4423 tree var;
4424 unsigned i;
4425 FOR_EACH_VEC_ELT (scc, i, var)
4426 {
4427 gimple *def = SSA_NAME_DEF_STMT (var);
4428 mark_use_processed (var);
4429 if (SSA_NAME_IS_DEFAULT_DEF (var)
4430 || gimple_code (def) == GIMPLE_PHI)
4431 set_ssa_val_to (var, var);
4432 else
4433 defs_to_varying (def);
4434 }
4435 return;
6be34936
RG
4436 }
4437
9771b263 4438 if (scc.length () > 1)
6be34936
RG
4439 sort_scc (scc);
4440
6be34936 4441 process_scc (scc);
6be34936
RG
4442}
4443
89fb70a3
DB
4444/* Depth first search on NAME to discover and process SCC's in the SSA
4445 graph.
4446 Execution of this algorithm relies on the fact that the SCC's are
863d2a57
RG
4447 popped off the stack in topological order.
4448 Returns true if successful, false if we stopped processing SCC's due
fa10beec 4449 to resource constraints. */
89fb70a3 4450
24c40f9a 4451static void
89fb70a3
DB
4452DFS (tree name)
4453{
8c681247
TS
4454 auto_vec<ssa_op_iter> itervec;
4455 auto_vec<tree> namevec;
6be34936 4456 use_operand_p usep = NULL;
355fe088 4457 gimple *defstmt;
726a989a 4458 tree use;
89fb70a3 4459 ssa_op_iter iter;
89fb70a3 4460
6be34936 4461start_over:
89fb70a3
DB
4462 /* SCC info */
4463 VN_INFO (name)->dfsnum = next_dfs_num++;
4464 VN_INFO (name)->visited = true;
4465 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4466
9771b263 4467 sccstack.safe_push (name);
89fb70a3
DB
4468 VN_INFO (name)->on_sccstack = true;
4469 defstmt = SSA_NAME_DEF_STMT (name);
4470
4471 /* Recursively DFS on our operands, looking for SCC's. */
726a989a 4472 if (!gimple_nop_p (defstmt))
89fb70a3 4473 {
6be34936 4474 /* Push a new iterator. */
538dd0b7
DM
4475 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4476 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
6be34936
RG
4477 else
4478 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4479 }
4480 else
81f5094d 4481 clear_and_done_ssa_iter (&iter);
6be34936
RG
4482
4483 while (1)
4484 {
4485 /* If we are done processing uses of a name, go up the stack
4486 of iterators and process SCCs as we found them. */
4487 if (op_iter_done (&iter))
89fb70a3 4488 {
6be34936
RG
4489 /* See if we found an SCC. */
4490 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
24c40f9a 4491 extract_and_process_scc_for_name (name);
89fb70a3 4492
6be34936 4493 /* Check if we are done. */
9771b263 4494 if (namevec.is_empty ())
24c40f9a 4495 return;
6be34936
RG
4496
4497 /* Restore the last use walker and continue walking there. */
4498 use = name;
9771b263
DN
4499 name = namevec.pop ();
4500 memcpy (&iter, &itervec.last (),
6be34936 4501 sizeof (ssa_op_iter));
9771b263 4502 itervec.pop ();
6be34936
RG
4503 goto continue_walking;
4504 }
89fb70a3 4505
6be34936
RG
4506 use = USE_FROM_PTR (usep);
4507
4508 /* Since we handle phi nodes, we will sometimes get
4509 invariants in the use expression. */
4510 if (TREE_CODE (use) == SSA_NAME)
4511 {
89fb70a3
DB
4512 if (! (VN_INFO (use)->visited))
4513 {
6be34936
RG
4514 /* Recurse by pushing the current use walking state on
4515 the stack and starting over. */
9771b263
DN
4516 itervec.safe_push (iter);
4517 namevec.safe_push (name);
6be34936
RG
4518 name = use;
4519 goto start_over;
4520
4521continue_walking:
89fb70a3
DB
4522 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4523 VN_INFO (use)->low);
4524 }
4525 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4526 && VN_INFO (use)->on_sccstack)
4527 {
4528 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4529 VN_INFO (name)->low);
4530 }
4531 }
863d2a57 4532
6be34936 4533 usep = op_iter_next_use (&iter);
89fb70a3
DB
4534 }
4535}
4536
89fb70a3
DB
4537/* Allocate a value number table. */
4538
4539static void
4540allocate_vn_table (vn_tables_t table)
4541{
c203e8a7
TS
4542 table->phis = new vn_phi_table_type (23);
4543 table->nary = new vn_nary_op_table_type (23);
4544 table->references = new vn_reference_table_type (23);
89fb70a3 4545
49a1fb2d 4546 gcc_obstack_init (&table->nary_obstack);
fcb87c50 4547 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
fb0b2914 4548 table->references_pool = new object_allocator<vn_reference_s>
fcb87c50 4549 ("VN references");
89fb70a3
DB
4550}
4551
4552/* Free a value number table. */
4553
4554static void
4555free_vn_table (vn_tables_t table)
4556{
c203e8a7
TS
4557 delete table->phis;
4558 table->phis = NULL;
4559 delete table->nary;
4560 table->nary = NULL;
4561 delete table->references;
4562 table->references = NULL;
49a1fb2d 4563 obstack_free (&table->nary_obstack, NULL);
af6a6eec
ML
4564 delete table->phis_pool;
4565 delete table->references_pool;
89fb70a3
DB
4566}
4567
4568static void
4569init_scc_vn (void)
4570{
89fb70a3
DB
4571 int j;
4572 int *rpo_numbers_temp;
89fb70a3
DB
4573
4574 calculate_dominance_info (CDI_DOMINATORS);
9fe4f60a
RB
4575 mark_dfs_back_edges ();
4576
9771b263 4577 sccstack.create (0);
c203e8a7 4578 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
b8698a0f 4579
c9145754 4580 constant_value_ids = BITMAP_ALLOC (NULL);
b8698a0f 4581
89fb70a3 4582 next_dfs_num = 1;
c9145754 4583 next_value_id = 1;
b8698a0f 4584
9771b263 4585 vn_ssa_aux_table.create (num_ssa_names + 1);
89fb70a3
DB
4586 /* VEC_alloc doesn't actually grow it to the right size, it just
4587 preallocates the space to do so. */
9771b263 4588 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
cbfb21c1
SB
4589 gcc_obstack_init (&vn_ssa_aux_obstack);
4590
9771b263
DN
4591 shared_lookup_phiargs.create (0);
4592 shared_lookup_references.create (0);
8b1c6fd7 4593 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
0cae8d31
DM
4594 rpo_numbers_temp =
4595 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
89fb70a3
DB
4596 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4597
4598 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4599 the i'th block in RPO order is bb. We want to map bb's to RPO
4600 numbers, so we need to rearrange this array. */
0cae8d31 4601 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
89fb70a3
DB
4602 rpo_numbers[rpo_numbers_temp[j]] = j;
4603
cbfb21c1 4604 XDELETE (rpo_numbers_temp);
89fb70a3 4605
a7976089
RB
4606 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4607 get_identifier ("VN_TOP"), error_mark_node);
89fb70a3 4608
908ff6a3 4609 renumber_gimple_stmt_uids ();
89fb70a3
DB
4610
4611 /* Create the valid and optimistic value numbering tables. */
4612 valid_info = XCNEW (struct vn_tables_s);
4613 allocate_vn_table (valid_info);
4614 optimistic_info = XCNEW (struct vn_tables_s);
4615 allocate_vn_table (optimistic_info);
34c89697
RB
4616 current_info = valid_info;
4617
4618 /* Create the VN_INFO structures, and initialize value numbers to
4619 TOP or VARYING for parameters. */
46aa019a
KV
4620 size_t i;
4621 tree name;
34c89697 4622
46aa019a
KV
4623 FOR_EACH_SSA_NAME (i, name, cfun)
4624 {
34c89697 4625 VN_INFO_GET (name)->valnum = VN_TOP;
34050b6b
RB
4626 VN_INFO (name)->needs_insertion = false;
4627 VN_INFO (name)->expr = NULL;
34c89697
RB
4628 VN_INFO (name)->value_id = 0;
4629
4630 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4631 continue;
4632
4633 switch (TREE_CODE (SSA_NAME_VAR (name)))
4634 {
4635 case VAR_DECL:
a7976089
RB
4636 /* All undefined vars are VARYING. */
4637 VN_INFO (name)->valnum = name;
4638 VN_INFO (name)->visited = true;
34c89697
RB
4639 break;
4640
4641 case PARM_DECL:
4642 /* Parameters are VARYING but we can record a condition
4643 if we know it is a non-NULL pointer. */
4644 VN_INFO (name)->visited = true;
4645 VN_INFO (name)->valnum = name;
4646 if (POINTER_TYPE_P (TREE_TYPE (name))
4647 && nonnull_arg_p (SSA_NAME_VAR (name)))
4648 {
4649 tree ops[2];
4650 ops[0] = name;
4651 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4652 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4653 boolean_true_node, 0);
4654 if (dump_file && (dump_flags & TDF_DETAILS))
4655 {
4656 fprintf (dump_file, "Recording ");
4657 print_generic_expr (dump_file, name, TDF_SLIM);
4658 fprintf (dump_file, " != 0\n");
4659 }
4660 }
4661 break;
4662
4663 case RESULT_DECL:
4664 /* If the result is passed by invisible reference the default
a7976089
RB
4665 def is initialized, otherwise it's uninitialized. Still
4666 undefined is varying. */
4667 VN_INFO (name)->visited = true;
4668 VN_INFO (name)->valnum = name;
34c89697
RB
4669 break;
4670
4671 default:
4672 gcc_unreachable ();
4673 }
4674 }
89fb70a3
DB
4675}
4676
65f52ee9
RB
4677/* Restore SSA info that has been reset on value leaders. */
4678
89fb70a3 4679void
65f52ee9 4680scc_vn_restore_ssa_info (void)
89fb70a3 4681{
46aa019a
KV
4682 unsigned i;
4683 tree name;
4684
4685 FOR_EACH_SSA_NAME (i, name, cfun)
89fb70a3 4686 {
46aa019a 4687 if (has_VN_INFO (name))
e93c66bc
RB
4688 {
4689 if (VN_INFO (name)->needs_insertion)
65f52ee9 4690 ;
e93c66bc
RB
4691 else if (POINTER_TYPE_P (TREE_TYPE (name))
4692 && VN_INFO (name)->info.ptr_info)
4693 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4694 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4695 && VN_INFO (name)->info.range_info)
fa4511c2
RB
4696 {
4697 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4698 SSA_NAME_ANTI_RANGE_P (name)
4699 = VN_INFO (name)->range_info_anti_range_p;
4700 }
e93c66bc 4701 }
89fb70a3 4702 }
65f52ee9
RB
4703}
4704
4705void
4706free_scc_vn (void)
4707{
4708 size_t i;
46aa019a 4709 tree name;
65f52ee9
RB
4710
4711 delete constant_to_value_id;
4712 constant_to_value_id = NULL;
4713 BITMAP_FREE (constant_value_ids);
4714 shared_lookup_phiargs.release ();
4715 shared_lookup_references.release ();
4716 XDELETEVEC (rpo_numbers);
4717
46aa019a 4718 FOR_EACH_SSA_NAME (i, name, cfun)
65f52ee9 4719 {
46aa019a 4720 if (has_VN_INFO (name)
65f52ee9
RB
4721 && VN_INFO (name)->needs_insertion)
4722 release_ssa_name (name);
4723 }
cbfb21c1 4724 obstack_free (&vn_ssa_aux_obstack, NULL);
9771b263 4725 vn_ssa_aux_table.release ();
cbfb21c1 4726
9771b263 4727 sccstack.release ();
89fb70a3
DB
4728 free_vn_table (valid_info);
4729 XDELETE (valid_info);
4730 free_vn_table (optimistic_info);
4731 XDELETE (optimistic_info);
e7cbc096
RB
4732
4733 BITMAP_FREE (const_parms);
89fb70a3
DB
4734}
4735
9ca966ca 4736/* Set *ID according to RESULT. */
9ad6bebe
NF
4737
4738static void
4739set_value_id_for_result (tree result, unsigned int *id)
4740{
9ca966ca
RB
4741 if (result && TREE_CODE (result) == SSA_NAME)
4742 *id = VN_INFO (result)->value_id;
4743 else if (result && is_gimple_min_invariant (result))
4744 *id = get_or_alloc_constant_value_id (result);
4745 else
4746 *id = get_next_value_id ();
9ad6bebe
NF
4747}
4748
caf55296 4749/* Set the value ids in the valid hash tables. */
c9145754
DB
4750
4751static void
4752set_hashtable_value_ids (void)
4753{
bf190e8d
LC
4754 vn_nary_op_iterator_type hin;
4755 vn_phi_iterator_type hip;
4756 vn_reference_iterator_type hir;
c9145754
DB
4757 vn_nary_op_t vno;
4758 vn_reference_t vr;
4759 vn_phi_t vp;
caf55296 4760
c9145754
DB
4761 /* Now set the value ids of the things we had put in the hash
4762 table. */
4763
c203e8a7 4764 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
9ad6bebe 4765 set_value_id_for_result (vno->result, &vno->value_id);
c9145754 4766
c203e8a7 4767 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
9ad6bebe 4768 set_value_id_for_result (vp->result, &vp->value_id);
c9145754 4769
c203e8a7
TS
4770 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4771 hir)
9ad6bebe 4772 set_value_id_for_result (vr->result, &vr->value_id);
c9145754
DB
4773}
4774
d2713985 4775class sccvn_dom_walker : public dom_walker
a764d660
RB
4776{
4777public:
7fd9012e 4778 sccvn_dom_walker ()
24c40f9a 4779 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
a764d660 4780
3daacdcd 4781 virtual edge before_dom_children (basic_block);
7fd9012e
RB
4782 virtual void after_dom_children (basic_block);
4783
4784 void record_cond (basic_block,
4785 enum tree_code code, tree lhs, tree rhs, bool value);
4786 void record_conds (basic_block,
4787 enum tree_code code, tree lhs, tree rhs, bool value);
a764d660 4788
fcd21591 4789 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
7fd9012e 4790 cond_stack;
a764d660
RB
4791};
4792
7fd9012e
RB
4793/* Record a temporary condition for the BB and its dominated blocks. */
4794
4795void
4796sccvn_dom_walker::record_cond (basic_block bb,
4797 enum tree_code code, tree lhs, tree rhs,
4798 bool value)
4799{
4800 tree ops[2] = { lhs, rhs };
4801 vn_nary_op_t old = NULL;
4802 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4803 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4804 vn_nary_op_t cond
4805 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4806 value
4807 ? boolean_true_node
4808 : boolean_false_node, 0);
4809 if (dump_file && (dump_flags & TDF_DETAILS))
4810 {
4811 fprintf (dump_file, "Recording temporarily ");
4812 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4813 fprintf (dump_file, " %s ", get_tree_code_name (code));
4814 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4815 fprintf (dump_file, " == %s%s\n",
4816 value ? "true" : "false",
4817 old ? " (old entry saved)" : "");
4818 }
4819 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4820}
4821
4822/* Record temporary conditions for the BB and its dominated blocks
4823 according to LHS CODE RHS == VALUE and its dominated conditions. */
4824
4825void
4826sccvn_dom_walker::record_conds (basic_block bb,
4827 enum tree_code code, tree lhs, tree rhs,
4828 bool value)
4829{
4830 /* Record the original condition. */
4831 record_cond (bb, code, lhs, rhs, value);
4832
4833 if (!value)
4834 return;
4835
4836 /* Record dominated conditions if the condition is true. Note that
4837 the inversion is already recorded. */
4838 switch (code)
4839 {
4840 case LT_EXPR:
4841 case GT_EXPR:
4842 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4843 record_cond (bb, NE_EXPR, lhs, rhs, true);
4844 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4845 break;
4846
4847 case EQ_EXPR:
4848 record_cond (bb, LE_EXPR, lhs, rhs, true);
4849 record_cond (bb, GE_EXPR, lhs, rhs, true);
4850 record_cond (bb, LT_EXPR, lhs, rhs, false);
4851 record_cond (bb, GT_EXPR, lhs, rhs, false);
4852 break;
4853
4854 default:
4855 break;
4856 }
4857}
4858
4859/* Restore expressions and values derived from conditionals. */
4860
4861void
4862sccvn_dom_walker::after_dom_children (basic_block bb)
4863{
4864 while (!cond_stack.is_empty ()
4865 && cond_stack.last ().first == bb)
4866 {
4867 vn_nary_op_t cond = cond_stack.last ().second.first;
4868 vn_nary_op_t old = cond_stack.last ().second.second;
4869 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4870 if (old)
4871 vn_nary_op_insert_into (old, current_info->nary, false);
4872 cond_stack.pop ();
4873 }
4874}
4875
d2713985
RB
4876/* Value number all statements in BB. */
4877
3daacdcd 4878edge
d2713985 4879sccvn_dom_walker::before_dom_children (basic_block bb)
a764d660 4880{
310d5e7d
RB
4881 if (dump_file && (dump_flags & TDF_DETAILS))
4882 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4883
7fd9012e
RB
4884 /* If we have a single predecessor record the equivalence from a
4885 possible condition on the predecessor edge. */
2965f127 4886 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
3789bf84 4887 if (pred_e)
7fd9012e 4888 {
7fd9012e
RB
4889 /* Check if there are multiple executable successor edges in
4890 the source block. Otherwise there is no additional info
4891 to be recorded. */
2965f127 4892 edge_iterator ei;
7fd9012e 4893 edge e2;
3789bf84
RB
4894 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4895 if (e2 != pred_e
7fd9012e
RB
4896 && e2->flags & EDGE_EXECUTABLE)
4897 break;
4898 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4899 {
3789bf84 4900 gimple *stmt = last_stmt (pred_e->src);
7fd9012e
RB
4901 if (stmt
4902 && gimple_code (stmt) == GIMPLE_COND)
4903 {
4904 enum tree_code code = gimple_cond_code (stmt);
4905 tree lhs = gimple_cond_lhs (stmt);
4906 tree rhs = gimple_cond_rhs (stmt);
4907 record_conds (bb, code, lhs, rhs,
3789bf84 4908 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
7fd9012e
RB
4909 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4910 if (code != ERROR_MARK)
4911 record_conds (bb, code, lhs, rhs,
3789bf84 4912 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
7fd9012e
RB
4913 }
4914 }
4915 }
4916
d2713985
RB
4917 /* Value-number all defs in the basic-block. */
4918 for (gphi_iterator gsi = gsi_start_phis (bb);
4919 !gsi_end_p (gsi); gsi_next (&gsi))
4920 {
4921 gphi *phi = gsi.phi ();
4922 tree res = PHI_RESULT (phi);
24c40f9a
RB
4923 if (!VN_INFO (res)->visited)
4924 DFS (res);
d2713985
RB
4925 }
4926 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4927 !gsi_end_p (gsi); gsi_next (&gsi))
4928 {
4929 ssa_op_iter i;
4930 tree op;
4931 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
24c40f9a
RB
4932 if (!VN_INFO (op)->visited)
4933 DFS (op);
d2713985
RB
4934 }
4935
4936 /* Finally look at the last stmt. */
355fe088 4937 gimple *stmt = last_stmt (bb);
a764d660 4938 if (!stmt)
3daacdcd 4939 return NULL;
a764d660 4940
b2b222b3
RB
4941 enum gimple_code code = gimple_code (stmt);
4942 if (code != GIMPLE_COND
4943 && code != GIMPLE_SWITCH
4944 && code != GIMPLE_GOTO)
3daacdcd 4945 return NULL;
b2b222b3
RB
4946
4947 if (dump_file && (dump_flags & TDF_DETAILS))
4948 {
310d5e7d 4949 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
ef6cb4c7 4950 print_gimple_stmt (dump_file, stmt, 0);
b2b222b3
RB
4951 }
4952
a764d660
RB
4953 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4954 if value-numbering can prove they are not reachable. Handling
4955 computed gotos is also possible. */
4956 tree val;
b2b222b3 4957 switch (code)
a764d660
RB
4958 {
4959 case GIMPLE_COND:
4960 {
34050b6b
RB
4961 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4962 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4963 val = gimple_simplify (gimple_cond_code (stmt),
4964 boolean_type_node, lhs, rhs,
4965 NULL, vn_valueize);
7fd9012e
RB
4966 /* If that didn't simplify to a constant see if we have recorded
4967 temporary expressions from taken edges. */
4968 if (!val || TREE_CODE (val) != INTEGER_CST)
4969 {
4970 tree ops[2];
34050b6b
RB
4971 ops[0] = lhs;
4972 ops[1] = rhs;
7fd9012e
RB
4973 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4974 boolean_type_node, ops, NULL);
4975 }
a764d660
RB
4976 break;
4977 }
4978 case GIMPLE_SWITCH:
538dd0b7 4979 val = gimple_switch_index (as_a <gswitch *> (stmt));
a764d660
RB
4980 break;
4981 case GIMPLE_GOTO:
4982 val = gimple_goto_dest (stmt);
4983 break;
4984 default:
b2b222b3 4985 gcc_unreachable ();
a764d660
RB
4986 }
4987 if (!val)
3daacdcd 4988 return NULL;
a764d660
RB
4989
4990 edge taken = find_taken_edge (bb, vn_valueize (val));
4991 if (!taken)
3daacdcd 4992 return NULL;
a764d660
RB
4993
4994 if (dump_file && (dump_flags & TDF_DETAILS))
4995 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4996 "not executable\n", bb->index, bb->index, taken->dest->index);
4997
3daacdcd 4998 return taken;
a764d660
RB
4999}
5000
863d2a57 5001/* Do SCCVN. Returns true if it finished, false if we bailed out
1ec87690
RG
5002 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5003 how we use the alias oracle walking during the VN process. */
863d2a57 5004
24c40f9a 5005void
1ec87690 5006run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
89fb70a3
DB
5007{
5008 size_t i;
b8698a0f 5009
1ec87690
RG
5010 default_vn_walk_kind = default_vn_walk_kind_;
5011
89fb70a3 5012 init_scc_vn ();
89fb70a3 5013
e7cbc096
RB
5014 /* Collect pointers we know point to readonly memory. */
5015 const_parms = BITMAP_ALLOC (NULL);
5016 tree fnspec = lookup_attribute ("fn spec",
5017 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5018 if (fnspec)
5019 {
5020 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5021 i = 1;
5022 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5023 arg; arg = DECL_CHAIN (arg), ++i)
5024 {
5025 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5026 break;
5027 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5028 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5029 {
5030 tree name = ssa_default_def (cfun, arg);
5031 if (name)
5032 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5033 }
5034 }
5035 }
5036
d2713985
RB
5037 /* Walk all blocks in dominator order, value-numbering stmts
5038 SSA defs and decide whether outgoing edges are not executable. */
5039 sccvn_dom_walker walker;
a764d660 5040 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
a764d660 5041
d2713985
RB
5042 /* Initialize the value ids and prune out remaining VN_TOPs
5043 from dead code. */
46aa019a 5044 tree name;
46aa019a 5045 FOR_EACH_SSA_NAME (i, name, cfun)
c9145754 5046 {
46aa019a 5047 vn_ssa_aux_t info = VN_INFO (name);
a7976089 5048 if (!info->visited
f116fecf 5049 || info->valnum == VN_TOP)
a7976089
RB
5050 info->valnum = name;
5051 if (info->valnum == name)
c9145754
DB
5052 info->value_id = get_next_value_id ();
5053 else if (is_gimple_min_invariant (info->valnum))
5054 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5055 }
b8698a0f 5056
bb35348a 5057 /* Propagate. */
46aa019a 5058 FOR_EACH_SSA_NAME (i, name, cfun)
c9145754 5059 {
46aa019a 5060 vn_ssa_aux_t info = VN_INFO (name);
bb35348a
RB
5061 if (TREE_CODE (info->valnum) == SSA_NAME
5062 && info->valnum != name
5063 && info->value_id != VN_INFO (info->valnum)->value_id)
5064 info->value_id = VN_INFO (info->valnum)->value_id;
c9145754 5065 }
b8698a0f 5066
c9145754 5067 set_hashtable_value_ids ();
b8698a0f 5068
89fb70a3
DB
5069 if (dump_file && (dump_flags & TDF_DETAILS))
5070 {
5071 fprintf (dump_file, "Value numbers:\n");
46aa019a 5072 FOR_EACH_SSA_NAME (i, name, cfun)
89fb70a3 5073 {
46aa019a 5074 if (VN_INFO (name)->visited
caf55296 5075 && SSA_VAL (name) != name)
89fb70a3 5076 {
ef6cb4c7 5077 print_generic_expr (dump_file, name);
89fb70a3 5078 fprintf (dump_file, " = ");
ef6cb4c7 5079 print_generic_expr (dump_file, SSA_VAL (name));
89fb70a3
DB
5080 fprintf (dump_file, "\n");
5081 }
5082 }
5083 }
5084}
c9145754
DB
5085
5086/* Return the maximum value id we have ever seen. */
5087
5088unsigned int
b8698a0f 5089get_max_value_id (void)
c9145754
DB
5090{
5091 return next_value_id;
5092}
5093
5094/* Return the next unique value id. */
5095
5096unsigned int
5097get_next_value_id (void)
5098{
5099 return next_value_id++;
5100}
5101
5102
330e765e 5103/* Compare two expressions E1 and E2 and return true if they are equal. */
c9145754
DB
5104
5105bool
5106expressions_equal_p (tree e1, tree e2)
5107{
330e765e 5108 /* The obvious case. */
c9145754
DB
5109 if (e1 == e2)
5110 return true;
5111
94852c8e
RB
5112 /* If either one is VN_TOP consider them equal. */
5113 if (e1 == VN_TOP || e2 == VN_TOP)
5114 return true;
5115
330e765e
EB
5116 /* If only one of them is null, they cannot be equal. */
5117 if (!e1 || !e2)
5118 return false;
5119
330e765e
EB
5120 /* Now perform the actual comparison. */
5121 if (TREE_CODE (e1) == TREE_CODE (e2)
5122 && operand_equal_p (e1, e2, OEP_PURE_SAME))
c9145754
DB
5123 return true;
5124
5125 return false;
5126}
5127
890065bf
RG
5128
5129/* Return true if the nary operation NARY may trap. This is a copy
5130 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5131
5132bool
5133vn_nary_may_trap (vn_nary_op_t nary)
5134{
5135 tree type;
6141b7db 5136 tree rhs2 = NULL_TREE;
890065bf
RG
5137 bool honor_nans = false;
5138 bool honor_snans = false;
5139 bool fp_operation = false;
5140 bool honor_trapv = false;
5141 bool handled, ret;
5142 unsigned i;
5143
5144 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5145 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5146 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5147 {
5148 type = nary->type;
5149 fp_operation = FLOAT_TYPE_P (type);
5150 if (fp_operation)
5151 {
5152 honor_nans = flag_trapping_math && !flag_finite_math_only;
5153 honor_snans = flag_signaling_nans != 0;
5154 }
5155 else if (INTEGRAL_TYPE_P (type)
5156 && TYPE_OVERFLOW_TRAPS (type))
5157 honor_trapv = true;
5158 }
6141b7db
RG
5159 if (nary->length >= 2)
5160 rhs2 = nary->op[1];
890065bf
RG
5161 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5162 honor_trapv,
5163 honor_nans, honor_snans, rhs2,
5164 &handled);
5165 if (handled
5166 && ret)
5167 return true;
5168
5169 for (i = 0; i < nary->length; ++i)
5170 if (tree_could_trap_p (nary->op[i]))
5171 return true;
5172
5173 return false;
5174}
5de583cc
RB
5175
5176
5177class eliminate_dom_walker : public dom_walker
5178{
5179public:
5180 eliminate_dom_walker (cdi_direction, bitmap);
5181 ~eliminate_dom_walker ();
5182
5183 virtual edge before_dom_children (basic_block);
5184 virtual void after_dom_children (basic_block);
5185
5186 tree eliminate_avail (tree op);
5187 void eliminate_push_avail (tree op);
5188 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5189
5190 bool do_pre;
5191 unsigned int el_todo;
5192 unsigned int eliminations;
5193 unsigned int insertions;
5194
5195 /* SSA names that had their defs inserted by PRE if do_pre. */
5196 bitmap inserted_exprs;
5197
5198 /* Blocks with statements that have had their EH properties changed. */
5199 bitmap need_eh_cleanup;
5200
5201 /* Blocks with statements that have had their AB properties changed. */
5202 bitmap need_ab_cleanup;
5203
5204 auto_vec<gimple *> to_remove;
5205 auto_vec<gimple *> to_fixup;
5206 auto_vec<tree> avail;
5207 auto_vec<tree> avail_stack;
5208};
5209
5210eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5211 bitmap inserted_exprs_)
5212 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5213 el_todo (0), eliminations (0), insertions (0),
5214 inserted_exprs (inserted_exprs_)
5215{
5216 need_eh_cleanup = BITMAP_ALLOC (NULL);
5217 need_ab_cleanup = BITMAP_ALLOC (NULL);
5218}
5219
5220eliminate_dom_walker::~eliminate_dom_walker ()
5221{
5222 BITMAP_FREE (need_eh_cleanup);
5223 BITMAP_FREE (need_ab_cleanup);
5224}
5225
5226/* Return a leader for OP that is available at the current point of the
5227 eliminate domwalk. */
5228
5229tree
5230eliminate_dom_walker::eliminate_avail (tree op)
5231{
5232 tree valnum = VN_INFO (op)->valnum;
5233 if (TREE_CODE (valnum) == SSA_NAME)
5234 {
5235 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5236 return valnum;
5237 if (avail.length () > SSA_NAME_VERSION (valnum))
5238 return avail[SSA_NAME_VERSION (valnum)];
5239 }
5240 else if (is_gimple_min_invariant (valnum))
5241 return valnum;
5242 return NULL_TREE;
5243}
5244
5245/* At the current point of the eliminate domwalk make OP available. */
5246
5247void
5248eliminate_dom_walker::eliminate_push_avail (tree op)
5249{
5250 tree valnum = VN_INFO (op)->valnum;
5251 if (TREE_CODE (valnum) == SSA_NAME)
5252 {
5253 if (avail.length () <= SSA_NAME_VERSION (valnum))
5254 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5255 tree pushop = op;
5256 if (avail[SSA_NAME_VERSION (valnum)])
5257 pushop = avail[SSA_NAME_VERSION (valnum)];
5258 avail_stack.safe_push (pushop);
5259 avail[SSA_NAME_VERSION (valnum)] = op;
5260 }
5261}
5262
5263/* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5264 the leader for the expression if insertion was successful. */
5265
5266tree
5267eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5268{
5269 /* We can insert a sequence with a single assignment only. */
5270 gimple_seq stmts = VN_INFO (val)->expr;
5271 if (!gimple_seq_singleton_p (stmts))
5272 return NULL_TREE;
5273 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5274 if (!stmt
5275 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5276 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5277 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5278 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5279 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5280 return NULL_TREE;
5281
5282 tree op = gimple_assign_rhs1 (stmt);
5283 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5284 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5285 op = TREE_OPERAND (op, 0);
5286 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5287 if (!leader)
5288 return NULL_TREE;
5289
5290 tree res;
5291 stmts = NULL;
5292 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5293 res = gimple_build (&stmts, BIT_FIELD_REF,
5294 TREE_TYPE (val), leader,
5295 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5296 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5297 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5298 res = gimple_build (&stmts, BIT_AND_EXPR,
5299 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5300 else
5301 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5302 TREE_TYPE (val), leader);
5303 if (TREE_CODE (res) != SSA_NAME
5304 || SSA_NAME_IS_DEFAULT_DEF (res)
5305 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5306 {
5307 gimple_seq_discard (stmts);
5308
5309 /* During propagation we have to treat SSA info conservatively
5310 and thus we can end up simplifying the inserted expression
5311 at elimination time to sth not defined in stmts. */
5312 /* But then this is a redundancy we failed to detect. Which means
5313 res now has two values. That doesn't play well with how
5314 we track availability here, so give up. */
5315 if (dump_file && (dump_flags & TDF_DETAILS))
5316 {
5317 if (TREE_CODE (res) == SSA_NAME)
5318 res = eliminate_avail (res);
5319 if (res)
5320 {
5321 fprintf (dump_file, "Failed to insert expression for value ");
5322 print_generic_expr (dump_file, val);
5323 fprintf (dump_file, " which is really fully redundant to ");
5324 print_generic_expr (dump_file, res);
5325 fprintf (dump_file, "\n");
5326 }
5327 }
5328
5329 return NULL_TREE;
5330 }
5331 else
5332 {
5333 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5334 VN_INFO_GET (res)->valnum = val;
5335 }
5336
5337 insertions++;
5338 if (dump_file && (dump_flags & TDF_DETAILS))
5339 {
5340 fprintf (dump_file, "Inserted ");
5341 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5342 }
5343
5344 return res;
5345}
5346
5347
5348
5349/* Perform elimination for the basic-block B during the domwalk. */
5350
5351edge
5352eliminate_dom_walker::before_dom_children (basic_block b)
5353{
5354 /* Mark new bb. */
5355 avail_stack.safe_push (NULL_TREE);
5356
5357 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5358 edge_iterator ei;
5359 edge e;
5360 FOR_EACH_EDGE (e, ei, b->preds)
5361 if (e->flags & EDGE_EXECUTABLE)
5362 break;
5363 if (! e)
5364 return NULL;
5365
5366 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5367 {
5368 gphi *phi = gsi.phi ();
5369 tree res = PHI_RESULT (phi);
5370
5371 if (virtual_operand_p (res))
5372 {
5373 gsi_next (&gsi);
5374 continue;
5375 }
5376
5377 tree sprime = eliminate_avail (res);
5378 if (sprime
5379 && sprime != res)
5380 {
5381 if (dump_file && (dump_flags & TDF_DETAILS))
5382 {
5383 fprintf (dump_file, "Replaced redundant PHI node defining ");
5384 print_generic_expr (dump_file, res);
5385 fprintf (dump_file, " with ");
5386 print_generic_expr (dump_file, sprime);
5387 fprintf (dump_file, "\n");
5388 }
5389
5390 /* If we inserted this PHI node ourself, it's not an elimination. */
5391 if (! inserted_exprs
5392 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5393 eliminations++;
5394
5395 /* If we will propagate into all uses don't bother to do
5396 anything. */
5397 if (may_propagate_copy (res, sprime))
5398 {
5399 /* Mark the PHI for removal. */
5400 to_remove.safe_push (phi);
5401 gsi_next (&gsi);
5402 continue;
5403 }
5404
5405 remove_phi_node (&gsi, false);
5406
5407 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5408 sprime = fold_convert (TREE_TYPE (res), sprime);
5409 gimple *stmt = gimple_build_assign (res, sprime);
5410 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5411 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5412 continue;
5413 }
5414
5415 eliminate_push_avail (res);
5416 gsi_next (&gsi);
5417 }
5418
5419 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5420 !gsi_end_p (gsi);
5421 gsi_next (&gsi))
5422 {
5423 tree sprime = NULL_TREE;
5424 gimple *stmt = gsi_stmt (gsi);
5425 tree lhs = gimple_get_lhs (stmt);
5426 if (lhs && TREE_CODE (lhs) == SSA_NAME
5427 && !gimple_has_volatile_ops (stmt)
5428 /* See PR43491. Do not replace a global register variable when
5429 it is a the RHS of an assignment. Do replace local register
5430 variables since gcc does not guarantee a local variable will
5431 be allocated in register.
5432 ??? The fix isn't effective here. This should instead
5433 be ensured by not value-numbering them the same but treating
5434 them like volatiles? */
5435 && !(gimple_assign_single_p (stmt)
5436 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5437 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5438 && is_global_var (gimple_assign_rhs1 (stmt)))))
5439 {
5440 sprime = eliminate_avail (lhs);
5441 if (!sprime)
5442 {
5443 /* If there is no existing usable leader but SCCVN thinks
5444 it has an expression it wants to use as replacement,
5445 insert that. */
5446 tree val = VN_INFO (lhs)->valnum;
5447 if (val != VN_TOP
5448 && TREE_CODE (val) == SSA_NAME
5449 && VN_INFO (val)->needs_insertion
5450 && VN_INFO (val)->expr != NULL
5451 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5452 eliminate_push_avail (sprime);
5453 }
5454
5455 /* If this now constitutes a copy duplicate points-to
5456 and range info appropriately. This is especially
5457 important for inserted code. See tree-ssa-copy.c
5458 for similar code. */
5459 if (sprime
5460 && TREE_CODE (sprime) == SSA_NAME)
5461 {
5462 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5463 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5464 && VN_INFO_PTR_INFO (lhs)
5465 && ! VN_INFO_PTR_INFO (sprime))
5466 {
5467 duplicate_ssa_name_ptr_info (sprime,
5468 VN_INFO_PTR_INFO (lhs));
5469 if (b != sprime_b)
5470 mark_ptr_info_alignment_unknown
5471 (SSA_NAME_PTR_INFO (sprime));
5472 }
5473 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5474 && VN_INFO_RANGE_INFO (lhs)
5475 && ! VN_INFO_RANGE_INFO (sprime)
5476 && b == sprime_b)
5477 duplicate_ssa_name_range_info (sprime,
5478 VN_INFO_RANGE_TYPE (lhs),
5479 VN_INFO_RANGE_INFO (lhs));
5480 }
5481
5482 /* Inhibit the use of an inserted PHI on a loop header when
5483 the address of the memory reference is a simple induction
5484 variable. In other cases the vectorizer won't do anything
5485 anyway (either it's loop invariant or a complicated
5486 expression). */
5487 if (sprime
5488 && TREE_CODE (sprime) == SSA_NAME
5489 && do_pre
5490 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5491 && loop_outer (b->loop_father)
5492 && has_zero_uses (sprime)
5493 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5494 && gimple_assign_load_p (stmt))
5495 {
5496 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5497 basic_block def_bb = gimple_bb (def_stmt);
5498 if (gimple_code (def_stmt) == GIMPLE_PHI
5499 && def_bb->loop_father->header == def_bb)
5500 {
5501 loop_p loop = def_bb->loop_father;
5502 ssa_op_iter iter;
5503 tree op;
5504 bool found = false;
5505 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5506 {
5507 affine_iv iv;
5508 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5509 if (def_bb
5510 && flow_bb_inside_loop_p (loop, def_bb)
5511 && simple_iv (loop, loop, op, &iv, true))
5512 {
5513 found = true;
5514 break;
5515 }
5516 }
5517 if (found)
5518 {
5519 if (dump_file && (dump_flags & TDF_DETAILS))
5520 {
5521 fprintf (dump_file, "Not replacing ");
5522 print_gimple_expr (dump_file, stmt, 0);
5523 fprintf (dump_file, " with ");
5524 print_generic_expr (dump_file, sprime);
5525 fprintf (dump_file, " which would add a loop"
5526 " carried dependence to loop %d\n",
5527 loop->num);
5528 }
5529 /* Don't keep sprime available. */
5530 sprime = NULL_TREE;
5531 }
5532 }
5533 }
5534
5535 if (sprime)
5536 {
5537 /* If we can propagate the value computed for LHS into
5538 all uses don't bother doing anything with this stmt. */
5539 if (may_propagate_copy (lhs, sprime))
5540 {
5541 /* Mark it for removal. */
5542 to_remove.safe_push (stmt);
5543
5544 /* ??? Don't count copy/constant propagations. */
5545 if (gimple_assign_single_p (stmt)
5546 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5547 || gimple_assign_rhs1 (stmt) == sprime))
5548 continue;
5549
5550 if (dump_file && (dump_flags & TDF_DETAILS))
5551 {
5552 fprintf (dump_file, "Replaced ");
5553 print_gimple_expr (dump_file, stmt, 0);
5554 fprintf (dump_file, " with ");
5555 print_generic_expr (dump_file, sprime);
5556 fprintf (dump_file, " in all uses of ");
5557 print_gimple_stmt (dump_file, stmt, 0);
5558 }
5559
5560 eliminations++;
5561 continue;
5562 }
5563
5564 /* If this is an assignment from our leader (which
5565 happens in the case the value-number is a constant)
5566 then there is nothing to do. */
5567 if (gimple_assign_single_p (stmt)
5568 && sprime == gimple_assign_rhs1 (stmt))
5569 continue;
5570
5571 /* Else replace its RHS. */
5572 bool can_make_abnormal_goto
5573 = is_gimple_call (stmt)
5574 && stmt_can_make_abnormal_goto (stmt);
5575
5576 if (dump_file && (dump_flags & TDF_DETAILS))
5577 {
5578 fprintf (dump_file, "Replaced ");
5579 print_gimple_expr (dump_file, stmt, 0);
5580 fprintf (dump_file, " with ");
5581 print_generic_expr (dump_file, sprime);
5582 fprintf (dump_file, " in ");
5583 print_gimple_stmt (dump_file, stmt, 0);
5584 }
5585
5586 eliminations++;
5587 gimple *orig_stmt = stmt;
5588 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5589 TREE_TYPE (sprime)))
5590 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5591 tree vdef = gimple_vdef (stmt);
5592 tree vuse = gimple_vuse (stmt);
5593 propagate_tree_value_into_stmt (&gsi, sprime);
5594 stmt = gsi_stmt (gsi);
5595 update_stmt (stmt);
5596 if (vdef != gimple_vdef (stmt))
5597 VN_INFO (vdef)->valnum = vuse;
5598
5599 /* If we removed EH side-effects from the statement, clean
5600 its EH information. */
5601 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5602 {
5603 bitmap_set_bit (need_eh_cleanup,
5604 gimple_bb (stmt)->index);
5605 if (dump_file && (dump_flags & TDF_DETAILS))
5606 fprintf (dump_file, " Removed EH side-effects.\n");
5607 }
5608
5609 /* Likewise for AB side-effects. */
5610 if (can_make_abnormal_goto
5611 && !stmt_can_make_abnormal_goto (stmt))
5612 {
5613 bitmap_set_bit (need_ab_cleanup,
5614 gimple_bb (stmt)->index);
5615 if (dump_file && (dump_flags & TDF_DETAILS))
5616 fprintf (dump_file, " Removed AB side-effects.\n");
5617 }
5618
5619 continue;
5620 }
5621 }
5622
5623 /* If the statement is a scalar store, see if the expression
5624 has the same value number as its rhs. If so, the store is
5625 dead. */
5626 if (gimple_assign_single_p (stmt)
5627 && !gimple_has_volatile_ops (stmt)
5628 && !is_gimple_reg (gimple_assign_lhs (stmt))
5629 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5630 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5631 {
5632 tree val;
5633 tree rhs = gimple_assign_rhs1 (stmt);
5634 vn_reference_t vnresult;
5635 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5636 &vnresult, false);
5637 if (TREE_CODE (rhs) == SSA_NAME)
5638 rhs = VN_INFO (rhs)->valnum;
5639 if (val
5640 && operand_equal_p (val, rhs, 0))
5641 {
5642 /* We can only remove the later store if the former aliases
5643 at least all accesses the later one does or if the store
5644 was to readonly memory storing the same value. */
5645 alias_set_type set = get_alias_set (lhs);
5646 if (! vnresult
5647 || vnresult->set == set
5648 || alias_set_subset_of (set, vnresult->set))
5649 {
5650 if (dump_file && (dump_flags & TDF_DETAILS))
5651 {
5652 fprintf (dump_file, "Deleted redundant store ");
5653 print_gimple_stmt (dump_file, stmt, 0);
5654 }
5655
5656 /* Queue stmt for removal. */
5657 to_remove.safe_push (stmt);
5658 continue;
5659 }
5660 }
5661 }
5662
5663 /* If this is a control statement value numbering left edges
5664 unexecuted on force the condition in a way consistent with
5665 that. */
5666 if (gcond *cond = dyn_cast <gcond *> (stmt))
5667 {
5668 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5669 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5670 {
5671 if (dump_file && (dump_flags & TDF_DETAILS))
5672 {
5673 fprintf (dump_file, "Removing unexecutable edge from ");
5674 print_gimple_stmt (dump_file, stmt, 0);
5675 }
5676 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5677 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5678 gimple_cond_make_true (cond);
5679 else
5680 gimple_cond_make_false (cond);
5681 update_stmt (cond);
5682 el_todo |= TODO_cleanup_cfg;
5683 continue;
5684 }
5685 }
5686
5687 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5688 bool was_noreturn = (is_gimple_call (stmt)
5689 && gimple_call_noreturn_p (stmt));
5690 tree vdef = gimple_vdef (stmt);
5691 tree vuse = gimple_vuse (stmt);
5692
5693 /* If we didn't replace the whole stmt (or propagate the result
5694 into all uses), replace all uses on this stmt with their
5695 leaders. */
5696 bool modified = false;
5697 use_operand_p use_p;
5698 ssa_op_iter iter;
5699 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5700 {
5701 tree use = USE_FROM_PTR (use_p);
5702 /* ??? The call code above leaves stmt operands un-updated. */
5703 if (TREE_CODE (use) != SSA_NAME)
5704 continue;
5705 tree sprime = eliminate_avail (use);
5706 if (sprime && sprime != use
5707 && may_propagate_copy (use, sprime)
5708 /* We substitute into debug stmts to avoid excessive
5709 debug temporaries created by removed stmts, but we need
5710 to avoid doing so for inserted sprimes as we never want
5711 to create debug temporaries for them. */
5712 && (!inserted_exprs
5713 || TREE_CODE (sprime) != SSA_NAME
5714 || !is_gimple_debug (stmt)
5715 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5716 {
5717 propagate_value (use_p, sprime);
5718 modified = true;
5719 }
5720 }
5721
5722 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5723 into which is a requirement for the IPA devirt machinery. */
5724 gimple *old_stmt = stmt;
5725 if (modified)
5726 {
5727 /* If a formerly non-invariant ADDR_EXPR is turned into an
5728 invariant one it was on a separate stmt. */
5729 if (gimple_assign_single_p (stmt)
5730 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5731 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5732 gimple_stmt_iterator prev = gsi;
5733 gsi_prev (&prev);
5734 if (fold_stmt (&gsi))
5735 {
5736 /* fold_stmt may have created new stmts inbetween
5737 the previous stmt and the folded stmt. Mark
5738 all defs created there as varying to not confuse
5739 the SCCVN machinery as we're using that even during
5740 elimination. */
5741 if (gsi_end_p (prev))
5742 prev = gsi_start_bb (b);
5743 else
5744 gsi_next (&prev);
5745 if (gsi_stmt (prev) != gsi_stmt (gsi))
5746 do
5747 {
5748 tree def;
5749 ssa_op_iter dit;
5750 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5751 dit, SSA_OP_ALL_DEFS)
5752 /* As existing DEFs may move between stmts
5753 we have to guard VN_INFO_GET. */
5754 if (! has_VN_INFO (def))
5755 VN_INFO_GET (def)->valnum = def;
5756 if (gsi_stmt (prev) == gsi_stmt (gsi))
5757 break;
5758 gsi_next (&prev);
5759 }
5760 while (1);
5761 }
5762 stmt = gsi_stmt (gsi);
5763 /* In case we folded the stmt away schedule the NOP for removal. */
5764 if (gimple_nop_p (stmt))
5765 to_remove.safe_push (stmt);
5766 }
5767
5768 /* Visit indirect calls and turn them into direct calls if
5769 possible using the devirtualization machinery. Do this before
5770 checking for required EH/abnormal/noreturn cleanup as devird
5771 may expose more of those. */
5772 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5773 {
5774 tree fn = gimple_call_fn (call_stmt);
5775 if (fn
5776 && flag_devirtualize
5777 && virtual_method_call_p (fn))
5778 {
5779 tree otr_type = obj_type_ref_class (fn);
5780 unsigned HOST_WIDE_INT otr_tok
5781 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5782 tree instance;
5783 ipa_polymorphic_call_context context (current_function_decl,
5784 fn, stmt, &instance);
5785 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5786 otr_type, stmt);
5787 bool final;
5788 vec <cgraph_node *> targets
5789 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5790 otr_tok, context, &final);
5791 if (dump_file)
5792 dump_possible_polymorphic_call_targets (dump_file,
5793 obj_type_ref_class (fn),
5794 otr_tok, context);
5795 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5796 {
5797 tree fn;
5798 if (targets.length () == 1)
5799 fn = targets[0]->decl;
5800 else
5801 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5802 if (dump_enabled_p ())
5803 {
5804 location_t loc = gimple_location (stmt);
5805 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5806 "converting indirect call to "
5807 "function %s\n",
5808 lang_hooks.decl_printable_name (fn, 2));
5809 }
5810 gimple_call_set_fndecl (call_stmt, fn);
5811 /* If changing the call to __builtin_unreachable
5812 or similar noreturn function, adjust gimple_call_fntype
5813 too. */
5814 if (gimple_call_noreturn_p (call_stmt)
5815 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5816 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5817 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5818 == void_type_node))
5819 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5820 maybe_remove_unused_call_args (cfun, call_stmt);
5821 modified = true;
5822 }
5823 }
5824 }
5825
5826 if (modified)
5827 {
5828 /* When changing a call into a noreturn call, cfg cleanup
5829 is needed to fix up the noreturn call. */
5830 if (!was_noreturn
5831 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5832 to_fixup.safe_push (stmt);
5833 /* When changing a condition or switch into one we know what
5834 edge will be executed, schedule a cfg cleanup. */
5835 if ((gimple_code (stmt) == GIMPLE_COND
5836 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5837 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5838 || (gimple_code (stmt) == GIMPLE_SWITCH
5839 && TREE_CODE (gimple_switch_index
5840 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5841 el_todo |= TODO_cleanup_cfg;
5842 /* If we removed EH side-effects from the statement, clean
5843 its EH information. */
5844 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5845 {
5846 bitmap_set_bit (need_eh_cleanup,
5847 gimple_bb (stmt)->index);
5848 if (dump_file && (dump_flags & TDF_DETAILS))
5849 fprintf (dump_file, " Removed EH side-effects.\n");
5850 }
5851 /* Likewise for AB side-effects. */
5852 if (can_make_abnormal_goto
5853 && !stmt_can_make_abnormal_goto (stmt))
5854 {
5855 bitmap_set_bit (need_ab_cleanup,
5856 gimple_bb (stmt)->index);
5857 if (dump_file && (dump_flags & TDF_DETAILS))
5858 fprintf (dump_file, " Removed AB side-effects.\n");
5859 }
5860 update_stmt (stmt);
5861 if (vdef != gimple_vdef (stmt))
5862 VN_INFO (vdef)->valnum = vuse;
5863 }
5864
5865 /* Make new values available - for fully redundant LHS we
5866 continue with the next stmt above and skip this. */
5867 def_operand_p defp;
5868 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5869 eliminate_push_avail (DEF_FROM_PTR (defp));
5870 }
5871
5872 /* Replace destination PHI arguments. */
5873 FOR_EACH_EDGE (e, ei, b->succs)
5874 if (e->flags & EDGE_EXECUTABLE)
5875 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5876 !gsi_end_p (gsi);
5877 gsi_next (&gsi))
5878 {
5879 gphi *phi = gsi.phi ();
5880 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5881 tree arg = USE_FROM_PTR (use_p);
5882 if (TREE_CODE (arg) != SSA_NAME
5883 || virtual_operand_p (arg))
5884 continue;
5885 tree sprime = eliminate_avail (arg);
5886 if (sprime && may_propagate_copy (arg, sprime))
5887 propagate_value (use_p, sprime);
5888 }
5889 return NULL;
5890}
5891
5892/* Make no longer available leaders no longer available. */
5893
5894void
5895eliminate_dom_walker::after_dom_children (basic_block)
5896{
5897 tree entry;
5898 while ((entry = avail_stack.pop ()) != NULL_TREE)
5899 {
5900 tree valnum = VN_INFO (entry)->valnum;
5901 tree old = avail[SSA_NAME_VERSION (valnum)];
5902 if (old == entry)
5903 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5904 else
5905 avail[SSA_NAME_VERSION (valnum)] = entry;
5906 }
5907}
5908
5909/* Eliminate fully redundant computations. */
5910
5911unsigned int
5912vn_eliminate (bitmap inserted_exprs)
5913{
5914 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5915 el.avail.reserve (num_ssa_names);
5916
5917 el.walk (cfun->cfg->x_entry_block_ptr);
5918
5919 /* We cannot remove stmts during BB walk, especially not release SSA
5920 names there as this confuses the VN machinery. The stmts ending
5921 up in to_remove are either stores or simple copies.
5922 Remove stmts in reverse order to make debug stmt creation possible. */
5923 while (!el.to_remove.is_empty ())
5924 {
5925 gimple *stmt = el.to_remove.pop ();
5926
5927 if (dump_file && (dump_flags & TDF_DETAILS))
5928 {
5929 fprintf (dump_file, "Removing dead stmt ");
5930 print_gimple_stmt (dump_file, stmt, 0, 0);
5931 }
5932
5933 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5934 if (gimple_code (stmt) == GIMPLE_PHI)
5935 remove_phi_node (&gsi, true);
5936 else
5937 {
5938 basic_block bb = gimple_bb (stmt);
5939 unlink_stmt_vdef (stmt);
5940 if (gsi_remove (&gsi, true))
5941 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5942 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5943 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5944 release_defs (stmt);
5945 }
5946
5947 /* Removing a stmt may expose a forwarder block. */
5948 el.el_todo |= TODO_cleanup_cfg;
5949 }
5950
5951 /* Fixup stmts that became noreturn calls. This may require splitting
5952 blocks and thus isn't possible during the dominator walk. Do this
5953 in reverse order so we don't inadvertedly remove a stmt we want to
5954 fixup by visiting a dominating now noreturn call first. */
5955 while (!el.to_fixup.is_empty ())
5956 {
5957 gimple *stmt = el.to_fixup.pop ();
5958
5959 if (dump_file && (dump_flags & TDF_DETAILS))
5960 {
5961 fprintf (dump_file, "Fixing up noreturn call ");
5962 print_gimple_stmt (dump_file, stmt, 0);
5963 }
5964
5965 if (fixup_noreturn_call (stmt))
5966 el.el_todo |= TODO_cleanup_cfg;
5967 }
5968
5969 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5970 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5971
5972 if (do_eh_cleanup)
5973 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5974
5975 if (do_ab_cleanup)
5976 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5977
5978 if (do_eh_cleanup || do_ab_cleanup)
5979 el.el_todo |= TODO_cleanup_cfg;
5980
5981 statistics_counter_event (cfun, "Eliminated", el.eliminations);
5982 statistics_counter_event (cfun, "Insertions", el.insertions);
5983
5984 return el.el_todo;
5985}
5986
5987
5988namespace {
5989
5990const pass_data pass_data_fre =
5991{
5992 GIMPLE_PASS, /* type */
5993 "fre", /* name */
5994 OPTGROUP_NONE, /* optinfo_flags */
5995 TV_TREE_FRE, /* tv_id */
5996 ( PROP_cfg | PROP_ssa ), /* properties_required */
5997 0, /* properties_provided */
5998 0, /* properties_destroyed */
5999 0, /* todo_flags_start */
6000 0, /* todo_flags_finish */
6001};
6002
6003class pass_fre : public gimple_opt_pass
6004{
6005public:
6006 pass_fre (gcc::context *ctxt)
6007 : gimple_opt_pass (pass_data_fre, ctxt)
6008 {}
6009
6010 /* opt_pass methods: */
6011 opt_pass * clone () { return new pass_fre (m_ctxt); }
6012 virtual bool gate (function *) { return flag_tree_fre != 0; }
6013 virtual unsigned int execute (function *);
6014
6015}; // class pass_fre
6016
6017unsigned int
6018pass_fre::execute (function *)
6019{
6020 unsigned int todo = 0;
6021
6022 run_scc_vn (VN_WALKREWRITE);
6023
6024 /* Remove all the redundant expressions. */
6025 todo |= vn_eliminate (NULL);
6026
6027 scc_vn_restore_ssa_info ();
6028 free_scc_vn ();
6029
6030 return todo;
6031}
6032
6033} // anon namespace
6034
6035gimple_opt_pass *
6036make_pass_fre (gcc::context *ctxt)
6037{
6038 return new pass_fre (ctxt);
6039}