]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-sccvn.c
2014-12-09 Richard Biener <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
CommitLineData
9e9e6e3e 1/* SCC value numbering for trees
3aea1f79 2 Copyright (C) 2006-2014 Free Software Foundation, Inc.
9e9e6e3e 3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
9e9e6e3e 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
9e9e6e3e 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
9e9e6e3e 25#include "tree.h"
9ed99284 26#include "stor-layout.h"
94ea8568 27#include "predict.h"
28#include "vec.h"
29#include "hashtab.h"
30#include "hash-set.h"
31#include "machmode.h"
32#include "hard-reg-set.h"
33#include "input.h"
34#include "function.h"
35#include "dominance.h"
36#include "cfg.h"
37#include "cfganal.h"
9e9e6e3e 38#include "basic-block.h"
ce084dfc 39#include "gimple-pretty-print.h"
9e9e6e3e 40#include "tree-inline.h"
bc61cadb 41#include "hash-table.h"
42#include "tree-ssa-alias.h"
43#include "internal-fn.h"
6b214d09 44#include "inchash.h"
bc61cadb 45#include "gimple-fold.h"
46#include "tree-eh.h"
47#include "gimple-expr.h"
48#include "is-a.h"
75a70cf9 49#include "gimple.h"
a8783bee 50#include "gimplify.h"
073c1fd5 51#include "gimple-ssa.h"
52#include "tree-phinodes.h"
53#include "ssa-iterators.h"
9ed99284 54#include "stringpool.h"
073c1fd5 55#include "tree-ssanames.h"
9ed99284 56#include "expr.h"
073c1fd5 57#include "tree-dfa.h"
58#include "tree-ssa.h"
b9ed1410 59#include "dumpfile.h"
9e9e6e3e 60#include "alloc-pool.h"
9e9e6e3e 61#include "flags.h"
9e9e6e3e 62#include "cfgloop.h"
a9b2282e 63#include "params.h"
1c6d350b 64#include "tree-ssa-propagate.h"
9e9e6e3e 65#include "tree-ssa-sccvn.h"
85e9a542 66#include "tree-cfg.h"
67#include "domwalk.h"
a5650c86 68#include "ipa-ref.h"
69#include "plugin-api.h"
70#include "cgraph.h"
9e9e6e3e 71
72/* This algorithm is based on the SCC algorithm presented by Keith
73 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
74 (http://citeseer.ist.psu.edu/41805.html). In
75 straight line code, it is equivalent to a regular hash based value
76 numbering that is performed in reverse postorder.
77
78 For code with cycles, there are two alternatives, both of which
79 require keeping the hashtables separate from the actual list of
80 value numbers for SSA names.
81
82 1. Iterate value numbering in an RPO walk of the blocks, removing
83 all the entries from the hashtable after each iteration (but
84 keeping the SSA name->value number mapping between iterations).
85 Iterate until it does not change.
86
87 2. Perform value numbering as part of an SCC walk on the SSA graph,
88 iterating only the cycles in the SSA graph until they do not change
89 (using a separate, optimistic hashtable for value numbering the SCC
90 operands).
91
92 The second is not just faster in practice (because most SSA graph
93 cycles do not involve all the variables in the graph), it also has
94 some nice properties.
95
96 One of these nice properties is that when we pop an SCC off the
97 stack, we are guaranteed to have processed all the operands coming from
98 *outside of that SCC*, so we do not need to do anything special to
99 ensure they have value numbers.
100
101 Another nice property is that the SCC walk is done as part of a DFS
102 of the SSA graph, which makes it easy to perform combining and
103 simplifying operations at the same time.
104
105 The code below is deliberately written in a way that makes it easy
106 to separate the SCC walk from the other work it does.
107
108 In order to propagate constants through the code, we track which
109 expressions contain constants, and use those while folding. In
110 theory, we could also track expressions whose value numbers are
111 replaced, in case we end up folding based on expression
112 identities.
113
114 In order to value number memory, we assign value numbers to vuses.
115 This enables us to note that, for example, stores to the same
116 address of the same value from the same starting memory states are
99698cf3 117 equivalent.
9e9e6e3e 118 TODO:
119
120 1. We can iterate only the changing portions of the SCC's, but
121 I have not seen an SCC big enough for this to be a win.
122 2. If you differentiate between phi nodes for loops and phi nodes
123 for if-then-else, you can properly consider phi nodes in different
124 blocks for equivalence.
125 3. We could value number vuses in more cases, particularly, whole
126 structure copies.
127*/
128
3e871d4d 129
130/* vn_nary_op hashtable helpers. */
131
132struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
133{
134 typedef vn_nary_op_s value_type;
135 typedef vn_nary_op_s compare_type;
136 static inline hashval_t hash (const value_type *);
137 static inline bool equal (const value_type *, const compare_type *);
138};
139
140/* Return the computed hashcode for nary operation P1. */
141
142inline hashval_t
143vn_nary_op_hasher::hash (const value_type *vno1)
144{
145 return vno1->hashcode;
146}
147
148/* Compare nary operations P1 and P2 and return true if they are
149 equivalent. */
150
151inline bool
152vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
153{
154 return vn_nary_op_eq (vno1, vno2);
155}
156
c1f445d2 157typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
3e871d4d 158typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
159
160
161/* vn_phi hashtable helpers. */
162
163static int
164vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
165
166struct vn_phi_hasher
167{
168 typedef vn_phi_s value_type;
169 typedef vn_phi_s compare_type;
170 static inline hashval_t hash (const value_type *);
171 static inline bool equal (const value_type *, const compare_type *);
172 static inline void remove (value_type *);
173};
174
175/* Return the computed hashcode for phi operation P1. */
176
177inline hashval_t
178vn_phi_hasher::hash (const value_type *vp1)
179{
180 return vp1->hashcode;
181}
182
183/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
184
185inline bool
186vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
187{
188 return vn_phi_eq (vp1, vp2);
189}
190
191/* Free a phi operation structure VP. */
192
193inline void
194vn_phi_hasher::remove (value_type *phi)
195{
196 phi->phiargs.release ();
197}
198
c1f445d2 199typedef hash_table<vn_phi_hasher> vn_phi_table_type;
3e871d4d 200typedef vn_phi_table_type::iterator vn_phi_iterator_type;
201
202
203/* Compare two reference operands P1 and P2 for equality. Return true if
204 they are equal, and false otherwise. */
205
206static int
207vn_reference_op_eq (const void *p1, const void *p2)
208{
209 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
210 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
211
212 return (vro1->opcode == vro2->opcode
213 /* We do not care for differences in type qualification. */
214 && (vro1->type == vro2->type
215 || (vro1->type && vro2->type
216 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
217 TYPE_MAIN_VARIANT (vro2->type))))
218 && expressions_equal_p (vro1->op0, vro2->op0)
219 && expressions_equal_p (vro1->op1, vro2->op1)
220 && expressions_equal_p (vro1->op2, vro2->op2));
221}
222
223/* Free a reference operation structure VP. */
224
225static inline void
226free_reference (vn_reference_s *vr)
227{
228 vr->operands.release ();
229}
230
231
232/* vn_reference hashtable helpers. */
233
234struct vn_reference_hasher
235{
236 typedef vn_reference_s value_type;
237 typedef vn_reference_s compare_type;
238 static inline hashval_t hash (const value_type *);
239 static inline bool equal (const value_type *, const compare_type *);
240 static inline void remove (value_type *);
241};
242
243/* Return the hashcode for a given reference operation P1. */
244
245inline hashval_t
246vn_reference_hasher::hash (const value_type *vr1)
247{
248 return vr1->hashcode;
249}
250
251inline bool
252vn_reference_hasher::equal (const value_type *v, const compare_type *c)
253{
254 return vn_reference_eq (v, c);
255}
256
257inline void
258vn_reference_hasher::remove (value_type *v)
259{
260 free_reference (v);
261}
262
c1f445d2 263typedef hash_table<vn_reference_hasher> vn_reference_table_type;
3e871d4d 264typedef vn_reference_table_type::iterator vn_reference_iterator_type;
265
266
9e9e6e3e 267/* The set of hashtables and alloc_pool's for their items. */
268
269typedef struct vn_tables_s
270{
c1f445d2 271 vn_nary_op_table_type *nary;
272 vn_phi_table_type *phis;
273 vn_reference_table_type *references;
51a23cfc 274 struct obstack nary_obstack;
9e9e6e3e 275 alloc_pool phis_pool;
276 alloc_pool references_pool;
277} *vn_tables_t;
278
3e871d4d 279
280/* vn_constant hashtable helpers. */
281
282struct vn_constant_hasher : typed_free_remove <vn_constant_s>
283{
284 typedef vn_constant_s value_type;
285 typedef vn_constant_s compare_type;
286 static inline hashval_t hash (const value_type *);
287 static inline bool equal (const value_type *, const compare_type *);
288};
289
290/* Hash table hash function for vn_constant_t. */
291
292inline hashval_t
293vn_constant_hasher::hash (const value_type *vc1)
294{
295 return vc1->hashcode;
296}
297
298/* Hash table equality function for vn_constant_t. */
299
300inline bool
301vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
302{
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
305
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
307}
308
c1f445d2 309static hash_table<vn_constant_hasher> *constant_to_value_id;
f6c33c78 310static bitmap constant_value_ids;
9e9e6e3e 311
9e9e6e3e 312
313/* Valid hashtables storing information we have proven to be
314 correct. */
315
316static vn_tables_t valid_info;
317
318/* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
320
321static vn_tables_t optimistic_info;
322
9e9e6e3e 323/* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
326
327static vn_tables_t current_info;
328
329
330/* Reverse post order index for each basic block. */
331
332static int *rpo_numbers;
333
334#define SSA_VAL(x) (VN_INFO ((x))->valnum)
335
b8a2283e 336/* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
339
340static inline tree
341vuse_ssa_val (tree x)
342{
343 if (!x)
344 return NULL_TREE;
345
346 do
347 {
348 x = SSA_VAL (x);
349 }
350 while (SSA_NAME_IN_FREE_LIST (x));
351
352 return x;
353}
354
9e9e6e3e 355/* This represents the top of the VN lattice, which is the universal
356 value. */
357
358tree VN_TOP;
359
f6c33c78 360/* Unique counter for our value ids. */
361
362static unsigned int next_value_id;
363
9e9e6e3e 364/* Next DFS number and the stack for strongly connected component
365 detection. */
366
367static unsigned int next_dfs_num;
f1f41a6c 368static vec<tree> sccstack;
9e9e6e3e 369
1d9353f3 370
9e9e6e3e 371
b9584939 372/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
373 are allocated on an obstack for locality reasons, and to free them
f1f41a6c 374 without looping over the vec. */
9e9e6e3e 375
f1f41a6c 376static vec<vn_ssa_aux_t> vn_ssa_aux_table;
b9584939 377static struct obstack vn_ssa_aux_obstack;
9e9e6e3e 378
379/* Return the value numbering information for a given SSA name. */
380
381vn_ssa_aux_t
382VN_INFO (tree name)
383{
f1f41a6c 384 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
0ea2d350 385 gcc_checking_assert (res);
f6c33c78 386 return res;
9e9e6e3e 387}
388
389/* Set the value numbering info for a given SSA name to a given
390 value. */
391
392static inline void
393VN_INFO_SET (tree name, vn_ssa_aux_t value)
394{
f1f41a6c 395 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
9e9e6e3e 396}
397
b9584939 398/* Initialize the value numbering info for a given SSA name.
399 This should be called just once for every SSA name. */
9e9e6e3e 400
401vn_ssa_aux_t
402VN_INFO_GET (tree name)
403{
b9584939 404 vn_ssa_aux_t newinfo;
405
45ba1503 406 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
b9584939 407 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
f1f41a6c 408 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
409 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
9e9e6e3e 411 return newinfo;
412}
413
414
75a70cf9 415/* Get the representative expression for the SSA_NAME NAME. Returns
416 the representative SSA_NAME if there is no expression associated with it. */
417
418tree
419vn_get_expr_for (tree name)
420{
421 vn_ssa_aux_t vn = VN_INFO (name);
422 gimple def_stmt;
423 tree expr = NULL_TREE;
77d62cb7 424 enum tree_code code;
75a70cf9 425
426 if (vn->valnum == VN_TOP)
427 return name;
428
429 /* If the value-number is a constant it is the representative
430 expression. */
431 if (TREE_CODE (vn->valnum) != SSA_NAME)
432 return vn->valnum;
433
434 /* Get to the information of the value of this SSA_NAME. */
435 vn = VN_INFO (vn->valnum);
436
437 /* If the value-number is a constant it is the representative
438 expression. */
439 if (TREE_CODE (vn->valnum) != SSA_NAME)
440 return vn->valnum;
441
442 /* Else if we have an expression, return it. */
443 if (vn->expr != NULL_TREE)
444 return vn->expr;
445
446 /* Otherwise use the defining statement to build the expression. */
447 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
448
77d62cb7 449 /* If the value number is not an assignment use it directly. */
75a70cf9 450 if (!is_gimple_assign (def_stmt))
451 return vn->valnum;
452
ff99a695 453 /* Note that we can valueize here because we clear the cached
454 simplified expressions after each optimistic iteration. */
77d62cb7 455 code = gimple_assign_rhs_code (def_stmt);
456 switch (TREE_CODE_CLASS (code))
75a70cf9 457 {
458 case tcc_reference:
77d62cb7 459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR)
462 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
463 0)) == SSA_NAME)
464 expr = fold_build1 (code,
75a70cf9 465 gimple_expr_type (def_stmt),
ff99a695 466 vn_valueize (TREE_OPERAND
467 (gimple_assign_rhs1 (def_stmt), 0)));
75a70cf9 468 break;
469
470 case tcc_unary:
77d62cb7 471 expr = fold_build1 (code,
75a70cf9 472 gimple_expr_type (def_stmt),
ff99a695 473 vn_valueize (gimple_assign_rhs1 (def_stmt)));
75a70cf9 474 break;
475
476 case tcc_binary:
77d62cb7 477 expr = fold_build2 (code,
75a70cf9 478 gimple_expr_type (def_stmt),
ff99a695 479 vn_valueize (gimple_assign_rhs1 (def_stmt)),
480 vn_valueize (gimple_assign_rhs2 (def_stmt)));
75a70cf9 481 break;
482
3eebeec6 483 case tcc_exceptional:
484 if (code == CONSTRUCTOR
485 && TREE_CODE
486 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
487 expr = gimple_assign_rhs1 (def_stmt);
488 break;
489
75a70cf9 490 default:;
491 }
492 if (expr == NULL_TREE)
493 return vn->valnum;
494
495 /* Cache the expression. */
496 vn->expr = expr;
497
498 return expr;
499}
500
024fee2c 501/* Return the vn_kind the expression computed by the stmt should be
502 associated with. */
503
504enum vn_kind
505vn_get_stmt_kind (gimple stmt)
506{
507 switch (gimple_code (stmt))
508 {
509 case GIMPLE_CALL:
510 return VN_REFERENCE;
511 case GIMPLE_PHI:
512 return VN_PHI;
513 case GIMPLE_ASSIGN:
514 {
515 enum tree_code code = gimple_assign_rhs_code (stmt);
516 tree rhs1 = gimple_assign_rhs1 (stmt);
517 switch (get_gimple_rhs_class (code))
518 {
519 case GIMPLE_UNARY_RHS:
520 case GIMPLE_BINARY_RHS:
521 case GIMPLE_TERNARY_RHS:
522 return VN_NARY;
523 case GIMPLE_SINGLE_RHS:
524 switch (TREE_CODE_CLASS (code))
525 {
526 case tcc_reference:
527 /* VOP-less references can go through unary case. */
528 if ((code == REALPART_EXPR
529 || code == IMAGPART_EXPR
530 || code == VIEW_CONVERT_EXPR
531 || code == BIT_FIELD_REF)
532 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
533 return VN_NARY;
534
535 /* Fallthrough. */
536 case tcc_declaration:
537 return VN_REFERENCE;
538
539 case tcc_constant:
540 return VN_CONSTANT;
541
542 default:
543 if (code == ADDR_EXPR)
544 return (is_gimple_min_invariant (rhs1)
545 ? VN_CONSTANT : VN_REFERENCE);
546 else if (code == CONSTRUCTOR)
547 return VN_NARY;
548 return VN_NONE;
549 }
550 default:
551 return VN_NONE;
552 }
553 }
554 default:
555 return VN_NONE;
556 }
557}
75a70cf9 558
8c8a7011 559/* Lookup a value id for CONSTANT and return it. If it does not
560 exist returns 0. */
561
562unsigned int
563get_constant_value_id (tree constant)
564{
3e871d4d 565 vn_constant_s **slot;
8c8a7011 566 struct vn_constant_s vc;
75a70cf9 567
568 vc.hashcode = vn_hash_constant_with_type (constant);
8c8a7011 569 vc.constant = constant;
c1f445d2 570 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
8c8a7011 571 if (slot)
3e871d4d 572 return (*slot)->value_id;
8c8a7011 573 return 0;
574}
575
f6c33c78 576/* Lookup a value id for CONSTANT, and if it does not exist, create a
577 new one and return it. If it does exist, return it. */
578
579unsigned int
580get_or_alloc_constant_value_id (tree constant)
581{
3e871d4d 582 vn_constant_s **slot;
88006128 583 struct vn_constant_s vc;
584 vn_constant_t vcp;
48e1416a 585
88006128 586 vc.hashcode = vn_hash_constant_with_type (constant);
587 vc.constant = constant;
c1f445d2 588 slot = constant_to_value_id->find_slot (&vc, INSERT);
f6c33c78 589 if (*slot)
3e871d4d 590 return (*slot)->value_id;
88006128 591
592 vcp = XNEW (struct vn_constant_s);
593 vcp->hashcode = vc.hashcode;
594 vcp->constant = constant;
595 vcp->value_id = get_next_value_id ();
3e871d4d 596 *slot = vcp;
88006128 597 bitmap_set_bit (constant_value_ids, vcp->value_id);
598 return vcp->value_id;
f6c33c78 599}
600
601/* Return true if V is a value id for a constant. */
602
603bool
604value_id_constant_p (unsigned int v)
605{
48e1416a 606 return bitmap_bit_p (constant_value_ids, v);
f6c33c78 607}
608
8f4173dc 609/* Compute the hash for a reference operand VRO1. */
9e9e6e3e 610
f32e91d5 611static void
612vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
9e9e6e3e 613{
f32e91d5 614 hstate.add_int (vro1->opcode);
3d2d7de7 615 if (vro1->op0)
f32e91d5 616 inchash::add_expr (vro1->op0, hstate);
3d2d7de7 617 if (vro1->op1)
f32e91d5 618 inchash::add_expr (vro1->op1, hstate);
3d2d7de7 619 if (vro1->op2)
f32e91d5 620 inchash::add_expr (vro1->op2, hstate);
9e9e6e3e 621}
622
9e9e6e3e 623/* Compute a hash for the reference operation VR1 and return it. */
624
2fd3ecff 625static hashval_t
9e9e6e3e 626vn_reference_compute_hash (const vn_reference_t vr1)
627{
f32e91d5 628 inchash::hash hstate;
629 hashval_t result;
9e9e6e3e 630 int i;
631 vn_reference_op_t vro;
182cf5a9 632 HOST_WIDE_INT off = -1;
633 bool deref = false;
9e9e6e3e 634
f1f41a6c 635 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
182cf5a9 636 {
637 if (vro->opcode == MEM_REF)
638 deref = true;
639 else if (vro->opcode != ADDR_EXPR)
640 deref = false;
641 if (vro->off != -1)
642 {
643 if (off == -1)
644 off = 0;
645 off += vro->off;
646 }
647 else
648 {
649 if (off != -1
650 && off != 0)
f32e91d5 651 hstate.add_int (off);
182cf5a9 652 off = -1;
653 if (deref
654 && vro->opcode == ADDR_EXPR)
655 {
656 if (vro->op0)
657 {
658 tree op = TREE_OPERAND (vro->op0, 0);
f32e91d5 659 hstate.add_int (TREE_CODE (op));
660 inchash::add_expr (op, hstate);
182cf5a9 661 }
662 }
663 else
f32e91d5 664 vn_reference_op_compute_hash (vro, hstate);
182cf5a9 665 }
666 }
f32e91d5 667 result = hstate.end ();
668 /* ??? We would ICE later if we hash instead of adding that in. */
84cd88b5 669 if (vr1->vuse)
670 result += SSA_NAME_VERSION (vr1->vuse);
9e9e6e3e 671
672 return result;
673}
674
3e871d4d 675/* Return true if reference operations VR1 and VR2 are equivalent. This
9e9e6e3e 676 means they have the same set of operands and vuses. */
677
3e871d4d 678bool
679vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
9e9e6e3e 680{
182cf5a9 681 unsigned i, j;
9e9e6e3e 682
dd277d48 683 /* Early out if this is not a hash collision. */
684 if (vr1->hashcode != vr2->hashcode)
685 return false;
9e9e6e3e 686
dd277d48 687 /* The VOP needs to be the same. */
688 if (vr1->vuse != vr2->vuse)
9e9e6e3e 689 return false;
690
dd277d48 691 /* If the operands are the same we are done. */
692 if (vr1->operands == vr2->operands)
693 return true;
694
182cf5a9 695 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
9e9e6e3e 696 return false;
697
87d822bb 698 if (INTEGRAL_TYPE_P (vr1->type)
699 && INTEGRAL_TYPE_P (vr2->type))
700 {
701 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
702 return false;
703 }
704 else if (INTEGRAL_TYPE_P (vr1->type)
705 && (TYPE_PRECISION (vr1->type)
f9ae6f95 706 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
87d822bb 707 return false;
708 else if (INTEGRAL_TYPE_P (vr2->type)
709 && (TYPE_PRECISION (vr2->type)
f9ae6f95 710 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
87d822bb 711 return false;
712
182cf5a9 713 i = 0;
714 j = 0;
715 do
716 {
717 HOST_WIDE_INT off1 = 0, off2 = 0;
718 vn_reference_op_t vro1, vro2;
719 vn_reference_op_s tem1, tem2;
720 bool deref1 = false, deref2 = false;
f1f41a6c 721 for (; vr1->operands.iterate (i, &vro1); i++)
182cf5a9 722 {
723 if (vro1->opcode == MEM_REF)
724 deref1 = true;
725 if (vro1->off == -1)
726 break;
727 off1 += vro1->off;
728 }
f1f41a6c 729 for (; vr2->operands.iterate (j, &vro2); j++)
182cf5a9 730 {
731 if (vro2->opcode == MEM_REF)
732 deref2 = true;
733 if (vro2->off == -1)
734 break;
735 off2 += vro2->off;
736 }
737 if (off1 != off2)
738 return false;
739 if (deref1 && vro1->opcode == ADDR_EXPR)
740 {
741 memset (&tem1, 0, sizeof (tem1));
742 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
743 tem1.type = TREE_TYPE (tem1.op0);
744 tem1.opcode = TREE_CODE (tem1.op0);
745 vro1 = &tem1;
f9f051a3 746 deref1 = false;
182cf5a9 747 }
748 if (deref2 && vro2->opcode == ADDR_EXPR)
749 {
750 memset (&tem2, 0, sizeof (tem2));
751 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
752 tem2.type = TREE_TYPE (tem2.op0);
753 tem2.opcode = TREE_CODE (tem2.op0);
754 vro2 = &tem2;
f9f051a3 755 deref2 = false;
182cf5a9 756 }
f9f051a3 757 if (deref1 != deref2)
758 return false;
182cf5a9 759 if (!vn_reference_op_eq (vro1, vro2))
760 return false;
761 ++j;
762 ++i;
763 }
f1f41a6c 764 while (vr1->operands.length () != i
765 || vr2->operands.length () != j);
9e9e6e3e 766
dd277d48 767 return true;
9e9e6e3e 768}
769
75a70cf9 770/* Copy the operations present in load/store REF into RESULT, a vector of
9e9e6e3e 771 vn_reference_op_s's. */
772
2fd3ecff 773static void
f1f41a6c 774copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
9e9e6e3e 775{
43a3cf90 776 if (TREE_CODE (ref) == TARGET_MEM_REF)
777 {
778 vn_reference_op_s temp;
779
75aefb7b 780 result->reserve (3);
781
43a3cf90 782 memset (&temp, 0, sizeof (temp));
2be90eed 783 temp.type = TREE_TYPE (ref);
43a3cf90 784 temp.opcode = TREE_CODE (ref);
869bac23 785 temp.op0 = TMR_INDEX (ref);
786 temp.op1 = TMR_STEP (ref);
787 temp.op2 = TMR_OFFSET (ref);
182cf5a9 788 temp.off = -1;
75aefb7b 789 result->quick_push (temp);
43a3cf90 790
791 memset (&temp, 0, sizeof (temp));
792 temp.type = NULL_TREE;
28daba6f 793 temp.opcode = ERROR_MARK;
794 temp.op0 = TMR_INDEX2 (ref);
795 temp.off = -1;
75aefb7b 796 result->quick_push (temp);
28daba6f 797
798 memset (&temp, 0, sizeof (temp));
799 temp.type = NULL_TREE;
800 temp.opcode = TREE_CODE (TMR_BASE (ref));
801 temp.op0 = TMR_BASE (ref);
182cf5a9 802 temp.off = -1;
75aefb7b 803 result->quick_push (temp);
43a3cf90 804 return;
805 }
806
9e9e6e3e 807 /* For non-calls, store the information that makes up the address. */
6a00bf6b 808 tree orig = ref;
9e9e6e3e 809 while (ref)
810 {
811 vn_reference_op_s temp;
812
813 memset (&temp, 0, sizeof (temp));
2be90eed 814 temp.type = TREE_TYPE (ref);
9e9e6e3e 815 temp.opcode = TREE_CODE (ref);
182cf5a9 816 temp.off = -1;
9e9e6e3e 817
818 switch (temp.opcode)
819 {
39215e09 820 case MODIFY_EXPR:
821 temp.op0 = TREE_OPERAND (ref, 1);
822 break;
8a19bda6 823 case WITH_SIZE_EXPR:
824 temp.op0 = TREE_OPERAND (ref, 1);
825 temp.off = 0;
826 break;
182cf5a9 827 case MEM_REF:
828 /* The base address gets its own vn_reference_op_s structure. */
829 temp.op0 = TREE_OPERAND (ref, 1);
e913b5cd 830 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
831 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
182cf5a9 832 break;
9e9e6e3e 833 case BIT_FIELD_REF:
834 /* Record bits and position. */
835 temp.op0 = TREE_OPERAND (ref, 1);
836 temp.op1 = TREE_OPERAND (ref, 2);
837 break;
838 case COMPONENT_REF:
659ce413 839 /* The field decl is enough to unambiguously specify the field,
840 a matching type is not necessary and a mismatching type
841 is always a spurious difference. */
842 temp.type = NULL_TREE;
3918bd18 843 temp.op0 = TREE_OPERAND (ref, 1);
844 temp.op1 = TREE_OPERAND (ref, 2);
182cf5a9 845 {
846 tree this_offset = component_ref_field_offset (ref);
847 if (this_offset
848 && TREE_CODE (this_offset) == INTEGER_CST)
849 {
850 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
f9ae6f95 851 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
182cf5a9 852 {
5de9d3ed 853 offset_int off
854 = (wi::to_offset (this_offset)
855 + wi::lrshift (wi::to_offset (bit_offset),
885a2694 856 LOG2_BITS_PER_UNIT));
fe5ad926 857 if (wi::fits_shwi_p (off)
6a00bf6b 858 /* Probibit value-numbering zero offset components
859 of addresses the same before the pass folding
860 __builtin_object_size had a chance to run
861 (checking cfun->after_inlining does the
862 trick here). */
863 && (TREE_CODE (orig) != ADDR_EXPR
fe5ad926 864 || off != 0
6a00bf6b 865 || cfun->after_inlining))
e913b5cd 866 temp.off = off.to_shwi ();
182cf5a9 867 }
868 }
869 }
9e9e6e3e 870 break;
871 case ARRAY_RANGE_REF:
872 case ARRAY_REF:
873 /* Record index as operand. */
874 temp.op0 = TREE_OPERAND (ref, 1);
9fa67218 875 /* Always record lower bounds and element size. */
876 temp.op1 = array_ref_low_bound (ref);
877 temp.op2 = array_ref_element_size (ref);
182cf5a9 878 if (TREE_CODE (temp.op0) == INTEGER_CST
879 && TREE_CODE (temp.op1) == INTEGER_CST
880 && TREE_CODE (temp.op2) == INTEGER_CST)
881 {
5de9d3ed 882 offset_int off = ((wi::to_offset (temp.op0)
883 - wi::to_offset (temp.op1))
884 * wi::to_offset (temp.op2));
796b6678 885 if (wi::fits_shwi_p (off))
e913b5cd 886 temp.off = off.to_shwi();
182cf5a9 887 }
9e9e6e3e 888 break;
2be90eed 889 case VAR_DECL:
890 if (DECL_HARD_REGISTER (ref))
891 {
892 temp.op0 = ref;
893 break;
894 }
895 /* Fallthru. */
896 case PARM_DECL:
897 case CONST_DECL:
898 case RESULT_DECL:
899 /* Canonicalize decls to MEM[&decl] which is what we end up with
900 when valueizing MEM[ptr] with ptr = &decl. */
901 temp.opcode = MEM_REF;
902 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
903 temp.off = 0;
f1f41a6c 904 result->safe_push (temp);
2be90eed 905 temp.opcode = ADDR_EXPR;
75aefb7b 906 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
2be90eed 907 temp.type = TREE_TYPE (temp.op0);
908 temp.off = -1;
909 break;
a0e3bc3a 910 case STRING_CST:
911 case INTEGER_CST:
912 case COMPLEX_CST:
913 case VECTOR_CST:
7342d4d1 914 case REAL_CST:
7f7ae544 915 case FIXED_CST:
2a2aef73 916 case CONSTRUCTOR:
9e9e6e3e 917 case SSA_NAME:
918 temp.op0 = ref;
919 break;
4be5a86a 920 case ADDR_EXPR:
921 if (is_gimple_min_invariant (ref))
922 {
923 temp.op0 = ref;
924 break;
925 }
a5650c86 926 break;
a0e3bc3a 927 /* These are only interesting for their operands, their
928 existence, and their type. They will never be the last
929 ref in the chain of references (IE they require an
930 operand), so we don't have to put anything
931 for op* as it will be handled by the iteration */
a0e3bc3a 932 case REALPART_EXPR:
933 case VIEW_CONVERT_EXPR:
182cf5a9 934 temp.off = 0;
935 break;
936 case IMAGPART_EXPR:
937 /* This is only interesting for its constant offset. */
f9ae6f95 938 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
9e9e6e3e 939 break;
a0e3bc3a 940 default:
941 gcc_unreachable ();
9e9e6e3e 942 }
f1f41a6c 943 result->safe_push (temp);
9e9e6e3e 944
4be5a86a 945 if (REFERENCE_CLASS_P (ref)
39215e09 946 || TREE_CODE (ref) == MODIFY_EXPR
8a19bda6 947 || TREE_CODE (ref) == WITH_SIZE_EXPR
4be5a86a 948 || (TREE_CODE (ref) == ADDR_EXPR
949 && !is_gimple_min_invariant (ref)))
9e9e6e3e 950 ref = TREE_OPERAND (ref, 0);
951 else
952 ref = NULL_TREE;
953 }
954}
955
3918bd18 956/* Build a alias-oracle reference abstraction in *REF from the vn_reference
957 operands in *OPS, the reference alias set SET and the reference type TYPE.
958 Return true if something useful was produced. */
02067dc5 959
3918bd18 960bool
961ao_ref_init_from_vn_reference (ao_ref *ref,
962 alias_set_type set, tree type,
f1f41a6c 963 vec<vn_reference_op_s> ops)
02067dc5 964{
965 vn_reference_op_t op;
966 unsigned i;
3918bd18 967 tree base = NULL_TREE;
968 tree *op0_p = &base;
969 HOST_WIDE_INT offset = 0;
970 HOST_WIDE_INT max_size;
971 HOST_WIDE_INT size = -1;
972 tree size_tree = NULL_TREE;
182cf5a9 973 alias_set_type base_alias_set = -1;
3918bd18 974
975 /* First get the final access size from just the outermost expression. */
f1f41a6c 976 op = &ops[0];
3918bd18 977 if (op->opcode == COMPONENT_REF)
182cf5a9 978 size_tree = DECL_SIZE (op->op0);
3918bd18 979 else if (op->opcode == BIT_FIELD_REF)
980 size_tree = op->op0;
981 else
982 {
3754d046 983 machine_mode mode = TYPE_MODE (type);
3918bd18 984 if (mode == BLKmode)
985 size_tree = TYPE_SIZE (type);
986 else
987 size = GET_MODE_BITSIZE (mode);
988 }
989 if (size_tree != NULL_TREE)
990 {
e913b5cd 991 if (!tree_fits_uhwi_p (size_tree))
3918bd18 992 size = -1;
993 else
e913b5cd 994 size = tree_to_uhwi (size_tree);
3918bd18 995 }
996
997 /* Initially, maxsize is the same as the accessed element size.
998 In the following it will only grow (or become -1). */
999 max_size = size;
02067dc5 1000
3918bd18 1001 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1002 and find the ultimate containing object. */
f1f41a6c 1003 FOR_EACH_VEC_ELT (ops, i, op)
02067dc5 1004 {
1005 switch (op->opcode)
1006 {
3918bd18 1007 /* These may be in the reference ops, but we cannot do anything
1008 sensible with them here. */
3918bd18 1009 case ADDR_EXPR:
182cf5a9 1010 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1011 if (base != NULL_TREE
1012 && TREE_CODE (base) == MEM_REF
1013 && op->op0
1014 && DECL_P (TREE_OPERAND (op->op0, 0)))
1015 {
f1f41a6c 1016 vn_reference_op_t pop = &ops[i-1];
182cf5a9 1017 base = TREE_OPERAND (op->op0, 0);
1018 if (pop->off == -1)
1019 {
1020 max_size = -1;
1021 offset = 0;
1022 }
1023 else
1024 offset += pop->off * BITS_PER_UNIT;
1025 op0_p = NULL;
1026 break;
1027 }
1028 /* Fallthru. */
1029 case CALL_EXPR:
3918bd18 1030 return false;
02067dc5 1031
3918bd18 1032 /* Record the base objects. */
182cf5a9 1033 case MEM_REF:
1034 base_alias_set = get_deref_alias_set (op->op0);
1035 *op0_p = build2 (MEM_REF, op->type,
1036 NULL_TREE, op->op0);
1037 op0_p = &TREE_OPERAND (*op0_p, 0);
1038 break;
1039
3918bd18 1040 case VAR_DECL:
1041 case PARM_DECL:
1042 case RESULT_DECL:
1043 case SSA_NAME:
3918bd18 1044 *op0_p = op->op0;
182cf5a9 1045 op0_p = NULL;
3918bd18 1046 break;
1047
1048 /* And now the usual component-reference style ops. */
02067dc5 1049 case BIT_FIELD_REF:
e913b5cd 1050 offset += tree_to_shwi (op->op1);
02067dc5 1051 break;
1052
1053 case COMPONENT_REF:
3918bd18 1054 {
1055 tree field = op->op0;
1056 /* We do not have a complete COMPONENT_REF tree here so we
1057 cannot use component_ref_field_offset. Do the interesting
1058 parts manually. */
1059
182cf5a9 1060 if (op->op1
e913b5cd 1061 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
3918bd18 1062 max_size = -1;
1063 else
1064 {
e913b5cd 1065 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
3918bd18 1066 * BITS_PER_UNIT);
f9ae6f95 1067 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
3918bd18 1068 }
1069 break;
1070 }
02067dc5 1071
1072 case ARRAY_RANGE_REF:
1073 case ARRAY_REF:
9fa67218 1074 /* We recorded the lower bound and the element size. */
e913b5cd 1075 if (!tree_fits_shwi_p (op->op0)
1076 || !tree_fits_shwi_p (op->op1)
1077 || !tree_fits_shwi_p (op->op2))
3918bd18 1078 max_size = -1;
1079 else
1080 {
e913b5cd 1081 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1082 hindex -= tree_to_shwi (op->op1);
1083 hindex *= tree_to_shwi (op->op2);
9fa67218 1084 hindex *= BITS_PER_UNIT;
3918bd18 1085 offset += hindex;
1086 }
1087 break;
1088
1089 case REALPART_EXPR:
1090 break;
1091
1092 case IMAGPART_EXPR:
1093 offset += size;
1094 break;
1095
1096 case VIEW_CONVERT_EXPR:
02067dc5 1097 break;
1098
1099 case STRING_CST:
1100 case INTEGER_CST:
1101 case COMPLEX_CST:
1102 case VECTOR_CST:
1103 case REAL_CST:
1104 case CONSTRUCTOR:
02067dc5 1105 case CONST_DECL:
3918bd18 1106 return false;
02067dc5 1107
1108 default:
3918bd18 1109 return false;
02067dc5 1110 }
1111 }
1112
3918bd18 1113 if (base == NULL_TREE)
1114 return false;
1115
1116 ref->ref = NULL_TREE;
1117 ref->base = base;
1118 ref->offset = offset;
1119 ref->size = size;
1120 ref->max_size = max_size;
1121 ref->ref_alias_set = set;
182cf5a9 1122 if (base_alias_set != -1)
1123 ref->base_alias_set = base_alias_set;
1124 else
1125 ref->base_alias_set = get_alias_set (base);
3787db52 1126 /* We discount volatiles from value-numbering elsewhere. */
1127 ref->volatile_p = false;
3918bd18 1128
1129 return true;
02067dc5 1130}
1131
75a70cf9 1132/* Copy the operations present in load/store/call REF into RESULT, a vector of
1133 vn_reference_op_s's. */
1134
2fd3ecff 1135static void
1a91d914 1136copy_reference_ops_from_call (gcall *call,
f1f41a6c 1137 vec<vn_reference_op_s> *result)
75a70cf9 1138{
1139 vn_reference_op_s temp;
75a70cf9 1140 unsigned i;
7ec657ff 1141 tree lhs = gimple_call_lhs (call);
27b0e9e4 1142 int lr;
7ec657ff 1143
1144 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1145 different. By adding the lhs here in the vector, we ensure that the
1146 hashcode is different, guaranteeing a different value number. */
1147 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1148 {
1149 memset (&temp, 0, sizeof (temp));
1150 temp.opcode = MODIFY_EXPR;
1151 temp.type = TREE_TYPE (lhs);
1152 temp.op0 = lhs;
1153 temp.off = -1;
f1f41a6c 1154 result->safe_push (temp);
7ec657ff 1155 }
75a70cf9 1156
27b0e9e4 1157 /* Copy the type, opcode, function, static chain and EH region, if any. */
75a70cf9 1158 memset (&temp, 0, sizeof (temp));
1159 temp.type = gimple_call_return_type (call);
1160 temp.opcode = CALL_EXPR;
4be5a86a 1161 temp.op0 = gimple_call_fn (call);
0e3bb11d 1162 temp.op1 = gimple_call_chain (call);
27b0e9e4 1163 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1164 temp.op2 = size_int (lr);
182cf5a9 1165 temp.off = -1;
058a1b7a 1166 if (gimple_call_with_bounds_p (call))
1167 temp.with_bounds = 1;
f1f41a6c 1168 result->safe_push (temp);
75a70cf9 1169
4be5a86a 1170 /* Copy the call arguments. As they can be references as well,
1171 just chain them together. */
75a70cf9 1172 for (i = 0; i < gimple_call_num_args (call); ++i)
1173 {
1174 tree callarg = gimple_call_arg (call, i);
4be5a86a 1175 copy_reference_ops_from_ref (callarg, result);
75a70cf9 1176 }
75a70cf9 1177}
1178
d12dee9c 1179/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1180 *I_P to point to the last element of the replacement. */
1181void
f1f41a6c 1182vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
d12dee9c 1183 unsigned int *i_p)
9e9e6e3e 1184{
d12dee9c 1185 unsigned int i = *i_p;
f1f41a6c 1186 vn_reference_op_t op = &(*ops)[i];
1187 vn_reference_op_t mem_op = &(*ops)[i - 1];
182cf5a9 1188 tree addr_base;
197400ff 1189 HOST_WIDE_INT addr_offset = 0;
182cf5a9 1190
1191 /* The only thing we have to do is from &OBJ.foo.bar add the offset
9d75589a 1192 from .foo.bar to the preceding MEM_REF offset and replace the
182cf5a9 1193 address with &OBJ. */
1194 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1195 &addr_offset);
1196 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
d68e9408 1197 if (addr_base != TREE_OPERAND (op->op0, 0))
182cf5a9 1198 {
5de9d3ed 1199 offset_int off = offset_int::from (mem_op->op0, SIGNED);
e913b5cd 1200 off += addr_offset;
1201 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
182cf5a9 1202 op->op0 = build_fold_addr_expr (addr_base);
e913b5cd 1203 if (tree_fits_shwi_p (mem_op->op0))
1204 mem_op->off = tree_to_shwi (mem_op->op0);
182cf5a9 1205 else
1206 mem_op->off = -1;
d12dee9c 1207 }
d12dee9c 1208}
9e9e6e3e 1209
37b80bde 1210/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1211 *I_P to point to the last element of the replacement. */
1212static void
f1f41a6c 1213vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
37b80bde 1214 unsigned int *i_p)
1215{
1216 unsigned int i = *i_p;
f1f41a6c 1217 vn_reference_op_t op = &(*ops)[i];
1218 vn_reference_op_t mem_op = &(*ops)[i - 1];
37b80bde 1219 gimple def_stmt;
1220 enum tree_code code;
5de9d3ed 1221 offset_int off;
37b80bde 1222
1223 def_stmt = SSA_NAME_DEF_STMT (op->op0);
b62e7449 1224 if (!is_gimple_assign (def_stmt))
37b80bde 1225 return;
1226
1227 code = gimple_assign_rhs_code (def_stmt);
1228 if (code != ADDR_EXPR
1229 && code != POINTER_PLUS_EXPR)
1230 return;
1231
5de9d3ed 1232 off = offset_int::from (mem_op->op0, SIGNED);
37b80bde 1233
1234 /* The only thing we have to do is from &OBJ.foo.bar add the offset
9d75589a 1235 from .foo.bar to the preceding MEM_REF offset and replace the
37b80bde 1236 address with &OBJ. */
1237 if (code == ADDR_EXPR)
1238 {
1239 tree addr, addr_base;
1240 HOST_WIDE_INT addr_offset;
1241
1242 addr = gimple_assign_rhs1 (def_stmt);
1243 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1244 &addr_offset);
1245 if (!addr_base
1246 || TREE_CODE (addr_base) != MEM_REF)
1247 return;
1248
e913b5cd 1249 off += addr_offset;
cf8f0e63 1250 off += mem_ref_offset (addr_base);
37b80bde 1251 op->op0 = TREE_OPERAND (addr_base, 0);
1252 }
1253 else
1254 {
1255 tree ptr, ptroff;
1256 ptr = gimple_assign_rhs1 (def_stmt);
1257 ptroff = gimple_assign_rhs2 (def_stmt);
1258 if (TREE_CODE (ptr) != SSA_NAME
1259 || TREE_CODE (ptroff) != INTEGER_CST)
1260 return;
1261
5de9d3ed 1262 off += wi::to_offset (ptroff);
b62e7449 1263 op->op0 = ptr;
37b80bde 1264 }
1265
e913b5cd 1266 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1267 if (tree_fits_shwi_p (mem_op->op0))
1268 mem_op->off = tree_to_shwi (mem_op->op0);
37b80bde 1269 else
1270 mem_op->off = -1;
1271 if (TREE_CODE (op->op0) == SSA_NAME)
05eda0e7 1272 op->op0 = SSA_VAL (op->op0);
1273 if (TREE_CODE (op->op0) != SSA_NAME)
1274 op->opcode = TREE_CODE (op->op0);
37b80bde 1275
1276 /* And recurse. */
1277 if (TREE_CODE (op->op0) == SSA_NAME)
1278 vn_reference_maybe_forwprop_address (ops, i_p);
1279 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1280 vn_reference_fold_indirect (ops, i_p);
1281}
1282
c26ce8a9 1283/* Optimize the reference REF to a constant if possible or return
1284 NULL_TREE if not. */
1285
1286tree
1287fully_constant_vn_reference_p (vn_reference_t ref)
1288{
f1f41a6c 1289 vec<vn_reference_op_s> operands = ref->operands;
c26ce8a9 1290 vn_reference_op_t op;
1291
1292 /* Try to simplify the translated expression if it is
1293 a call to a builtin function with at most two arguments. */
f1f41a6c 1294 op = &operands[0];
c26ce8a9 1295 if (op->opcode == CALL_EXPR
1296 && TREE_CODE (op->op0) == ADDR_EXPR
1297 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1298 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
f1f41a6c 1299 && operands.length () >= 2
1300 && operands.length () <= 3)
c26ce8a9 1301 {
1302 vn_reference_op_t arg0, arg1 = NULL;
1303 bool anyconst = false;
f1f41a6c 1304 arg0 = &operands[1];
1305 if (operands.length () > 2)
1306 arg1 = &operands[2];
c26ce8a9 1307 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1308 || (arg0->opcode == ADDR_EXPR
1309 && is_gimple_min_invariant (arg0->op0)))
1310 anyconst = true;
1311 if (arg1
1312 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1313 || (arg1->opcode == ADDR_EXPR
1314 && is_gimple_min_invariant (arg1->op0))))
1315 anyconst = true;
1316 if (anyconst)
1317 {
1318 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1319 arg1 ? 2 : 1,
1320 arg0->op0,
1321 arg1 ? arg1->op0 : NULL);
1322 if (folded
1323 && TREE_CODE (folded) == NOP_EXPR)
1324 folded = TREE_OPERAND (folded, 0);
1325 if (folded
1326 && is_gimple_min_invariant (folded))
1327 return folded;
1328 }
1329 }
1330
a5650c86 1331 /* Simplify reads from constants or constant initializers. */
1332 else if (BITS_PER_UNIT == 8
1333 && is_gimple_reg_type (ref->type)
1334 && (!INTEGRAL_TYPE_P (ref->type)
1335 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
c26ce8a9 1336 {
a5650c86 1337 HOST_WIDE_INT off = 0;
1338 HOST_WIDE_INT size = tree_to_shwi (TYPE_SIZE (ref->type));
1339 if (size % BITS_PER_UNIT != 0
1340 || size > MAX_BITSIZE_MODE_ANY_MODE)
1341 return NULL_TREE;
1342 size /= BITS_PER_UNIT;
1343 unsigned i;
1344 for (i = 0; i < operands.length (); ++i)
1345 {
1346 if (operands[i].off == -1)
1347 return NULL_TREE;
1348 off += operands[i].off;
1349 if (operands[i].opcode == MEM_REF)
1350 {
1351 ++i;
1352 break;
1353 }
1354 }
1355 vn_reference_op_t base = &operands[--i];
1356 tree ctor = error_mark_node;
1357 tree decl = NULL_TREE;
1358 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1359 ctor = base->op0;
1360 else if (base->opcode == MEM_REF
1361 && base[1].opcode == ADDR_EXPR
1362 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1363 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1364 {
1365 decl = TREE_OPERAND (base[1].op0, 0);
1366 ctor = ctor_for_folding (decl);
1367 }
1368 if (ctor == NULL_TREE)
1369 return build_zero_cst (ref->type);
1370 else if (ctor != error_mark_node)
1371 {
1372 if (decl)
1373 {
1374 tree res = fold_ctor_reference (ref->type, ctor,
1375 off * BITS_PER_UNIT,
1376 size * BITS_PER_UNIT, decl);
1377 if (res)
1378 {
1379 STRIP_USELESS_TYPE_CONVERSION (res);
1380 if (is_gimple_min_invariant (res))
1381 return res;
1382 }
1383 }
1384 else
1385 {
1386 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1387 if (native_encode_expr (ctor, buf, size, off) > 0)
1388 return native_interpret_expr (ref->type, buf, size);
1389 }
1390 }
c26ce8a9 1391 }
1392
1393 return NULL_TREE;
1394}
1395
9e9e6e3e 1396/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1397 structures into their value numbers. This is done in-place, and
882f8b55 1398 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1399 whether any operands were valueized. */
9e9e6e3e 1400
f1f41a6c 1401static vec<vn_reference_op_s>
1402valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
9e9e6e3e 1403{
1404 vn_reference_op_t vro;
d12dee9c 1405 unsigned int i;
9e9e6e3e 1406
882f8b55 1407 *valueized_anything = false;
1408
f1f41a6c 1409 FOR_EACH_VEC_ELT (orig, i, vro)
9e9e6e3e 1410 {
1411 if (vro->opcode == SSA_NAME
1412 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
f6c33c78 1413 {
882f8b55 1414 tree tem = SSA_VAL (vro->op0);
1415 if (tem != vro->op0)
1416 {
1417 *valueized_anything = true;
1418 vro->op0 = tem;
1419 }
f6c33c78 1420 /* If it transforms from an SSA_NAME to a constant, update
1421 the opcode. */
1422 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1423 vro->opcode = TREE_CODE (vro->op0);
1424 }
d12dee9c 1425 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
882f8b55 1426 {
1427 tree tem = SSA_VAL (vro->op1);
1428 if (tem != vro->op1)
1429 {
1430 *valueized_anything = true;
1431 vro->op1 = tem;
1432 }
1433 }
d12dee9c 1434 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
882f8b55 1435 {
1436 tree tem = SSA_VAL (vro->op2);
1437 if (tem != vro->op2)
1438 {
1439 *valueized_anything = true;
1440 vro->op2 = tem;
1441 }
1442 }
182cf5a9 1443 /* If it transforms from an SSA_NAME to an address, fold with
1444 a preceding indirect reference. */
1445 if (i > 0
1446 && vro->op0
1447 && TREE_CODE (vro->op0) == ADDR_EXPR
f1f41a6c 1448 && orig[i - 1].opcode == MEM_REF)
182cf5a9 1449 vn_reference_fold_indirect (&orig, &i);
37b80bde 1450 else if (i > 0
1451 && vro->opcode == SSA_NAME
f1f41a6c 1452 && orig[i - 1].opcode == MEM_REF)
37b80bde 1453 vn_reference_maybe_forwprop_address (&orig, &i);
182cf5a9 1454 /* If it transforms a non-constant ARRAY_REF into a constant
1455 one, adjust the constant offset. */
1456 else if (vro->opcode == ARRAY_REF
1457 && vro->off == -1
1458 && TREE_CODE (vro->op0) == INTEGER_CST
1459 && TREE_CODE (vro->op1) == INTEGER_CST
1460 && TREE_CODE (vro->op2) == INTEGER_CST)
1461 {
5de9d3ed 1462 offset_int off = ((wi::to_offset (vro->op0)
1463 - wi::to_offset (vro->op1))
1464 * wi::to_offset (vro->op2));
796b6678 1465 if (wi::fits_shwi_p (off))
e913b5cd 1466 vro->off = off.to_shwi ();
182cf5a9 1467 }
9e9e6e3e 1468 }
1469
1470 return orig;
1471}
1472
f1f41a6c 1473static vec<vn_reference_op_s>
1474valueize_refs (vec<vn_reference_op_s> orig)
882f8b55 1475{
1476 bool tem;
1477 return valueize_refs_1 (orig, &tem);
1478}
1479
f1f41a6c 1480static vec<vn_reference_op_s> shared_lookup_references;
d12dee9c 1481
1482/* Create a vector of vn_reference_op_s structures from REF, a
1483 REFERENCE_CLASS_P tree. The vector is shared among all callers of
882f8b55 1484 this function. *VALUEIZED_ANYTHING will specify whether any
1485 operands were valueized. */
d12dee9c 1486
f1f41a6c 1487static vec<vn_reference_op_s>
882f8b55 1488valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
d12dee9c 1489{
1490 if (!ref)
1e094109 1491 return vNULL;
f1f41a6c 1492 shared_lookup_references.truncate (0);
d12dee9c 1493 copy_reference_ops_from_ref (ref, &shared_lookup_references);
882f8b55 1494 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1495 valueized_anything);
d12dee9c 1496 return shared_lookup_references;
1497}
1498
1499/* Create a vector of vn_reference_op_s structures from CALL, a
1500 call statement. The vector is shared among all callers of
1501 this function. */
1502
f1f41a6c 1503static vec<vn_reference_op_s>
1a91d914 1504valueize_shared_reference_ops_from_call (gcall *call)
d12dee9c 1505{
1506 if (!call)
1e094109 1507 return vNULL;
f1f41a6c 1508 shared_lookup_references.truncate (0);
d12dee9c 1509 copy_reference_ops_from_call (call, &shared_lookup_references);
1510 shared_lookup_references = valueize_refs (shared_lookup_references);
1511 return shared_lookup_references;
1512}
1513
404d6be4 1514/* Lookup a SCCVN reference operation VR in the current hash table.
1515 Returns the resulting value number if it exists in the hash table,
f6c33c78 1516 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1517 vn_reference_t stored in the hashtable if something is found. */
404d6be4 1518
1519static tree
f6c33c78 1520vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
404d6be4 1521{
3e871d4d 1522 vn_reference_s **slot;
404d6be4 1523 hashval_t hash;
1524
1525 hash = vr->hashcode;
c1f445d2 1526 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
404d6be4 1527 if (!slot && current_info == optimistic_info)
c1f445d2 1528 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
404d6be4 1529 if (slot)
f6c33c78 1530 {
1531 if (vnresult)
1532 *vnresult = (vn_reference_t)*slot;
1533 return ((vn_reference_t)*slot)->result;
1534 }
48e1416a 1535
404d6be4 1536 return NULL_TREE;
1537}
1538
4a83fadb 1539static tree *last_vuse_ptr;
8ecc6b38 1540static vn_lookup_kind vn_walk_kind;
8f190c8a 1541static vn_lookup_kind default_vn_walk_kind;
4a83fadb 1542
dd277d48 1543/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1544 with the current VUSE and performs the expression lookup. */
1545
1546static void *
297a2110 1547vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1548 unsigned int cnt, void *vr_)
dd277d48 1549{
1550 vn_reference_t vr = (vn_reference_t)vr_;
3e871d4d 1551 vn_reference_s **slot;
dd277d48 1552 hashval_t hash;
1553
297a2110 1554 /* This bounds the stmt walks we perform on reference lookups
1555 to O(1) instead of O(N) where N is the number of dominating
1556 stores. */
1557 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1558 return (void *)-1;
1559
4a83fadb 1560 if (last_vuse_ptr)
1561 *last_vuse_ptr = vuse;
1562
dd277d48 1563 /* Fixup vuse and hash. */
84cd88b5 1564 if (vr->vuse)
1565 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
b8a2283e 1566 vr->vuse = vuse_ssa_val (vuse);
84cd88b5 1567 if (vr->vuse)
1568 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
dd277d48 1569
1570 hash = vr->hashcode;
c1f445d2 1571 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
dd277d48 1572 if (!slot && current_info == optimistic_info)
c1f445d2 1573 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
dd277d48 1574 if (slot)
1575 return *slot;
48e1416a 1576
dd277d48 1577 return NULL;
1578}
f6c33c78 1579
01fd46e3 1580/* Lookup an existing or insert a new vn_reference entry into the
1581 value table for the VUSE, SET, TYPE, OPERANDS reference which
a4f94d42 1582 has the value VALUE which is either a constant or an SSA name. */
01fd46e3 1583
1584static vn_reference_t
a4f94d42 1585vn_reference_lookup_or_insert_for_pieces (tree vuse,
1586 alias_set_type set,
1587 tree type,
f1f41a6c 1588 vec<vn_reference_op_s,
1589 va_heap> operands,
a4f94d42 1590 tree value)
01fd46e3 1591{
1592 struct vn_reference_s vr1;
1593 vn_reference_t result;
a4f94d42 1594 unsigned value_id;
01fd46e3 1595 vr1.vuse = vuse;
1596 vr1.operands = operands;
1597 vr1.type = type;
1598 vr1.set = set;
1599 vr1.hashcode = vn_reference_compute_hash (&vr1);
1600 if (vn_reference_lookup_1 (&vr1, &result))
1601 return result;
a4f94d42 1602 if (TREE_CODE (value) == SSA_NAME)
1603 value_id = VN_INFO (value)->value_id;
1604 else
1605 value_id = get_or_alloc_constant_value_id (value);
01fd46e3 1606 return vn_reference_insert_pieces (vuse, set, type,
f1f41a6c 1607 operands.copy (), value, value_id);
01fd46e3 1608}
1609
d8021dea 1610/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1611 from the statement defining VUSE and if not successful tries to
9d75589a 1612 translate *REFP and VR_ through an aggregate copy at the definition
d8021dea 1613 of VUSE. */
1614
1615static void *
38168b16 1616vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1617 bool disambiguate_only)
d8021dea 1618{
1619 vn_reference_t vr = (vn_reference_t)vr_;
1620 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
d8021dea 1621 tree base;
f018d957 1622 HOST_WIDE_INT offset, maxsize;
f1f41a6c 1623 static vec<vn_reference_op_s>
1e094109 1624 lhs_ops = vNULL;
66b86a74 1625 ao_ref lhs_ref;
1626 bool lhs_ref_ok = false;
d8021dea 1627
180572f4 1628 /* First try to disambiguate after value-replacing in the definitions LHS. */
1629 if (is_gimple_assign (def_stmt))
1630 {
f1f41a6c 1631 vec<vn_reference_op_s> tem;
180572f4 1632 tree lhs = gimple_assign_lhs (def_stmt);
b11771e1 1633 bool valueized_anything = false;
66b86a74 1634 /* Avoid re-allocation overhead. */
f1f41a6c 1635 lhs_ops.truncate (0);
66b86a74 1636 copy_reference_ops_from_ref (lhs, &lhs_ops);
1637 tem = lhs_ops;
b11771e1 1638 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
66b86a74 1639 gcc_assert (lhs_ops == tem);
b11771e1 1640 if (valueized_anything)
1641 {
1642 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1643 get_alias_set (lhs),
1644 TREE_TYPE (lhs), lhs_ops);
1645 if (lhs_ref_ok
1646 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1647 return NULL;
1648 }
1649 else
1650 {
1651 ao_ref_init (&lhs_ref, lhs);
1652 lhs_ref_ok = true;
1653 }
180572f4 1654 }
38168b16 1655 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1656 && gimple_call_num_args (def_stmt) <= 4)
1657 {
1658 /* For builtin calls valueize its arguments and call the
1659 alias oracle again. Valueization may improve points-to
1660 info of pointers and constify size and position arguments.
1661 Originally this was motivated by PR61034 which has
1662 conditional calls to free falsely clobbering ref because
1663 of imprecise points-to info of the argument. */
1664 tree oldargs[4];
2eb57bc3 1665 bool valueized_anything = false;
38168b16 1666 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1667 {
1668 oldargs[i] = gimple_call_arg (def_stmt, i);
1669 if (TREE_CODE (oldargs[i]) == SSA_NAME
1670 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1671 {
1672 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1673 valueized_anything = true;
1674 }
1675 }
1676 if (valueized_anything)
1677 {
1a91d914 1678 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1679 ref);
38168b16 1680 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1681 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1682 if (!res)
1683 return NULL;
1684 }
1685 }
1686
1687 if (disambiguate_only)
1688 return (void *)-1;
180572f4 1689
3918bd18 1690 base = ao_ref_base (ref);
1691 offset = ref->offset;
3918bd18 1692 maxsize = ref->max_size;
d8021dea 1693
1694 /* If we cannot constrain the size of the reference we cannot
1695 test if anything kills it. */
1696 if (maxsize == -1)
1697 return (void *)-1;
1698
3c25489e 1699 /* We can't deduce anything useful from clobbers. */
1700 if (gimple_clobber_p (def_stmt))
1701 return (void *)-1;
1702
d8021dea 1703 /* def_stmt may-defs *ref. See if we can derive a value for *ref
3c25489e 1704 from that definition.
d8021dea 1705 1) Memset. */
3918bd18 1706 if (is_gimple_reg_type (vr->type)
77c7051b 1707 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
d8021dea 1708 && integer_zerop (gimple_call_arg (def_stmt, 1))
e913b5cd 1709 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
d8021dea 1710 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1711 {
1712 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1713 tree base2;
1714 HOST_WIDE_INT offset2, size2, maxsize2;
1715 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
e913b5cd 1716 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
d8021dea 1717 if ((unsigned HOST_WIDE_INT)size2 / 8
e913b5cd 1718 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
a7be40cc 1719 && maxsize2 != -1
d8021dea 1720 && operand_equal_p (base, base2, 0)
1721 && offset2 <= offset
1722 && offset2 + size2 >= offset + maxsize)
3918bd18 1723 {
385f3f36 1724 tree val = build_zero_cst (vr->type);
a4f94d42 1725 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1726 (vuse, vr->set, vr->type, vr->operands, val);
3918bd18 1727 }
d8021dea 1728 }
1729
1730 /* 2) Assignment from an empty CONSTRUCTOR. */
3918bd18 1731 else if (is_gimple_reg_type (vr->type)
d8021dea 1732 && gimple_assign_single_p (def_stmt)
1733 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1734 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1735 {
1736 tree base2;
1737 HOST_WIDE_INT offset2, size2, maxsize2;
1738 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1739 &offset2, &size2, &maxsize2);
a7be40cc 1740 if (maxsize2 != -1
1741 && operand_equal_p (base, base2, 0)
d8021dea 1742 && offset2 <= offset
1743 && offset2 + size2 >= offset + maxsize)
3918bd18 1744 {
385f3f36 1745 tree val = build_zero_cst (vr->type);
a4f94d42 1746 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1747 (vuse, vr->set, vr->type, vr->operands, val);
3918bd18 1748 }
d8021dea 1749 }
1750
87b53397 1751 /* 3) Assignment from a constant. We can use folds native encode/interpret
1752 routines to extract the assigned bits. */
824bbeb8 1753 else if (vn_walk_kind == VN_WALKREWRITE
1754 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
87b53397 1755 && ref->size == maxsize
1756 && maxsize % BITS_PER_UNIT == 0
1757 && offset % BITS_PER_UNIT == 0
1758 && is_gimple_reg_type (vr->type)
1759 && gimple_assign_single_p (def_stmt)
1760 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1761 {
1762 tree base2;
1763 HOST_WIDE_INT offset2, size2, maxsize2;
1764 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1765 &offset2, &size2, &maxsize2);
1766 if (maxsize2 != -1
1767 && maxsize2 == size2
1768 && size2 % BITS_PER_UNIT == 0
1769 && offset2 % BITS_PER_UNIT == 0
1770 && operand_equal_p (base, base2, 0)
1771 && offset2 <= offset
1772 && offset2 + size2 >= offset + maxsize)
1773 {
1774 /* We support up to 512-bit values (for V8DFmode). */
1775 unsigned char buffer[64];
1776 int len;
1777
1778 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1779 buffer, sizeof (buffer));
1780 if (len > 0)
1781 {
1782 tree val = native_interpret_expr (vr->type,
1783 buffer
1784 + ((offset - offset2)
1785 / BITS_PER_UNIT),
1786 ref->size / BITS_PER_UNIT);
1787 if (val)
a4f94d42 1788 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1789 (vuse, vr->set, vr->type, vr->operands, val);
87b53397 1790 }
1791 }
1792 }
1793
a3bb56f0 1794 /* 4) Assignment from an SSA name which definition we may be able
1795 to access pieces from. */
1796 else if (ref->size == maxsize
1797 && is_gimple_reg_type (vr->type)
1798 && gimple_assign_single_p (def_stmt)
1799 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1800 {
1801 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1802 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1803 if (is_gimple_assign (def_stmt2)
1804 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1805 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1806 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1807 {
1808 tree base2;
1809 HOST_WIDE_INT offset2, size2, maxsize2, off;
1810 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1811 &offset2, &size2, &maxsize2);
1812 off = offset - offset2;
1813 if (maxsize2 != -1
1814 && maxsize2 == size2
1815 && operand_equal_p (base, base2, 0)
1816 && offset2 <= offset
1817 && offset2 + size2 >= offset + maxsize)
1818 {
1819 tree val = NULL_TREE;
1820 HOST_WIDE_INT elsz
f9ae6f95 1821 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
a3bb56f0 1822 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1823 {
1824 if (off == 0)
1825 val = gimple_assign_rhs1 (def_stmt2);
1826 else if (off == elsz)
1827 val = gimple_assign_rhs2 (def_stmt2);
1828 }
1829 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1830 && off % elsz == 0)
1831 {
1832 tree ctor = gimple_assign_rhs1 (def_stmt2);
1833 unsigned i = off / elsz;
1834 if (i < CONSTRUCTOR_NELTS (ctor))
1835 {
1836 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
0ff8139c 1837 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1838 {
1839 if (TREE_CODE (TREE_TYPE (elt->value))
1840 != VECTOR_TYPE)
1841 val = elt->value;
1842 }
a3bb56f0 1843 }
1844 }
1845 if (val)
a4f94d42 1846 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1847 (vuse, vr->set, vr->type, vr->operands, val);
a3bb56f0 1848 }
1849 }
1850 }
1851
1852 /* 5) For aggregate copies translate the reference through them if
d8021dea 1853 the copy kills ref. */
8ecc6b38 1854 else if (vn_walk_kind == VN_WALKREWRITE
1855 && gimple_assign_single_p (def_stmt)
d8021dea 1856 && (DECL_P (gimple_assign_rhs1 (def_stmt))
182cf5a9 1857 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
d8021dea 1858 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1859 {
1860 tree base2;
a7be40cc 1861 HOST_WIDE_INT offset2, size2, maxsize2;
d8021dea 1862 int i, j;
c2078b80 1863 auto_vec<vn_reference_op_s> rhs;
d8021dea 1864 vn_reference_op_t vro;
3918bd18 1865 ao_ref r;
d8021dea 1866
66b86a74 1867 if (!lhs_ref_ok)
1868 return (void *)-1;
1869
d8021dea 1870 /* See if the assignment kills REF. */
66b86a74 1871 base2 = ao_ref_base (&lhs_ref);
1872 offset2 = lhs_ref.offset;
1873 size2 = lhs_ref.size;
a7be40cc 1874 maxsize2 = lhs_ref.max_size;
1875 if (maxsize2 == -1
1876 || (base != base2 && !operand_equal_p (base, base2, 0))
d8021dea 1877 || offset2 > offset
1878 || offset2 + size2 < offset + maxsize)
1879 return (void *)-1;
1880
66b86a74 1881 /* Find the common base of ref and the lhs. lhs_ops already
1882 contains valueized operands for the lhs. */
f1f41a6c 1883 i = vr->operands.length () - 1;
1884 j = lhs_ops.length () - 1;
0d5b37dd 1885 while (j >= 0 && i >= 0
f1f41a6c 1886 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
d8021dea 1887 {
1888 i--;
1889 j--;
1890 }
0d5b37dd 1891
b11771e1 1892 /* ??? The innermost op should always be a MEM_REF and we already
1893 checked that the assignment to the lhs kills vr. Thus for
1894 aggregate copies using char[] types the vn_reference_op_eq
1895 may fail when comparing types for compatibility. But we really
1896 don't care here - further lookups with the rewritten operands
1897 will simply fail if we messed up types too badly. */
a5650c86 1898 HOST_WIDE_INT extra_off = 0;
78e606ea 1899 if (j == 0 && i >= 0
f1f41a6c 1900 && lhs_ops[0].opcode == MEM_REF
a5650c86 1901 && lhs_ops[0].off != -1)
1902 {
1903 if (lhs_ops[0].off == vr->operands[i].off)
1904 i--, j--;
1905 else if (vr->operands[i].opcode == MEM_REF
1906 && vr->operands[i].off != -1)
1907 {
1908 extra_off = vr->operands[i].off - lhs_ops[0].off;
1909 i--, j--;
1910 }
1911 }
b11771e1 1912
d8021dea 1913 /* i now points to the first additional op.
1914 ??? LHS may not be completely contained in VR, one or more
1915 VIEW_CONVERT_EXPRs could be in its way. We could at least
1916 try handling outermost VIEW_CONVERT_EXPRs. */
1917 if (j != -1)
1918 return (void *)-1;
d8021dea 1919
1920 /* Now re-write REF to be based on the rhs of the assignment. */
1921 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
a5650c86 1922
1923 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1924 if (extra_off != 0)
1925 {
1926 if (rhs.length () < 2
1927 || rhs[0].opcode != MEM_REF
1928 || rhs[0].off == -1)
1929 return (void *)-1;
1930 rhs[0].off += extra_off;
1931 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1932 build_int_cst (TREE_TYPE (rhs[0].op0),
1933 extra_off));
1934 }
1935
d8021dea 1936 /* We need to pre-pend vr->operands[0..i] to rhs. */
2fd3ecff 1937 vec<vn_reference_op_s> old = vr->operands;
f1f41a6c 1938 if (i + 1 + rhs.length () > vr->operands.length ())
d8021dea 1939 {
f1f41a6c 1940 vr->operands.safe_grow (i + 1 + rhs.length ());
2fd3ecff 1941 if (old == shared_lookup_references)
1942 shared_lookup_references = vr->operands;
d8021dea 1943 }
1944 else
f1f41a6c 1945 vr->operands.truncate (i + 1 + rhs.length ());
1946 FOR_EACH_VEC_ELT (rhs, j, vro)
1947 vr->operands[i + 1 + j] = *vro;
01fd46e3 1948 vr->operands = valueize_refs (vr->operands);
2fd3ecff 1949 if (old == shared_lookup_references)
1950 shared_lookup_references = vr->operands;
d8021dea 1951 vr->hashcode = vn_reference_compute_hash (vr);
77c7051b 1952
a5650c86 1953 /* Try folding the new reference to a constant. */
1954 tree val = fully_constant_vn_reference_p (vr);
1955 if (val)
1956 return vn_reference_lookup_or_insert_for_pieces
1957 (vuse, vr->set, vr->type, vr->operands, val);
1958
77c7051b 1959 /* Adjust *ref from the new operands. */
1960 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1961 return (void *)-1;
1962 /* This can happen with bitfields. */
1963 if (ref->size != r.size)
1964 return (void *)-1;
1965 *ref = r;
1966
1967 /* Do not update last seen VUSE after translating. */
1968 last_vuse_ptr = NULL;
1969
1970 /* Keep looking for the adjusted *REF / VR pair. */
1971 return NULL;
1972 }
1973
a3bb56f0 1974 /* 6) For memcpy copies translate the reference through them if
77c7051b 1975 the copy kills ref. */
1976 else if (vn_walk_kind == VN_WALKREWRITE
1977 && is_gimple_reg_type (vr->type)
1978 /* ??? Handle BCOPY as well. */
1979 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1980 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1981 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1982 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1983 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1984 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1985 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
e913b5cd 1986 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
77c7051b 1987 {
1988 tree lhs, rhs;
1989 ao_ref r;
1990 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1991 vn_reference_op_s op;
1992 HOST_WIDE_INT at;
1993
1994
1995 /* Only handle non-variable, addressable refs. */
1996 if (ref->size != maxsize
1997 || offset % BITS_PER_UNIT != 0
1998 || ref->size % BITS_PER_UNIT != 0)
1999 return (void *)-1;
2000
2001 /* Extract a pointer base and an offset for the destination. */
2002 lhs = gimple_call_arg (def_stmt, 0);
2003 lhs_offset = 0;
2004 if (TREE_CODE (lhs) == SSA_NAME)
2005 lhs = SSA_VAL (lhs);
2006 if (TREE_CODE (lhs) == ADDR_EXPR)
2007 {
2008 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2009 &lhs_offset);
2010 if (!tem)
2011 return (void *)-1;
2012 if (TREE_CODE (tem) == MEM_REF
e913b5cd 2013 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
77c7051b 2014 {
2015 lhs = TREE_OPERAND (tem, 0);
e913b5cd 2016 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
77c7051b 2017 }
2018 else if (DECL_P (tem))
2019 lhs = build_fold_addr_expr (tem);
2020 else
2021 return (void *)-1;
2022 }
2023 if (TREE_CODE (lhs) != SSA_NAME
2024 && TREE_CODE (lhs) != ADDR_EXPR)
2025 return (void *)-1;
2026
2027 /* Extract a pointer base and an offset for the source. */
2028 rhs = gimple_call_arg (def_stmt, 1);
2029 rhs_offset = 0;
2030 if (TREE_CODE (rhs) == SSA_NAME)
2031 rhs = SSA_VAL (rhs);
2032 if (TREE_CODE (rhs) == ADDR_EXPR)
2033 {
2034 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2035 &rhs_offset);
2036 if (!tem)
2037 return (void *)-1;
2038 if (TREE_CODE (tem) == MEM_REF
e913b5cd 2039 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
77c7051b 2040 {
2041 rhs = TREE_OPERAND (tem, 0);
e913b5cd 2042 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
77c7051b 2043 }
2044 else if (DECL_P (tem))
2045 rhs = build_fold_addr_expr (tem);
2046 else
2047 return (void *)-1;
2048 }
2049 if (TREE_CODE (rhs) != SSA_NAME
2050 && TREE_CODE (rhs) != ADDR_EXPR)
2051 return (void *)-1;
2052
8c53c46c 2053 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
77c7051b 2054
2055 /* The bases of the destination and the references have to agree. */
2056 if ((TREE_CODE (base) != MEM_REF
2057 && !DECL_P (base))
2058 || (TREE_CODE (base) == MEM_REF
2059 && (TREE_OPERAND (base, 0) != lhs
e913b5cd 2060 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
77c7051b 2061 || (DECL_P (base)
2062 && (TREE_CODE (lhs) != ADDR_EXPR
2063 || TREE_OPERAND (lhs, 0) != base)))
2064 return (void *)-1;
2065
2066 /* And the access has to be contained within the memcpy destination. */
2067 at = offset / BITS_PER_UNIT;
2068 if (TREE_CODE (base) == MEM_REF)
8c53c46c 2069 at += tree_to_uhwi (TREE_OPERAND (base, 1));
77c7051b 2070 if (lhs_offset > at
2071 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2072 return (void *)-1;
2073
2074 /* Make room for 2 operands in the new reference. */
f1f41a6c 2075 if (vr->operands.length () < 2)
77c7051b 2076 {
f1f41a6c 2077 vec<vn_reference_op_s> old = vr->operands;
2078 vr->operands.safe_grow_cleared (2);
77c7051b 2079 if (old == shared_lookup_references
2080 && vr->operands != old)
2fd3ecff 2081 shared_lookup_references = vr->operands;
77c7051b 2082 }
2083 else
f1f41a6c 2084 vr->operands.truncate (2);
77c7051b 2085
2086 /* The looked-through reference is a simple MEM_REF. */
2087 memset (&op, 0, sizeof (op));
2088 op.type = vr->type;
2089 op.opcode = MEM_REF;
2090 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2091 op.off = at - lhs_offset + rhs_offset;
f1f41a6c 2092 vr->operands[0] = op;
2be90eed 2093 op.type = TREE_TYPE (rhs);
77c7051b 2094 op.opcode = TREE_CODE (rhs);
2095 op.op0 = rhs;
2096 op.off = -1;
f1f41a6c 2097 vr->operands[1] = op;
77c7051b 2098 vr->hashcode = vn_reference_compute_hash (vr);
3918bd18 2099
2100 /* Adjust *ref from the new operands. */
2101 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
d8021dea 2102 return (void *)-1;
8f15ba15 2103 /* This can happen with bitfields. */
2104 if (ref->size != r.size)
2105 return (void *)-1;
3918bd18 2106 *ref = r;
d8021dea 2107
4a83fadb 2108 /* Do not update last seen VUSE after translating. */
2109 last_vuse_ptr = NULL;
2110
d8021dea 2111 /* Keep looking for the adjusted *REF / VR pair. */
2112 return NULL;
2113 }
2114
2115 /* Bail out and stop walking. */
2116 return (void *)-1;
2117}
2118
f6c33c78 2119/* Lookup a reference operation by it's parts, in the current hash table.
2120 Returns the resulting value number if it exists in the hash table,
2121 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2122 vn_reference_t stored in the hashtable if something is found. */
9e9e6e3e 2123
2124tree
3918bd18 2125vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
f1f41a6c 2126 vec<vn_reference_op_s> operands,
8ecc6b38 2127 vn_reference_t *vnresult, vn_lookup_kind kind)
f6c33c78 2128{
2129 struct vn_reference_s vr1;
dd277d48 2130 vn_reference_t tmp;
c26ce8a9 2131 tree cst;
dd277d48 2132
2133 if (!vnresult)
2134 vnresult = &tmp;
2135 *vnresult = NULL;
d8021dea 2136
b8a2283e 2137 vr1.vuse = vuse_ssa_val (vuse);
f1f41a6c 2138 shared_lookup_references.truncate (0);
2139 shared_lookup_references.safe_grow (operands.length ());
2140 memcpy (shared_lookup_references.address (),
2141 operands.address (),
d8021dea 2142 sizeof (vn_reference_op_s)
f1f41a6c 2143 * operands.length ());
d8021dea 2144 vr1.operands = operands = shared_lookup_references
2145 = valueize_refs (shared_lookup_references);
3918bd18 2146 vr1.type = type;
2147 vr1.set = set;
f6c33c78 2148 vr1.hashcode = vn_reference_compute_hash (&vr1);
c26ce8a9 2149 if ((cst = fully_constant_vn_reference_p (&vr1)))
2150 return cst;
f6c33c78 2151
c26ce8a9 2152 vn_reference_lookup_1 (&vr1, vnresult);
dd277d48 2153 if (!*vnresult
8ecc6b38 2154 && kind != VN_NOWALK
dd277d48 2155 && vr1.vuse)
02067dc5 2156 {
3918bd18 2157 ao_ref r;
8ecc6b38 2158 vn_walk_kind = kind;
3918bd18 2159 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
d8021dea 2160 *vnresult =
3918bd18 2161 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
d8021dea 2162 vn_reference_lookup_2,
46816709 2163 vn_reference_lookup_3,
2164 vn_valueize, &vr1);
2fd3ecff 2165 gcc_checking_assert (vr1.operands == shared_lookup_references);
02067dc5 2166 }
2167
dd277d48 2168 if (*vnresult)
2169 return (*vnresult)->result;
2170
2171 return NULL_TREE;
f6c33c78 2172}
2173
2174/* Lookup OP in the current hash table, and return the resulting value
2175 number if it exists in the hash table. Return NULL_TREE if it does
2176 not exist in the hash table or if the result field of the structure
2177 was NULL.. VNRESULT will be filled in with the vn_reference_t
2178 stored in the hashtable if one exists. */
2179
2180tree
8ecc6b38 2181vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
f6c33c78 2182 vn_reference_t *vnresult)
9e9e6e3e 2183{
f1f41a6c 2184 vec<vn_reference_op_s> operands;
9e9e6e3e 2185 struct vn_reference_s vr1;
c26ce8a9 2186 tree cst;
882f8b55 2187 bool valuezied_anything;
dd277d48 2188
f6c33c78 2189 if (vnresult)
2190 *vnresult = NULL;
9e9e6e3e 2191
b8a2283e 2192 vr1.vuse = vuse_ssa_val (vuse);
882f8b55 2193 vr1.operands = operands
2194 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3918bd18 2195 vr1.type = TREE_TYPE (op);
2196 vr1.set = get_alias_set (op);
9e9e6e3e 2197 vr1.hashcode = vn_reference_compute_hash (&vr1);
c26ce8a9 2198 if ((cst = fully_constant_vn_reference_p (&vr1)))
2199 return cst;
404d6be4 2200
8ecc6b38 2201 if (kind != VN_NOWALK
dd277d48 2202 && vr1.vuse)
2203 {
2204 vn_reference_t wvnresult;
3918bd18 2205 ao_ref r;
882f8b55 2206 /* Make sure to use a valueized reference if we valueized anything.
2207 Otherwise preserve the full reference for advanced TBAA. */
2208 if (!valuezied_anything
2209 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2210 vr1.operands))
2be90eed 2211 ao_ref_init (&r, op);
8ecc6b38 2212 vn_walk_kind = kind;
dd277d48 2213 wvnresult =
3918bd18 2214 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
d8021dea 2215 vn_reference_lookup_2,
46816709 2216 vn_reference_lookup_3,
2217 vn_valueize, &vr1);
2fd3ecff 2218 gcc_checking_assert (vr1.operands == shared_lookup_references);
dd277d48 2219 if (wvnresult)
2220 {
2221 if (vnresult)
2222 *vnresult = wvnresult;
2223 return wvnresult->result;
2224 }
2225
2226 return NULL_TREE;
404d6be4 2227 }
9e9e6e3e 2228
dd277d48 2229 return vn_reference_lookup_1 (&vr1, vnresult);
9e9e6e3e 2230}
2231
2fd3ecff 2232/* Lookup CALL in the current hash table and return the entry in
2233 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2234
2235void
1a91d914 2236vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2fd3ecff 2237 vn_reference_t vr)
2238{
72e693ed 2239 if (vnresult)
2240 *vnresult = NULL;
2241
2fd3ecff 2242 tree vuse = gimple_vuse (call);
2243
2244 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2245 vr->operands = valueize_shared_reference_ops_from_call (call);
2246 vr->type = gimple_expr_type (call);
2247 vr->set = 0;
2248 vr->hashcode = vn_reference_compute_hash (vr);
2249 vn_reference_lookup_1 (vr, vnresult);
2250}
f6c33c78 2251
9e9e6e3e 2252/* Insert OP into the current hash table with a value number of
f6c33c78 2253 RESULT, and return the resulting reference structure we created. */
9e9e6e3e 2254
2fd3ecff 2255static vn_reference_t
39215e09 2256vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
9e9e6e3e 2257{
3e871d4d 2258 vn_reference_s **slot;
9e9e6e3e 2259 vn_reference_t vr1;
75aefb7b 2260 bool tem;
9e9e6e3e 2261
2262 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
f6c33c78 2263 if (TREE_CODE (result) == SSA_NAME)
2264 vr1->value_id = VN_INFO (result)->value_id;
2265 else
2266 vr1->value_id = get_or_alloc_constant_value_id (result);
dd277d48 2267 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
75aefb7b 2268 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3918bd18 2269 vr1->type = TREE_TYPE (op);
2270 vr1->set = get_alias_set (op);
9e9e6e3e 2271 vr1->hashcode = vn_reference_compute_hash (vr1);
2272 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
39215e09 2273 vr1->result_vdef = vdef;
9e9e6e3e 2274
c1f445d2 2275 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2276 INSERT);
9e9e6e3e 2277
2278 /* Because we lookup stores using vuses, and value number failures
2279 using the vdefs (see visit_reference_op_store for how and why),
2280 it's possible that on failure we may try to insert an already
2281 inserted store. This is not wrong, there is no ssa name for a
2282 store that we could use as a differentiator anyway. Thus, unlike
2283 the other lookup functions, you cannot gcc_assert (!*slot)
2284 here. */
2285
12661815 2286 /* But free the old slot in case of a collision. */
2287 if (*slot)
2288 free_reference (*slot);
9e9e6e3e 2289
2290 *slot = vr1;
f6c33c78 2291 return vr1;
2292}
2293
2294/* Insert a reference by it's pieces into the current hash table with
2295 a value number of RESULT. Return the resulting reference
2296 structure we created. */
2297
2298vn_reference_t
3918bd18 2299vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
f1f41a6c 2300 vec<vn_reference_op_s> operands,
f6c33c78 2301 tree result, unsigned int value_id)
2302
2303{
3e871d4d 2304 vn_reference_s **slot;
f6c33c78 2305 vn_reference_t vr1;
2306
2307 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
dd277d48 2308 vr1->value_id = value_id;
2309 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
f6c33c78 2310 vr1->operands = valueize_refs (operands);
3918bd18 2311 vr1->type = type;
2312 vr1->set = set;
f6c33c78 2313 vr1->hashcode = vn_reference_compute_hash (vr1);
2314 if (result && TREE_CODE (result) == SSA_NAME)
2315 result = SSA_VAL (result);
2316 vr1->result = result;
2317
c1f445d2 2318 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2319 INSERT);
48e1416a 2320
f6c33c78 2321 /* At this point we should have all the things inserted that we have
dd277d48 2322 seen before, and we should never try inserting something that
2323 already exists. */
f6c33c78 2324 gcc_assert (!*slot);
2325 if (*slot)
2326 free_reference (*slot);
2327
2328 *slot = vr1;
2329 return vr1;
9e9e6e3e 2330}
2331
51a23cfc 2332/* Compute and return the hash value for nary operation VBO1. */
9e9e6e3e 2333
2fd3ecff 2334static hashval_t
51a23cfc 2335vn_nary_op_compute_hash (const vn_nary_op_t vno1)
9e9e6e3e 2336{
f32e91d5 2337 inchash::hash hstate;
51a23cfc 2338 unsigned i;
9e9e6e3e 2339
51a23cfc 2340 for (i = 0; i < vno1->length; ++i)
2341 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2342 vno1->op[i] = SSA_VAL (vno1->op[i]);
9e9e6e3e 2343
51a23cfc 2344 if (vno1->length == 2
2345 && commutative_tree_code (vno1->opcode)
2346 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2347 {
2348 tree temp = vno1->op[0];
2349 vno1->op[0] = vno1->op[1];
2350 vno1->op[1] = temp;
2351 }
9e9e6e3e 2352
f32e91d5 2353 hstate.add_int (vno1->opcode);
51a23cfc 2354 for (i = 0; i < vno1->length; ++i)
f32e91d5 2355 inchash::add_expr (vno1->op[i], hstate);
9e9e6e3e 2356
f32e91d5 2357 return hstate.end ();
9e9e6e3e 2358}
2359
3e871d4d 2360/* Compare nary operations VNO1 and VNO2 and return true if they are
9e9e6e3e 2361 equivalent. */
2362
3e871d4d 2363bool
2364vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
9e9e6e3e 2365{
51a23cfc 2366 unsigned i;
2367
3d2d7de7 2368 if (vno1->hashcode != vno2->hashcode)
2369 return false;
2370
7384c678 2371 if (vno1->length != vno2->length)
2372 return false;
2373
51a23cfc 2374 if (vno1->opcode != vno2->opcode
c477520d 2375 || !types_compatible_p (vno1->type, vno2->type))
51a23cfc 2376 return false;
2377
2378 for (i = 0; i < vno1->length; ++i)
2379 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2380 return false;
2381
2382 return true;
9e9e6e3e 2383}
2384
f8ce304c 2385/* Initialize VNO from the pieces provided. */
9e9e6e3e 2386
f8ce304c 2387static void
2388init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
7384c678 2389 enum tree_code code, tree type, tree *ops)
f8ce304c 2390{
2391 vno->opcode = code;
2392 vno->length = length;
2393 vno->type = type;
7384c678 2394 memcpy (&vno->op[0], ops, sizeof (tree) * length);
f8ce304c 2395}
2396
2397/* Initialize VNO from OP. */
2398
2399static void
2400init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2401{
2402 unsigned i;
2403
2404 vno->opcode = TREE_CODE (op);
2405 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2406 vno->type = TREE_TYPE (op);
2407 for (i = 0; i < vno->length; ++i)
2408 vno->op[i] = TREE_OPERAND (op, i);
2409}
2410
7384c678 2411/* Return the number of operands for a vn_nary ops structure from STMT. */
2412
2413static unsigned int
2414vn_nary_length_from_stmt (gimple stmt)
2415{
2416 switch (gimple_assign_rhs_code (stmt))
2417 {
2418 case REALPART_EXPR:
2419 case IMAGPART_EXPR:
2420 case VIEW_CONVERT_EXPR:
2421 return 1;
2422
70cd63a3 2423 case BIT_FIELD_REF:
2424 return 3;
2425
7384c678 2426 case CONSTRUCTOR:
2427 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2428
2429 default:
2430 return gimple_num_ops (stmt) - 1;
2431 }
2432}
2433
f8ce304c 2434/* Initialize VNO from STMT. */
2435
2436static void
2437init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2438{
2439 unsigned i;
2440
2441 vno->opcode = gimple_assign_rhs_code (stmt);
f8ce304c 2442 vno->type = gimple_expr_type (stmt);
7384c678 2443 switch (vno->opcode)
2444 {
2445 case REALPART_EXPR:
2446 case IMAGPART_EXPR:
2447 case VIEW_CONVERT_EXPR:
2448 vno->length = 1;
2449 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2450 break;
2451
70cd63a3 2452 case BIT_FIELD_REF:
2453 vno->length = 3;
2454 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2455 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2456 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2457 break;
2458
7384c678 2459 case CONSTRUCTOR:
2460 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2461 for (i = 0; i < vno->length; ++i)
2462 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2463 break;
2464
2465 default:
70cd63a3 2466 gcc_checking_assert (!gimple_assign_single_p (stmt));
7384c678 2467 vno->length = gimple_num_ops (stmt) - 1;
2468 for (i = 0; i < vno->length; ++i)
2469 vno->op[i] = gimple_op (stmt, i + 1);
2470 }
f8ce304c 2471}
2472
2473/* Compute the hashcode for VNO and look for it in the hash table;
2474 return the resulting value number if it exists in the hash table.
2475 Return NULL_TREE if it does not exist in the hash table or if the
2476 result field of the operation is NULL. VNRESULT will contain the
2477 vn_nary_op_t from the hashtable if it exists. */
2478
2479static tree
2480vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
f6c33c78 2481{
3e871d4d 2482 vn_nary_op_s **slot;
f8ce304c 2483
f6c33c78 2484 if (vnresult)
2485 *vnresult = NULL;
f8ce304c 2486
2487 vno->hashcode = vn_nary_op_compute_hash (vno);
c1f445d2 2488 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2489 NO_INSERT);
f6c33c78 2490 if (!slot && current_info == optimistic_info)
c1f445d2 2491 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2492 NO_INSERT);
f6c33c78 2493 if (!slot)
2494 return NULL_TREE;
2495 if (vnresult)
3e871d4d 2496 *vnresult = *slot;
2497 return (*slot)->result;
f6c33c78 2498}
2499
f8ce304c 2500/* Lookup a n-ary operation by its pieces and return the resulting value
2501 number if it exists in the hash table. Return NULL_TREE if it does
2502 not exist in the hash table or if the result field of the operation
2503 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2504 if it exists. */
2505
2506tree
2507vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
7384c678 2508 tree type, tree *ops, vn_nary_op_t *vnresult)
f8ce304c 2509{
7384c678 2510 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2511 sizeof_vn_nary_op (length));
2512 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2513 return vn_nary_op_lookup_1 (vno1, vnresult);
f8ce304c 2514}
2515
f6c33c78 2516/* Lookup OP in the current hash table, and return the resulting value
2517 number if it exists in the hash table. Return NULL_TREE if it does
2518 not exist in the hash table or if the result field of the operation
2519 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2520 if it exists. */
2521
2522tree
2523vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
9e9e6e3e 2524{
7384c678 2525 vn_nary_op_t vno1
2526 = XALLOCAVAR (struct vn_nary_op_s,
2527 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2528 init_vn_nary_op_from_op (vno1, op);
2529 return vn_nary_op_lookup_1 (vno1, vnresult);
9e9e6e3e 2530}
2531
75a70cf9 2532/* Lookup the rhs of STMT in the current hash table, and return the resulting
2533 value number if it exists in the hash table. Return NULL_TREE if
2534 it does not exist in the hash table. VNRESULT will contain the
2535 vn_nary_op_t from the hashtable if it exists. */
2536
2537tree
2538vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2539{
7384c678 2540 vn_nary_op_t vno1
2541 = XALLOCAVAR (struct vn_nary_op_s,
2542 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2543 init_vn_nary_op_from_stmt (vno1, stmt);
2544 return vn_nary_op_lookup_1 (vno1, vnresult);
f8ce304c 2545}
2546
2547/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2548
2549static vn_nary_op_t
2550alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2551{
2552 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2553}
2554
2555/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2556 obstack. */
2557
2558static vn_nary_op_t
2559alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2560{
2561 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2562 &current_info->nary_obstack);
2563
2564 vno1->value_id = value_id;
2565 vno1->length = length;
2566 vno1->result = result;
2567
2568 return vno1;
2569}
2570
2571/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2572 VNO->HASHCODE first. */
2573
2574static vn_nary_op_t
c1f445d2 2575vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3e871d4d 2576 bool compute_hash)
f8ce304c 2577{
3e871d4d 2578 vn_nary_op_s **slot;
f8ce304c 2579
2580 if (compute_hash)
2581 vno->hashcode = vn_nary_op_compute_hash (vno);
2582
c1f445d2 2583 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
f8ce304c 2584 gcc_assert (!*slot);
2585
2586 *slot = vno;
2587 return vno;
75a70cf9 2588}
2589
f6c33c78 2590/* Insert a n-ary operation into the current hash table using it's
2591 pieces. Return the vn_nary_op_t structure we created and put in
2592 the hashtable. */
2593
2594vn_nary_op_t
2595vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
7384c678 2596 tree type, tree *ops,
2597 tree result, unsigned int value_id)
f6c33c78 2598{
7384c678 2599 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2600 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
f8ce304c 2601 return vn_nary_op_insert_into (vno1, current_info->nary, true);
f6c33c78 2602}
2603
9e9e6e3e 2604/* Insert OP into the current hash table with a value number of
f6c33c78 2605 RESULT. Return the vn_nary_op_t structure we created and put in
2606 the hashtable. */
9e9e6e3e 2607
f6c33c78 2608vn_nary_op_t
51a23cfc 2609vn_nary_op_insert (tree op, tree result)
9e9e6e3e 2610{
51a23cfc 2611 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
51a23cfc 2612 vn_nary_op_t vno1;
51a23cfc 2613
f8ce304c 2614 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2615 init_vn_nary_op_from_op (vno1, op);
2616 return vn_nary_op_insert_into (vno1, current_info->nary, true);
9e9e6e3e 2617}
2618
75a70cf9 2619/* Insert the rhs of STMT into the current hash table with a value number of
2620 RESULT. */
2621
2622vn_nary_op_t
2623vn_nary_op_insert_stmt (gimple stmt, tree result)
2624{
7384c678 2625 vn_nary_op_t vno1
2626 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2627 result, VN_INFO (result)->value_id);
f8ce304c 2628 init_vn_nary_op_from_stmt (vno1, stmt);
2629 return vn_nary_op_insert_into (vno1, current_info->nary, true);
75a70cf9 2630}
2631
9e9e6e3e 2632/* Compute a hashcode for PHI operation VP1 and return it. */
2633
2634static inline hashval_t
2635vn_phi_compute_hash (vn_phi_t vp1)
2636{
f32e91d5 2637 inchash::hash hstate (vp1->block->index);
9e9e6e3e 2638 int i;
2639 tree phi1op;
9a7beb5f 2640 tree type;
9e9e6e3e 2641
9a7beb5f 2642 /* If all PHI arguments are constants we need to distinguish
2643 the PHI node via its type. */
82a7a70c 2644 type = vp1->type;
f32e91d5 2645 hstate.merge_hash (vn_hash_type (type));
9a7beb5f 2646
f1f41a6c 2647 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
9e9e6e3e 2648 {
2649 if (phi1op == VN_TOP)
2650 continue;
f32e91d5 2651 inchash::add_expr (phi1op, hstate);
9e9e6e3e 2652 }
2653
f32e91d5 2654 return hstate.end ();
9e9e6e3e 2655}
2656
9e9e6e3e 2657/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2658
2659static int
3e871d4d 2660vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
9e9e6e3e 2661{
3d2d7de7 2662 if (vp1->hashcode != vp2->hashcode)
2663 return false;
2664
9e9e6e3e 2665 if (vp1->block == vp2->block)
2666 {
2667 int i;
2668 tree phi1op;
2669
9a7beb5f 2670 /* If the PHI nodes do not have compatible types
2671 they are not the same. */
82a7a70c 2672 if (!types_compatible_p (vp1->type, vp2->type))
9a7beb5f 2673 return false;
2674
9e9e6e3e 2675 /* Any phi in the same block will have it's arguments in the
2676 same edge order, because of how we store phi nodes. */
f1f41a6c 2677 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
9e9e6e3e 2678 {
f1f41a6c 2679 tree phi2op = vp2->phiargs[i];
9e9e6e3e 2680 if (phi1op == VN_TOP || phi2op == VN_TOP)
2681 continue;
2682 if (!expressions_equal_p (phi1op, phi2op))
2683 return false;
2684 }
2685 return true;
2686 }
2687 return false;
2688}
2689
f1f41a6c 2690static vec<tree> shared_lookup_phiargs;
9e9e6e3e 2691
2692/* Lookup PHI in the current hash table, and return the resulting
2693 value number if it exists in the hash table. Return NULL_TREE if
2694 it does not exist in the hash table. */
2695
3dc4c394 2696static tree
75a70cf9 2697vn_phi_lookup (gimple phi)
9e9e6e3e 2698{
3e871d4d 2699 vn_phi_s **slot;
9e9e6e3e 2700 struct vn_phi_s vp1;
75a70cf9 2701 unsigned i;
9e9e6e3e 2702
f1f41a6c 2703 shared_lookup_phiargs.truncate (0);
9e9e6e3e 2704
2705 /* Canonicalize the SSA_NAME's to their value number. */
75a70cf9 2706 for (i = 0; i < gimple_phi_num_args (phi); i++)
9e9e6e3e 2707 {
2708 tree def = PHI_ARG_DEF (phi, i);
2709 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
f1f41a6c 2710 shared_lookup_phiargs.safe_push (def);
9e9e6e3e 2711 }
82a7a70c 2712 vp1.type = TREE_TYPE (gimple_phi_result (phi));
9e9e6e3e 2713 vp1.phiargs = shared_lookup_phiargs;
75a70cf9 2714 vp1.block = gimple_bb (phi);
9e9e6e3e 2715 vp1.hashcode = vn_phi_compute_hash (&vp1);
c1f445d2 2716 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2717 NO_INSERT);
48694fc0 2718 if (!slot && current_info == optimistic_info)
c1f445d2 2719 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2720 NO_INSERT);
9e9e6e3e 2721 if (!slot)
2722 return NULL_TREE;
3e871d4d 2723 return (*slot)->result;
9e9e6e3e 2724}
2725
2726/* Insert PHI into the current hash table with a value number of
2727 RESULT. */
2728
f6c33c78 2729static vn_phi_t
75a70cf9 2730vn_phi_insert (gimple phi, tree result)
9e9e6e3e 2731{
3e871d4d 2732 vn_phi_s **slot;
9e9e6e3e 2733 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
75a70cf9 2734 unsigned i;
1e094109 2735 vec<tree> args = vNULL;
9e9e6e3e 2736
2737 /* Canonicalize the SSA_NAME's to their value number. */
75a70cf9 2738 for (i = 0; i < gimple_phi_num_args (phi); i++)
9e9e6e3e 2739 {
2740 tree def = PHI_ARG_DEF (phi, i);
2741 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
f1f41a6c 2742 args.safe_push (def);
9e9e6e3e 2743 }
f6c33c78 2744 vp1->value_id = VN_INFO (result)->value_id;
82a7a70c 2745 vp1->type = TREE_TYPE (gimple_phi_result (phi));
9e9e6e3e 2746 vp1->phiargs = args;
75a70cf9 2747 vp1->block = gimple_bb (phi);
9e9e6e3e 2748 vp1->result = result;
2749 vp1->hashcode = vn_phi_compute_hash (vp1);
2750
c1f445d2 2751 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
9e9e6e3e 2752
2753 /* Because we iterate over phi operations more than once, it's
2754 possible the slot might already exist here, hence no assert.*/
2755 *slot = vp1;
f6c33c78 2756 return vp1;
9e9e6e3e 2757}
2758
2759
2760/* Print set of components in strongly connected component SCC to OUT. */
2761
2762static void
f1f41a6c 2763print_scc (FILE *out, vec<tree> scc)
9e9e6e3e 2764{
2765 tree var;
2766 unsigned int i;
2767
7ef97146 2768 fprintf (out, "SCC consists of:");
f1f41a6c 2769 FOR_EACH_VEC_ELT (scc, i, var)
9e9e6e3e 2770 {
9e9e6e3e 2771 fprintf (out, " ");
7ef97146 2772 print_generic_expr (out, var, 0);
9e9e6e3e 2773 }
2774 fprintf (out, "\n");
2775}
2776
2777/* Set the value number of FROM to TO, return true if it has changed
2778 as a result. */
2779
2780static inline bool
2781set_ssa_val_to (tree from, tree to)
2782{
b81ffaee 2783 tree currval = SSA_VAL (from);
d68e9408 2784 HOST_WIDE_INT toff, coff;
9e9e6e3e 2785
85e9a542 2786 /* The only thing we allow as value numbers are ssa_names
2787 and invariants. So assert that here. We don't allow VN_TOP
2788 as visiting a stmt should produce a value-number other than
2789 that.
2790 ??? Still VN_TOP can happen for unreachable code, so force
2791 it to varying in that case. Not all code is prepared to
2792 get VN_TOP on valueization. */
2793 if (to == VN_TOP)
2794 {
2795 if (dump_file && (dump_flags & TDF_DETAILS))
2796 fprintf (dump_file, "Forcing value number to varying on "
2797 "receiving VN_TOP\n");
2798 to = from;
2799 }
2800
2801 gcc_assert (to != NULL_TREE
2802 && (TREE_CODE (to) == SSA_NAME
2803 || is_gimple_min_invariant (to)));
2804
b81ffaee 2805 if (from != to)
2806 {
2807 if (currval == from)
2808 {
2809 if (dump_file && (dump_flags & TDF_DETAILS))
2810 {
2811 fprintf (dump_file, "Not changing value number of ");
2812 print_generic_expr (dump_file, from, 0);
2813 fprintf (dump_file, " from VARYING to ");
2814 print_generic_expr (dump_file, to, 0);
2815 fprintf (dump_file, "\n");
2816 }
2817 return false;
2818 }
2819 else if (TREE_CODE (to) == SSA_NAME
2820 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2821 to = from;
2822 }
5dbdbadc 2823
9e9e6e3e 2824 if (dump_file && (dump_flags & TDF_DETAILS))
2825 {
2826 fprintf (dump_file, "Setting value number of ");
2827 print_generic_expr (dump_file, from, 0);
2828 fprintf (dump_file, " to ");
2829 print_generic_expr (dump_file, to, 0);
9e9e6e3e 2830 }
2831
d68e9408 2832 if (currval != to
2833 && !operand_equal_p (currval, to, 0)
2834 /* ??? For addresses involving volatile objects or types operand_equal_p
2835 does not reliably detect ADDR_EXPRs as equal. We know we are only
2836 getting invariant gimple addresses here, so can use
2837 get_addr_base_and_unit_offset to do this comparison. */
2838 && !(TREE_CODE (currval) == ADDR_EXPR
2839 && TREE_CODE (to) == ADDR_EXPR
2840 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2841 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2842 && coff == toff))
9e9e6e3e 2843 {
dd277d48 2844 VN_INFO (from)->valnum = to;
19744bd4 2845 if (dump_file && (dump_flags & TDF_DETAILS))
2846 fprintf (dump_file, " (changed)\n");
9e9e6e3e 2847 return true;
2848 }
19744bd4 2849 if (dump_file && (dump_flags & TDF_DETAILS))
2850 fprintf (dump_file, "\n");
9e9e6e3e 2851 return false;
2852}
2853
b736e424 2854/* Mark as processed all the definitions in the defining stmt of USE, or
2855 the USE itself. */
2856
2857static void
2858mark_use_processed (tree use)
2859{
2860 ssa_op_iter iter;
2861 def_operand_p defp;
2862 gimple stmt = SSA_NAME_DEF_STMT (use);
2863
2864 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2865 {
2866 VN_INFO (use)->use_processed = true;
2867 return;
2868 }
2869
2870 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2871 {
2872 tree def = DEF_FROM_PTR (defp);
2873
2874 VN_INFO (def)->use_processed = true;
2875 }
2876}
2877
9e9e6e3e 2878/* Set all definitions in STMT to value number to themselves.
2879 Return true if a value number changed. */
2880
2881static bool
75a70cf9 2882defs_to_varying (gimple stmt)
9e9e6e3e 2883{
2884 bool changed = false;
2885 ssa_op_iter iter;
2886 def_operand_p defp;
2887
2888 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2889 {
2890 tree def = DEF_FROM_PTR (defp);
9e9e6e3e 2891 changed |= set_ssa_val_to (def, def);
2892 }
2893 return changed;
2894}
2895
64919a86 2896static bool expr_has_constants (tree expr);
1d9353f3 2897
9e9e6e3e 2898/* Visit a copy between LHS and RHS, return true if the value number
2899 changed. */
2900
2901static bool
2902visit_copy (tree lhs, tree rhs)
2903{
9e9e6e3e 2904 /* The copy may have a more interesting constant filled expression
2905 (we don't, since we know our RHS is just an SSA name). */
cc99c1e1 2906 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2907 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2908
2909 /* And finally valueize. */
2910 rhs = SSA_VAL (rhs);
9e9e6e3e 2911
2912 return set_ssa_val_to (lhs, rhs);
2913}
2914
0fea623c 2915/* Visit a nary operator RHS, value number it, and return true if the
9e9e6e3e 2916 value number of LHS has changed as a result. */
2917
2918static bool
0fea623c 2919visit_nary_op (tree lhs, gimple stmt)
9e9e6e3e 2920{
2921 bool changed = false;
75a70cf9 2922 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
9e9e6e3e 2923
2924 if (result)
0fea623c 2925 changed = set_ssa_val_to (lhs, result);
75a70cf9 2926 else
2927 {
2928 changed = set_ssa_val_to (lhs, lhs);
2929 vn_nary_op_insert_stmt (stmt, lhs);
2930 }
2931
2932 return changed;
2933}
2934
2935/* Visit a call STMT storing into LHS. Return true if the value number
2936 of the LHS has changed as a result. */
2937
2938static bool
1a91d914 2939visit_reference_op_call (tree lhs, gcall *stmt)
9e9e6e3e 2940{
2941 bool changed = false;
75a70cf9 2942 struct vn_reference_s vr1;
b736e424 2943 vn_reference_t vnresult = NULL;
b736e424 2944 tree vdef = gimple_vdef (stmt);
9e9e6e3e 2945
7ec657ff 2946 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2947 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2948 lhs = NULL_TREE;
2949
2fd3ecff 2950 vn_reference_lookup_call (stmt, &vnresult, &vr1);
b736e424 2951 if (vnresult)
9e9e6e3e 2952 {
d1a94241 2953 if (vnresult->result_vdef && vdef)
b736e424 2954 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2955
2956 if (!vnresult->result && lhs)
2957 vnresult->result = lhs;
2958
2959 if (vnresult->result && lhs)
2960 {
2961 changed |= set_ssa_val_to (lhs, vnresult->result);
2962
2963 if (VN_INFO (vnresult->result)->has_constants)
2964 VN_INFO (lhs)->has_constants = true;
2965 }
9e9e6e3e 2966 }
2967 else
2968 {
75a70cf9 2969 vn_reference_t vr2;
2fd3ecff 2970 vn_reference_s **slot;
b736e424 2971 if (vdef)
2972 changed |= set_ssa_val_to (vdef, vdef);
2973 if (lhs)
2974 changed |= set_ssa_val_to (lhs, lhs);
75a70cf9 2975 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
dd277d48 2976 vr2->vuse = vr1.vuse;
2fd3ecff 2977 /* As we are not walking the virtual operand chain we know the
2978 shared_lookup_references are still original so we can re-use
2979 them here. */
2980 vr2->operands = vr1.operands.copy ();
3918bd18 2981 vr2->type = vr1.type;
2982 vr2->set = vr1.set;
75a70cf9 2983 vr2->hashcode = vr1.hashcode;
2984 vr2->result = lhs;
b736e424 2985 vr2->result_vdef = vdef;
c1f445d2 2986 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
2987 INSERT);
2fd3ecff 2988 gcc_assert (!*slot);
75a70cf9 2989 *slot = vr2;
9e9e6e3e 2990 }
2991
2992 return changed;
2993}
2994
2995/* Visit a load from a reference operator RHS, part of STMT, value number it,
2996 and return true if the value number of the LHS has changed as a result. */
2997
2998static bool
75a70cf9 2999visit_reference_op_load (tree lhs, tree op, gimple stmt)
9e9e6e3e 3000{
3001 bool changed = false;
4a83fadb 3002 tree last_vuse;
3003 tree result;
3004
3005 last_vuse = gimple_vuse (stmt);
3006 last_vuse_ptr = &last_vuse;
8f190c8a 3007 result = vn_reference_lookup (op, gimple_vuse (stmt),
3008 default_vn_walk_kind, NULL);
4a83fadb 3009 last_vuse_ptr = NULL;
9e9e6e3e 3010
1d9353f3 3011 /* We handle type-punning through unions by value-numbering based
3012 on offset and size of the access. Be prepared to handle a
3013 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3014 if (result
3015 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3016 {
3017 /* We will be setting the value number of lhs to the value number
3018 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3019 So first simplify and lookup this expression to see if it
3020 is already available. */
3021 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
37279c98 3022 if ((CONVERT_EXPR_P (val)
3023 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3024 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1d9353f3 3025 {
ff99a695 3026 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
37279c98 3027 if ((CONVERT_EXPR_P (tem)
3028 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
cd30b839 3029 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3030 TREE_TYPE (val), tem)))
1d9353f3 3031 val = tem;
3032 }
3033 result = val;
3034 if (!is_gimple_min_invariant (val)
3035 && TREE_CODE (val) != SSA_NAME)
f6c33c78 3036 result = vn_nary_op_lookup (val, NULL);
1d9353f3 3037 /* If the expression is not yet available, value-number lhs to
3038 a new SSA_NAME we create. */
182cf5a9 3039 if (!result)
1d9353f3 3040 {
ec11736b 3041 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3042 "vntemp");
1d9353f3 3043 /* Initialize value-number information properly. */
3044 VN_INFO_GET (result)->valnum = result;
75a70cf9 3045 VN_INFO (result)->value_id = get_next_value_id ();
1d9353f3 3046 VN_INFO (result)->expr = val;
64919a86 3047 VN_INFO (result)->has_constants = expr_has_constants (val);
1d9353f3 3048 VN_INFO (result)->needs_insertion = true;
3049 /* As all "inserted" statements are singleton SCCs, insert
3050 to the valid table. This is strictly needed to
3051 avoid re-generating new value SSA_NAMEs for the same
3052 expression during SCC iteration over and over (the
3053 optimistic table gets cleared after each iteration).
3054 We do not need to insert into the optimistic table, as
3055 lookups there will fall back to the valid table. */
3056 if (current_info == optimistic_info)
3057 {
3058 current_info = valid_info;
3059 vn_nary_op_insert (val, result);
3060 current_info = optimistic_info;
3061 }
3062 else
3063 vn_nary_op_insert (val, result);
3064 if (dump_file && (dump_flags & TDF_DETAILS))
3065 {
3066 fprintf (dump_file, "Inserting name ");
3067 print_generic_expr (dump_file, result, 0);
3068 fprintf (dump_file, " for expression ");
3069 print_generic_expr (dump_file, val, 0);
3070 fprintf (dump_file, "\n");
3071 }
3072 }
3073 }
3074
9e9e6e3e 3075 if (result)
3076 {
3077 changed = set_ssa_val_to (lhs, result);
b9e98b8a 3078 if (TREE_CODE (result) == SSA_NAME
3079 && VN_INFO (result)->has_constants)
3080 {
3081 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3082 VN_INFO (lhs)->has_constants = true;
3083 }
9e9e6e3e 3084 }
3085 else
3086 {
3087 changed = set_ssa_val_to (lhs, lhs);
39215e09 3088 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
9e9e6e3e 3089 }
3090
3091 return changed;
3092}
3093
3094
3095/* Visit a store to a reference operator LHS, part of STMT, value number it,
3096 and return true if the value number of the LHS has changed as a result. */
3097
3098static bool
75a70cf9 3099visit_reference_op_store (tree lhs, tree op, gimple stmt)
9e9e6e3e 3100{
3101 bool changed = false;
39215e09 3102 vn_reference_t vnresult = NULL;
3103 tree result, assign;
9e9e6e3e 3104 bool resultsame = false;
39215e09 3105 tree vuse = gimple_vuse (stmt);
3106 tree vdef = gimple_vdef (stmt);
9e9e6e3e 3107
3108 /* First we want to lookup using the *vuses* from the store and see
3109 if there the last store to this location with the same address
3110 had the same value.
3111
3112 The vuses represent the memory state before the store. If the
3113 memory state, address, and value of the store is the same as the
3114 last store to this location, then this store will produce the
3115 same memory state as that store.
3116
3117 In this case the vdef versions for this store are value numbered to those
3118 vuse versions, since they represent the same memory state after
3119 this store.
3120
3121 Otherwise, the vdefs for the store are used when inserting into
3122 the table, since the store generates a new memory state. */
3123
39215e09 3124 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
9e9e6e3e 3125
3126 if (result)
3127 {
3128 if (TREE_CODE (result) == SSA_NAME)
3129 result = SSA_VAL (result);
d4cdfd27 3130 if (TREE_CODE (op) == SSA_NAME)
3131 op = SSA_VAL (op);
9e9e6e3e 3132 resultsame = expressions_equal_p (result, op);
3133 }
3134
2fd3ecff 3135 if ((!result || !resultsame)
3136 /* Only perform the following when being called from PRE
3137 which embeds tail merging. */
3138 && default_vn_walk_kind == VN_WALK)
9e9e6e3e 3139 {
39215e09 3140 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3141 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3142 if (vnresult)
3143 {
3144 VN_INFO (vdef)->use_processed = true;
3145 return set_ssa_val_to (vdef, vnresult->result_vdef);
3146 }
3147 }
9e9e6e3e 3148
39215e09 3149 if (!result || !resultsame)
3150 {
9e9e6e3e 3151 if (dump_file && (dump_flags & TDF_DETAILS))
3152 {
3153 fprintf (dump_file, "No store match\n");
3154 fprintf (dump_file, "Value numbering store ");
3155 print_generic_expr (dump_file, lhs, 0);
3156 fprintf (dump_file, " to ");
3157 print_generic_expr (dump_file, op, 0);
3158 fprintf (dump_file, "\n");
3159 }
3160 /* Have to set value numbers before insert, since insert is
3161 going to valueize the references in-place. */
39215e09 3162 if (vdef)
9e9e6e3e 3163 {
9e9e6e3e 3164 changed |= set_ssa_val_to (vdef, vdef);
3165 }
3166
802d9f2f 3167 /* Do not insert structure copies into the tables. */
3168 if (is_gimple_min_invariant (op)
3169 || is_gimple_reg (op))
39215e09 3170 vn_reference_insert (lhs, op, vdef, NULL);
3171
2fd3ecff 3172 /* Only perform the following when being called from PRE
3173 which embeds tail merging. */
3174 if (default_vn_walk_kind == VN_WALK)
3175 {
3176 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3177 vn_reference_insert (assign, lhs, vuse, vdef);
3178 }
9e9e6e3e 3179 }
3180 else
3181 {
dd277d48 3182 /* We had a match, so value number the vdef to have the value
3183 number of the vuse it came from. */
9e9e6e3e 3184
3185 if (dump_file && (dump_flags & TDF_DETAILS))
3186 fprintf (dump_file, "Store matched earlier value,"
3187 "value numbering store vdefs to matching vuses.\n");
3188
39215e09 3189 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
9e9e6e3e 3190 }
3191
3192 return changed;
3193}
3194
3195/* Visit and value number PHI, return true if the value number
3196 changed. */
3197
3198static bool
75a70cf9 3199visit_phi (gimple phi)
9e9e6e3e 3200{
3201 bool changed = false;
3202 tree result;
3203 tree sameval = VN_TOP;
3204 bool allsame = true;
9e9e6e3e 3205
5f6261a7 3206 /* TODO: We could check for this in init_sccvn, and replace this
3207 with a gcc_assert. */
3208 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3209 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3210
9e9e6e3e 3211 /* See if all non-TOP arguments have the same value. TOP is
3212 equivalent to everything, so we can ignore it. */
85e9a542 3213 edge_iterator ei;
3214 edge e;
3215 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3216 if (e->flags & EDGE_EXECUTABLE)
3217 {
3218 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
9e9e6e3e 3219
85e9a542 3220 if (TREE_CODE (def) == SSA_NAME)
3221 def = SSA_VAL (def);
3222 if (def == VN_TOP)
3223 continue;
3224 if (sameval == VN_TOP)
3225 {
3226 sameval = def;
3227 }
3228 else
3229 {
3230 if (!expressions_equal_p (def, sameval))
3231 {
3232 allsame = false;
3233 break;
3234 }
3235 }
3236 }
9e9e6e3e 3237
3238 /* If all value numbered to the same value, the phi node has that
3239 value. */
3240 if (allsame)
b46fda07 3241 return set_ssa_val_to (PHI_RESULT (phi), sameval);
9e9e6e3e 3242
3243 /* Otherwise, see if it is equivalent to a phi node in this block. */
3244 result = vn_phi_lookup (phi);
3245 if (result)
b46fda07 3246 changed = set_ssa_val_to (PHI_RESULT (phi), result);
9e9e6e3e 3247 else
3248 {
3249 vn_phi_insert (phi, PHI_RESULT (phi));
3250 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3251 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3252 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3253 }
3254
3255 return changed;
3256}
3257
3258/* Return true if EXPR contains constants. */
3259
3260static bool
3261expr_has_constants (tree expr)
3262{
3263 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3264 {
3265 case tcc_unary:
3266 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3267
3268 case tcc_binary:
3269 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3270 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3271 /* Constants inside reference ops are rarely interesting, but
3272 it can take a lot of looking to find them. */
3273 case tcc_reference:
70ae6476 3274 case tcc_declaration:
9e9e6e3e 3275 return false;
3276 default:
3277 return is_gimple_min_invariant (expr);
3278 }
3279 return false;
3280}
3281
75a70cf9 3282/* Return true if STMT contains constants. */
3283
3284static bool
3285stmt_has_constants (gimple stmt)
3286{
cc99c1e1 3287 tree tem;
3288
75a70cf9 3289 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3290 return false;
3291
3292 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3293 {
cc99c1e1 3294 case GIMPLE_TERNARY_RHS:
3295 tem = gimple_assign_rhs3 (stmt);
3296 if (TREE_CODE (tem) == SSA_NAME)
3297 tem = SSA_VAL (tem);
3298 if (is_gimple_min_invariant (tem))
3299 return true;
3300 /* Fallthru. */
75a70cf9 3301
3302 case GIMPLE_BINARY_RHS:
cc99c1e1 3303 tem = gimple_assign_rhs2 (stmt);
3304 if (TREE_CODE (tem) == SSA_NAME)
3305 tem = SSA_VAL (tem);
3306 if (is_gimple_min_invariant (tem))
3307 return true;
3308 /* Fallthru. */
3309
75a70cf9 3310 case GIMPLE_SINGLE_RHS:
3311 /* Constants inside reference ops are rarely interesting, but
3312 it can take a lot of looking to find them. */
cc99c1e1 3313 case GIMPLE_UNARY_RHS:
3314 tem = gimple_assign_rhs1 (stmt);
3315 if (TREE_CODE (tem) == SSA_NAME)
3316 tem = SSA_VAL (tem);
3317 return is_gimple_min_invariant (tem);
3318
75a70cf9 3319 default:
3320 gcc_unreachable ();
3321 }
3322 return false;
3323}
3324
9e9e6e3e 3325/* Simplify the binary expression RHS, and return the result if
3326 simplified. */
3327
3328static tree
75a70cf9 3329simplify_binary_expression (gimple stmt)
9e9e6e3e 3330{
3331 tree result = NULL_TREE;
75a70cf9 3332 tree op0 = gimple_assign_rhs1 (stmt);
3333 tree op1 = gimple_assign_rhs2 (stmt);
77d62cb7 3334 enum tree_code code = gimple_assign_rhs_code (stmt);
9e9e6e3e 3335
3336 /* This will not catch every single case we could combine, but will
3337 catch those with constants. The goal here is to simultaneously
3338 combine constants between expressions, but avoid infinite
3339 expansion of expressions during simplification. */
b46fda07 3340 op0 = vn_valueize (op0);
3341 if (TREE_CODE (op0) == SSA_NAME
3342 && (VN_INFO (op0)->has_constants
77d62cb7 3343 || TREE_CODE_CLASS (code) == tcc_comparison
b46fda07 3344 || code == COMPLEX_EXPR))
3345 op0 = vn_get_expr_for (op0);
9e9e6e3e 3346
b46fda07 3347 op1 = vn_valueize (op1);
3348 if (TREE_CODE (op1) == SSA_NAME
3349 && (VN_INFO (op1)->has_constants
3350 || code == COMPLEX_EXPR))
3351 op1 = vn_get_expr_for (op1);
1c6d350b 3352
1d0b727d 3353 /* Pointer plus constant can be represented as invariant address.
3354 Do so to allow further propatation, see also tree forwprop. */
77d62cb7 3355 if (code == POINTER_PLUS_EXPR
e913b5cd 3356 && tree_fits_uhwi_p (op1)
1d0b727d 3357 && TREE_CODE (op0) == ADDR_EXPR
3358 && is_gimple_min_invariant (op0))
3359 return build_invariant_address (TREE_TYPE (op0),
3360 TREE_OPERAND (op0, 0),
e913b5cd 3361 tree_to_uhwi (op1));
1d0b727d 3362
e01e695f 3363 /* Avoid folding if nothing changed. */
75a70cf9 3364 if (op0 == gimple_assign_rhs1 (stmt)
3365 && op1 == gimple_assign_rhs2 (stmt))
e01e695f 3366 return NULL_TREE;
3367
72c59a18 3368 fold_defer_overflow_warnings ();
3369
77d62cb7 3370 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
19744bd4 3371 if (result)
3372 STRIP_USELESS_TYPE_CONVERSION (result);
9e9e6e3e 3373
75a70cf9 3374 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
72c59a18 3375 stmt, 0);
3376
6dfdc153 3377 /* Make sure result is not a complex expression consisting
9e9e6e3e 3378 of operators of operators (IE (a + b) + (a + c))
3379 Otherwise, we will end up with unbounded expressions if
3380 fold does anything at all. */
75a70cf9 3381 if (result && valid_gimple_rhs_p (result))
1c6d350b 3382 return result;
3383
9e9e6e3e 3384 return NULL_TREE;
3385}
3386
e01e695f 3387/* Simplify the unary expression RHS, and return the result if
3388 simplified. */
3389
3390static tree
1a91d914 3391simplify_unary_expression (gassign *stmt)
e01e695f 3392{
3393 tree result = NULL_TREE;
75a70cf9 3394 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
77d62cb7 3395 enum tree_code code = gimple_assign_rhs_code (stmt);
75a70cf9 3396
3397 /* We handle some tcc_reference codes here that are all
3398 GIMPLE_ASSIGN_SINGLE codes. */
77d62cb7 3399 if (code == REALPART_EXPR
3400 || code == IMAGPART_EXPR
3eebeec6 3401 || code == VIEW_CONVERT_EXPR
3402 || code == BIT_FIELD_REF)
75a70cf9 3403 op0 = TREE_OPERAND (op0, 0);
e01e695f 3404
75a70cf9 3405 orig_op0 = op0;
b46fda07 3406 op0 = vn_valueize (op0);
3407 if (TREE_CODE (op0) == SSA_NAME)
e01e695f 3408 {
b46fda07 3409 if (VN_INFO (op0)->has_constants)
3410 op0 = vn_get_expr_for (op0);
3411 else if (CONVERT_EXPR_CODE_P (code)
3412 || code == REALPART_EXPR
3413 || code == IMAGPART_EXPR
3414 || code == VIEW_CONVERT_EXPR
3415 || code == BIT_FIELD_REF)
3416 {
3417 /* We want to do tree-combining on conversion-like expressions.
3418 Make sure we feed only SSA_NAMEs or constants to fold though. */
3419 tree tem = vn_get_expr_for (op0);
3420 if (UNARY_CLASS_P (tem)
3421 || BINARY_CLASS_P (tem)
3422 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3423 || TREE_CODE (tem) == SSA_NAME
3424 || TREE_CODE (tem) == CONSTRUCTOR
3425 || is_gimple_min_invariant (tem))
3426 op0 = tem;
3427 }
e01e695f 3428 }
3429
3430 /* Avoid folding if nothing changed, but remember the expression. */
75a70cf9 3431 if (op0 == orig_op0)
3432 return NULL_TREE;
e01e695f 3433
3eebeec6 3434 if (code == BIT_FIELD_REF)
3435 {
3436 tree rhs = gimple_assign_rhs1 (stmt);
3437 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3438 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3439 }
3440 else
3441 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
e01e695f 3442 if (result)
3443 {
3444 STRIP_USELESS_TYPE_CONVERSION (result);
75a70cf9 3445 if (valid_gimple_rhs_p (result))
e01e695f 3446 return result;
3447 }
3448
75a70cf9 3449 return NULL_TREE;
e01e695f 3450}
3451
9e9e6e3e 3452/* Try to simplify RHS using equivalences and constant folding. */
3453
3454static tree
1a91d914 3455try_to_simplify (gassign *stmt)
9e9e6e3e 3456{
ce993cc2 3457 enum tree_code code = gimple_assign_rhs_code (stmt);
e004838d 3458 tree tem;
3459
d4cdfd27 3460 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3461 in this case, there is no point in doing extra work. */
ce993cc2 3462 if (code == SSA_NAME)
75a70cf9 3463 return NULL_TREE;
e004838d 3464
1d0b727d 3465 /* First try constant folding based on our current lattice. */
ef8cb3d3 3466 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
ce993cc2 3467 if (tem
3468 && (TREE_CODE (tem) == SSA_NAME
3469 || is_gimple_min_invariant (tem)))
1d0b727d 3470 return tem;
3471
3472 /* If that didn't work try combining multiple statements. */
ce993cc2 3473 switch (TREE_CODE_CLASS (code))
9e9e6e3e 3474 {
e004838d 3475 case tcc_reference:
ce993cc2 3476 /* Fallthrough for some unary codes that can operate on registers. */
3477 if (!(code == REALPART_EXPR
3478 || code == IMAGPART_EXPR
3eebeec6 3479 || code == VIEW_CONVERT_EXPR
3480 || code == BIT_FIELD_REF))
e004838d 3481 break;
3482 /* We could do a little more with unary ops, if they expand
3483 into binary ops, but it's debatable whether it is worth it. */
3484 case tcc_unary:
75a70cf9 3485 return simplify_unary_expression (stmt);
1d0b727d 3486
e004838d 3487 case tcc_comparison:
3488 case tcc_binary:
75a70cf9 3489 return simplify_binary_expression (stmt);
1d0b727d 3490
e004838d 3491 default:
3492 break;
9e9e6e3e 3493 }
e004838d 3494
75a70cf9 3495 return NULL_TREE;
9e9e6e3e 3496}
3497
3498/* Visit and value number USE, return true if the value number
3499 changed. */
3500
3501static bool
3502visit_use (tree use)
3503{
3504 bool changed = false;
75a70cf9 3505 gimple stmt = SSA_NAME_DEF_STMT (use);
9e9e6e3e 3506
b736e424 3507 mark_use_processed (use);
9e9e6e3e 3508
3509 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1d9353f3 3510 if (dump_file && (dump_flags & TDF_DETAILS)
75a70cf9 3511 && !SSA_NAME_IS_DEFAULT_DEF (use))
9e9e6e3e 3512 {
3513 fprintf (dump_file, "Value numbering ");
3514 print_generic_expr (dump_file, use, 0);
3515 fprintf (dump_file, " stmt = ");
75a70cf9 3516 print_gimple_stmt (dump_file, stmt, 0, 0);
9e9e6e3e 3517 }
3518
9e9e6e3e 3519 /* Handle uninitialized uses. */
75a70cf9 3520 if (SSA_NAME_IS_DEFAULT_DEF (use))
3521 changed = set_ssa_val_to (use, use);
9e9e6e3e 3522 else
3523 {
75a70cf9 3524 if (gimple_code (stmt) == GIMPLE_PHI)
3525 changed = visit_phi (stmt);
b736e424 3526 else if (gimple_has_volatile_ops (stmt))
75a70cf9 3527 changed = defs_to_varying (stmt);
3528 else if (is_gimple_assign (stmt))
9e9e6e3e 3529 {
7aa07231 3530 enum tree_code code = gimple_assign_rhs_code (stmt);
75a70cf9 3531 tree lhs = gimple_assign_lhs (stmt);
7aa07231 3532 tree rhs1 = gimple_assign_rhs1 (stmt);
9e9e6e3e 3533 tree simplified;
3534
2a922cb6 3535 /* Shortcut for copies. Simplifying copies is pointless,
3536 since we copy the expression and value they represent. */
7aa07231 3537 if (code == SSA_NAME
75a70cf9 3538 && TREE_CODE (lhs) == SSA_NAME)
2a922cb6 3539 {
7aa07231 3540 changed = visit_copy (lhs, rhs1);
2a922cb6 3541 goto done;
3542 }
1a91d914 3543 simplified = try_to_simplify (as_a <gassign *> (stmt));
75a70cf9 3544 if (simplified)
9e9e6e3e 3545 {
3546 if (dump_file && (dump_flags & TDF_DETAILS))
3547 {
3548 fprintf (dump_file, "RHS ");
75a70cf9 3549 print_gimple_expr (dump_file, stmt, 0, 0);
9e9e6e3e 3550 fprintf (dump_file, " simplified to ");
3551 print_generic_expr (dump_file, simplified, 0);
3552 if (TREE_CODE (lhs) == SSA_NAME)
3553 fprintf (dump_file, " has constants %d\n",
404d6be4 3554 expr_has_constants (simplified));
9e9e6e3e 3555 else
3556 fprintf (dump_file, "\n");
9e9e6e3e 3557 }
3558 }
3559 /* Setting value numbers to constants will occasionally
3560 screw up phi congruence because constants are not
3561 uniquely associated with a single ssa name that can be
3562 looked up. */
75a70cf9 3563 if (simplified
3564 && is_gimple_min_invariant (simplified)
3565 && TREE_CODE (lhs) == SSA_NAME)
9e9e6e3e 3566 {
3567 VN_INFO (lhs)->expr = simplified;
3568 VN_INFO (lhs)->has_constants = true;
3569 changed = set_ssa_val_to (lhs, simplified);
3570 goto done;
3571 }
75a70cf9 3572 else if (simplified
3573 && TREE_CODE (simplified) == SSA_NAME
9e9e6e3e 3574 && TREE_CODE (lhs) == SSA_NAME)
3575 {
3576 changed = visit_copy (lhs, simplified);
3577 goto done;
3578 }
3579 else if (simplified)
3580 {
3581 if (TREE_CODE (lhs) == SSA_NAME)
3582 {
3583 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3584 /* We have to unshare the expression or else
3585 valuizing may change the IL stream. */
3586 VN_INFO (lhs)->expr = unshare_expr (simplified);
3587 }
9e9e6e3e 3588 }
75a70cf9 3589 else if (stmt_has_constants (stmt)
3590 && TREE_CODE (lhs) == SSA_NAME)
3591 VN_INFO (lhs)->has_constants = true;
9e9e6e3e 3592 else if (TREE_CODE (lhs) == SSA_NAME)
3593 {
3594 /* We reset expr and constantness here because we may
3595 have been value numbering optimistically, and
3596 iterating. They may become non-constant in this case,
3597 even if they were optimistically constant. */
99698cf3 3598
9e9e6e3e 3599 VN_INFO (lhs)->has_constants = false;
75a70cf9 3600 VN_INFO (lhs)->expr = NULL_TREE;
9e9e6e3e 3601 }
3602
a4c8b601 3603 if ((TREE_CODE (lhs) == SSA_NAME
3604 /* We can substitute SSA_NAMEs that are live over
3605 abnormal edges with their constant value. */
3606 && !(gimple_assign_copy_p (stmt)
7aa07231 3607 && is_gimple_min_invariant (rhs1))
a4c8b601 3608 && !(simplified
3609 && is_gimple_min_invariant (simplified))
3610 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3611 /* Stores or copies from SSA_NAMEs that are live over
3612 abnormal edges are a problem. */
7aa07231 3613 || (code == SSA_NAME
3614 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
9e9e6e3e 3615 changed = defs_to_varying (stmt);
7aa07231 3616 else if (REFERENCE_CLASS_P (lhs)
3617 || DECL_P (lhs))
3618 changed = visit_reference_op_store (lhs, rhs1, stmt);
9e9e6e3e 3619 else if (TREE_CODE (lhs) == SSA_NAME)
3620 {
75a70cf9 3621 if ((gimple_assign_copy_p (stmt)
7aa07231 3622 && is_gimple_min_invariant (rhs1))
75a70cf9 3623 || (simplified
3624 && is_gimple_min_invariant (simplified)))
9e9e6e3e 3625 {
3626 VN_INFO (lhs)->has_constants = true;
75a70cf9 3627 if (simplified)
3628 changed = set_ssa_val_to (lhs, simplified);
3629 else
7aa07231 3630 changed = set_ssa_val_to (lhs, rhs1);
9e9e6e3e 3631 }
9e9e6e3e 3632 else
3633 {
3a73dcc2 3634 /* First try to lookup the simplified expression. */
3635 if (simplified)
3636 {
3637 enum gimple_rhs_class rhs_class;
3638
3639
3640 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3641 if ((rhs_class == GIMPLE_UNARY_RHS
3642 || rhs_class == GIMPLE_BINARY_RHS
3643 || rhs_class == GIMPLE_TERNARY_RHS)
3644 && valid_gimple_rhs_p (simplified))
3645 {
3646 tree result = vn_nary_op_lookup (simplified, NULL);
3647 if (result)
3648 {
3649 changed = set_ssa_val_to (lhs, result);
3650 goto done;
3651 }
3652 }
3653 }
3654
3655 /* Otherwise visit the original statement. */
024fee2c 3656 switch (vn_get_stmt_kind (stmt))
9e9e6e3e 3657 {
024fee2c 3658 case VN_NARY:
0fea623c 3659 changed = visit_nary_op (lhs, stmt);
9e9e6e3e 3660 break;
024fee2c 3661 case VN_REFERENCE:
3662 changed = visit_reference_op_load (lhs, rhs1, stmt);
75a70cf9 3663 break;
9e9e6e3e 3664 default:
3665 changed = defs_to_varying (stmt);
3666 break;
3667 }
3668 }
3669 }
3670 else
3671 changed = defs_to_varying (stmt);
3672 }
1a91d914 3673 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
75a70cf9 3674 {
3675 tree lhs = gimple_call_lhs (stmt);
b736e424 3676 if (lhs && TREE_CODE (lhs) == SSA_NAME)
75a70cf9 3677 {
b15c78f2 3678 /* Try constant folding based on our current lattice. */
3679 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3680 vn_valueize);
3681 if (simplified)
3682 {
3683 if (dump_file && (dump_flags & TDF_DETAILS))
3684 {
3685 fprintf (dump_file, "call ");
3686 print_gimple_expr (dump_file, stmt, 0, 0);
3687 fprintf (dump_file, " simplified to ");
3688 print_generic_expr (dump_file, simplified, 0);
3689 if (TREE_CODE (lhs) == SSA_NAME)
3690 fprintf (dump_file, " has constants %d\n",
3691 expr_has_constants (simplified));
3692 else
3693 fprintf (dump_file, "\n");
3694 }
3695 }
3696 /* Setting value numbers to constants will occasionally
3697 screw up phi congruence because constants are not
3698 uniquely associated with a single ssa name that can be
3699 looked up. */
3700 if (simplified
3701 && is_gimple_min_invariant (simplified))
b736e424 3702 {
b15c78f2 3703 VN_INFO (lhs)->expr = simplified;
3704 VN_INFO (lhs)->has_constants = true;
3705 changed = set_ssa_val_to (lhs, simplified);
3706 if (gimple_vdef (stmt))
3707 changed |= set_ssa_val_to (gimple_vdef (stmt),
3708 gimple_vuse (stmt));
3709 goto done;
b736e424 3710 }
b15c78f2 3711 else if (simplified
3712 && TREE_CODE (simplified) == SSA_NAME)
b736e424 3713 {
b15c78f2 3714 changed = visit_copy (lhs, simplified);
3715 if (gimple_vdef (stmt))
3716 changed |= set_ssa_val_to (gimple_vdef (stmt),
3717 gimple_vuse (stmt));
b736e424 3718 goto done;
3719 }
b15c78f2 3720 else
3721 {
3722 if (stmt_has_constants (stmt))
3723 VN_INFO (lhs)->has_constants = true;
3724 else
3725 {
3726 /* We reset expr and constantness here because we may
3727 have been value numbering optimistically, and
3728 iterating. They may become non-constant in this case,
3729 even if they were optimistically constant. */
3730 VN_INFO (lhs)->has_constants = false;
3731 VN_INFO (lhs)->expr = NULL_TREE;
3732 }
3733
3734 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3735 {
3736 changed = defs_to_varying (stmt);
3737 goto done;
3738 }
3739 }
75a70cf9 3740 }
3741
b736e424 3742 if (!gimple_call_internal_p (stmt)
7ec657ff 3743 && (/* Calls to the same function with the same vuse
3744 and the same operands do not necessarily return the same
3745 value, unless they're pure or const. */
3746 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3747 /* If calls have a vdef, subsequent calls won't have
3748 the same incoming vuse. So, if 2 calls with vdef have the
3749 same vuse, we know they're not subsequent.
3750 We can value number 2 calls to the same function with the
3751 same vuse and the same operands which are not subsequent
3752 the same, because there is no code in the program that can
affbda71 3753 compare the 2 values... */
3754 || (gimple_vdef (stmt)
3755 /* ... unless the call returns a pointer which does
3756 not alias with anything else. In which case the
3757 information that the values are distinct are encoded
3758 in the IL. */
1a91d914 3759 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
2fd3ecff 3760 /* Only perform the following when being called from PRE
3761 which embeds tail merging. */
3762 && default_vn_walk_kind == VN_WALK)))
1a91d914 3763 changed = visit_reference_op_call (lhs, call_stmt);
75a70cf9 3764 else
3765 changed = defs_to_varying (stmt);
3766 }
b736e424 3767 else
3768 changed = defs_to_varying (stmt);
9e9e6e3e 3769 }
3770 done:
3771 return changed;
3772}
3773
3774/* Compare two operands by reverse postorder index */
3775
3776static int
3777compare_ops (const void *pa, const void *pb)
3778{
3779 const tree opa = *((const tree *)pa);
3780 const tree opb = *((const tree *)pb);
75a70cf9 3781 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3782 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
9e9e6e3e 3783 basic_block bba;
3784 basic_block bbb;
3785
75a70cf9 3786 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
f7b092e4 3787 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
75a70cf9 3788 else if (gimple_nop_p (opstmta))
9e9e6e3e 3789 return -1;
75a70cf9 3790 else if (gimple_nop_p (opstmtb))
9e9e6e3e 3791 return 1;
3792
75a70cf9 3793 bba = gimple_bb (opstmta);
3794 bbb = gimple_bb (opstmtb);
9e9e6e3e 3795
3796 if (!bba && !bbb)
f7b092e4 3797 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
9e9e6e3e 3798 else if (!bba)
3799 return -1;
3800 else if (!bbb)
3801 return 1;
3802
3803 if (bba == bbb)
3804 {
75a70cf9 3805 if (gimple_code (opstmta) == GIMPLE_PHI
3806 && gimple_code (opstmtb) == GIMPLE_PHI)
f7b092e4 3807 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
75a70cf9 3808 else if (gimple_code (opstmta) == GIMPLE_PHI)
9e9e6e3e 3809 return -1;
75a70cf9 3810 else if (gimple_code (opstmtb) == GIMPLE_PHI)
9e9e6e3e 3811 return 1;
f7b092e4 3812 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3813 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3814 else
3815 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
9e9e6e3e 3816 }
3817 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3818}
3819
3820/* Sort an array containing members of a strongly connected component
3821 SCC so that the members are ordered by RPO number.
3822 This means that when the sort is complete, iterating through the
3823 array will give you the members in RPO order. */
3824
3825static void
f1f41a6c 3826sort_scc (vec<tree> scc)
9e9e6e3e 3827{
f1f41a6c 3828 scc.qsort (compare_ops);
9e9e6e3e 3829}
3830
3df47675 3831/* Insert the no longer used nary ONARY to the hash INFO. */
ca4721d3 3832
3df47675 3833static void
3834copy_nary (vn_nary_op_t onary, vn_tables_t info)
ca4721d3 3835{
f8ce304c 3836 size_t size = sizeof_vn_nary_op (onary->length);
3837 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3838 &info->nary_obstack);
ca4721d3 3839 memcpy (nary, onary, size);
f8ce304c 3840 vn_nary_op_insert_into (nary, info->nary, false);
ca4721d3 3841}
3842
3df47675 3843/* Insert the no longer used phi OPHI to the hash INFO. */
ca4721d3 3844
3df47675 3845static void
3846copy_phi (vn_phi_t ophi, vn_tables_t info)
ca4721d3 3847{
3df47675 3848 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3e871d4d 3849 vn_phi_s **slot;
ca4721d3 3850 memcpy (phi, ophi, sizeof (*phi));
f1f41a6c 3851 ophi->phiargs.create (0);
c1f445d2 3852 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3df47675 3853 gcc_assert (!*slot);
ca4721d3 3854 *slot = phi;
ca4721d3 3855}
3856
3df47675 3857/* Insert the no longer used reference OREF to the hash INFO. */
ca4721d3 3858
3df47675 3859static void
3860copy_reference (vn_reference_t oref, vn_tables_t info)
ca4721d3 3861{
ca4721d3 3862 vn_reference_t ref;
3e871d4d 3863 vn_reference_s **slot;
3df47675 3864 ref = (vn_reference_t) pool_alloc (info->references_pool);
ca4721d3 3865 memcpy (ref, oref, sizeof (*ref));
f1f41a6c 3866 oref->operands.create (0);
c1f445d2 3867 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
ca4721d3 3868 if (*slot)
3869 free_reference (*slot);
3870 *slot = ref;
ca4721d3 3871}
3872
9e9e6e3e 3873/* Process a strongly connected component in the SSA graph. */
3874
3875static void
f1f41a6c 3876process_scc (vec<tree> scc)
9e9e6e3e 3877{
3df47675 3878 tree var;
3879 unsigned int i;
3880 unsigned int iterations = 0;
3881 bool changed = true;
3e871d4d 3882 vn_nary_op_iterator_type hin;
3883 vn_phi_iterator_type hip;
3884 vn_reference_iterator_type hir;
3df47675 3885 vn_nary_op_t nary;
3886 vn_phi_t phi;
3887 vn_reference_t ref;
9e9e6e3e 3888
3df47675 3889 /* If the SCC has a single member, just visit it. */
f1f41a6c 3890 if (scc.length () == 1)
9e9e6e3e 3891 {
f1f41a6c 3892 tree use = scc[0];
ebca8514 3893 if (VN_INFO (use)->use_processed)
3894 return;
3895 /* We need to make sure it doesn't form a cycle itself, which can
3896 happen for self-referential PHI nodes. In that case we would
3897 end up inserting an expression with VN_TOP operands into the
3898 valid table which makes us derive bogus equivalences later.
3899 The cheapest way to check this is to assume it for all PHI nodes. */
3900 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3901 /* Fallthru to iteration. */ ;
3902 else
3903 {
3904 visit_use (use);
3905 return;
3906 }
9e9e6e3e 3907 }
3df47675 3908
c443af04 3909 if (dump_file && (dump_flags & TDF_DETAILS))
3910 print_scc (dump_file, scc);
3911
3df47675 3912 /* Iterate over the SCC with the optimistic table until it stops
3913 changing. */
3914 current_info = optimistic_info;
3915 while (changed)
9e9e6e3e 3916 {
3df47675 3917 changed = false;
3918 iterations++;
b81ffaee 3919 if (dump_file && (dump_flags & TDF_DETAILS))
3920 fprintf (dump_file, "Starting iteration %d\n", iterations);
3df47675 3921 /* As we are value-numbering optimistically we have to
3922 clear the expression tables and the simplified expressions
3923 in each iteration until we converge. */
c1f445d2 3924 optimistic_info->nary->empty ();
3925 optimistic_info->phis->empty ();
3926 optimistic_info->references->empty ();
3df47675 3927 obstack_free (&optimistic_info->nary_obstack, NULL);
3928 gcc_obstack_init (&optimistic_info->nary_obstack);
3929 empty_alloc_pool (optimistic_info->phis_pool);
3930 empty_alloc_pool (optimistic_info->references_pool);
f1f41a6c 3931 FOR_EACH_VEC_ELT (scc, i, var)
3df47675 3932 VN_INFO (var)->expr = NULL_TREE;
f1f41a6c 3933 FOR_EACH_VEC_ELT (scc, i, var)
3df47675 3934 changed |= visit_use (var);
3935 }
9e9e6e3e 3936
c443af04 3937 if (dump_file && (dump_flags & TDF_DETAILS))
3938 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3df47675 3939 statistics_histogram_event (cfun, "SCC iterations", iterations);
9e9e6e3e 3940
3df47675 3941 /* Finally, copy the contents of the no longer used optimistic
3942 table to the valid table. */
c1f445d2 3943 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3df47675 3944 copy_nary (nary, valid_info);
c1f445d2 3945 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3df47675 3946 copy_phi (phi, valid_info);
c1f445d2 3947 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3e871d4d 3948 ref, vn_reference_t, hir)
3df47675 3949 copy_reference (ref, valid_info);
3950
3951 current_info = valid_info;
9e9e6e3e 3952}
3953
000ef0a0 3954
3955/* Pop the components of the found SCC for NAME off the SCC stack
3956 and process them. Returns true if all went well, false if
3957 we run into resource limits. */
3958
3959static bool
3960extract_and_process_scc_for_name (tree name)
3961{
c2078b80 3962 auto_vec<tree> scc;
000ef0a0 3963 tree x;
3964
3965 /* Found an SCC, pop the components off the SCC stack and
3966 process them. */
3967 do
3968 {
f1f41a6c 3969 x = sccstack.pop ();
000ef0a0 3970
3971 VN_INFO (x)->on_sccstack = false;
f1f41a6c 3972 scc.safe_push (x);
000ef0a0 3973 } while (x != name);
3974
3975 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
f1f41a6c 3976 if (scc.length ()
000ef0a0 3977 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3978 {
3979 if (dump_file)
3980 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
f1f41a6c 3981 "SCC size %u exceeding %u\n", scc.length (),
000ef0a0 3982 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
83b709f2 3983
000ef0a0 3984 return false;
3985 }
3986
f1f41a6c 3987 if (scc.length () > 1)
000ef0a0 3988 sort_scc (scc);
3989
000ef0a0 3990 process_scc (scc);
3991
000ef0a0 3992 return true;
3993}
3994
9e9e6e3e 3995/* Depth first search on NAME to discover and process SCC's in the SSA
3996 graph.
3997 Execution of this algorithm relies on the fact that the SCC's are
a9b2282e 3998 popped off the stack in topological order.
3999 Returns true if successful, false if we stopped processing SCC's due
f0b5f617 4000 to resource constraints. */
9e9e6e3e 4001
a9b2282e 4002static bool
9e9e6e3e 4003DFS (tree name)
4004{
1e094109 4005 vec<ssa_op_iter> itervec = vNULL;
4006 vec<tree> namevec = vNULL;
000ef0a0 4007 use_operand_p usep = NULL;
75a70cf9 4008 gimple defstmt;
4009 tree use;
9e9e6e3e 4010 ssa_op_iter iter;
9e9e6e3e 4011
000ef0a0 4012start_over:
9e9e6e3e 4013 /* SCC info */
4014 VN_INFO (name)->dfsnum = next_dfs_num++;
4015 VN_INFO (name)->visited = true;
4016 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4017
f1f41a6c 4018 sccstack.safe_push (name);
9e9e6e3e 4019 VN_INFO (name)->on_sccstack = true;
4020 defstmt = SSA_NAME_DEF_STMT (name);
4021
4022 /* Recursively DFS on our operands, looking for SCC's. */
75a70cf9 4023 if (!gimple_nop_p (defstmt))
9e9e6e3e 4024 {
000ef0a0 4025 /* Push a new iterator. */
1a91d914 4026 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4027 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
000ef0a0 4028 else
4029 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4030 }
4031 else
5383fb56 4032 clear_and_done_ssa_iter (&iter);
000ef0a0 4033
4034 while (1)
4035 {
4036 /* If we are done processing uses of a name, go up the stack
4037 of iterators and process SCCs as we found them. */
4038 if (op_iter_done (&iter))
9e9e6e3e 4039 {
000ef0a0 4040 /* See if we found an SCC. */
4041 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4042 if (!extract_and_process_scc_for_name (name))
4043 {
f1f41a6c 4044 namevec.release ();
4045 itervec.release ();
000ef0a0 4046 return false;
4047 }
9e9e6e3e 4048
000ef0a0 4049 /* Check if we are done. */
f1f41a6c 4050 if (namevec.is_empty ())
000ef0a0 4051 {
f1f41a6c 4052 namevec.release ();
4053 itervec.release ();
000ef0a0 4054 return true;
4055 }
4056
4057 /* Restore the last use walker and continue walking there. */
4058 use = name;
f1f41a6c 4059 name = namevec.pop ();
4060 memcpy (&iter, &itervec.last (),
000ef0a0 4061 sizeof (ssa_op_iter));
f1f41a6c 4062 itervec.pop ();
000ef0a0 4063 goto continue_walking;
4064 }
9e9e6e3e 4065
000ef0a0 4066 use = USE_FROM_PTR (usep);
4067
4068 /* Since we handle phi nodes, we will sometimes get
4069 invariants in the use expression. */
4070 if (TREE_CODE (use) == SSA_NAME)
4071 {
9e9e6e3e 4072 if (! (VN_INFO (use)->visited))
4073 {
000ef0a0 4074 /* Recurse by pushing the current use walking state on
4075 the stack and starting over. */
f1f41a6c 4076 itervec.safe_push (iter);
4077 namevec.safe_push (name);
000ef0a0 4078 name = use;
4079 goto start_over;
4080
4081continue_walking:
9e9e6e3e 4082 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4083 VN_INFO (use)->low);
4084 }
4085 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4086 && VN_INFO (use)->on_sccstack)
4087 {
4088 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4089 VN_INFO (name)->low);
4090 }
4091 }
a9b2282e 4092
000ef0a0 4093 usep = op_iter_next_use (&iter);
9e9e6e3e 4094 }
4095}
4096
9e9e6e3e 4097/* Allocate a value number table. */
4098
4099static void
4100allocate_vn_table (vn_tables_t table)
4101{
c1f445d2 4102 table->phis = new vn_phi_table_type (23);
4103 table->nary = new vn_nary_op_table_type (23);
4104 table->references = new vn_reference_table_type (23);
9e9e6e3e 4105
51a23cfc 4106 gcc_obstack_init (&table->nary_obstack);
9e9e6e3e 4107 table->phis_pool = create_alloc_pool ("VN phis",
4108 sizeof (struct vn_phi_s),
4109 30);
4110 table->references_pool = create_alloc_pool ("VN references",
4111 sizeof (struct vn_reference_s),
4112 30);
4113}
4114
4115/* Free a value number table. */
4116
4117static void
4118free_vn_table (vn_tables_t table)
4119{
c1f445d2 4120 delete table->phis;
4121 table->phis = NULL;
4122 delete table->nary;
4123 table->nary = NULL;
4124 delete table->references;
4125 table->references = NULL;
51a23cfc 4126 obstack_free (&table->nary_obstack, NULL);
9e9e6e3e 4127 free_alloc_pool (table->phis_pool);
4128 free_alloc_pool (table->references_pool);
4129}
4130
4131static void
4132init_scc_vn (void)
4133{
4134 size_t i;
4135 int j;
4136 int *rpo_numbers_temp;
9e9e6e3e 4137
4138 calculate_dominance_info (CDI_DOMINATORS);
f1f41a6c 4139 sccstack.create (0);
c1f445d2 4140 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
48e1416a 4141
f6c33c78 4142 constant_value_ids = BITMAP_ALLOC (NULL);
48e1416a 4143
9e9e6e3e 4144 next_dfs_num = 1;
f6c33c78 4145 next_value_id = 1;
48e1416a 4146
f1f41a6c 4147 vn_ssa_aux_table.create (num_ssa_names + 1);
9e9e6e3e 4148 /* VEC_alloc doesn't actually grow it to the right size, it just
4149 preallocates the space to do so. */
f1f41a6c 4150 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
b9584939 4151 gcc_obstack_init (&vn_ssa_aux_obstack);
4152
f1f41a6c 4153 shared_lookup_phiargs.create (0);
4154 shared_lookup_references.create (0);
fe672ac0 4155 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
a28770e1 4156 rpo_numbers_temp =
4157 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
9e9e6e3e 4158 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4159
4160 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4161 the i'th block in RPO order is bb. We want to map bb's to RPO
4162 numbers, so we need to rearrange this array. */
a28770e1 4163 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
9e9e6e3e 4164 rpo_numbers[rpo_numbers_temp[j]] = j;
4165
b9584939 4166 XDELETE (rpo_numbers_temp);
9e9e6e3e 4167
4168 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4169
4170 /* Create the VN_INFO structures, and initialize value numbers to
4171 TOP. */
4172 for (i = 0; i < num_ssa_names; i++)
4173 {
4174 tree name = ssa_name (i);
4175 if (name)
4176 {
4177 VN_INFO_GET (name)->valnum = VN_TOP;
75a70cf9 4178 VN_INFO (name)->expr = NULL_TREE;
f6c33c78 4179 VN_INFO (name)->value_id = 0;
9e9e6e3e 4180 }
4181 }
4182
ec415c45 4183 renumber_gimple_stmt_uids ();
9e9e6e3e 4184
4185 /* Create the valid and optimistic value numbering tables. */
4186 valid_info = XCNEW (struct vn_tables_s);
4187 allocate_vn_table (valid_info);
4188 optimistic_info = XCNEW (struct vn_tables_s);
4189 allocate_vn_table (optimistic_info);
9e9e6e3e 4190}
4191
4192void
4193free_scc_vn (void)
4194{
4195 size_t i;
4196
c1f445d2 4197 delete constant_to_value_id;
4198 constant_to_value_id = NULL;
f6c33c78 4199 BITMAP_FREE (constant_value_ids);
f1f41a6c 4200 shared_lookup_phiargs.release ();
4201 shared_lookup_references.release ();
9e9e6e3e 4202 XDELETEVEC (rpo_numbers);
b9584939 4203
9e9e6e3e 4204 for (i = 0; i < num_ssa_names; i++)
4205 {
4206 tree name = ssa_name (i);
1d9353f3 4207 if (name
4208 && VN_INFO (name)->needs_insertion)
4209 release_ssa_name (name);
9e9e6e3e 4210 }
b9584939 4211 obstack_free (&vn_ssa_aux_obstack, NULL);
f1f41a6c 4212 vn_ssa_aux_table.release ();
b9584939 4213
f1f41a6c 4214 sccstack.release ();
9e9e6e3e 4215 free_vn_table (valid_info);
4216 XDELETE (valid_info);
4217 free_vn_table (optimistic_info);
4218 XDELETE (optimistic_info);
9e9e6e3e 4219}
4220
63628665 4221/* Set *ID according to RESULT. */
f8ce304c 4222
4223static void
4224set_value_id_for_result (tree result, unsigned int *id)
4225{
63628665 4226 if (result && TREE_CODE (result) == SSA_NAME)
4227 *id = VN_INFO (result)->value_id;
4228 else if (result && is_gimple_min_invariant (result))
4229 *id = get_or_alloc_constant_value_id (result);
4230 else
4231 *id = get_next_value_id ();
f8ce304c 4232}
4233
8883e700 4234/* Set the value ids in the valid hash tables. */
f6c33c78 4235
4236static void
4237set_hashtable_value_ids (void)
4238{
3e871d4d 4239 vn_nary_op_iterator_type hin;
4240 vn_phi_iterator_type hip;
4241 vn_reference_iterator_type hir;
f6c33c78 4242 vn_nary_op_t vno;
4243 vn_reference_t vr;
4244 vn_phi_t vp;
8883e700 4245
f6c33c78 4246 /* Now set the value ids of the things we had put in the hash
4247 table. */
4248
c1f445d2 4249 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
f8ce304c 4250 set_value_id_for_result (vno->result, &vno->value_id);
f6c33c78 4251
c1f445d2 4252 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
f8ce304c 4253 set_value_id_for_result (vp->result, &vp->value_id);
f6c33c78 4254
c1f445d2 4255 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4256 hir)
f8ce304c 4257 set_value_id_for_result (vr->result, &vr->value_id);
f6c33c78 4258}
4259
85e9a542 4260class cond_dom_walker : public dom_walker
4261{
4262public:
4263 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4264
4265 virtual void before_dom_children (basic_block);
4266
4267 bool fail;
4268};
4269
4270void
4271cond_dom_walker::before_dom_children (basic_block bb)
4272{
4273 edge e;
4274 edge_iterator ei;
4275
4276 if (fail)
4277 return;
4278
22499a39 4279 /* If any of the predecessor edges that do not come from blocks dominated
4280 by us are still marked as possibly executable consider this block
4281 reachable. */
85e9a542 4282 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4283 FOR_EACH_EDGE (e, ei, bb->preds)
22499a39 4284 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4285 reachable |= (e->flags & EDGE_EXECUTABLE);
85e9a542 4286
4287 /* If the block is not reachable all outgoing edges are not
4288 executable. */
4289 if (!reachable)
4290 {
4291 if (dump_file && (dump_flags & TDF_DETAILS))
4292 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4293 "BB %d as not executable\n", bb->index);
4294
4295 FOR_EACH_EDGE (e, ei, bb->succs)
4296 e->flags &= ~EDGE_EXECUTABLE;
4297 return;
4298 }
4299
4300 gimple stmt = last_stmt (bb);
4301 if (!stmt)
4302 return;
4303
c443af04 4304 enum gimple_code code = gimple_code (stmt);
4305 if (code != GIMPLE_COND
4306 && code != GIMPLE_SWITCH
4307 && code != GIMPLE_GOTO)
4308 return;
4309
4310 if (dump_file && (dump_flags & TDF_DETAILS))
4311 {
4312 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4313 bb->index);
4314 print_gimple_stmt (dump_file, stmt, 0, 0);
4315 }
4316
85e9a542 4317 /* Value-number the last stmts SSA uses. */
4318 ssa_op_iter i;
4319 tree op;
4320 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4321 if (VN_INFO (op)->visited == false
4322 && !DFS (op))
4323 {
4324 fail = true;
4325 return;
4326 }
4327
4328 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4329 if value-numbering can prove they are not reachable. Handling
4330 computed gotos is also possible. */
4331 tree val;
c443af04 4332 switch (code)
85e9a542 4333 {
4334 case GIMPLE_COND:
4335 {
4336 tree lhs = gimple_cond_lhs (stmt);
4337 tree rhs = gimple_cond_rhs (stmt);
4338 /* Work hard in computing the condition and take into account
4339 the valueization of the defining stmt. */
4340 if (TREE_CODE (lhs) == SSA_NAME)
4341 lhs = vn_get_expr_for (lhs);
4342 if (TREE_CODE (rhs) == SSA_NAME)
4343 rhs = vn_get_expr_for (rhs);
4344 val = fold_binary (gimple_cond_code (stmt),
4345 boolean_type_node, lhs, rhs);
4346 break;
4347 }
4348 case GIMPLE_SWITCH:
1a91d914 4349 val = gimple_switch_index (as_a <gswitch *> (stmt));
85e9a542 4350 break;
4351 case GIMPLE_GOTO:
4352 val = gimple_goto_dest (stmt);
4353 break;
4354 default:
c443af04 4355 gcc_unreachable ();
85e9a542 4356 }
4357 if (!val)
4358 return;
4359
4360 edge taken = find_taken_edge (bb, vn_valueize (val));
4361 if (!taken)
4362 return;
4363
4364 if (dump_file && (dump_flags & TDF_DETAILS))
4365 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4366 "not executable\n", bb->index, bb->index, taken->dest->index);
4367
4368 FOR_EACH_EDGE (e, ei, bb->succs)
4369 if (e != taken)
4370 e->flags &= ~EDGE_EXECUTABLE;
4371}
4372
a9b2282e 4373/* Do SCCVN. Returns true if it finished, false if we bailed out
8f190c8a 4374 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4375 how we use the alias oracle walking during the VN process. */
a9b2282e 4376
4377bool
8f190c8a 4378run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
9e9e6e3e 4379{
85e9a542 4380 basic_block bb;
9e9e6e3e 4381 size_t i;
4382 tree param;
48e1416a 4383
8f190c8a 4384 default_vn_walk_kind = default_vn_walk_kind_;
4385
9e9e6e3e 4386 init_scc_vn ();
4387 current_info = valid_info;
4388
4389 for (param = DECL_ARGUMENTS (current_function_decl);
4390 param;
1767a056 4391 param = DECL_CHAIN (param))
9e9e6e3e 4392 {
c6dfe037 4393 tree def = ssa_default_def (cfun, param);
4394 if (def)
c443af04 4395 {
4396 VN_INFO (def)->visited = true;
4397 VN_INFO (def)->valnum = def;
4398 }
9e9e6e3e 4399 }
4400
85e9a542 4401 /* Mark all edges as possibly executable. */
4402 FOR_ALL_BB_FN (bb, cfun)
4403 {
4404 edge_iterator ei;
4405 edge e;
4406 FOR_EACH_EDGE (e, ei, bb->succs)
4407 e->flags |= EDGE_EXECUTABLE;
4408 }
4409
4410 /* Walk all blocks in dominator order, value-numbering the last stmts
4411 SSA uses and decide whether outgoing edges are not executable. */
4412 cond_dom_walker walker;
4413 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4414 if (walker.fail)
4415 {
4416 free_scc_vn ();
4417 return false;
4418 }
4419
4420 /* Value-number remaining SSA names. */
1d9353f3 4421 for (i = 1; i < num_ssa_names; ++i)
9e9e6e3e 4422 {
4423 tree name = ssa_name (i);
4424 if (name
4425 && VN_INFO (name)->visited == false
4426 && !has_zero_uses (name))
a9b2282e 4427 if (!DFS (name))
4428 {
4429 free_scc_vn ();
4430 return false;
4431 }
9e9e6e3e 4432 }
4433
f6c33c78 4434 /* Initialize the value ids. */
48e1416a 4435
f6c33c78 4436 for (i = 1; i < num_ssa_names; ++i)
4437 {
4438 tree name = ssa_name (i);
4439 vn_ssa_aux_t info;
4440 if (!name)
4441 continue;
4442 info = VN_INFO (name);
d94bf438 4443 if (info->valnum == name
4444 || info->valnum == VN_TOP)
f6c33c78 4445 info->value_id = get_next_value_id ();
4446 else if (is_gimple_min_invariant (info->valnum))
4447 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4448 }
48e1416a 4449
597ce871 4450 /* Propagate. */
4451 for (i = 1; i < num_ssa_names; ++i)
f6c33c78 4452 {
597ce871 4453 tree name = ssa_name (i);
4454 vn_ssa_aux_t info;
4455 if (!name)
4456 continue;
4457 info = VN_INFO (name);
4458 if (TREE_CODE (info->valnum) == SSA_NAME
4459 && info->valnum != name
4460 && info->value_id != VN_INFO (info->valnum)->value_id)
4461 info->value_id = VN_INFO (info->valnum)->value_id;
f6c33c78 4462 }
48e1416a 4463
f6c33c78 4464 set_hashtable_value_ids ();
48e1416a 4465
9e9e6e3e 4466 if (dump_file && (dump_flags & TDF_DETAILS))
4467 {
4468 fprintf (dump_file, "Value numbers:\n");
4469 for (i = 0; i < num_ssa_names; i++)
4470 {
4471 tree name = ssa_name (i);
8883e700 4472 if (name
4473 && VN_INFO (name)->visited
4474 && SSA_VAL (name) != name)
9e9e6e3e 4475 {
4476 print_generic_expr (dump_file, name, 0);
4477 fprintf (dump_file, " = ");
8883e700 4478 print_generic_expr (dump_file, SSA_VAL (name), 0);
9e9e6e3e 4479 fprintf (dump_file, "\n");
4480 }
4481 }
4482 }
a9b2282e 4483
4484 return true;
9e9e6e3e 4485}
f6c33c78 4486
4487/* Return the maximum value id we have ever seen. */
4488
4489unsigned int
48e1416a 4490get_max_value_id (void)
f6c33c78 4491{
4492 return next_value_id;
4493}
4494
4495/* Return the next unique value id. */
4496
4497unsigned int
4498get_next_value_id (void)
4499{
4500 return next_value_id++;
4501}
4502
4503
127fb64d 4504/* Compare two expressions E1 and E2 and return true if they are equal. */
f6c33c78 4505
4506bool
4507expressions_equal_p (tree e1, tree e2)
4508{
127fb64d 4509 /* The obvious case. */
f6c33c78 4510 if (e1 == e2)
4511 return true;
4512
127fb64d 4513 /* If only one of them is null, they cannot be equal. */
4514 if (!e1 || !e2)
4515 return false;
4516
127fb64d 4517 /* Now perform the actual comparison. */
4518 if (TREE_CODE (e1) == TREE_CODE (e2)
4519 && operand_equal_p (e1, e2, OEP_PURE_SAME))
f6c33c78 4520 return true;
4521
4522 return false;
4523}
4524
2ac47fdf 4525
4526/* Return true if the nary operation NARY may trap. This is a copy
4527 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4528
4529bool
4530vn_nary_may_trap (vn_nary_op_t nary)
4531{
4532 tree type;
888b74b6 4533 tree rhs2 = NULL_TREE;
2ac47fdf 4534 bool honor_nans = false;
4535 bool honor_snans = false;
4536 bool fp_operation = false;
4537 bool honor_trapv = false;
4538 bool handled, ret;
4539 unsigned i;
4540
4541 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4542 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4543 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4544 {
4545 type = nary->type;
4546 fp_operation = FLOAT_TYPE_P (type);
4547 if (fp_operation)
4548 {
4549 honor_nans = flag_trapping_math && !flag_finite_math_only;
4550 honor_snans = flag_signaling_nans != 0;
4551 }
4552 else if (INTEGRAL_TYPE_P (type)
4553 && TYPE_OVERFLOW_TRAPS (type))
4554 honor_trapv = true;
4555 }
888b74b6 4556 if (nary->length >= 2)
4557 rhs2 = nary->op[1];
2ac47fdf 4558 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4559 honor_trapv,
4560 honor_nans, honor_snans, rhs2,
4561 &handled);
4562 if (handled
4563 && ret)
4564 return true;
4565
4566 for (i = 0; i < nary->length; ++i)
4567 if (tree_could_trap_p (nary->op[i]))
4568 return true;
4569
4570 return false;
4571}