]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-sccvn.c
gcc/
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
CommitLineData
9e9e6e3e 1/* SCC value numbering for trees
d353bf18 2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
9e9e6e3e 3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
9e9e6e3e 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
9e9e6e3e 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
b20a8bb4 25#include "alias.h"
26#include "symtab.h"
9e9e6e3e 27#include "tree.h"
b20a8bb4 28#include "fold-const.h"
9ed99284 29#include "stor-layout.h"
94ea8568 30#include "predict.h"
94ea8568 31#include "hard-reg-set.h"
94ea8568 32#include "function.h"
33#include "dominance.h"
34#include "cfg.h"
35#include "cfganal.h"
9e9e6e3e 36#include "basic-block.h"
ce084dfc 37#include "gimple-pretty-print.h"
9e9e6e3e 38#include "tree-inline.h"
bc61cadb 39#include "tree-ssa-alias.h"
40#include "internal-fn.h"
41#include "gimple-fold.h"
42#include "tree-eh.h"
43#include "gimple-expr.h"
75a70cf9 44#include "gimple.h"
a8783bee 45#include "gimplify.h"
073c1fd5 46#include "gimple-ssa.h"
47#include "tree-phinodes.h"
48#include "ssa-iterators.h"
9ed99284 49#include "stringpool.h"
073c1fd5 50#include "tree-ssanames.h"
d53441c8 51#include "rtl.h"
52#include "flags.h"
d53441c8 53#include "insn-config.h"
54#include "expmed.h"
55#include "dojump.h"
56#include "explow.h"
57#include "calls.h"
58#include "emit-rtl.h"
59#include "varasm.h"
60#include "stmt.h"
9ed99284 61#include "expr.h"
073c1fd5 62#include "tree-dfa.h"
63#include "tree-ssa.h"
b9ed1410 64#include "dumpfile.h"
9e9e6e3e 65#include "alloc-pool.h"
9e9e6e3e 66#include "cfgloop.h"
a9b2282e 67#include "params.h"
1c6d350b 68#include "tree-ssa-propagate.h"
9e9e6e3e 69#include "tree-ssa-sccvn.h"
85e9a542 70#include "tree-cfg.h"
71#include "domwalk.h"
a5650c86 72#include "ipa-ref.h"
73#include "plugin-api.h"
74#include "cgraph.h"
9e9e6e3e 75
76/* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
81
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
85
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
90
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
95
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
99
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
104
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
108
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
111
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
117
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
99698cf3 121 equivalent.
9e9e6e3e 122 TODO:
123
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
131*/
132
3e871d4d 133
5c08a518 134static tree *last_vuse_ptr;
135static vn_lookup_kind vn_walk_kind;
136static vn_lookup_kind default_vn_walk_kind;
137
3e871d4d 138/* vn_nary_op hashtable helpers. */
139
770ff93b 140struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
3e871d4d 141{
9969c043 142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
3e871d4d 145};
146
147/* Return the computed hashcode for nary operation P1. */
148
149inline hashval_t
9969c043 150vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
3e871d4d 151{
152 return vno1->hashcode;
153}
154
155/* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
157
158inline bool
9969c043 159vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
3e871d4d 160{
161 return vn_nary_op_eq (vno1, vno2);
162}
163
c1f445d2 164typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
3e871d4d 165typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166
167
168/* vn_phi hashtable helpers. */
169
170static int
171vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172
173struct vn_phi_hasher
174{
9969c043 175 typedef vn_phi_s *value_type;
176 typedef vn_phi_s *compare_type;
177 static inline hashval_t hash (const vn_phi_s *);
178 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
179 static inline void remove (vn_phi_s *);
3e871d4d 180};
181
182/* Return the computed hashcode for phi operation P1. */
183
184inline hashval_t
9969c043 185vn_phi_hasher::hash (const vn_phi_s *vp1)
3e871d4d 186{
187 return vp1->hashcode;
188}
189
190/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
191
192inline bool
9969c043 193vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
3e871d4d 194{
195 return vn_phi_eq (vp1, vp2);
196}
197
198/* Free a phi operation structure VP. */
199
200inline void
9969c043 201vn_phi_hasher::remove (vn_phi_s *phi)
3e871d4d 202{
203 phi->phiargs.release ();
204}
205
c1f445d2 206typedef hash_table<vn_phi_hasher> vn_phi_table_type;
3e871d4d 207typedef vn_phi_table_type::iterator vn_phi_iterator_type;
208
209
210/* Compare two reference operands P1 and P2 for equality. Return true if
211 they are equal, and false otherwise. */
212
213static int
214vn_reference_op_eq (const void *p1, const void *p2)
215{
216 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
217 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
218
219 return (vro1->opcode == vro2->opcode
220 /* We do not care for differences in type qualification. */
221 && (vro1->type == vro2->type
222 || (vro1->type && vro2->type
223 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
224 TYPE_MAIN_VARIANT (vro2->type))))
225 && expressions_equal_p (vro1->op0, vro2->op0)
226 && expressions_equal_p (vro1->op1, vro2->op1)
227 && expressions_equal_p (vro1->op2, vro2->op2));
228}
229
230/* Free a reference operation structure VP. */
231
232static inline void
233free_reference (vn_reference_s *vr)
234{
235 vr->operands.release ();
236}
237
238
239/* vn_reference hashtable helpers. */
240
241struct vn_reference_hasher
242{
9969c043 243 typedef vn_reference_s *value_type;
244 typedef vn_reference_s *compare_type;
245 static inline hashval_t hash (const vn_reference_s *);
246 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
247 static inline void remove (vn_reference_s *);
3e871d4d 248};
249
250/* Return the hashcode for a given reference operation P1. */
251
252inline hashval_t
9969c043 253vn_reference_hasher::hash (const vn_reference_s *vr1)
3e871d4d 254{
255 return vr1->hashcode;
256}
257
258inline bool
9969c043 259vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
3e871d4d 260{
261 return vn_reference_eq (v, c);
262}
263
264inline void
9969c043 265vn_reference_hasher::remove (vn_reference_s *v)
3e871d4d 266{
267 free_reference (v);
268}
269
c1f445d2 270typedef hash_table<vn_reference_hasher> vn_reference_table_type;
3e871d4d 271typedef vn_reference_table_type::iterator vn_reference_iterator_type;
272
273
9e9e6e3e 274/* The set of hashtables and alloc_pool's for their items. */
275
276typedef struct vn_tables_s
277{
c1f445d2 278 vn_nary_op_table_type *nary;
279 vn_phi_table_type *phis;
280 vn_reference_table_type *references;
51a23cfc 281 struct obstack nary_obstack;
d27a7bc4 282 pool_allocator<vn_phi_s> *phis_pool;
283 pool_allocator<vn_reference_s> *references_pool;
9e9e6e3e 284} *vn_tables_t;
285
3e871d4d 286
287/* vn_constant hashtable helpers. */
288
289struct vn_constant_hasher : typed_free_remove <vn_constant_s>
290{
9969c043 291 typedef vn_constant_s *value_type;
292 typedef vn_constant_s *compare_type;
293 static inline hashval_t hash (const vn_constant_s *);
294 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
3e871d4d 295};
296
297/* Hash table hash function for vn_constant_t. */
298
299inline hashval_t
9969c043 300vn_constant_hasher::hash (const vn_constant_s *vc1)
3e871d4d 301{
302 return vc1->hashcode;
303}
304
305/* Hash table equality function for vn_constant_t. */
306
307inline bool
9969c043 308vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
3e871d4d 309{
310 if (vc1->hashcode != vc2->hashcode)
311 return false;
312
313 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
314}
315
c1f445d2 316static hash_table<vn_constant_hasher> *constant_to_value_id;
f6c33c78 317static bitmap constant_value_ids;
9e9e6e3e 318
9e9e6e3e 319
320/* Valid hashtables storing information we have proven to be
321 correct. */
322
323static vn_tables_t valid_info;
324
325/* Optimistic hashtables storing information we are making assumptions about
326 during iterations. */
327
328static vn_tables_t optimistic_info;
329
9e9e6e3e 330/* Pointer to the set of hashtables that is currently being used.
331 Should always point to either the optimistic_info, or the
332 valid_info. */
333
334static vn_tables_t current_info;
335
336
337/* Reverse post order index for each basic block. */
338
339static int *rpo_numbers;
340
341#define SSA_VAL(x) (VN_INFO ((x))->valnum)
342
b8a2283e 343/* Return the SSA value of the VUSE x, supporting released VDEFs
344 during elimination which will value-number the VDEF to the
345 associated VUSE (but not substitute in the whole lattice). */
346
347static inline tree
348vuse_ssa_val (tree x)
349{
350 if (!x)
351 return NULL_TREE;
352
353 do
354 {
355 x = SSA_VAL (x);
356 }
357 while (SSA_NAME_IN_FREE_LIST (x));
358
359 return x;
360}
361
9e9e6e3e 362/* This represents the top of the VN lattice, which is the universal
363 value. */
364
365tree VN_TOP;
366
f6c33c78 367/* Unique counter for our value ids. */
368
369static unsigned int next_value_id;
370
9e9e6e3e 371/* Next DFS number and the stack for strongly connected component
372 detection. */
373
374static unsigned int next_dfs_num;
f1f41a6c 375static vec<tree> sccstack;
9e9e6e3e 376
1d9353f3 377
9e9e6e3e 378
b9584939 379/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
380 are allocated on an obstack for locality reasons, and to free them
f1f41a6c 381 without looping over the vec. */
9e9e6e3e 382
f1f41a6c 383static vec<vn_ssa_aux_t> vn_ssa_aux_table;
b9584939 384static struct obstack vn_ssa_aux_obstack;
9e9e6e3e 385
386/* Return the value numbering information for a given SSA name. */
387
388vn_ssa_aux_t
389VN_INFO (tree name)
390{
f1f41a6c 391 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
0ea2d350 392 gcc_checking_assert (res);
f6c33c78 393 return res;
9e9e6e3e 394}
395
396/* Set the value numbering info for a given SSA name to a given
397 value. */
398
399static inline void
400VN_INFO_SET (tree name, vn_ssa_aux_t value)
401{
f1f41a6c 402 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
9e9e6e3e 403}
404
b9584939 405/* Initialize the value numbering info for a given SSA name.
406 This should be called just once for every SSA name. */
9e9e6e3e 407
408vn_ssa_aux_t
409VN_INFO_GET (tree name)
410{
b9584939 411 vn_ssa_aux_t newinfo;
412
45ba1503 413 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
b9584939 414 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
f1f41a6c 415 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
416 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
417 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
9e9e6e3e 418 return newinfo;
419}
420
421
75a70cf9 422/* Get the representative expression for the SSA_NAME NAME. Returns
423 the representative SSA_NAME if there is no expression associated with it. */
424
425tree
426vn_get_expr_for (tree name)
427{
428 vn_ssa_aux_t vn = VN_INFO (name);
429 gimple def_stmt;
430 tree expr = NULL_TREE;
77d62cb7 431 enum tree_code code;
75a70cf9 432
433 if (vn->valnum == VN_TOP)
434 return name;
435
436 /* If the value-number is a constant it is the representative
437 expression. */
438 if (TREE_CODE (vn->valnum) != SSA_NAME)
439 return vn->valnum;
440
441 /* Get to the information of the value of this SSA_NAME. */
442 vn = VN_INFO (vn->valnum);
443
444 /* If the value-number is a constant it is the representative
445 expression. */
446 if (TREE_CODE (vn->valnum) != SSA_NAME)
447 return vn->valnum;
448
449 /* Else if we have an expression, return it. */
450 if (vn->expr != NULL_TREE)
451 return vn->expr;
452
453 /* Otherwise use the defining statement to build the expression. */
454 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
455
77d62cb7 456 /* If the value number is not an assignment use it directly. */
75a70cf9 457 if (!is_gimple_assign (def_stmt))
458 return vn->valnum;
459
ff99a695 460 /* Note that we can valueize here because we clear the cached
461 simplified expressions after each optimistic iteration. */
77d62cb7 462 code = gimple_assign_rhs_code (def_stmt);
463 switch (TREE_CODE_CLASS (code))
75a70cf9 464 {
465 case tcc_reference:
77d62cb7 466 if ((code == REALPART_EXPR
467 || code == IMAGPART_EXPR
468 || code == VIEW_CONVERT_EXPR)
469 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
470 0)) == SSA_NAME)
471 expr = fold_build1 (code,
75a70cf9 472 gimple_expr_type (def_stmt),
ff99a695 473 vn_valueize (TREE_OPERAND
474 (gimple_assign_rhs1 (def_stmt), 0)));
75a70cf9 475 break;
476
477 case tcc_unary:
77d62cb7 478 expr = fold_build1 (code,
75a70cf9 479 gimple_expr_type (def_stmt),
ff99a695 480 vn_valueize (gimple_assign_rhs1 (def_stmt)));
75a70cf9 481 break;
482
483 case tcc_binary:
77d62cb7 484 expr = fold_build2 (code,
75a70cf9 485 gimple_expr_type (def_stmt),
ff99a695 486 vn_valueize (gimple_assign_rhs1 (def_stmt)),
487 vn_valueize (gimple_assign_rhs2 (def_stmt)));
75a70cf9 488 break;
489
3eebeec6 490 case tcc_exceptional:
491 if (code == CONSTRUCTOR
492 && TREE_CODE
493 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
494 expr = gimple_assign_rhs1 (def_stmt);
495 break;
496
75a70cf9 497 default:;
498 }
499 if (expr == NULL_TREE)
500 return vn->valnum;
501
502 /* Cache the expression. */
503 vn->expr = expr;
504
505 return expr;
506}
507
024fee2c 508/* Return the vn_kind the expression computed by the stmt should be
509 associated with. */
510
511enum vn_kind
512vn_get_stmt_kind (gimple stmt)
513{
514 switch (gimple_code (stmt))
515 {
516 case GIMPLE_CALL:
517 return VN_REFERENCE;
518 case GIMPLE_PHI:
519 return VN_PHI;
520 case GIMPLE_ASSIGN:
521 {
522 enum tree_code code = gimple_assign_rhs_code (stmt);
523 tree rhs1 = gimple_assign_rhs1 (stmt);
524 switch (get_gimple_rhs_class (code))
525 {
526 case GIMPLE_UNARY_RHS:
527 case GIMPLE_BINARY_RHS:
528 case GIMPLE_TERNARY_RHS:
529 return VN_NARY;
530 case GIMPLE_SINGLE_RHS:
531 switch (TREE_CODE_CLASS (code))
532 {
533 case tcc_reference:
534 /* VOP-less references can go through unary case. */
535 if ((code == REALPART_EXPR
536 || code == IMAGPART_EXPR
537 || code == VIEW_CONVERT_EXPR
538 || code == BIT_FIELD_REF)
539 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
540 return VN_NARY;
541
542 /* Fallthrough. */
543 case tcc_declaration:
544 return VN_REFERENCE;
545
546 case tcc_constant:
547 return VN_CONSTANT;
548
549 default:
550 if (code == ADDR_EXPR)
551 return (is_gimple_min_invariant (rhs1)
552 ? VN_CONSTANT : VN_REFERENCE);
553 else if (code == CONSTRUCTOR)
554 return VN_NARY;
555 return VN_NONE;
556 }
557 default:
558 return VN_NONE;
559 }
560 }
561 default:
562 return VN_NONE;
563 }
564}
75a70cf9 565
8c8a7011 566/* Lookup a value id for CONSTANT and return it. If it does not
567 exist returns 0. */
568
569unsigned int
570get_constant_value_id (tree constant)
571{
3e871d4d 572 vn_constant_s **slot;
8c8a7011 573 struct vn_constant_s vc;
75a70cf9 574
575 vc.hashcode = vn_hash_constant_with_type (constant);
8c8a7011 576 vc.constant = constant;
c1f445d2 577 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
8c8a7011 578 if (slot)
3e871d4d 579 return (*slot)->value_id;
8c8a7011 580 return 0;
581}
582
f6c33c78 583/* Lookup a value id for CONSTANT, and if it does not exist, create a
584 new one and return it. If it does exist, return it. */
585
586unsigned int
587get_or_alloc_constant_value_id (tree constant)
588{
3e871d4d 589 vn_constant_s **slot;
88006128 590 struct vn_constant_s vc;
591 vn_constant_t vcp;
48e1416a 592
88006128 593 vc.hashcode = vn_hash_constant_with_type (constant);
594 vc.constant = constant;
c1f445d2 595 slot = constant_to_value_id->find_slot (&vc, INSERT);
f6c33c78 596 if (*slot)
3e871d4d 597 return (*slot)->value_id;
88006128 598
599 vcp = XNEW (struct vn_constant_s);
600 vcp->hashcode = vc.hashcode;
601 vcp->constant = constant;
602 vcp->value_id = get_next_value_id ();
3e871d4d 603 *slot = vcp;
88006128 604 bitmap_set_bit (constant_value_ids, vcp->value_id);
605 return vcp->value_id;
f6c33c78 606}
607
608/* Return true if V is a value id for a constant. */
609
610bool
611value_id_constant_p (unsigned int v)
612{
48e1416a 613 return bitmap_bit_p (constant_value_ids, v);
f6c33c78 614}
615
8f4173dc 616/* Compute the hash for a reference operand VRO1. */
9e9e6e3e 617
f32e91d5 618static void
619vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
9e9e6e3e 620{
f32e91d5 621 hstate.add_int (vro1->opcode);
3d2d7de7 622 if (vro1->op0)
f32e91d5 623 inchash::add_expr (vro1->op0, hstate);
3d2d7de7 624 if (vro1->op1)
f32e91d5 625 inchash::add_expr (vro1->op1, hstate);
3d2d7de7 626 if (vro1->op2)
f32e91d5 627 inchash::add_expr (vro1->op2, hstate);
9e9e6e3e 628}
629
9e9e6e3e 630/* Compute a hash for the reference operation VR1 and return it. */
631
2fd3ecff 632static hashval_t
9e9e6e3e 633vn_reference_compute_hash (const vn_reference_t vr1)
634{
f32e91d5 635 inchash::hash hstate;
636 hashval_t result;
9e9e6e3e 637 int i;
638 vn_reference_op_t vro;
182cf5a9 639 HOST_WIDE_INT off = -1;
640 bool deref = false;
9e9e6e3e 641
f1f41a6c 642 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
182cf5a9 643 {
644 if (vro->opcode == MEM_REF)
645 deref = true;
646 else if (vro->opcode != ADDR_EXPR)
647 deref = false;
648 if (vro->off != -1)
649 {
650 if (off == -1)
651 off = 0;
652 off += vro->off;
653 }
654 else
655 {
656 if (off != -1
657 && off != 0)
f32e91d5 658 hstate.add_int (off);
182cf5a9 659 off = -1;
660 if (deref
661 && vro->opcode == ADDR_EXPR)
662 {
663 if (vro->op0)
664 {
665 tree op = TREE_OPERAND (vro->op0, 0);
f32e91d5 666 hstate.add_int (TREE_CODE (op));
667 inchash::add_expr (op, hstate);
182cf5a9 668 }
669 }
670 else
f32e91d5 671 vn_reference_op_compute_hash (vro, hstate);
182cf5a9 672 }
673 }
f32e91d5 674 result = hstate.end ();
675 /* ??? We would ICE later if we hash instead of adding that in. */
84cd88b5 676 if (vr1->vuse)
677 result += SSA_NAME_VERSION (vr1->vuse);
9e9e6e3e 678
679 return result;
680}
681
3e871d4d 682/* Return true if reference operations VR1 and VR2 are equivalent. This
9e9e6e3e 683 means they have the same set of operands and vuses. */
684
3e871d4d 685bool
686vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
9e9e6e3e 687{
182cf5a9 688 unsigned i, j;
9e9e6e3e 689
dd277d48 690 /* Early out if this is not a hash collision. */
691 if (vr1->hashcode != vr2->hashcode)
692 return false;
9e9e6e3e 693
dd277d48 694 /* The VOP needs to be the same. */
695 if (vr1->vuse != vr2->vuse)
9e9e6e3e 696 return false;
697
dd277d48 698 /* If the operands are the same we are done. */
699 if (vr1->operands == vr2->operands)
700 return true;
701
182cf5a9 702 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
9e9e6e3e 703 return false;
704
87d822bb 705 if (INTEGRAL_TYPE_P (vr1->type)
706 && INTEGRAL_TYPE_P (vr2->type))
707 {
708 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
709 return false;
710 }
711 else if (INTEGRAL_TYPE_P (vr1->type)
712 && (TYPE_PRECISION (vr1->type)
f9ae6f95 713 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
87d822bb 714 return false;
715 else if (INTEGRAL_TYPE_P (vr2->type)
716 && (TYPE_PRECISION (vr2->type)
f9ae6f95 717 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
87d822bb 718 return false;
719
182cf5a9 720 i = 0;
721 j = 0;
722 do
723 {
724 HOST_WIDE_INT off1 = 0, off2 = 0;
725 vn_reference_op_t vro1, vro2;
726 vn_reference_op_s tem1, tem2;
727 bool deref1 = false, deref2 = false;
f1f41a6c 728 for (; vr1->operands.iterate (i, &vro1); i++)
182cf5a9 729 {
730 if (vro1->opcode == MEM_REF)
731 deref1 = true;
732 if (vro1->off == -1)
733 break;
734 off1 += vro1->off;
735 }
f1f41a6c 736 for (; vr2->operands.iterate (j, &vro2); j++)
182cf5a9 737 {
738 if (vro2->opcode == MEM_REF)
739 deref2 = true;
740 if (vro2->off == -1)
741 break;
742 off2 += vro2->off;
743 }
744 if (off1 != off2)
745 return false;
746 if (deref1 && vro1->opcode == ADDR_EXPR)
747 {
748 memset (&tem1, 0, sizeof (tem1));
749 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
750 tem1.type = TREE_TYPE (tem1.op0);
751 tem1.opcode = TREE_CODE (tem1.op0);
752 vro1 = &tem1;
f9f051a3 753 deref1 = false;
182cf5a9 754 }
755 if (deref2 && vro2->opcode == ADDR_EXPR)
756 {
757 memset (&tem2, 0, sizeof (tem2));
758 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
759 tem2.type = TREE_TYPE (tem2.op0);
760 tem2.opcode = TREE_CODE (tem2.op0);
761 vro2 = &tem2;
f9f051a3 762 deref2 = false;
182cf5a9 763 }
f9f051a3 764 if (deref1 != deref2)
765 return false;
182cf5a9 766 if (!vn_reference_op_eq (vro1, vro2))
767 return false;
768 ++j;
769 ++i;
770 }
f1f41a6c 771 while (vr1->operands.length () != i
772 || vr2->operands.length () != j);
9e9e6e3e 773
dd277d48 774 return true;
9e9e6e3e 775}
776
75a70cf9 777/* Copy the operations present in load/store REF into RESULT, a vector of
9e9e6e3e 778 vn_reference_op_s's. */
779
2fd3ecff 780static void
f1f41a6c 781copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
9e9e6e3e 782{
43a3cf90 783 if (TREE_CODE (ref) == TARGET_MEM_REF)
784 {
785 vn_reference_op_s temp;
786
75aefb7b 787 result->reserve (3);
788
43a3cf90 789 memset (&temp, 0, sizeof (temp));
2be90eed 790 temp.type = TREE_TYPE (ref);
43a3cf90 791 temp.opcode = TREE_CODE (ref);
869bac23 792 temp.op0 = TMR_INDEX (ref);
793 temp.op1 = TMR_STEP (ref);
794 temp.op2 = TMR_OFFSET (ref);
182cf5a9 795 temp.off = -1;
75aefb7b 796 result->quick_push (temp);
43a3cf90 797
798 memset (&temp, 0, sizeof (temp));
799 temp.type = NULL_TREE;
28daba6f 800 temp.opcode = ERROR_MARK;
801 temp.op0 = TMR_INDEX2 (ref);
802 temp.off = -1;
75aefb7b 803 result->quick_push (temp);
28daba6f 804
805 memset (&temp, 0, sizeof (temp));
806 temp.type = NULL_TREE;
807 temp.opcode = TREE_CODE (TMR_BASE (ref));
808 temp.op0 = TMR_BASE (ref);
182cf5a9 809 temp.off = -1;
75aefb7b 810 result->quick_push (temp);
43a3cf90 811 return;
812 }
813
9e9e6e3e 814 /* For non-calls, store the information that makes up the address. */
6a00bf6b 815 tree orig = ref;
9e9e6e3e 816 while (ref)
817 {
818 vn_reference_op_s temp;
819
820 memset (&temp, 0, sizeof (temp));
2be90eed 821 temp.type = TREE_TYPE (ref);
9e9e6e3e 822 temp.opcode = TREE_CODE (ref);
182cf5a9 823 temp.off = -1;
9e9e6e3e 824
825 switch (temp.opcode)
826 {
39215e09 827 case MODIFY_EXPR:
828 temp.op0 = TREE_OPERAND (ref, 1);
829 break;
8a19bda6 830 case WITH_SIZE_EXPR:
831 temp.op0 = TREE_OPERAND (ref, 1);
832 temp.off = 0;
833 break;
182cf5a9 834 case MEM_REF:
835 /* The base address gets its own vn_reference_op_s structure. */
836 temp.op0 = TREE_OPERAND (ref, 1);
e913b5cd 837 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
838 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
182cf5a9 839 break;
9e9e6e3e 840 case BIT_FIELD_REF:
841 /* Record bits and position. */
842 temp.op0 = TREE_OPERAND (ref, 1);
843 temp.op1 = TREE_OPERAND (ref, 2);
844 break;
845 case COMPONENT_REF:
659ce413 846 /* The field decl is enough to unambiguously specify the field,
847 a matching type is not necessary and a mismatching type
848 is always a spurious difference. */
849 temp.type = NULL_TREE;
3918bd18 850 temp.op0 = TREE_OPERAND (ref, 1);
851 temp.op1 = TREE_OPERAND (ref, 2);
182cf5a9 852 {
853 tree this_offset = component_ref_field_offset (ref);
854 if (this_offset
855 && TREE_CODE (this_offset) == INTEGER_CST)
856 {
857 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
f9ae6f95 858 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
182cf5a9 859 {
5de9d3ed 860 offset_int off
861 = (wi::to_offset (this_offset)
862 + wi::lrshift (wi::to_offset (bit_offset),
885a2694 863 LOG2_BITS_PER_UNIT));
fe5ad926 864 if (wi::fits_shwi_p (off)
6a00bf6b 865 /* Probibit value-numbering zero offset components
866 of addresses the same before the pass folding
867 __builtin_object_size had a chance to run
868 (checking cfun->after_inlining does the
869 trick here). */
870 && (TREE_CODE (orig) != ADDR_EXPR
fe5ad926 871 || off != 0
6a00bf6b 872 || cfun->after_inlining))
e913b5cd 873 temp.off = off.to_shwi ();
182cf5a9 874 }
875 }
876 }
9e9e6e3e 877 break;
878 case ARRAY_RANGE_REF:
879 case ARRAY_REF:
880 /* Record index as operand. */
881 temp.op0 = TREE_OPERAND (ref, 1);
9fa67218 882 /* Always record lower bounds and element size. */
883 temp.op1 = array_ref_low_bound (ref);
884 temp.op2 = array_ref_element_size (ref);
182cf5a9 885 if (TREE_CODE (temp.op0) == INTEGER_CST
886 && TREE_CODE (temp.op1) == INTEGER_CST
887 && TREE_CODE (temp.op2) == INTEGER_CST)
888 {
5de9d3ed 889 offset_int off = ((wi::to_offset (temp.op0)
890 - wi::to_offset (temp.op1))
891 * wi::to_offset (temp.op2));
796b6678 892 if (wi::fits_shwi_p (off))
e913b5cd 893 temp.off = off.to_shwi();
182cf5a9 894 }
9e9e6e3e 895 break;
2be90eed 896 case VAR_DECL:
897 if (DECL_HARD_REGISTER (ref))
898 {
899 temp.op0 = ref;
900 break;
901 }
902 /* Fallthru. */
903 case PARM_DECL:
904 case CONST_DECL:
905 case RESULT_DECL:
906 /* Canonicalize decls to MEM[&decl] which is what we end up with
907 when valueizing MEM[ptr] with ptr = &decl. */
908 temp.opcode = MEM_REF;
909 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
910 temp.off = 0;
f1f41a6c 911 result->safe_push (temp);
2be90eed 912 temp.opcode = ADDR_EXPR;
75aefb7b 913 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
2be90eed 914 temp.type = TREE_TYPE (temp.op0);
915 temp.off = -1;
916 break;
a0e3bc3a 917 case STRING_CST:
918 case INTEGER_CST:
919 case COMPLEX_CST:
920 case VECTOR_CST:
7342d4d1 921 case REAL_CST:
7f7ae544 922 case FIXED_CST:
2a2aef73 923 case CONSTRUCTOR:
9e9e6e3e 924 case SSA_NAME:
925 temp.op0 = ref;
926 break;
4be5a86a 927 case ADDR_EXPR:
928 if (is_gimple_min_invariant (ref))
929 {
930 temp.op0 = ref;
931 break;
932 }
a5650c86 933 break;
a0e3bc3a 934 /* These are only interesting for their operands, their
935 existence, and their type. They will never be the last
936 ref in the chain of references (IE they require an
937 operand), so we don't have to put anything
938 for op* as it will be handled by the iteration */
a0e3bc3a 939 case REALPART_EXPR:
940 case VIEW_CONVERT_EXPR:
182cf5a9 941 temp.off = 0;
942 break;
943 case IMAGPART_EXPR:
944 /* This is only interesting for its constant offset. */
f9ae6f95 945 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
9e9e6e3e 946 break;
a0e3bc3a 947 default:
948 gcc_unreachable ();
9e9e6e3e 949 }
f1f41a6c 950 result->safe_push (temp);
9e9e6e3e 951
4be5a86a 952 if (REFERENCE_CLASS_P (ref)
39215e09 953 || TREE_CODE (ref) == MODIFY_EXPR
8a19bda6 954 || TREE_CODE (ref) == WITH_SIZE_EXPR
4be5a86a 955 || (TREE_CODE (ref) == ADDR_EXPR
956 && !is_gimple_min_invariant (ref)))
9e9e6e3e 957 ref = TREE_OPERAND (ref, 0);
958 else
959 ref = NULL_TREE;
960 }
961}
962
3918bd18 963/* Build a alias-oracle reference abstraction in *REF from the vn_reference
964 operands in *OPS, the reference alias set SET and the reference type TYPE.
965 Return true if something useful was produced. */
02067dc5 966
3918bd18 967bool
968ao_ref_init_from_vn_reference (ao_ref *ref,
969 alias_set_type set, tree type,
f1f41a6c 970 vec<vn_reference_op_s> ops)
02067dc5 971{
972 vn_reference_op_t op;
973 unsigned i;
3918bd18 974 tree base = NULL_TREE;
975 tree *op0_p = &base;
976 HOST_WIDE_INT offset = 0;
977 HOST_WIDE_INT max_size;
978 HOST_WIDE_INT size = -1;
979 tree size_tree = NULL_TREE;
182cf5a9 980 alias_set_type base_alias_set = -1;
3918bd18 981
982 /* First get the final access size from just the outermost expression. */
f1f41a6c 983 op = &ops[0];
3918bd18 984 if (op->opcode == COMPONENT_REF)
182cf5a9 985 size_tree = DECL_SIZE (op->op0);
3918bd18 986 else if (op->opcode == BIT_FIELD_REF)
987 size_tree = op->op0;
988 else
989 {
3754d046 990 machine_mode mode = TYPE_MODE (type);
3918bd18 991 if (mode == BLKmode)
992 size_tree = TYPE_SIZE (type);
993 else
994 size = GET_MODE_BITSIZE (mode);
995 }
996 if (size_tree != NULL_TREE)
997 {
e913b5cd 998 if (!tree_fits_uhwi_p (size_tree))
3918bd18 999 size = -1;
1000 else
e913b5cd 1001 size = tree_to_uhwi (size_tree);
3918bd18 1002 }
1003
1004 /* Initially, maxsize is the same as the accessed element size.
1005 In the following it will only grow (or become -1). */
1006 max_size = size;
02067dc5 1007
3918bd18 1008 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1009 and find the ultimate containing object. */
f1f41a6c 1010 FOR_EACH_VEC_ELT (ops, i, op)
02067dc5 1011 {
1012 switch (op->opcode)
1013 {
3918bd18 1014 /* These may be in the reference ops, but we cannot do anything
1015 sensible with them here. */
3918bd18 1016 case ADDR_EXPR:
182cf5a9 1017 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1018 if (base != NULL_TREE
1019 && TREE_CODE (base) == MEM_REF
1020 && op->op0
1021 && DECL_P (TREE_OPERAND (op->op0, 0)))
1022 {
f1f41a6c 1023 vn_reference_op_t pop = &ops[i-1];
182cf5a9 1024 base = TREE_OPERAND (op->op0, 0);
1025 if (pop->off == -1)
1026 {
1027 max_size = -1;
1028 offset = 0;
1029 }
1030 else
1031 offset += pop->off * BITS_PER_UNIT;
1032 op0_p = NULL;
1033 break;
1034 }
1035 /* Fallthru. */
1036 case CALL_EXPR:
3918bd18 1037 return false;
02067dc5 1038
3918bd18 1039 /* Record the base objects. */
182cf5a9 1040 case MEM_REF:
1041 base_alias_set = get_deref_alias_set (op->op0);
1042 *op0_p = build2 (MEM_REF, op->type,
1043 NULL_TREE, op->op0);
1044 op0_p = &TREE_OPERAND (*op0_p, 0);
1045 break;
1046
3918bd18 1047 case VAR_DECL:
1048 case PARM_DECL:
1049 case RESULT_DECL:
1050 case SSA_NAME:
3918bd18 1051 *op0_p = op->op0;
182cf5a9 1052 op0_p = NULL;
3918bd18 1053 break;
1054
1055 /* And now the usual component-reference style ops. */
02067dc5 1056 case BIT_FIELD_REF:
e913b5cd 1057 offset += tree_to_shwi (op->op1);
02067dc5 1058 break;
1059
1060 case COMPONENT_REF:
3918bd18 1061 {
1062 tree field = op->op0;
1063 /* We do not have a complete COMPONENT_REF tree here so we
1064 cannot use component_ref_field_offset. Do the interesting
1065 parts manually. */
1066
182cf5a9 1067 if (op->op1
e913b5cd 1068 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
3918bd18 1069 max_size = -1;
1070 else
1071 {
e913b5cd 1072 offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
3918bd18 1073 * BITS_PER_UNIT);
f9ae6f95 1074 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
3918bd18 1075 }
1076 break;
1077 }
02067dc5 1078
1079 case ARRAY_RANGE_REF:
1080 case ARRAY_REF:
9fa67218 1081 /* We recorded the lower bound and the element size. */
e913b5cd 1082 if (!tree_fits_shwi_p (op->op0)
1083 || !tree_fits_shwi_p (op->op1)
1084 || !tree_fits_shwi_p (op->op2))
3918bd18 1085 max_size = -1;
1086 else
1087 {
e913b5cd 1088 HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1089 hindex -= tree_to_shwi (op->op1);
1090 hindex *= tree_to_shwi (op->op2);
9fa67218 1091 hindex *= BITS_PER_UNIT;
3918bd18 1092 offset += hindex;
1093 }
1094 break;
1095
1096 case REALPART_EXPR:
1097 break;
1098
1099 case IMAGPART_EXPR:
1100 offset += size;
1101 break;
1102
1103 case VIEW_CONVERT_EXPR:
02067dc5 1104 break;
1105
1106 case STRING_CST:
1107 case INTEGER_CST:
1108 case COMPLEX_CST:
1109 case VECTOR_CST:
1110 case REAL_CST:
1111 case CONSTRUCTOR:
02067dc5 1112 case CONST_DECL:
3918bd18 1113 return false;
02067dc5 1114
1115 default:
3918bd18 1116 return false;
02067dc5 1117 }
1118 }
1119
3918bd18 1120 if (base == NULL_TREE)
1121 return false;
1122
1123 ref->ref = NULL_TREE;
1124 ref->base = base;
1125 ref->offset = offset;
1126 ref->size = size;
1127 ref->max_size = max_size;
1128 ref->ref_alias_set = set;
182cf5a9 1129 if (base_alias_set != -1)
1130 ref->base_alias_set = base_alias_set;
1131 else
1132 ref->base_alias_set = get_alias_set (base);
3787db52 1133 /* We discount volatiles from value-numbering elsewhere. */
1134 ref->volatile_p = false;
3918bd18 1135
1136 return true;
02067dc5 1137}
1138
75a70cf9 1139/* Copy the operations present in load/store/call REF into RESULT, a vector of
1140 vn_reference_op_s's. */
1141
2fd3ecff 1142static void
1a91d914 1143copy_reference_ops_from_call (gcall *call,
f1f41a6c 1144 vec<vn_reference_op_s> *result)
75a70cf9 1145{
1146 vn_reference_op_s temp;
75a70cf9 1147 unsigned i;
7ec657ff 1148 tree lhs = gimple_call_lhs (call);
27b0e9e4 1149 int lr;
7ec657ff 1150
1151 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1152 different. By adding the lhs here in the vector, we ensure that the
1153 hashcode is different, guaranteeing a different value number. */
1154 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1155 {
1156 memset (&temp, 0, sizeof (temp));
1157 temp.opcode = MODIFY_EXPR;
1158 temp.type = TREE_TYPE (lhs);
1159 temp.op0 = lhs;
1160 temp.off = -1;
f1f41a6c 1161 result->safe_push (temp);
7ec657ff 1162 }
75a70cf9 1163
27b0e9e4 1164 /* Copy the type, opcode, function, static chain and EH region, if any. */
75a70cf9 1165 memset (&temp, 0, sizeof (temp));
1166 temp.type = gimple_call_return_type (call);
1167 temp.opcode = CALL_EXPR;
4be5a86a 1168 temp.op0 = gimple_call_fn (call);
0e3bb11d 1169 temp.op1 = gimple_call_chain (call);
27b0e9e4 1170 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1171 temp.op2 = size_int (lr);
182cf5a9 1172 temp.off = -1;
058a1b7a 1173 if (gimple_call_with_bounds_p (call))
1174 temp.with_bounds = 1;
f1f41a6c 1175 result->safe_push (temp);
75a70cf9 1176
4be5a86a 1177 /* Copy the call arguments. As they can be references as well,
1178 just chain them together. */
75a70cf9 1179 for (i = 0; i < gimple_call_num_args (call); ++i)
1180 {
1181 tree callarg = gimple_call_arg (call, i);
4be5a86a 1182 copy_reference_ops_from_ref (callarg, result);
75a70cf9 1183 }
75a70cf9 1184}
1185
d12dee9c 1186/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1187 *I_P to point to the last element of the replacement. */
1188void
f1f41a6c 1189vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
d12dee9c 1190 unsigned int *i_p)
9e9e6e3e 1191{
d12dee9c 1192 unsigned int i = *i_p;
f1f41a6c 1193 vn_reference_op_t op = &(*ops)[i];
1194 vn_reference_op_t mem_op = &(*ops)[i - 1];
182cf5a9 1195 tree addr_base;
197400ff 1196 HOST_WIDE_INT addr_offset = 0;
182cf5a9 1197
1198 /* The only thing we have to do is from &OBJ.foo.bar add the offset
9d75589a 1199 from .foo.bar to the preceding MEM_REF offset and replace the
182cf5a9 1200 address with &OBJ. */
1201 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1202 &addr_offset);
1203 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
d68e9408 1204 if (addr_base != TREE_OPERAND (op->op0, 0))
182cf5a9 1205 {
5de9d3ed 1206 offset_int off = offset_int::from (mem_op->op0, SIGNED);
e913b5cd 1207 off += addr_offset;
1208 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
182cf5a9 1209 op->op0 = build_fold_addr_expr (addr_base);
e913b5cd 1210 if (tree_fits_shwi_p (mem_op->op0))
1211 mem_op->off = tree_to_shwi (mem_op->op0);
182cf5a9 1212 else
1213 mem_op->off = -1;
d12dee9c 1214 }
d12dee9c 1215}
9e9e6e3e 1216
37b80bde 1217/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1218 *I_P to point to the last element of the replacement. */
1219static void
f1f41a6c 1220vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
37b80bde 1221 unsigned int *i_p)
1222{
1223 unsigned int i = *i_p;
f1f41a6c 1224 vn_reference_op_t op = &(*ops)[i];
1225 vn_reference_op_t mem_op = &(*ops)[i - 1];
37b80bde 1226 gimple def_stmt;
1227 enum tree_code code;
5de9d3ed 1228 offset_int off;
37b80bde 1229
1230 def_stmt = SSA_NAME_DEF_STMT (op->op0);
b62e7449 1231 if (!is_gimple_assign (def_stmt))
37b80bde 1232 return;
1233
1234 code = gimple_assign_rhs_code (def_stmt);
1235 if (code != ADDR_EXPR
1236 && code != POINTER_PLUS_EXPR)
1237 return;
1238
5de9d3ed 1239 off = offset_int::from (mem_op->op0, SIGNED);
37b80bde 1240
1241 /* The only thing we have to do is from &OBJ.foo.bar add the offset
9d75589a 1242 from .foo.bar to the preceding MEM_REF offset and replace the
37b80bde 1243 address with &OBJ. */
1244 if (code == ADDR_EXPR)
1245 {
1246 tree addr, addr_base;
1247 HOST_WIDE_INT addr_offset;
1248
1249 addr = gimple_assign_rhs1 (def_stmt);
1250 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1251 &addr_offset);
5c08a518 1252 /* If that didn't work because the address isn't invariant propagate
1253 the reference tree from the address operation in case the current
1254 dereference isn't offsetted. */
1255 if (!addr_base
1256 && *i_p == ops->length () - 1
1257 && off == 0
1258 /* This makes us disable this transform for PRE where the
1259 reference ops might be also used for code insertion which
1260 is invalid. */
1261 && default_vn_walk_kind == VN_WALKREWRITE)
1262 {
1263 auto_vec<vn_reference_op_s, 32> tem;
1264 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1265 ops->pop ();
1266 ops->pop ();
1267 ops->safe_splice (tem);
1268 --*i_p;
1269 return;
1270 }
37b80bde 1271 if (!addr_base
1272 || TREE_CODE (addr_base) != MEM_REF)
1273 return;
1274
e913b5cd 1275 off += addr_offset;
cf8f0e63 1276 off += mem_ref_offset (addr_base);
37b80bde 1277 op->op0 = TREE_OPERAND (addr_base, 0);
1278 }
1279 else
1280 {
1281 tree ptr, ptroff;
1282 ptr = gimple_assign_rhs1 (def_stmt);
1283 ptroff = gimple_assign_rhs2 (def_stmt);
1284 if (TREE_CODE (ptr) != SSA_NAME
1285 || TREE_CODE (ptroff) != INTEGER_CST)
1286 return;
1287
5de9d3ed 1288 off += wi::to_offset (ptroff);
b62e7449 1289 op->op0 = ptr;
37b80bde 1290 }
1291
e913b5cd 1292 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1293 if (tree_fits_shwi_p (mem_op->op0))
1294 mem_op->off = tree_to_shwi (mem_op->op0);
37b80bde 1295 else
1296 mem_op->off = -1;
1297 if (TREE_CODE (op->op0) == SSA_NAME)
05eda0e7 1298 op->op0 = SSA_VAL (op->op0);
1299 if (TREE_CODE (op->op0) != SSA_NAME)
1300 op->opcode = TREE_CODE (op->op0);
37b80bde 1301
1302 /* And recurse. */
1303 if (TREE_CODE (op->op0) == SSA_NAME)
1304 vn_reference_maybe_forwprop_address (ops, i_p);
1305 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1306 vn_reference_fold_indirect (ops, i_p);
1307}
1308
c26ce8a9 1309/* Optimize the reference REF to a constant if possible or return
1310 NULL_TREE if not. */
1311
1312tree
1313fully_constant_vn_reference_p (vn_reference_t ref)
1314{
f1f41a6c 1315 vec<vn_reference_op_s> operands = ref->operands;
c26ce8a9 1316 vn_reference_op_t op;
1317
1318 /* Try to simplify the translated expression if it is
1319 a call to a builtin function with at most two arguments. */
f1f41a6c 1320 op = &operands[0];
c26ce8a9 1321 if (op->opcode == CALL_EXPR
1322 && TREE_CODE (op->op0) == ADDR_EXPR
1323 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1324 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
f1f41a6c 1325 && operands.length () >= 2
1326 && operands.length () <= 3)
c26ce8a9 1327 {
1328 vn_reference_op_t arg0, arg1 = NULL;
1329 bool anyconst = false;
f1f41a6c 1330 arg0 = &operands[1];
1331 if (operands.length () > 2)
1332 arg1 = &operands[2];
c26ce8a9 1333 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1334 || (arg0->opcode == ADDR_EXPR
1335 && is_gimple_min_invariant (arg0->op0)))
1336 anyconst = true;
1337 if (arg1
1338 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1339 || (arg1->opcode == ADDR_EXPR
1340 && is_gimple_min_invariant (arg1->op0))))
1341 anyconst = true;
1342 if (anyconst)
1343 {
1344 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1345 arg1 ? 2 : 1,
1346 arg0->op0,
1347 arg1 ? arg1->op0 : NULL);
1348 if (folded
1349 && TREE_CODE (folded) == NOP_EXPR)
1350 folded = TREE_OPERAND (folded, 0);
1351 if (folded
1352 && is_gimple_min_invariant (folded))
1353 return folded;
1354 }
1355 }
1356
a5650c86 1357 /* Simplify reads from constants or constant initializers. */
1358 else if (BITS_PER_UNIT == 8
1359 && is_gimple_reg_type (ref->type)
1360 && (!INTEGRAL_TYPE_P (ref->type)
1361 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
c26ce8a9 1362 {
a5650c86 1363 HOST_WIDE_INT off = 0;
9f9cf897 1364 HOST_WIDE_INT size;
1365 if (INTEGRAL_TYPE_P (ref->type))
1366 size = TYPE_PRECISION (ref->type);
1367 else
1368 size = tree_to_shwi (TYPE_SIZE (ref->type));
a5650c86 1369 if (size % BITS_PER_UNIT != 0
1370 || size > MAX_BITSIZE_MODE_ANY_MODE)
1371 return NULL_TREE;
1372 size /= BITS_PER_UNIT;
1373 unsigned i;
1374 for (i = 0; i < operands.length (); ++i)
1375 {
1376 if (operands[i].off == -1)
1377 return NULL_TREE;
1378 off += operands[i].off;
1379 if (operands[i].opcode == MEM_REF)
1380 {
1381 ++i;
1382 break;
1383 }
1384 }
1385 vn_reference_op_t base = &operands[--i];
1386 tree ctor = error_mark_node;
1387 tree decl = NULL_TREE;
1388 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1389 ctor = base->op0;
1390 else if (base->opcode == MEM_REF
1391 && base[1].opcode == ADDR_EXPR
1392 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1393 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1394 {
1395 decl = TREE_OPERAND (base[1].op0, 0);
1396 ctor = ctor_for_folding (decl);
1397 }
1398 if (ctor == NULL_TREE)
1399 return build_zero_cst (ref->type);
1400 else if (ctor != error_mark_node)
1401 {
1402 if (decl)
1403 {
1404 tree res = fold_ctor_reference (ref->type, ctor,
1405 off * BITS_PER_UNIT,
1406 size * BITS_PER_UNIT, decl);
1407 if (res)
1408 {
1409 STRIP_USELESS_TYPE_CONVERSION (res);
1410 if (is_gimple_min_invariant (res))
1411 return res;
1412 }
1413 }
1414 else
1415 {
1416 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1417 if (native_encode_expr (ctor, buf, size, off) > 0)
1418 return native_interpret_expr (ref->type, buf, size);
1419 }
1420 }
c26ce8a9 1421 }
1422
1423 return NULL_TREE;
1424}
1425
9e9e6e3e 1426/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1427 structures into their value numbers. This is done in-place, and
882f8b55 1428 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1429 whether any operands were valueized. */
9e9e6e3e 1430
f1f41a6c 1431static vec<vn_reference_op_s>
1432valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
9e9e6e3e 1433{
1434 vn_reference_op_t vro;
d12dee9c 1435 unsigned int i;
9e9e6e3e 1436
882f8b55 1437 *valueized_anything = false;
1438
f1f41a6c 1439 FOR_EACH_VEC_ELT (orig, i, vro)
9e9e6e3e 1440 {
1441 if (vro->opcode == SSA_NAME
1442 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
f6c33c78 1443 {
882f8b55 1444 tree tem = SSA_VAL (vro->op0);
1445 if (tem != vro->op0)
1446 {
1447 *valueized_anything = true;
1448 vro->op0 = tem;
1449 }
f6c33c78 1450 /* If it transforms from an SSA_NAME to a constant, update
1451 the opcode. */
1452 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1453 vro->opcode = TREE_CODE (vro->op0);
1454 }
d12dee9c 1455 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
882f8b55 1456 {
1457 tree tem = SSA_VAL (vro->op1);
1458 if (tem != vro->op1)
1459 {
1460 *valueized_anything = true;
1461 vro->op1 = tem;
1462 }
1463 }
d12dee9c 1464 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
882f8b55 1465 {
1466 tree tem = SSA_VAL (vro->op2);
1467 if (tem != vro->op2)
1468 {
1469 *valueized_anything = true;
1470 vro->op2 = tem;
1471 }
1472 }
182cf5a9 1473 /* If it transforms from an SSA_NAME to an address, fold with
1474 a preceding indirect reference. */
1475 if (i > 0
1476 && vro->op0
1477 && TREE_CODE (vro->op0) == ADDR_EXPR
f1f41a6c 1478 && orig[i - 1].opcode == MEM_REF)
182cf5a9 1479 vn_reference_fold_indirect (&orig, &i);
37b80bde 1480 else if (i > 0
1481 && vro->opcode == SSA_NAME
f1f41a6c 1482 && orig[i - 1].opcode == MEM_REF)
37b80bde 1483 vn_reference_maybe_forwprop_address (&orig, &i);
182cf5a9 1484 /* If it transforms a non-constant ARRAY_REF into a constant
1485 one, adjust the constant offset. */
1486 else if (vro->opcode == ARRAY_REF
1487 && vro->off == -1
1488 && TREE_CODE (vro->op0) == INTEGER_CST
1489 && TREE_CODE (vro->op1) == INTEGER_CST
1490 && TREE_CODE (vro->op2) == INTEGER_CST)
1491 {
5de9d3ed 1492 offset_int off = ((wi::to_offset (vro->op0)
1493 - wi::to_offset (vro->op1))
1494 * wi::to_offset (vro->op2));
796b6678 1495 if (wi::fits_shwi_p (off))
e913b5cd 1496 vro->off = off.to_shwi ();
182cf5a9 1497 }
9e9e6e3e 1498 }
1499
1500 return orig;
1501}
1502
f1f41a6c 1503static vec<vn_reference_op_s>
1504valueize_refs (vec<vn_reference_op_s> orig)
882f8b55 1505{
1506 bool tem;
1507 return valueize_refs_1 (orig, &tem);
1508}
1509
f1f41a6c 1510static vec<vn_reference_op_s> shared_lookup_references;
d12dee9c 1511
1512/* Create a vector of vn_reference_op_s structures from REF, a
1513 REFERENCE_CLASS_P tree. The vector is shared among all callers of
882f8b55 1514 this function. *VALUEIZED_ANYTHING will specify whether any
1515 operands were valueized. */
d12dee9c 1516
f1f41a6c 1517static vec<vn_reference_op_s>
882f8b55 1518valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
d12dee9c 1519{
1520 if (!ref)
1e094109 1521 return vNULL;
f1f41a6c 1522 shared_lookup_references.truncate (0);
d12dee9c 1523 copy_reference_ops_from_ref (ref, &shared_lookup_references);
882f8b55 1524 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1525 valueized_anything);
d12dee9c 1526 return shared_lookup_references;
1527}
1528
1529/* Create a vector of vn_reference_op_s structures from CALL, a
1530 call statement. The vector is shared among all callers of
1531 this function. */
1532
f1f41a6c 1533static vec<vn_reference_op_s>
1a91d914 1534valueize_shared_reference_ops_from_call (gcall *call)
d12dee9c 1535{
1536 if (!call)
1e094109 1537 return vNULL;
f1f41a6c 1538 shared_lookup_references.truncate (0);
d12dee9c 1539 copy_reference_ops_from_call (call, &shared_lookup_references);
1540 shared_lookup_references = valueize_refs (shared_lookup_references);
1541 return shared_lookup_references;
1542}
1543
404d6be4 1544/* Lookup a SCCVN reference operation VR in the current hash table.
1545 Returns the resulting value number if it exists in the hash table,
f6c33c78 1546 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1547 vn_reference_t stored in the hashtable if something is found. */
404d6be4 1548
1549static tree
f6c33c78 1550vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
404d6be4 1551{
3e871d4d 1552 vn_reference_s **slot;
404d6be4 1553 hashval_t hash;
1554
1555 hash = vr->hashcode;
c1f445d2 1556 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
404d6be4 1557 if (!slot && current_info == optimistic_info)
c1f445d2 1558 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
404d6be4 1559 if (slot)
f6c33c78 1560 {
1561 if (vnresult)
1562 *vnresult = (vn_reference_t)*slot;
1563 return ((vn_reference_t)*slot)->result;
1564 }
48e1416a 1565
404d6be4 1566 return NULL_TREE;
1567}
1568
dd277d48 1569/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1570 with the current VUSE and performs the expression lookup. */
1571
1572static void *
297a2110 1573vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1574 unsigned int cnt, void *vr_)
dd277d48 1575{
1576 vn_reference_t vr = (vn_reference_t)vr_;
3e871d4d 1577 vn_reference_s **slot;
dd277d48 1578 hashval_t hash;
1579
297a2110 1580 /* This bounds the stmt walks we perform on reference lookups
1581 to O(1) instead of O(N) where N is the number of dominating
1582 stores. */
1583 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1584 return (void *)-1;
1585
4a83fadb 1586 if (last_vuse_ptr)
1587 *last_vuse_ptr = vuse;
1588
dd277d48 1589 /* Fixup vuse and hash. */
84cd88b5 1590 if (vr->vuse)
1591 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
b8a2283e 1592 vr->vuse = vuse_ssa_val (vuse);
84cd88b5 1593 if (vr->vuse)
1594 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
dd277d48 1595
1596 hash = vr->hashcode;
c1f445d2 1597 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
dd277d48 1598 if (!slot && current_info == optimistic_info)
c1f445d2 1599 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
dd277d48 1600 if (slot)
1601 return *slot;
48e1416a 1602
dd277d48 1603 return NULL;
1604}
f6c33c78 1605
01fd46e3 1606/* Lookup an existing or insert a new vn_reference entry into the
1607 value table for the VUSE, SET, TYPE, OPERANDS reference which
a4f94d42 1608 has the value VALUE which is either a constant or an SSA name. */
01fd46e3 1609
1610static vn_reference_t
a4f94d42 1611vn_reference_lookup_or_insert_for_pieces (tree vuse,
1612 alias_set_type set,
1613 tree type,
f1f41a6c 1614 vec<vn_reference_op_s,
1615 va_heap> operands,
a4f94d42 1616 tree value)
01fd46e3 1617{
9251bb6f 1618 vn_reference_s vr1;
01fd46e3 1619 vn_reference_t result;
a4f94d42 1620 unsigned value_id;
01fd46e3 1621 vr1.vuse = vuse;
1622 vr1.operands = operands;
1623 vr1.type = type;
1624 vr1.set = set;
1625 vr1.hashcode = vn_reference_compute_hash (&vr1);
1626 if (vn_reference_lookup_1 (&vr1, &result))
1627 return result;
a4f94d42 1628 if (TREE_CODE (value) == SSA_NAME)
1629 value_id = VN_INFO (value)->value_id;
1630 else
1631 value_id = get_or_alloc_constant_value_id (value);
01fd46e3 1632 return vn_reference_insert_pieces (vuse, set, type,
f1f41a6c 1633 operands.copy (), value, value_id);
01fd46e3 1634}
1635
d8021dea 1636/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1637 from the statement defining VUSE and if not successful tries to
9d75589a 1638 translate *REFP and VR_ through an aggregate copy at the definition
d8021dea 1639 of VUSE. */
1640
1641static void *
38168b16 1642vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1643 bool disambiguate_only)
d8021dea 1644{
1645 vn_reference_t vr = (vn_reference_t)vr_;
1646 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
d8021dea 1647 tree base;
f018d957 1648 HOST_WIDE_INT offset, maxsize;
f1f41a6c 1649 static vec<vn_reference_op_s>
1e094109 1650 lhs_ops = vNULL;
66b86a74 1651 ao_ref lhs_ref;
1652 bool lhs_ref_ok = false;
d8021dea 1653
180572f4 1654 /* First try to disambiguate after value-replacing in the definitions LHS. */
1655 if (is_gimple_assign (def_stmt))
1656 {
1657 tree lhs = gimple_assign_lhs (def_stmt);
b11771e1 1658 bool valueized_anything = false;
66b86a74 1659 /* Avoid re-allocation overhead. */
f1f41a6c 1660 lhs_ops.truncate (0);
66b86a74 1661 copy_reference_ops_from_ref (lhs, &lhs_ops);
b11771e1 1662 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
b11771e1 1663 if (valueized_anything)
1664 {
1665 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1666 get_alias_set (lhs),
1667 TREE_TYPE (lhs), lhs_ops);
1668 if (lhs_ref_ok
1669 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1670 return NULL;
1671 }
1672 else
1673 {
1674 ao_ref_init (&lhs_ref, lhs);
1675 lhs_ref_ok = true;
1676 }
180572f4 1677 }
38168b16 1678 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1679 && gimple_call_num_args (def_stmt) <= 4)
1680 {
1681 /* For builtin calls valueize its arguments and call the
1682 alias oracle again. Valueization may improve points-to
1683 info of pointers and constify size and position arguments.
1684 Originally this was motivated by PR61034 which has
1685 conditional calls to free falsely clobbering ref because
1686 of imprecise points-to info of the argument. */
1687 tree oldargs[4];
2eb57bc3 1688 bool valueized_anything = false;
38168b16 1689 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1690 {
1691 oldargs[i] = gimple_call_arg (def_stmt, i);
1692 if (TREE_CODE (oldargs[i]) == SSA_NAME
1693 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1694 {
1695 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1696 valueized_anything = true;
1697 }
1698 }
1699 if (valueized_anything)
1700 {
1a91d914 1701 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1702 ref);
38168b16 1703 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1704 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1705 if (!res)
1706 return NULL;
1707 }
1708 }
1709
1710 if (disambiguate_only)
1711 return (void *)-1;
180572f4 1712
3918bd18 1713 base = ao_ref_base (ref);
1714 offset = ref->offset;
3918bd18 1715 maxsize = ref->max_size;
d8021dea 1716
1717 /* If we cannot constrain the size of the reference we cannot
1718 test if anything kills it. */
1719 if (maxsize == -1)
1720 return (void *)-1;
1721
3c25489e 1722 /* We can't deduce anything useful from clobbers. */
1723 if (gimple_clobber_p (def_stmt))
1724 return (void *)-1;
1725
d8021dea 1726 /* def_stmt may-defs *ref. See if we can derive a value for *ref
3c25489e 1727 from that definition.
d8021dea 1728 1) Memset. */
3918bd18 1729 if (is_gimple_reg_type (vr->type)
77c7051b 1730 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
d8021dea 1731 && integer_zerop (gimple_call_arg (def_stmt, 1))
e913b5cd 1732 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
d8021dea 1733 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1734 {
1735 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1736 tree base2;
1737 HOST_WIDE_INT offset2, size2, maxsize2;
1738 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
e913b5cd 1739 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
d8021dea 1740 if ((unsigned HOST_WIDE_INT)size2 / 8
e913b5cd 1741 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
a7be40cc 1742 && maxsize2 != -1
d8021dea 1743 && operand_equal_p (base, base2, 0)
1744 && offset2 <= offset
1745 && offset2 + size2 >= offset + maxsize)
3918bd18 1746 {
385f3f36 1747 tree val = build_zero_cst (vr->type);
a4f94d42 1748 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1749 (vuse, vr->set, vr->type, vr->operands, val);
3918bd18 1750 }
d8021dea 1751 }
1752
1753 /* 2) Assignment from an empty CONSTRUCTOR. */
3918bd18 1754 else if (is_gimple_reg_type (vr->type)
d8021dea 1755 && gimple_assign_single_p (def_stmt)
1756 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1757 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1758 {
1759 tree base2;
1760 HOST_WIDE_INT offset2, size2, maxsize2;
1761 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1762 &offset2, &size2, &maxsize2);
a7be40cc 1763 if (maxsize2 != -1
1764 && operand_equal_p (base, base2, 0)
d8021dea 1765 && offset2 <= offset
1766 && offset2 + size2 >= offset + maxsize)
3918bd18 1767 {
385f3f36 1768 tree val = build_zero_cst (vr->type);
a4f94d42 1769 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1770 (vuse, vr->set, vr->type, vr->operands, val);
3918bd18 1771 }
d8021dea 1772 }
1773
87b53397 1774 /* 3) Assignment from a constant. We can use folds native encode/interpret
1775 routines to extract the assigned bits. */
824bbeb8 1776 else if (vn_walk_kind == VN_WALKREWRITE
1777 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
87b53397 1778 && ref->size == maxsize
1779 && maxsize % BITS_PER_UNIT == 0
1780 && offset % BITS_PER_UNIT == 0
1781 && is_gimple_reg_type (vr->type)
1782 && gimple_assign_single_p (def_stmt)
1783 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1784 {
1785 tree base2;
1786 HOST_WIDE_INT offset2, size2, maxsize2;
1787 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1788 &offset2, &size2, &maxsize2);
1789 if (maxsize2 != -1
1790 && maxsize2 == size2
1791 && size2 % BITS_PER_UNIT == 0
1792 && offset2 % BITS_PER_UNIT == 0
1793 && operand_equal_p (base, base2, 0)
1794 && offset2 <= offset
1795 && offset2 + size2 >= offset + maxsize)
1796 {
1797 /* We support up to 512-bit values (for V8DFmode). */
1798 unsigned char buffer[64];
1799 int len;
1800
1801 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1802 buffer, sizeof (buffer));
1803 if (len > 0)
1804 {
1805 tree val = native_interpret_expr (vr->type,
1806 buffer
1807 + ((offset - offset2)
1808 / BITS_PER_UNIT),
1809 ref->size / BITS_PER_UNIT);
1810 if (val)
a4f94d42 1811 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1812 (vuse, vr->set, vr->type, vr->operands, val);
87b53397 1813 }
1814 }
1815 }
1816
a3bb56f0 1817 /* 4) Assignment from an SSA name which definition we may be able
1818 to access pieces from. */
1819 else if (ref->size == maxsize
1820 && is_gimple_reg_type (vr->type)
1821 && gimple_assign_single_p (def_stmt)
1822 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1823 {
1824 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1825 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1826 if (is_gimple_assign (def_stmt2)
1827 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1828 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1829 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1830 {
1831 tree base2;
1832 HOST_WIDE_INT offset2, size2, maxsize2, off;
1833 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1834 &offset2, &size2, &maxsize2);
1835 off = offset - offset2;
1836 if (maxsize2 != -1
1837 && maxsize2 == size2
1838 && operand_equal_p (base, base2, 0)
1839 && offset2 <= offset
1840 && offset2 + size2 >= offset + maxsize)
1841 {
1842 tree val = NULL_TREE;
1843 HOST_WIDE_INT elsz
f9ae6f95 1844 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
a3bb56f0 1845 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1846 {
1847 if (off == 0)
1848 val = gimple_assign_rhs1 (def_stmt2);
1849 else if (off == elsz)
1850 val = gimple_assign_rhs2 (def_stmt2);
1851 }
1852 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1853 && off % elsz == 0)
1854 {
1855 tree ctor = gimple_assign_rhs1 (def_stmt2);
1856 unsigned i = off / elsz;
1857 if (i < CONSTRUCTOR_NELTS (ctor))
1858 {
1859 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
0ff8139c 1860 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1861 {
1862 if (TREE_CODE (TREE_TYPE (elt->value))
1863 != VECTOR_TYPE)
1864 val = elt->value;
1865 }
a3bb56f0 1866 }
1867 }
1868 if (val)
a4f94d42 1869 return vn_reference_lookup_or_insert_for_pieces
01fd46e3 1870 (vuse, vr->set, vr->type, vr->operands, val);
a3bb56f0 1871 }
1872 }
1873 }
1874
1875 /* 5) For aggregate copies translate the reference through them if
d8021dea 1876 the copy kills ref. */
8ecc6b38 1877 else if (vn_walk_kind == VN_WALKREWRITE
1878 && gimple_assign_single_p (def_stmt)
d8021dea 1879 && (DECL_P (gimple_assign_rhs1 (def_stmt))
182cf5a9 1880 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
d8021dea 1881 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1882 {
1883 tree base2;
a7be40cc 1884 HOST_WIDE_INT offset2, size2, maxsize2;
d8021dea 1885 int i, j;
c2078b80 1886 auto_vec<vn_reference_op_s> rhs;
d8021dea 1887 vn_reference_op_t vro;
3918bd18 1888 ao_ref r;
d8021dea 1889
66b86a74 1890 if (!lhs_ref_ok)
1891 return (void *)-1;
1892
d8021dea 1893 /* See if the assignment kills REF. */
66b86a74 1894 base2 = ao_ref_base (&lhs_ref);
1895 offset2 = lhs_ref.offset;
1896 size2 = lhs_ref.size;
a7be40cc 1897 maxsize2 = lhs_ref.max_size;
1898 if (maxsize2 == -1
e5d08bfd 1899 || (base != base2
1900 && (TREE_CODE (base) != MEM_REF
1901 || TREE_CODE (base2) != MEM_REF
1902 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1903 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1904 TREE_OPERAND (base2, 1))))
d8021dea 1905 || offset2 > offset
1906 || offset2 + size2 < offset + maxsize)
1907 return (void *)-1;
1908
66b86a74 1909 /* Find the common base of ref and the lhs. lhs_ops already
1910 contains valueized operands for the lhs. */
f1f41a6c 1911 i = vr->operands.length () - 1;
1912 j = lhs_ops.length () - 1;
0d5b37dd 1913 while (j >= 0 && i >= 0
f1f41a6c 1914 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
d8021dea 1915 {
1916 i--;
1917 j--;
1918 }
0d5b37dd 1919
b11771e1 1920 /* ??? The innermost op should always be a MEM_REF and we already
1921 checked that the assignment to the lhs kills vr. Thus for
1922 aggregate copies using char[] types the vn_reference_op_eq
1923 may fail when comparing types for compatibility. But we really
1924 don't care here - further lookups with the rewritten operands
1925 will simply fail if we messed up types too badly. */
a5650c86 1926 HOST_WIDE_INT extra_off = 0;
78e606ea 1927 if (j == 0 && i >= 0
f1f41a6c 1928 && lhs_ops[0].opcode == MEM_REF
a5650c86 1929 && lhs_ops[0].off != -1)
1930 {
1931 if (lhs_ops[0].off == vr->operands[i].off)
1932 i--, j--;
1933 else if (vr->operands[i].opcode == MEM_REF
1934 && vr->operands[i].off != -1)
1935 {
1936 extra_off = vr->operands[i].off - lhs_ops[0].off;
1937 i--, j--;
1938 }
1939 }
b11771e1 1940
d8021dea 1941 /* i now points to the first additional op.
1942 ??? LHS may not be completely contained in VR, one or more
1943 VIEW_CONVERT_EXPRs could be in its way. We could at least
1944 try handling outermost VIEW_CONVERT_EXPRs. */
1945 if (j != -1)
1946 return (void *)-1;
d8021dea 1947
1948 /* Now re-write REF to be based on the rhs of the assignment. */
1949 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
a5650c86 1950
1951 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1952 if (extra_off != 0)
1953 {
1954 if (rhs.length () < 2
1955 || rhs[0].opcode != MEM_REF
1956 || rhs[0].off == -1)
1957 return (void *)-1;
1958 rhs[0].off += extra_off;
1959 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1960 build_int_cst (TREE_TYPE (rhs[0].op0),
1961 extra_off));
1962 }
1963
d8021dea 1964 /* We need to pre-pend vr->operands[0..i] to rhs. */
2fd3ecff 1965 vec<vn_reference_op_s> old = vr->operands;
f1f41a6c 1966 if (i + 1 + rhs.length () > vr->operands.length ())
d8021dea 1967 {
f1f41a6c 1968 vr->operands.safe_grow (i + 1 + rhs.length ());
2fd3ecff 1969 if (old == shared_lookup_references)
1970 shared_lookup_references = vr->operands;
d8021dea 1971 }
1972 else
f1f41a6c 1973 vr->operands.truncate (i + 1 + rhs.length ());
1974 FOR_EACH_VEC_ELT (rhs, j, vro)
1975 vr->operands[i + 1 + j] = *vro;
01fd46e3 1976 vr->operands = valueize_refs (vr->operands);
2fd3ecff 1977 if (old == shared_lookup_references)
1978 shared_lookup_references = vr->operands;
d8021dea 1979 vr->hashcode = vn_reference_compute_hash (vr);
77c7051b 1980
a5650c86 1981 /* Try folding the new reference to a constant. */
1982 tree val = fully_constant_vn_reference_p (vr);
1983 if (val)
1984 return vn_reference_lookup_or_insert_for_pieces
1985 (vuse, vr->set, vr->type, vr->operands, val);
1986
77c7051b 1987 /* Adjust *ref from the new operands. */
1988 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1989 return (void *)-1;
1990 /* This can happen with bitfields. */
1991 if (ref->size != r.size)
1992 return (void *)-1;
1993 *ref = r;
1994
1995 /* Do not update last seen VUSE after translating. */
1996 last_vuse_ptr = NULL;
1997
1998 /* Keep looking for the adjusted *REF / VR pair. */
1999 return NULL;
2000 }
2001
a3bb56f0 2002 /* 6) For memcpy copies translate the reference through them if
77c7051b 2003 the copy kills ref. */
2004 else if (vn_walk_kind == VN_WALKREWRITE
2005 && is_gimple_reg_type (vr->type)
2006 /* ??? Handle BCOPY as well. */
2007 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2008 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2009 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2010 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2011 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2012 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2013 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
e913b5cd 2014 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
77c7051b 2015 {
2016 tree lhs, rhs;
2017 ao_ref r;
2018 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2019 vn_reference_op_s op;
2020 HOST_WIDE_INT at;
2021
2022
2023 /* Only handle non-variable, addressable refs. */
2024 if (ref->size != maxsize
2025 || offset % BITS_PER_UNIT != 0
2026 || ref->size % BITS_PER_UNIT != 0)
2027 return (void *)-1;
2028
2029 /* Extract a pointer base and an offset for the destination. */
2030 lhs = gimple_call_arg (def_stmt, 0);
2031 lhs_offset = 0;
2032 if (TREE_CODE (lhs) == SSA_NAME)
6325605f 2033 {
2034 lhs = SSA_VAL (lhs);
2035 if (TREE_CODE (lhs) == SSA_NAME)
2036 {
2037 gimple def_stmt = SSA_NAME_DEF_STMT (lhs);
2038 if (gimple_assign_single_p (def_stmt)
2039 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2040 lhs = gimple_assign_rhs1 (def_stmt);
2041 }
2042 }
77c7051b 2043 if (TREE_CODE (lhs) == ADDR_EXPR)
2044 {
2045 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2046 &lhs_offset);
2047 if (!tem)
2048 return (void *)-1;
2049 if (TREE_CODE (tem) == MEM_REF
e913b5cd 2050 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
77c7051b 2051 {
2052 lhs = TREE_OPERAND (tem, 0);
6325605f 2053 if (TREE_CODE (lhs) == SSA_NAME)
2054 lhs = SSA_VAL (lhs);
e913b5cd 2055 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
77c7051b 2056 }
2057 else if (DECL_P (tem))
2058 lhs = build_fold_addr_expr (tem);
2059 else
2060 return (void *)-1;
2061 }
2062 if (TREE_CODE (lhs) != SSA_NAME
2063 && TREE_CODE (lhs) != ADDR_EXPR)
2064 return (void *)-1;
2065
2066 /* Extract a pointer base and an offset for the source. */
2067 rhs = gimple_call_arg (def_stmt, 1);
2068 rhs_offset = 0;
2069 if (TREE_CODE (rhs) == SSA_NAME)
2070 rhs = SSA_VAL (rhs);
2071 if (TREE_CODE (rhs) == ADDR_EXPR)
2072 {
2073 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2074 &rhs_offset);
2075 if (!tem)
2076 return (void *)-1;
2077 if (TREE_CODE (tem) == MEM_REF
e913b5cd 2078 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
77c7051b 2079 {
2080 rhs = TREE_OPERAND (tem, 0);
e913b5cd 2081 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
77c7051b 2082 }
2083 else if (DECL_P (tem))
2084 rhs = build_fold_addr_expr (tem);
2085 else
2086 return (void *)-1;
2087 }
2088 if (TREE_CODE (rhs) != SSA_NAME
2089 && TREE_CODE (rhs) != ADDR_EXPR)
2090 return (void *)-1;
2091
8c53c46c 2092 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
77c7051b 2093
2094 /* The bases of the destination and the references have to agree. */
2095 if ((TREE_CODE (base) != MEM_REF
2096 && !DECL_P (base))
2097 || (TREE_CODE (base) == MEM_REF
2098 && (TREE_OPERAND (base, 0) != lhs
e913b5cd 2099 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
77c7051b 2100 || (DECL_P (base)
2101 && (TREE_CODE (lhs) != ADDR_EXPR
2102 || TREE_OPERAND (lhs, 0) != base)))
2103 return (void *)-1;
2104
77c7051b 2105 at = offset / BITS_PER_UNIT;
2106 if (TREE_CODE (base) == MEM_REF)
8c53c46c 2107 at += tree_to_uhwi (TREE_OPERAND (base, 1));
6325605f 2108 /* If the access is completely outside of the memcpy destination
2109 area there is no aliasing. */
2110 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2111 || lhs_offset + copy_size <= at)
2112 return NULL;
2113 /* And the access has to be contained within the memcpy destination. */
77c7051b 2114 if (lhs_offset > at
2115 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2116 return (void *)-1;
2117
2118 /* Make room for 2 operands in the new reference. */
f1f41a6c 2119 if (vr->operands.length () < 2)
77c7051b 2120 {
f1f41a6c 2121 vec<vn_reference_op_s> old = vr->operands;
2122 vr->operands.safe_grow_cleared (2);
77c7051b 2123 if (old == shared_lookup_references
2124 && vr->operands != old)
2fd3ecff 2125 shared_lookup_references = vr->operands;
77c7051b 2126 }
2127 else
f1f41a6c 2128 vr->operands.truncate (2);
77c7051b 2129
2130 /* The looked-through reference is a simple MEM_REF. */
2131 memset (&op, 0, sizeof (op));
2132 op.type = vr->type;
2133 op.opcode = MEM_REF;
2134 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2135 op.off = at - lhs_offset + rhs_offset;
f1f41a6c 2136 vr->operands[0] = op;
2be90eed 2137 op.type = TREE_TYPE (rhs);
77c7051b 2138 op.opcode = TREE_CODE (rhs);
2139 op.op0 = rhs;
2140 op.off = -1;
f1f41a6c 2141 vr->operands[1] = op;
77c7051b 2142 vr->hashcode = vn_reference_compute_hash (vr);
3918bd18 2143
2144 /* Adjust *ref from the new operands. */
2145 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
d8021dea 2146 return (void *)-1;
8f15ba15 2147 /* This can happen with bitfields. */
2148 if (ref->size != r.size)
2149 return (void *)-1;
3918bd18 2150 *ref = r;
d8021dea 2151
4a83fadb 2152 /* Do not update last seen VUSE after translating. */
2153 last_vuse_ptr = NULL;
2154
d8021dea 2155 /* Keep looking for the adjusted *REF / VR pair. */
2156 return NULL;
2157 }
2158
2159 /* Bail out and stop walking. */
2160 return (void *)-1;
2161}
2162
f6c33c78 2163/* Lookup a reference operation by it's parts, in the current hash table.
2164 Returns the resulting value number if it exists in the hash table,
2165 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2166 vn_reference_t stored in the hashtable if something is found. */
9e9e6e3e 2167
2168tree
3918bd18 2169vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
f1f41a6c 2170 vec<vn_reference_op_s> operands,
8ecc6b38 2171 vn_reference_t *vnresult, vn_lookup_kind kind)
f6c33c78 2172{
2173 struct vn_reference_s vr1;
dd277d48 2174 vn_reference_t tmp;
c26ce8a9 2175 tree cst;
dd277d48 2176
2177 if (!vnresult)
2178 vnresult = &tmp;
2179 *vnresult = NULL;
d8021dea 2180
b8a2283e 2181 vr1.vuse = vuse_ssa_val (vuse);
f1f41a6c 2182 shared_lookup_references.truncate (0);
2183 shared_lookup_references.safe_grow (operands.length ());
2184 memcpy (shared_lookup_references.address (),
2185 operands.address (),
d8021dea 2186 sizeof (vn_reference_op_s)
f1f41a6c 2187 * operands.length ());
d8021dea 2188 vr1.operands = operands = shared_lookup_references
2189 = valueize_refs (shared_lookup_references);
3918bd18 2190 vr1.type = type;
2191 vr1.set = set;
f6c33c78 2192 vr1.hashcode = vn_reference_compute_hash (&vr1);
c26ce8a9 2193 if ((cst = fully_constant_vn_reference_p (&vr1)))
2194 return cst;
f6c33c78 2195
c26ce8a9 2196 vn_reference_lookup_1 (&vr1, vnresult);
dd277d48 2197 if (!*vnresult
8ecc6b38 2198 && kind != VN_NOWALK
dd277d48 2199 && vr1.vuse)
02067dc5 2200 {
3918bd18 2201 ao_ref r;
8ecc6b38 2202 vn_walk_kind = kind;
3918bd18 2203 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
d8021dea 2204 *vnresult =
3918bd18 2205 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
d8021dea 2206 vn_reference_lookup_2,
46816709 2207 vn_reference_lookup_3,
f3c5599a 2208 vuse_ssa_val, &vr1);
2fd3ecff 2209 gcc_checking_assert (vr1.operands == shared_lookup_references);
02067dc5 2210 }
2211
dd277d48 2212 if (*vnresult)
2213 return (*vnresult)->result;
2214
2215 return NULL_TREE;
f6c33c78 2216}
2217
2218/* Lookup OP in the current hash table, and return the resulting value
2219 number if it exists in the hash table. Return NULL_TREE if it does
2220 not exist in the hash table or if the result field of the structure
2221 was NULL.. VNRESULT will be filled in with the vn_reference_t
2222 stored in the hashtable if one exists. */
2223
2224tree
8ecc6b38 2225vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
f6c33c78 2226 vn_reference_t *vnresult)
9e9e6e3e 2227{
f1f41a6c 2228 vec<vn_reference_op_s> operands;
9e9e6e3e 2229 struct vn_reference_s vr1;
c26ce8a9 2230 tree cst;
882f8b55 2231 bool valuezied_anything;
dd277d48 2232
f6c33c78 2233 if (vnresult)
2234 *vnresult = NULL;
9e9e6e3e 2235
b8a2283e 2236 vr1.vuse = vuse_ssa_val (vuse);
882f8b55 2237 vr1.operands = operands
2238 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3918bd18 2239 vr1.type = TREE_TYPE (op);
2240 vr1.set = get_alias_set (op);
9e9e6e3e 2241 vr1.hashcode = vn_reference_compute_hash (&vr1);
c26ce8a9 2242 if ((cst = fully_constant_vn_reference_p (&vr1)))
2243 return cst;
404d6be4 2244
8ecc6b38 2245 if (kind != VN_NOWALK
dd277d48 2246 && vr1.vuse)
2247 {
2248 vn_reference_t wvnresult;
3918bd18 2249 ao_ref r;
882f8b55 2250 /* Make sure to use a valueized reference if we valueized anything.
2251 Otherwise preserve the full reference for advanced TBAA. */
2252 if (!valuezied_anything
2253 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2254 vr1.operands))
2be90eed 2255 ao_ref_init (&r, op);
8ecc6b38 2256 vn_walk_kind = kind;
dd277d48 2257 wvnresult =
3918bd18 2258 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
d8021dea 2259 vn_reference_lookup_2,
46816709 2260 vn_reference_lookup_3,
f3c5599a 2261 vuse_ssa_val, &vr1);
2fd3ecff 2262 gcc_checking_assert (vr1.operands == shared_lookup_references);
dd277d48 2263 if (wvnresult)
2264 {
2265 if (vnresult)
2266 *vnresult = wvnresult;
2267 return wvnresult->result;
2268 }
2269
2270 return NULL_TREE;
404d6be4 2271 }
9e9e6e3e 2272
dd277d48 2273 return vn_reference_lookup_1 (&vr1, vnresult);
9e9e6e3e 2274}
2275
2fd3ecff 2276/* Lookup CALL in the current hash table and return the entry in
2277 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2278
2279void
1a91d914 2280vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2fd3ecff 2281 vn_reference_t vr)
2282{
72e693ed 2283 if (vnresult)
2284 *vnresult = NULL;
2285
2fd3ecff 2286 tree vuse = gimple_vuse (call);
2287
2288 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2289 vr->operands = valueize_shared_reference_ops_from_call (call);
2290 vr->type = gimple_expr_type (call);
2291 vr->set = 0;
2292 vr->hashcode = vn_reference_compute_hash (vr);
2293 vn_reference_lookup_1 (vr, vnresult);
2294}
f6c33c78 2295
9e9e6e3e 2296/* Insert OP into the current hash table with a value number of
f6c33c78 2297 RESULT, and return the resulting reference structure we created. */
9e9e6e3e 2298
2fd3ecff 2299static vn_reference_t
39215e09 2300vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
9e9e6e3e 2301{
3e871d4d 2302 vn_reference_s **slot;
9e9e6e3e 2303 vn_reference_t vr1;
75aefb7b 2304 bool tem;
9e9e6e3e 2305
d27a7bc4 2306 vr1 = current_info->references_pool->allocate ();
f6c33c78 2307 if (TREE_CODE (result) == SSA_NAME)
2308 vr1->value_id = VN_INFO (result)->value_id;
2309 else
2310 vr1->value_id = get_or_alloc_constant_value_id (result);
dd277d48 2311 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
75aefb7b 2312 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3918bd18 2313 vr1->type = TREE_TYPE (op);
2314 vr1->set = get_alias_set (op);
9e9e6e3e 2315 vr1->hashcode = vn_reference_compute_hash (vr1);
2316 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
39215e09 2317 vr1->result_vdef = vdef;
9e9e6e3e 2318
c1f445d2 2319 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2320 INSERT);
9e9e6e3e 2321
2322 /* Because we lookup stores using vuses, and value number failures
2323 using the vdefs (see visit_reference_op_store for how and why),
2324 it's possible that on failure we may try to insert an already
2325 inserted store. This is not wrong, there is no ssa name for a
2326 store that we could use as a differentiator anyway. Thus, unlike
2327 the other lookup functions, you cannot gcc_assert (!*slot)
2328 here. */
2329
12661815 2330 /* But free the old slot in case of a collision. */
2331 if (*slot)
2332 free_reference (*slot);
9e9e6e3e 2333
2334 *slot = vr1;
f6c33c78 2335 return vr1;
2336}
2337
2338/* Insert a reference by it's pieces into the current hash table with
2339 a value number of RESULT. Return the resulting reference
2340 structure we created. */
2341
2342vn_reference_t
3918bd18 2343vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
f1f41a6c 2344 vec<vn_reference_op_s> operands,
f6c33c78 2345 tree result, unsigned int value_id)
2346
2347{
3e871d4d 2348 vn_reference_s **slot;
f6c33c78 2349 vn_reference_t vr1;
2350
d27a7bc4 2351 vr1 = current_info->references_pool->allocate ();
dd277d48 2352 vr1->value_id = value_id;
2353 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
f6c33c78 2354 vr1->operands = valueize_refs (operands);
3918bd18 2355 vr1->type = type;
2356 vr1->set = set;
f6c33c78 2357 vr1->hashcode = vn_reference_compute_hash (vr1);
2358 if (result && TREE_CODE (result) == SSA_NAME)
2359 result = SSA_VAL (result);
2360 vr1->result = result;
2361
c1f445d2 2362 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2363 INSERT);
48e1416a 2364
f6c33c78 2365 /* At this point we should have all the things inserted that we have
dd277d48 2366 seen before, and we should never try inserting something that
2367 already exists. */
f6c33c78 2368 gcc_assert (!*slot);
2369 if (*slot)
2370 free_reference (*slot);
2371
2372 *slot = vr1;
2373 return vr1;
9e9e6e3e 2374}
2375
51a23cfc 2376/* Compute and return the hash value for nary operation VBO1. */
9e9e6e3e 2377
2fd3ecff 2378static hashval_t
51a23cfc 2379vn_nary_op_compute_hash (const vn_nary_op_t vno1)
9e9e6e3e 2380{
f32e91d5 2381 inchash::hash hstate;
51a23cfc 2382 unsigned i;
9e9e6e3e 2383
51a23cfc 2384 for (i = 0; i < vno1->length; ++i)
2385 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2386 vno1->op[i] = SSA_VAL (vno1->op[i]);
9e9e6e3e 2387
51a23cfc 2388 if (vno1->length == 2
2389 && commutative_tree_code (vno1->opcode)
2390 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
a4f59596 2391 std::swap (vno1->op[0], vno1->op[1]);
9e9e6e3e 2392
f32e91d5 2393 hstate.add_int (vno1->opcode);
51a23cfc 2394 for (i = 0; i < vno1->length; ++i)
f32e91d5 2395 inchash::add_expr (vno1->op[i], hstate);
9e9e6e3e 2396
f32e91d5 2397 return hstate.end ();
9e9e6e3e 2398}
2399
3e871d4d 2400/* Compare nary operations VNO1 and VNO2 and return true if they are
9e9e6e3e 2401 equivalent. */
2402
3e871d4d 2403bool
2404vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
9e9e6e3e 2405{
51a23cfc 2406 unsigned i;
2407
3d2d7de7 2408 if (vno1->hashcode != vno2->hashcode)
2409 return false;
2410
7384c678 2411 if (vno1->length != vno2->length)
2412 return false;
2413
51a23cfc 2414 if (vno1->opcode != vno2->opcode
c477520d 2415 || !types_compatible_p (vno1->type, vno2->type))
51a23cfc 2416 return false;
2417
2418 for (i = 0; i < vno1->length; ++i)
2419 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2420 return false;
2421
2422 return true;
9e9e6e3e 2423}
2424
f8ce304c 2425/* Initialize VNO from the pieces provided. */
9e9e6e3e 2426
f8ce304c 2427static void
2428init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
7384c678 2429 enum tree_code code, tree type, tree *ops)
f8ce304c 2430{
2431 vno->opcode = code;
2432 vno->length = length;
2433 vno->type = type;
7384c678 2434 memcpy (&vno->op[0], ops, sizeof (tree) * length);
f8ce304c 2435}
2436
2437/* Initialize VNO from OP. */
2438
2439static void
2440init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2441{
2442 unsigned i;
2443
2444 vno->opcode = TREE_CODE (op);
2445 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2446 vno->type = TREE_TYPE (op);
2447 for (i = 0; i < vno->length; ++i)
2448 vno->op[i] = TREE_OPERAND (op, i);
2449}
2450
7384c678 2451/* Return the number of operands for a vn_nary ops structure from STMT. */
2452
2453static unsigned int
2454vn_nary_length_from_stmt (gimple stmt)
2455{
2456 switch (gimple_assign_rhs_code (stmt))
2457 {
2458 case REALPART_EXPR:
2459 case IMAGPART_EXPR:
2460 case VIEW_CONVERT_EXPR:
2461 return 1;
2462
70cd63a3 2463 case BIT_FIELD_REF:
2464 return 3;
2465
7384c678 2466 case CONSTRUCTOR:
2467 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2468
2469 default:
2470 return gimple_num_ops (stmt) - 1;
2471 }
2472}
2473
f8ce304c 2474/* Initialize VNO from STMT. */
2475
2476static void
2477init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2478{
2479 unsigned i;
2480
2481 vno->opcode = gimple_assign_rhs_code (stmt);
f8ce304c 2482 vno->type = gimple_expr_type (stmt);
7384c678 2483 switch (vno->opcode)
2484 {
2485 case REALPART_EXPR:
2486 case IMAGPART_EXPR:
2487 case VIEW_CONVERT_EXPR:
2488 vno->length = 1;
2489 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2490 break;
2491
70cd63a3 2492 case BIT_FIELD_REF:
2493 vno->length = 3;
2494 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2495 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2496 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2497 break;
2498
7384c678 2499 case CONSTRUCTOR:
2500 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2501 for (i = 0; i < vno->length; ++i)
2502 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2503 break;
2504
2505 default:
70cd63a3 2506 gcc_checking_assert (!gimple_assign_single_p (stmt));
7384c678 2507 vno->length = gimple_num_ops (stmt) - 1;
2508 for (i = 0; i < vno->length; ++i)
2509 vno->op[i] = gimple_op (stmt, i + 1);
2510 }
f8ce304c 2511}
2512
2513/* Compute the hashcode for VNO and look for it in the hash table;
2514 return the resulting value number if it exists in the hash table.
2515 Return NULL_TREE if it does not exist in the hash table or if the
2516 result field of the operation is NULL. VNRESULT will contain the
2517 vn_nary_op_t from the hashtable if it exists. */
2518
2519static tree
2520vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
f6c33c78 2521{
3e871d4d 2522 vn_nary_op_s **slot;
f8ce304c 2523
f6c33c78 2524 if (vnresult)
2525 *vnresult = NULL;
f8ce304c 2526
2527 vno->hashcode = vn_nary_op_compute_hash (vno);
c1f445d2 2528 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2529 NO_INSERT);
f6c33c78 2530 if (!slot && current_info == optimistic_info)
c1f445d2 2531 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2532 NO_INSERT);
f6c33c78 2533 if (!slot)
2534 return NULL_TREE;
2535 if (vnresult)
3e871d4d 2536 *vnresult = *slot;
2537 return (*slot)->result;
f6c33c78 2538}
2539
f8ce304c 2540/* Lookup a n-ary operation by its pieces and return the resulting value
2541 number if it exists in the hash table. Return NULL_TREE if it does
2542 not exist in the hash table or if the result field of the operation
2543 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2544 if it exists. */
2545
2546tree
2547vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
7384c678 2548 tree type, tree *ops, vn_nary_op_t *vnresult)
f8ce304c 2549{
7384c678 2550 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2551 sizeof_vn_nary_op (length));
2552 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2553 return vn_nary_op_lookup_1 (vno1, vnresult);
f8ce304c 2554}
2555
f6c33c78 2556/* Lookup OP in the current hash table, and return the resulting value
2557 number if it exists in the hash table. Return NULL_TREE if it does
2558 not exist in the hash table or if the result field of the operation
2559 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2560 if it exists. */
2561
2562tree
2563vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
9e9e6e3e 2564{
7384c678 2565 vn_nary_op_t vno1
2566 = XALLOCAVAR (struct vn_nary_op_s,
2567 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2568 init_vn_nary_op_from_op (vno1, op);
2569 return vn_nary_op_lookup_1 (vno1, vnresult);
9e9e6e3e 2570}
2571
75a70cf9 2572/* Lookup the rhs of STMT in the current hash table, and return the resulting
2573 value number if it exists in the hash table. Return NULL_TREE if
2574 it does not exist in the hash table. VNRESULT will contain the
2575 vn_nary_op_t from the hashtable if it exists. */
2576
2577tree
2578vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2579{
7384c678 2580 vn_nary_op_t vno1
2581 = XALLOCAVAR (struct vn_nary_op_s,
2582 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2583 init_vn_nary_op_from_stmt (vno1, stmt);
2584 return vn_nary_op_lookup_1 (vno1, vnresult);
f8ce304c 2585}
2586
2587/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2588
2589static vn_nary_op_t
2590alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2591{
2592 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2593}
2594
2595/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2596 obstack. */
2597
2598static vn_nary_op_t
2599alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2600{
2601 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2602 &current_info->nary_obstack);
2603
2604 vno1->value_id = value_id;
2605 vno1->length = length;
2606 vno1->result = result;
2607
2608 return vno1;
2609}
2610
2611/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2612 VNO->HASHCODE first. */
2613
2614static vn_nary_op_t
c1f445d2 2615vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3e871d4d 2616 bool compute_hash)
f8ce304c 2617{
3e871d4d 2618 vn_nary_op_s **slot;
f8ce304c 2619
2620 if (compute_hash)
2621 vno->hashcode = vn_nary_op_compute_hash (vno);
2622
c1f445d2 2623 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
f8ce304c 2624 gcc_assert (!*slot);
2625
2626 *slot = vno;
2627 return vno;
75a70cf9 2628}
2629
f6c33c78 2630/* Insert a n-ary operation into the current hash table using it's
2631 pieces. Return the vn_nary_op_t structure we created and put in
2632 the hashtable. */
2633
2634vn_nary_op_t
2635vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
7384c678 2636 tree type, tree *ops,
2637 tree result, unsigned int value_id)
f6c33c78 2638{
7384c678 2639 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2640 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
f8ce304c 2641 return vn_nary_op_insert_into (vno1, current_info->nary, true);
f6c33c78 2642}
2643
9e9e6e3e 2644/* Insert OP into the current hash table with a value number of
f6c33c78 2645 RESULT. Return the vn_nary_op_t structure we created and put in
2646 the hashtable. */
9e9e6e3e 2647
f6c33c78 2648vn_nary_op_t
51a23cfc 2649vn_nary_op_insert (tree op, tree result)
9e9e6e3e 2650{
51a23cfc 2651 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
51a23cfc 2652 vn_nary_op_t vno1;
51a23cfc 2653
f8ce304c 2654 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2655 init_vn_nary_op_from_op (vno1, op);
2656 return vn_nary_op_insert_into (vno1, current_info->nary, true);
9e9e6e3e 2657}
2658
75a70cf9 2659/* Insert the rhs of STMT into the current hash table with a value number of
2660 RESULT. */
2661
2662vn_nary_op_t
2663vn_nary_op_insert_stmt (gimple stmt, tree result)
2664{
7384c678 2665 vn_nary_op_t vno1
2666 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2667 result, VN_INFO (result)->value_id);
f8ce304c 2668 init_vn_nary_op_from_stmt (vno1, stmt);
2669 return vn_nary_op_insert_into (vno1, current_info->nary, true);
75a70cf9 2670}
2671
9e9e6e3e 2672/* Compute a hashcode for PHI operation VP1 and return it. */
2673
2674static inline hashval_t
2675vn_phi_compute_hash (vn_phi_t vp1)
2676{
f32e91d5 2677 inchash::hash hstate (vp1->block->index);
9e9e6e3e 2678 int i;
2679 tree phi1op;
9a7beb5f 2680 tree type;
9e9e6e3e 2681
9a7beb5f 2682 /* If all PHI arguments are constants we need to distinguish
2683 the PHI node via its type. */
82a7a70c 2684 type = vp1->type;
f32e91d5 2685 hstate.merge_hash (vn_hash_type (type));
9a7beb5f 2686
f1f41a6c 2687 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
9e9e6e3e 2688 {
2689 if (phi1op == VN_TOP)
2690 continue;
f32e91d5 2691 inchash::add_expr (phi1op, hstate);
9e9e6e3e 2692 }
2693
f32e91d5 2694 return hstate.end ();
9e9e6e3e 2695}
2696
9e9e6e3e 2697/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2698
2699static int
3e871d4d 2700vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
9e9e6e3e 2701{
3d2d7de7 2702 if (vp1->hashcode != vp2->hashcode)
2703 return false;
2704
9e9e6e3e 2705 if (vp1->block == vp2->block)
2706 {
2707 int i;
2708 tree phi1op;
2709
9a7beb5f 2710 /* If the PHI nodes do not have compatible types
2711 they are not the same. */
82a7a70c 2712 if (!types_compatible_p (vp1->type, vp2->type))
9a7beb5f 2713 return false;
2714
9e9e6e3e 2715 /* Any phi in the same block will have it's arguments in the
2716 same edge order, because of how we store phi nodes. */
f1f41a6c 2717 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
9e9e6e3e 2718 {
f1f41a6c 2719 tree phi2op = vp2->phiargs[i];
9e9e6e3e 2720 if (phi1op == VN_TOP || phi2op == VN_TOP)
2721 continue;
2722 if (!expressions_equal_p (phi1op, phi2op))
2723 return false;
2724 }
2725 return true;
2726 }
2727 return false;
2728}
2729
f1f41a6c 2730static vec<tree> shared_lookup_phiargs;
9e9e6e3e 2731
2732/* Lookup PHI in the current hash table, and return the resulting
2733 value number if it exists in the hash table. Return NULL_TREE if
2734 it does not exist in the hash table. */
2735
3dc4c394 2736static tree
75a70cf9 2737vn_phi_lookup (gimple phi)
9e9e6e3e 2738{
3e871d4d 2739 vn_phi_s **slot;
9e9e6e3e 2740 struct vn_phi_s vp1;
75a70cf9 2741 unsigned i;
9e9e6e3e 2742
f1f41a6c 2743 shared_lookup_phiargs.truncate (0);
9e9e6e3e 2744
2745 /* Canonicalize the SSA_NAME's to their value number. */
75a70cf9 2746 for (i = 0; i < gimple_phi_num_args (phi); i++)
9e9e6e3e 2747 {
2748 tree def = PHI_ARG_DEF (phi, i);
2749 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
f1f41a6c 2750 shared_lookup_phiargs.safe_push (def);
9e9e6e3e 2751 }
82a7a70c 2752 vp1.type = TREE_TYPE (gimple_phi_result (phi));
9e9e6e3e 2753 vp1.phiargs = shared_lookup_phiargs;
75a70cf9 2754 vp1.block = gimple_bb (phi);
9e9e6e3e 2755 vp1.hashcode = vn_phi_compute_hash (&vp1);
c1f445d2 2756 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2757 NO_INSERT);
48694fc0 2758 if (!slot && current_info == optimistic_info)
c1f445d2 2759 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2760 NO_INSERT);
9e9e6e3e 2761 if (!slot)
2762 return NULL_TREE;
3e871d4d 2763 return (*slot)->result;
9e9e6e3e 2764}
2765
2766/* Insert PHI into the current hash table with a value number of
2767 RESULT. */
2768
f6c33c78 2769static vn_phi_t
75a70cf9 2770vn_phi_insert (gimple phi, tree result)
9e9e6e3e 2771{
3e871d4d 2772 vn_phi_s **slot;
d27a7bc4 2773 vn_phi_t vp1 = current_info->phis_pool->allocate ();
75a70cf9 2774 unsigned i;
1e094109 2775 vec<tree> args = vNULL;
9e9e6e3e 2776
2777 /* Canonicalize the SSA_NAME's to their value number. */
75a70cf9 2778 for (i = 0; i < gimple_phi_num_args (phi); i++)
9e9e6e3e 2779 {
2780 tree def = PHI_ARG_DEF (phi, i);
2781 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
f1f41a6c 2782 args.safe_push (def);
9e9e6e3e 2783 }
f6c33c78 2784 vp1->value_id = VN_INFO (result)->value_id;
82a7a70c 2785 vp1->type = TREE_TYPE (gimple_phi_result (phi));
9e9e6e3e 2786 vp1->phiargs = args;
75a70cf9 2787 vp1->block = gimple_bb (phi);
9e9e6e3e 2788 vp1->result = result;
2789 vp1->hashcode = vn_phi_compute_hash (vp1);
2790
c1f445d2 2791 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
9e9e6e3e 2792
2793 /* Because we iterate over phi operations more than once, it's
2794 possible the slot might already exist here, hence no assert.*/
2795 *slot = vp1;
f6c33c78 2796 return vp1;
9e9e6e3e 2797}
2798
2799
2800/* Print set of components in strongly connected component SCC to OUT. */
2801
2802static void
f1f41a6c 2803print_scc (FILE *out, vec<tree> scc)
9e9e6e3e 2804{
2805 tree var;
2806 unsigned int i;
2807
7ef97146 2808 fprintf (out, "SCC consists of:");
f1f41a6c 2809 FOR_EACH_VEC_ELT (scc, i, var)
9e9e6e3e 2810 {
9e9e6e3e 2811 fprintf (out, " ");
7ef97146 2812 print_generic_expr (out, var, 0);
9e9e6e3e 2813 }
2814 fprintf (out, "\n");
2815}
2816
2817/* Set the value number of FROM to TO, return true if it has changed
2818 as a result. */
2819
2820static inline bool
2821set_ssa_val_to (tree from, tree to)
2822{
b81ffaee 2823 tree currval = SSA_VAL (from);
d68e9408 2824 HOST_WIDE_INT toff, coff;
9e9e6e3e 2825
85e9a542 2826 /* The only thing we allow as value numbers are ssa_names
2827 and invariants. So assert that here. We don't allow VN_TOP
2828 as visiting a stmt should produce a value-number other than
2829 that.
2830 ??? Still VN_TOP can happen for unreachable code, so force
2831 it to varying in that case. Not all code is prepared to
2832 get VN_TOP on valueization. */
2833 if (to == VN_TOP)
2834 {
2835 if (dump_file && (dump_flags & TDF_DETAILS))
2836 fprintf (dump_file, "Forcing value number to varying on "
2837 "receiving VN_TOP\n");
2838 to = from;
2839 }
2840
2841 gcc_assert (to != NULL_TREE
9251bb6f 2842 && ((TREE_CODE (to) == SSA_NAME
2843 && (to == from || SSA_VAL (to) == to))
85e9a542 2844 || is_gimple_min_invariant (to)));
2845
b81ffaee 2846 if (from != to)
2847 {
2848 if (currval == from)
2849 {
2850 if (dump_file && (dump_flags & TDF_DETAILS))
2851 {
2852 fprintf (dump_file, "Not changing value number of ");
2853 print_generic_expr (dump_file, from, 0);
2854 fprintf (dump_file, " from VARYING to ");
2855 print_generic_expr (dump_file, to, 0);
2856 fprintf (dump_file, "\n");
2857 }
2858 return false;
2859 }
2860 else if (TREE_CODE (to) == SSA_NAME
2861 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2862 to = from;
2863 }
5dbdbadc 2864
9e9e6e3e 2865 if (dump_file && (dump_flags & TDF_DETAILS))
2866 {
2867 fprintf (dump_file, "Setting value number of ");
2868 print_generic_expr (dump_file, from, 0);
2869 fprintf (dump_file, " to ");
2870 print_generic_expr (dump_file, to, 0);
9e9e6e3e 2871 }
2872
d68e9408 2873 if (currval != to
2874 && !operand_equal_p (currval, to, 0)
2875 /* ??? For addresses involving volatile objects or types operand_equal_p
2876 does not reliably detect ADDR_EXPRs as equal. We know we are only
2877 getting invariant gimple addresses here, so can use
2878 get_addr_base_and_unit_offset to do this comparison. */
2879 && !(TREE_CODE (currval) == ADDR_EXPR
2880 && TREE_CODE (to) == ADDR_EXPR
2881 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2882 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2883 && coff == toff))
9e9e6e3e 2884 {
dd277d48 2885 VN_INFO (from)->valnum = to;
19744bd4 2886 if (dump_file && (dump_flags & TDF_DETAILS))
2887 fprintf (dump_file, " (changed)\n");
9e9e6e3e 2888 return true;
2889 }
19744bd4 2890 if (dump_file && (dump_flags & TDF_DETAILS))
2891 fprintf (dump_file, "\n");
9e9e6e3e 2892 return false;
2893}
2894
b736e424 2895/* Mark as processed all the definitions in the defining stmt of USE, or
2896 the USE itself. */
2897
2898static void
2899mark_use_processed (tree use)
2900{
2901 ssa_op_iter iter;
2902 def_operand_p defp;
2903 gimple stmt = SSA_NAME_DEF_STMT (use);
2904
2905 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2906 {
2907 VN_INFO (use)->use_processed = true;
2908 return;
2909 }
2910
2911 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2912 {
2913 tree def = DEF_FROM_PTR (defp);
2914
2915 VN_INFO (def)->use_processed = true;
2916 }
2917}
2918
9e9e6e3e 2919/* Set all definitions in STMT to value number to themselves.
2920 Return true if a value number changed. */
2921
2922static bool
75a70cf9 2923defs_to_varying (gimple stmt)
9e9e6e3e 2924{
2925 bool changed = false;
2926 ssa_op_iter iter;
2927 def_operand_p defp;
2928
2929 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2930 {
2931 tree def = DEF_FROM_PTR (defp);
9e9e6e3e 2932 changed |= set_ssa_val_to (def, def);
2933 }
2934 return changed;
2935}
2936
64919a86 2937static bool expr_has_constants (tree expr);
1d9353f3 2938
9e9e6e3e 2939/* Visit a copy between LHS and RHS, return true if the value number
2940 changed. */
2941
2942static bool
2943visit_copy (tree lhs, tree rhs)
2944{
9e9e6e3e 2945 /* The copy may have a more interesting constant filled expression
2946 (we don't, since we know our RHS is just an SSA name). */
cc99c1e1 2947 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2948 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2949
2950 /* And finally valueize. */
2951 rhs = SSA_VAL (rhs);
9e9e6e3e 2952
2953 return set_ssa_val_to (lhs, rhs);
2954}
2955
0fea623c 2956/* Visit a nary operator RHS, value number it, and return true if the
9e9e6e3e 2957 value number of LHS has changed as a result. */
2958
2959static bool
0fea623c 2960visit_nary_op (tree lhs, gimple stmt)
9e9e6e3e 2961{
2962 bool changed = false;
75a70cf9 2963 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
9e9e6e3e 2964
2965 if (result)
0fea623c 2966 changed = set_ssa_val_to (lhs, result);
75a70cf9 2967 else
2968 {
2969 changed = set_ssa_val_to (lhs, lhs);
2970 vn_nary_op_insert_stmt (stmt, lhs);
2971 }
2972
2973 return changed;
2974}
2975
2976/* Visit a call STMT storing into LHS. Return true if the value number
2977 of the LHS has changed as a result. */
2978
2979static bool
1a91d914 2980visit_reference_op_call (tree lhs, gcall *stmt)
9e9e6e3e 2981{
2982 bool changed = false;
75a70cf9 2983 struct vn_reference_s vr1;
b736e424 2984 vn_reference_t vnresult = NULL;
b736e424 2985 tree vdef = gimple_vdef (stmt);
9e9e6e3e 2986
7ec657ff 2987 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2988 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2989 lhs = NULL_TREE;
2990
2fd3ecff 2991 vn_reference_lookup_call (stmt, &vnresult, &vr1);
b736e424 2992 if (vnresult)
9e9e6e3e 2993 {
d1a94241 2994 if (vnresult->result_vdef && vdef)
b736e424 2995 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2996
2997 if (!vnresult->result && lhs)
2998 vnresult->result = lhs;
2999
3000 if (vnresult->result && lhs)
3001 {
3002 changed |= set_ssa_val_to (lhs, vnresult->result);
3003
3004 if (VN_INFO (vnresult->result)->has_constants)
3005 VN_INFO (lhs)->has_constants = true;
3006 }
9e9e6e3e 3007 }
3008 else
3009 {
75a70cf9 3010 vn_reference_t vr2;
2fd3ecff 3011 vn_reference_s **slot;
b736e424 3012 if (vdef)
3013 changed |= set_ssa_val_to (vdef, vdef);
3014 if (lhs)
3015 changed |= set_ssa_val_to (lhs, lhs);
d27a7bc4 3016 vr2 = current_info->references_pool->allocate ();
dd277d48 3017 vr2->vuse = vr1.vuse;
2fd3ecff 3018 /* As we are not walking the virtual operand chain we know the
3019 shared_lookup_references are still original so we can re-use
3020 them here. */
3021 vr2->operands = vr1.operands.copy ();
3918bd18 3022 vr2->type = vr1.type;
3023 vr2->set = vr1.set;
75a70cf9 3024 vr2->hashcode = vr1.hashcode;
3025 vr2->result = lhs;
b736e424 3026 vr2->result_vdef = vdef;
c1f445d2 3027 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3028 INSERT);
2fd3ecff 3029 gcc_assert (!*slot);
75a70cf9 3030 *slot = vr2;
9e9e6e3e 3031 }
3032
3033 return changed;
3034}
3035
3036/* Visit a load from a reference operator RHS, part of STMT, value number it,
3037 and return true if the value number of the LHS has changed as a result. */
3038
3039static bool
75a70cf9 3040visit_reference_op_load (tree lhs, tree op, gimple stmt)
9e9e6e3e 3041{
3042 bool changed = false;
4a83fadb 3043 tree last_vuse;
3044 tree result;
3045
3046 last_vuse = gimple_vuse (stmt);
3047 last_vuse_ptr = &last_vuse;
8f190c8a 3048 result = vn_reference_lookup (op, gimple_vuse (stmt),
3049 default_vn_walk_kind, NULL);
4a83fadb 3050 last_vuse_ptr = NULL;
9e9e6e3e 3051
1d9353f3 3052 /* We handle type-punning through unions by value-numbering based
3053 on offset and size of the access. Be prepared to handle a
3054 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3055 if (result
3056 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3057 {
3058 /* We will be setting the value number of lhs to the value number
3059 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3060 So first simplify and lookup this expression to see if it
3061 is already available. */
3062 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
37279c98 3063 if ((CONVERT_EXPR_P (val)
3064 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
3065 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1d9353f3 3066 {
ff99a695 3067 tree tem = vn_get_expr_for (TREE_OPERAND (val, 0));
37279c98 3068 if ((CONVERT_EXPR_P (tem)
3069 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
cd30b839 3070 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
3071 TREE_TYPE (val), tem)))
1d9353f3 3072 val = tem;
3073 }
3074 result = val;
3075 if (!is_gimple_min_invariant (val)
3076 && TREE_CODE (val) != SSA_NAME)
f6c33c78 3077 result = vn_nary_op_lookup (val, NULL);
1d9353f3 3078 /* If the expression is not yet available, value-number lhs to
3079 a new SSA_NAME we create. */
182cf5a9 3080 if (!result)
1d9353f3 3081 {
ec11736b 3082 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
3083 "vntemp");
1d9353f3 3084 /* Initialize value-number information properly. */
3085 VN_INFO_GET (result)->valnum = result;
75a70cf9 3086 VN_INFO (result)->value_id = get_next_value_id ();
1d9353f3 3087 VN_INFO (result)->expr = val;
64919a86 3088 VN_INFO (result)->has_constants = expr_has_constants (val);
1d9353f3 3089 VN_INFO (result)->needs_insertion = true;
3090 /* As all "inserted" statements are singleton SCCs, insert
3091 to the valid table. This is strictly needed to
3092 avoid re-generating new value SSA_NAMEs for the same
3093 expression during SCC iteration over and over (the
3094 optimistic table gets cleared after each iteration).
3095 We do not need to insert into the optimistic table, as
3096 lookups there will fall back to the valid table. */
3097 if (current_info == optimistic_info)
3098 {
3099 current_info = valid_info;
3100 vn_nary_op_insert (val, result);
3101 current_info = optimistic_info;
3102 }
3103 else
3104 vn_nary_op_insert (val, result);
3105 if (dump_file && (dump_flags & TDF_DETAILS))
3106 {
3107 fprintf (dump_file, "Inserting name ");
3108 print_generic_expr (dump_file, result, 0);
3109 fprintf (dump_file, " for expression ");
3110 print_generic_expr (dump_file, val, 0);
3111 fprintf (dump_file, "\n");
3112 }
3113 }
3114 }
3115
9e9e6e3e 3116 if (result)
3117 {
3118 changed = set_ssa_val_to (lhs, result);
b9e98b8a 3119 if (TREE_CODE (result) == SSA_NAME
3120 && VN_INFO (result)->has_constants)
3121 {
3122 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
3123 VN_INFO (lhs)->has_constants = true;
3124 }
9e9e6e3e 3125 }
3126 else
3127 {
3128 changed = set_ssa_val_to (lhs, lhs);
39215e09 3129 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
9e9e6e3e 3130 }
3131
3132 return changed;
3133}
3134
3135
3136/* Visit a store to a reference operator LHS, part of STMT, value number it,
3137 and return true if the value number of the LHS has changed as a result. */
3138
3139static bool
75a70cf9 3140visit_reference_op_store (tree lhs, tree op, gimple stmt)
9e9e6e3e 3141{
3142 bool changed = false;
39215e09 3143 vn_reference_t vnresult = NULL;
3144 tree result, assign;
9e9e6e3e 3145 bool resultsame = false;
39215e09 3146 tree vuse = gimple_vuse (stmt);
3147 tree vdef = gimple_vdef (stmt);
9e9e6e3e 3148
9251bb6f 3149 if (TREE_CODE (op) == SSA_NAME)
3150 op = SSA_VAL (op);
3151
9e9e6e3e 3152 /* First we want to lookup using the *vuses* from the store and see
3153 if there the last store to this location with the same address
3154 had the same value.
3155
3156 The vuses represent the memory state before the store. If the
3157 memory state, address, and value of the store is the same as the
3158 last store to this location, then this store will produce the
3159 same memory state as that store.
3160
3161 In this case the vdef versions for this store are value numbered to those
3162 vuse versions, since they represent the same memory state after
3163 this store.
3164
3165 Otherwise, the vdefs for the store are used when inserting into
3166 the table, since the store generates a new memory state. */
3167
39215e09 3168 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
9e9e6e3e 3169
3170 if (result)
3171 {
3172 if (TREE_CODE (result) == SSA_NAME)
3173 result = SSA_VAL (result);
3174 resultsame = expressions_equal_p (result, op);
3175 }
3176
2fd3ecff 3177 if ((!result || !resultsame)
3178 /* Only perform the following when being called from PRE
3179 which embeds tail merging. */
3180 && default_vn_walk_kind == VN_WALK)
9e9e6e3e 3181 {
39215e09 3182 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3183 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3184 if (vnresult)
3185 {
3186 VN_INFO (vdef)->use_processed = true;
3187 return set_ssa_val_to (vdef, vnresult->result_vdef);
3188 }
3189 }
9e9e6e3e 3190
39215e09 3191 if (!result || !resultsame)
3192 {
9e9e6e3e 3193 if (dump_file && (dump_flags & TDF_DETAILS))
3194 {
3195 fprintf (dump_file, "No store match\n");
3196 fprintf (dump_file, "Value numbering store ");
3197 print_generic_expr (dump_file, lhs, 0);
3198 fprintf (dump_file, " to ");
3199 print_generic_expr (dump_file, op, 0);
3200 fprintf (dump_file, "\n");
3201 }
3202 /* Have to set value numbers before insert, since insert is
3203 going to valueize the references in-place. */
39215e09 3204 if (vdef)
9e9e6e3e 3205 {
9e9e6e3e 3206 changed |= set_ssa_val_to (vdef, vdef);
3207 }
3208
802d9f2f 3209 /* Do not insert structure copies into the tables. */
3210 if (is_gimple_min_invariant (op)
3211 || is_gimple_reg (op))
39215e09 3212 vn_reference_insert (lhs, op, vdef, NULL);
3213
2fd3ecff 3214 /* Only perform the following when being called from PRE
3215 which embeds tail merging. */
3216 if (default_vn_walk_kind == VN_WALK)
3217 {
3218 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3219 vn_reference_insert (assign, lhs, vuse, vdef);
3220 }
9e9e6e3e 3221 }
3222 else
3223 {
dd277d48 3224 /* We had a match, so value number the vdef to have the value
3225 number of the vuse it came from. */
9e9e6e3e 3226
3227 if (dump_file && (dump_flags & TDF_DETAILS))
3228 fprintf (dump_file, "Store matched earlier value,"
3229 "value numbering store vdefs to matching vuses.\n");
3230
39215e09 3231 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
9e9e6e3e 3232 }
3233
3234 return changed;
3235}
3236
3237/* Visit and value number PHI, return true if the value number
3238 changed. */
3239
3240static bool
75a70cf9 3241visit_phi (gimple phi)
9e9e6e3e 3242{
3243 bool changed = false;
3244 tree result;
3245 tree sameval = VN_TOP;
3246 bool allsame = true;
9e9e6e3e 3247
5f6261a7 3248 /* TODO: We could check for this in init_sccvn, and replace this
3249 with a gcc_assert. */
3250 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3251 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3252
9e9e6e3e 3253 /* See if all non-TOP arguments have the same value. TOP is
3254 equivalent to everything, so we can ignore it. */
85e9a542 3255 edge_iterator ei;
3256 edge e;
3257 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3258 if (e->flags & EDGE_EXECUTABLE)
3259 {
3260 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
9e9e6e3e 3261
85e9a542 3262 if (TREE_CODE (def) == SSA_NAME)
3263 def = SSA_VAL (def);
3264 if (def == VN_TOP)
3265 continue;
3266 if (sameval == VN_TOP)
3267 {
3268 sameval = def;
3269 }
3270 else
3271 {
3272 if (!expressions_equal_p (def, sameval))
3273 {
3274 allsame = false;
3275 break;
3276 }
3277 }
3278 }
9e9e6e3e 3279
3280 /* If all value numbered to the same value, the phi node has that
3281 value. */
3282 if (allsame)
b46fda07 3283 return set_ssa_val_to (PHI_RESULT (phi), sameval);
9e9e6e3e 3284
3285 /* Otherwise, see if it is equivalent to a phi node in this block. */
3286 result = vn_phi_lookup (phi);
3287 if (result)
b46fda07 3288 changed = set_ssa_val_to (PHI_RESULT (phi), result);
9e9e6e3e 3289 else
3290 {
3291 vn_phi_insert (phi, PHI_RESULT (phi));
3292 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3293 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3294 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3295 }
3296
3297 return changed;
3298}
3299
3300/* Return true if EXPR contains constants. */
3301
3302static bool
3303expr_has_constants (tree expr)
3304{
3305 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3306 {
3307 case tcc_unary:
3308 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3309
3310 case tcc_binary:
3311 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3312 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3313 /* Constants inside reference ops are rarely interesting, but
3314 it can take a lot of looking to find them. */
3315 case tcc_reference:
70ae6476 3316 case tcc_declaration:
9e9e6e3e 3317 return false;
3318 default:
3319 return is_gimple_min_invariant (expr);
3320 }
3321 return false;
3322}
3323
75a70cf9 3324/* Return true if STMT contains constants. */
3325
3326static bool
3327stmt_has_constants (gimple stmt)
3328{
cc99c1e1 3329 tree tem;
3330
75a70cf9 3331 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3332 return false;
3333
3334 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3335 {
cc99c1e1 3336 case GIMPLE_TERNARY_RHS:
3337 tem = gimple_assign_rhs3 (stmt);
3338 if (TREE_CODE (tem) == SSA_NAME)
3339 tem = SSA_VAL (tem);
3340 if (is_gimple_min_invariant (tem))
3341 return true;
3342 /* Fallthru. */
75a70cf9 3343
3344 case GIMPLE_BINARY_RHS:
cc99c1e1 3345 tem = gimple_assign_rhs2 (stmt);
3346 if (TREE_CODE (tem) == SSA_NAME)
3347 tem = SSA_VAL (tem);
3348 if (is_gimple_min_invariant (tem))
3349 return true;
3350 /* Fallthru. */
3351
75a70cf9 3352 case GIMPLE_SINGLE_RHS:
3353 /* Constants inside reference ops are rarely interesting, but
3354 it can take a lot of looking to find them. */
cc99c1e1 3355 case GIMPLE_UNARY_RHS:
3356 tem = gimple_assign_rhs1 (stmt);
3357 if (TREE_CODE (tem) == SSA_NAME)
3358 tem = SSA_VAL (tem);
3359 return is_gimple_min_invariant (tem);
3360
75a70cf9 3361 default:
3362 gcc_unreachable ();
3363 }
3364 return false;
3365}
3366
9e9e6e3e 3367/* Simplify the binary expression RHS, and return the result if
3368 simplified. */
3369
3370static tree
75a70cf9 3371simplify_binary_expression (gimple stmt)
9e9e6e3e 3372{
3373 tree result = NULL_TREE;
75a70cf9 3374 tree op0 = gimple_assign_rhs1 (stmt);
3375 tree op1 = gimple_assign_rhs2 (stmt);
77d62cb7 3376 enum tree_code code = gimple_assign_rhs_code (stmt);
9e9e6e3e 3377
3378 /* This will not catch every single case we could combine, but will
3379 catch those with constants. The goal here is to simultaneously
3380 combine constants between expressions, but avoid infinite
3381 expansion of expressions during simplification. */
b46fda07 3382 op0 = vn_valueize (op0);
3383 if (TREE_CODE (op0) == SSA_NAME
3384 && (VN_INFO (op0)->has_constants
77d62cb7 3385 || TREE_CODE_CLASS (code) == tcc_comparison
b46fda07 3386 || code == COMPLEX_EXPR))
3387 op0 = vn_get_expr_for (op0);
9e9e6e3e 3388
b46fda07 3389 op1 = vn_valueize (op1);
3390 if (TREE_CODE (op1) == SSA_NAME
3391 && (VN_INFO (op1)->has_constants
3392 || code == COMPLEX_EXPR))
3393 op1 = vn_get_expr_for (op1);
1c6d350b 3394
1d0b727d 3395 /* Pointer plus constant can be represented as invariant address.
3396 Do so to allow further propatation, see also tree forwprop. */
77d62cb7 3397 if (code == POINTER_PLUS_EXPR
e913b5cd 3398 && tree_fits_uhwi_p (op1)
1d0b727d 3399 && TREE_CODE (op0) == ADDR_EXPR
3400 && is_gimple_min_invariant (op0))
3401 return build_invariant_address (TREE_TYPE (op0),
3402 TREE_OPERAND (op0, 0),
e913b5cd 3403 tree_to_uhwi (op1));
1d0b727d 3404
e01e695f 3405 /* Avoid folding if nothing changed. */
75a70cf9 3406 if (op0 == gimple_assign_rhs1 (stmt)
3407 && op1 == gimple_assign_rhs2 (stmt))
e01e695f 3408 return NULL_TREE;
3409
72c59a18 3410 fold_defer_overflow_warnings ();
3411
77d62cb7 3412 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
19744bd4 3413 if (result)
3414 STRIP_USELESS_TYPE_CONVERSION (result);
9e9e6e3e 3415
75a70cf9 3416 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
72c59a18 3417 stmt, 0);
3418
6dfdc153 3419 /* Make sure result is not a complex expression consisting
9e9e6e3e 3420 of operators of operators (IE (a + b) + (a + c))
3421 Otherwise, we will end up with unbounded expressions if
3422 fold does anything at all. */
75a70cf9 3423 if (result && valid_gimple_rhs_p (result))
1c6d350b 3424 return result;
3425
9e9e6e3e 3426 return NULL_TREE;
3427}
3428
e01e695f 3429/* Simplify the unary expression RHS, and return the result if
3430 simplified. */
3431
3432static tree
1a91d914 3433simplify_unary_expression (gassign *stmt)
e01e695f 3434{
3435 tree result = NULL_TREE;
75a70cf9 3436 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
77d62cb7 3437 enum tree_code code = gimple_assign_rhs_code (stmt);
75a70cf9 3438
3439 /* We handle some tcc_reference codes here that are all
3440 GIMPLE_ASSIGN_SINGLE codes. */
77d62cb7 3441 if (code == REALPART_EXPR
3442 || code == IMAGPART_EXPR
3eebeec6 3443 || code == VIEW_CONVERT_EXPR
3444 || code == BIT_FIELD_REF)
75a70cf9 3445 op0 = TREE_OPERAND (op0, 0);
e01e695f 3446
75a70cf9 3447 orig_op0 = op0;
b46fda07 3448 op0 = vn_valueize (op0);
3449 if (TREE_CODE (op0) == SSA_NAME)
e01e695f 3450 {
b46fda07 3451 if (VN_INFO (op0)->has_constants)
3452 op0 = vn_get_expr_for (op0);
3453 else if (CONVERT_EXPR_CODE_P (code)
3454 || code == REALPART_EXPR
3455 || code == IMAGPART_EXPR
3456 || code == VIEW_CONVERT_EXPR
3457 || code == BIT_FIELD_REF)
3458 {
3459 /* We want to do tree-combining on conversion-like expressions.
3460 Make sure we feed only SSA_NAMEs or constants to fold though. */
3461 tree tem = vn_get_expr_for (op0);
3462 if (UNARY_CLASS_P (tem)
3463 || BINARY_CLASS_P (tem)
3464 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3465 || TREE_CODE (tem) == SSA_NAME
3466 || TREE_CODE (tem) == CONSTRUCTOR
3467 || is_gimple_min_invariant (tem))
3468 op0 = tem;
3469 }
e01e695f 3470 }
3471
3472 /* Avoid folding if nothing changed, but remember the expression. */
75a70cf9 3473 if (op0 == orig_op0)
3474 return NULL_TREE;
e01e695f 3475
3eebeec6 3476 if (code == BIT_FIELD_REF)
3477 {
3478 tree rhs = gimple_assign_rhs1 (stmt);
3479 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3480 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3481 }
3482 else
3483 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
e01e695f 3484 if (result)
3485 {
3486 STRIP_USELESS_TYPE_CONVERSION (result);
75a70cf9 3487 if (valid_gimple_rhs_p (result))
e01e695f 3488 return result;
3489 }
3490
75a70cf9 3491 return NULL_TREE;
e01e695f 3492}
3493
9e9e6e3e 3494/* Try to simplify RHS using equivalences and constant folding. */
3495
3496static tree
1a91d914 3497try_to_simplify (gassign *stmt)
9e9e6e3e 3498{
ce993cc2 3499 enum tree_code code = gimple_assign_rhs_code (stmt);
e004838d 3500 tree tem;
3501
d4cdfd27 3502 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3503 in this case, there is no point in doing extra work. */
ce993cc2 3504 if (code == SSA_NAME)
75a70cf9 3505 return NULL_TREE;
e004838d 3506
1d0b727d 3507 /* First try constant folding based on our current lattice. */
ef8cb3d3 3508 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
ce993cc2 3509 if (tem
3510 && (TREE_CODE (tem) == SSA_NAME
3511 || is_gimple_min_invariant (tem)))
1d0b727d 3512 return tem;
3513
3514 /* If that didn't work try combining multiple statements. */
ce993cc2 3515 switch (TREE_CODE_CLASS (code))
9e9e6e3e 3516 {
e004838d 3517 case tcc_reference:
ce993cc2 3518 /* Fallthrough for some unary codes that can operate on registers. */
3519 if (!(code == REALPART_EXPR
3520 || code == IMAGPART_EXPR
3eebeec6 3521 || code == VIEW_CONVERT_EXPR
3522 || code == BIT_FIELD_REF))
e004838d 3523 break;
3524 /* We could do a little more with unary ops, if they expand
3525 into binary ops, but it's debatable whether it is worth it. */
3526 case tcc_unary:
75a70cf9 3527 return simplify_unary_expression (stmt);
1d0b727d 3528
e004838d 3529 case tcc_comparison:
3530 case tcc_binary:
75a70cf9 3531 return simplify_binary_expression (stmt);
1d0b727d 3532
e004838d 3533 default:
3534 break;
9e9e6e3e 3535 }
e004838d 3536
75a70cf9 3537 return NULL_TREE;
9e9e6e3e 3538}
3539
3540/* Visit and value number USE, return true if the value number
3541 changed. */
3542
3543static bool
3544visit_use (tree use)
3545{
3546 bool changed = false;
75a70cf9 3547 gimple stmt = SSA_NAME_DEF_STMT (use);
9e9e6e3e 3548
b736e424 3549 mark_use_processed (use);
9e9e6e3e 3550
3551 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1d9353f3 3552 if (dump_file && (dump_flags & TDF_DETAILS)
75a70cf9 3553 && !SSA_NAME_IS_DEFAULT_DEF (use))
9e9e6e3e 3554 {
3555 fprintf (dump_file, "Value numbering ");
3556 print_generic_expr (dump_file, use, 0);
3557 fprintf (dump_file, " stmt = ");
75a70cf9 3558 print_gimple_stmt (dump_file, stmt, 0, 0);
9e9e6e3e 3559 }
3560
9e9e6e3e 3561 /* Handle uninitialized uses. */
75a70cf9 3562 if (SSA_NAME_IS_DEFAULT_DEF (use))
3563 changed = set_ssa_val_to (use, use);
9e9e6e3e 3564 else
3565 {
75a70cf9 3566 if (gimple_code (stmt) == GIMPLE_PHI)
3567 changed = visit_phi (stmt);
b736e424 3568 else if (gimple_has_volatile_ops (stmt))
75a70cf9 3569 changed = defs_to_varying (stmt);
3570 else if (is_gimple_assign (stmt))
9e9e6e3e 3571 {
7aa07231 3572 enum tree_code code = gimple_assign_rhs_code (stmt);
75a70cf9 3573 tree lhs = gimple_assign_lhs (stmt);
7aa07231 3574 tree rhs1 = gimple_assign_rhs1 (stmt);
9e9e6e3e 3575 tree simplified;
3576
2a922cb6 3577 /* Shortcut for copies. Simplifying copies is pointless,
3578 since we copy the expression and value they represent. */
7aa07231 3579 if (code == SSA_NAME
75a70cf9 3580 && TREE_CODE (lhs) == SSA_NAME)
2a922cb6 3581 {
7aa07231 3582 changed = visit_copy (lhs, rhs1);
2a922cb6 3583 goto done;
3584 }
1a91d914 3585 simplified = try_to_simplify (as_a <gassign *> (stmt));
75a70cf9 3586 if (simplified)
9e9e6e3e 3587 {
3588 if (dump_file && (dump_flags & TDF_DETAILS))
3589 {
3590 fprintf (dump_file, "RHS ");
75a70cf9 3591 print_gimple_expr (dump_file, stmt, 0, 0);
9e9e6e3e 3592 fprintf (dump_file, " simplified to ");
3593 print_generic_expr (dump_file, simplified, 0);
3594 if (TREE_CODE (lhs) == SSA_NAME)
3595 fprintf (dump_file, " has constants %d\n",
404d6be4 3596 expr_has_constants (simplified));
9e9e6e3e 3597 else
3598 fprintf (dump_file, "\n");
9e9e6e3e 3599 }
3600 }
3601 /* Setting value numbers to constants will occasionally
3602 screw up phi congruence because constants are not
3603 uniquely associated with a single ssa name that can be
3604 looked up. */
75a70cf9 3605 if (simplified
3606 && is_gimple_min_invariant (simplified)
3607 && TREE_CODE (lhs) == SSA_NAME)
9e9e6e3e 3608 {
3609 VN_INFO (lhs)->expr = simplified;
3610 VN_INFO (lhs)->has_constants = true;
3611 changed = set_ssa_val_to (lhs, simplified);
3612 goto done;
3613 }
75a70cf9 3614 else if (simplified
3615 && TREE_CODE (simplified) == SSA_NAME
9e9e6e3e 3616 && TREE_CODE (lhs) == SSA_NAME)
3617 {
3618 changed = visit_copy (lhs, simplified);
3619 goto done;
3620 }
3621 else if (simplified)
3622 {
3623 if (TREE_CODE (lhs) == SSA_NAME)
3624 {
3625 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3626 /* We have to unshare the expression or else
3627 valuizing may change the IL stream. */
3628 VN_INFO (lhs)->expr = unshare_expr (simplified);
3629 }
9e9e6e3e 3630 }
75a70cf9 3631 else if (stmt_has_constants (stmt)
3632 && TREE_CODE (lhs) == SSA_NAME)
3633 VN_INFO (lhs)->has_constants = true;
9e9e6e3e 3634 else if (TREE_CODE (lhs) == SSA_NAME)
3635 {
3636 /* We reset expr and constantness here because we may
3637 have been value numbering optimistically, and
3638 iterating. They may become non-constant in this case,
3639 even if they were optimistically constant. */
99698cf3 3640
9e9e6e3e 3641 VN_INFO (lhs)->has_constants = false;
75a70cf9 3642 VN_INFO (lhs)->expr = NULL_TREE;
9e9e6e3e 3643 }
3644
a4c8b601 3645 if ((TREE_CODE (lhs) == SSA_NAME
3646 /* We can substitute SSA_NAMEs that are live over
3647 abnormal edges with their constant value. */
3648 && !(gimple_assign_copy_p (stmt)
7aa07231 3649 && is_gimple_min_invariant (rhs1))
a4c8b601 3650 && !(simplified
3651 && is_gimple_min_invariant (simplified))
3652 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3653 /* Stores or copies from SSA_NAMEs that are live over
3654 abnormal edges are a problem. */
7aa07231 3655 || (code == SSA_NAME
3656 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
9e9e6e3e 3657 changed = defs_to_varying (stmt);
7aa07231 3658 else if (REFERENCE_CLASS_P (lhs)
3659 || DECL_P (lhs))
3660 changed = visit_reference_op_store (lhs, rhs1, stmt);
9e9e6e3e 3661 else if (TREE_CODE (lhs) == SSA_NAME)
3662 {
75a70cf9 3663 if ((gimple_assign_copy_p (stmt)
7aa07231 3664 && is_gimple_min_invariant (rhs1))
75a70cf9 3665 || (simplified
3666 && is_gimple_min_invariant (simplified)))
9e9e6e3e 3667 {
3668 VN_INFO (lhs)->has_constants = true;
75a70cf9 3669 if (simplified)
3670 changed = set_ssa_val_to (lhs, simplified);
3671 else
7aa07231 3672 changed = set_ssa_val_to (lhs, rhs1);
9e9e6e3e 3673 }
9e9e6e3e 3674 else
3675 {
3a73dcc2 3676 /* First try to lookup the simplified expression. */
3677 if (simplified)
3678 {
3679 enum gimple_rhs_class rhs_class;
3680
3681
3682 rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3683 if ((rhs_class == GIMPLE_UNARY_RHS
3684 || rhs_class == GIMPLE_BINARY_RHS
3685 || rhs_class == GIMPLE_TERNARY_RHS)
3686 && valid_gimple_rhs_p (simplified))
3687 {
3688 tree result = vn_nary_op_lookup (simplified, NULL);
3689 if (result)
3690 {
3691 changed = set_ssa_val_to (lhs, result);
3692 goto done;
3693 }
3694 }
3695 }
3696
3697 /* Otherwise visit the original statement. */
024fee2c 3698 switch (vn_get_stmt_kind (stmt))
9e9e6e3e 3699 {
024fee2c 3700 case VN_NARY:
0fea623c 3701 changed = visit_nary_op (lhs, stmt);
9e9e6e3e 3702 break;
024fee2c 3703 case VN_REFERENCE:
3704 changed = visit_reference_op_load (lhs, rhs1, stmt);
75a70cf9 3705 break;
9e9e6e3e 3706 default:
3707 changed = defs_to_varying (stmt);
3708 break;
3709 }
3710 }
3711 }
3712 else
3713 changed = defs_to_varying (stmt);
3714 }
1a91d914 3715 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
75a70cf9 3716 {
3717 tree lhs = gimple_call_lhs (stmt);
b736e424 3718 if (lhs && TREE_CODE (lhs) == SSA_NAME)
75a70cf9 3719 {
b15c78f2 3720 /* Try constant folding based on our current lattice. */
3721 tree simplified = gimple_fold_stmt_to_constant_1 (stmt,
3722 vn_valueize);
3723 if (simplified)
3724 {
3725 if (dump_file && (dump_flags & TDF_DETAILS))
3726 {
3727 fprintf (dump_file, "call ");
3728 print_gimple_expr (dump_file, stmt, 0, 0);
3729 fprintf (dump_file, " simplified to ");
3730 print_generic_expr (dump_file, simplified, 0);
3731 if (TREE_CODE (lhs) == SSA_NAME)
3732 fprintf (dump_file, " has constants %d\n",
3733 expr_has_constants (simplified));
3734 else
3735 fprintf (dump_file, "\n");
3736 }
3737 }
3738 /* Setting value numbers to constants will occasionally
3739 screw up phi congruence because constants are not
3740 uniquely associated with a single ssa name that can be
3741 looked up. */
3742 if (simplified
3743 && is_gimple_min_invariant (simplified))
b736e424 3744 {
b15c78f2 3745 VN_INFO (lhs)->expr = simplified;
3746 VN_INFO (lhs)->has_constants = true;
3747 changed = set_ssa_val_to (lhs, simplified);
3748 if (gimple_vdef (stmt))
3749 changed |= set_ssa_val_to (gimple_vdef (stmt),
9251bb6f 3750 SSA_VAL (gimple_vuse (stmt)));
b15c78f2 3751 goto done;
b736e424 3752 }
b15c78f2 3753 else if (simplified
3754 && TREE_CODE (simplified) == SSA_NAME)
b736e424 3755 {
b15c78f2 3756 changed = visit_copy (lhs, simplified);
3757 if (gimple_vdef (stmt))
3758 changed |= set_ssa_val_to (gimple_vdef (stmt),
9251bb6f 3759 SSA_VAL (gimple_vuse (stmt)));
b736e424 3760 goto done;
3761 }
b15c78f2 3762 else
3763 {
3764 if (stmt_has_constants (stmt))
3765 VN_INFO (lhs)->has_constants = true;
3766 else
3767 {
3768 /* We reset expr and constantness here because we may
3769 have been value numbering optimistically, and
3770 iterating. They may become non-constant in this case,
3771 even if they were optimistically constant. */
3772 VN_INFO (lhs)->has_constants = false;
3773 VN_INFO (lhs)->expr = NULL_TREE;
3774 }
3775
3776 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3777 {
3778 changed = defs_to_varying (stmt);
3779 goto done;
3780 }
3781 }
75a70cf9 3782 }
3783
b736e424 3784 if (!gimple_call_internal_p (stmt)
7ec657ff 3785 && (/* Calls to the same function with the same vuse
3786 and the same operands do not necessarily return the same
3787 value, unless they're pure or const. */
3788 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3789 /* If calls have a vdef, subsequent calls won't have
3790 the same incoming vuse. So, if 2 calls with vdef have the
3791 same vuse, we know they're not subsequent.
3792 We can value number 2 calls to the same function with the
3793 same vuse and the same operands which are not subsequent
3794 the same, because there is no code in the program that can
affbda71 3795 compare the 2 values... */
3796 || (gimple_vdef (stmt)
3797 /* ... unless the call returns a pointer which does
3798 not alias with anything else. In which case the
3799 information that the values are distinct are encoded
3800 in the IL. */
1a91d914 3801 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
2fd3ecff 3802 /* Only perform the following when being called from PRE
3803 which embeds tail merging. */
3804 && default_vn_walk_kind == VN_WALK)))
1a91d914 3805 changed = visit_reference_op_call (lhs, call_stmt);
75a70cf9 3806 else
3807 changed = defs_to_varying (stmt);
3808 }
b736e424 3809 else
3810 changed = defs_to_varying (stmt);
9e9e6e3e 3811 }
3812 done:
3813 return changed;
3814}
3815
3816/* Compare two operands by reverse postorder index */
3817
3818static int
3819compare_ops (const void *pa, const void *pb)
3820{
3821 const tree opa = *((const tree *)pa);
3822 const tree opb = *((const tree *)pb);
75a70cf9 3823 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3824 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
9e9e6e3e 3825 basic_block bba;
3826 basic_block bbb;
3827
75a70cf9 3828 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
f7b092e4 3829 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
75a70cf9 3830 else if (gimple_nop_p (opstmta))
9e9e6e3e 3831 return -1;
75a70cf9 3832 else if (gimple_nop_p (opstmtb))
9e9e6e3e 3833 return 1;
3834
75a70cf9 3835 bba = gimple_bb (opstmta);
3836 bbb = gimple_bb (opstmtb);
9e9e6e3e 3837
3838 if (!bba && !bbb)
f7b092e4 3839 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
9e9e6e3e 3840 else if (!bba)
3841 return -1;
3842 else if (!bbb)
3843 return 1;
3844
3845 if (bba == bbb)
3846 {
75a70cf9 3847 if (gimple_code (opstmta) == GIMPLE_PHI
3848 && gimple_code (opstmtb) == GIMPLE_PHI)
f7b092e4 3849 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
75a70cf9 3850 else if (gimple_code (opstmta) == GIMPLE_PHI)
9e9e6e3e 3851 return -1;
75a70cf9 3852 else if (gimple_code (opstmtb) == GIMPLE_PHI)
9e9e6e3e 3853 return 1;
f7b092e4 3854 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3855 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3856 else
3857 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
9e9e6e3e 3858 }
3859 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3860}
3861
3862/* Sort an array containing members of a strongly connected component
3863 SCC so that the members are ordered by RPO number.
3864 This means that when the sort is complete, iterating through the
3865 array will give you the members in RPO order. */
3866
3867static void
f1f41a6c 3868sort_scc (vec<tree> scc)
9e9e6e3e 3869{
f1f41a6c 3870 scc.qsort (compare_ops);
9e9e6e3e 3871}
3872
3df47675 3873/* Insert the no longer used nary ONARY to the hash INFO. */
ca4721d3 3874
3df47675 3875static void
3876copy_nary (vn_nary_op_t onary, vn_tables_t info)
ca4721d3 3877{
f8ce304c 3878 size_t size = sizeof_vn_nary_op (onary->length);
3879 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3880 &info->nary_obstack);
ca4721d3 3881 memcpy (nary, onary, size);
f8ce304c 3882 vn_nary_op_insert_into (nary, info->nary, false);
ca4721d3 3883}
3884
3df47675 3885/* Insert the no longer used phi OPHI to the hash INFO. */
ca4721d3 3886
3df47675 3887static void
3888copy_phi (vn_phi_t ophi, vn_tables_t info)
ca4721d3 3889{
d27a7bc4 3890 vn_phi_t phi = info->phis_pool->allocate ();
3e871d4d 3891 vn_phi_s **slot;
ca4721d3 3892 memcpy (phi, ophi, sizeof (*phi));
f1f41a6c 3893 ophi->phiargs.create (0);
c1f445d2 3894 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3df47675 3895 gcc_assert (!*slot);
ca4721d3 3896 *slot = phi;
ca4721d3 3897}
3898
3df47675 3899/* Insert the no longer used reference OREF to the hash INFO. */
ca4721d3 3900
3df47675 3901static void
3902copy_reference (vn_reference_t oref, vn_tables_t info)
ca4721d3 3903{
ca4721d3 3904 vn_reference_t ref;
3e871d4d 3905 vn_reference_s **slot;
d27a7bc4 3906 ref = info->references_pool->allocate ();
ca4721d3 3907 memcpy (ref, oref, sizeof (*ref));
f1f41a6c 3908 oref->operands.create (0);
c1f445d2 3909 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
ca4721d3 3910 if (*slot)
3911 free_reference (*slot);
3912 *slot = ref;
ca4721d3 3913}
3914
9e9e6e3e 3915/* Process a strongly connected component in the SSA graph. */
3916
3917static void
f1f41a6c 3918process_scc (vec<tree> scc)
9e9e6e3e 3919{
3df47675 3920 tree var;
3921 unsigned int i;
3922 unsigned int iterations = 0;
3923 bool changed = true;
3e871d4d 3924 vn_nary_op_iterator_type hin;
3925 vn_phi_iterator_type hip;
3926 vn_reference_iterator_type hir;
3df47675 3927 vn_nary_op_t nary;
3928 vn_phi_t phi;
3929 vn_reference_t ref;
9e9e6e3e 3930
3df47675 3931 /* If the SCC has a single member, just visit it. */
f1f41a6c 3932 if (scc.length () == 1)
9e9e6e3e 3933 {
f1f41a6c 3934 tree use = scc[0];
ebca8514 3935 if (VN_INFO (use)->use_processed)
3936 return;
3937 /* We need to make sure it doesn't form a cycle itself, which can
3938 happen for self-referential PHI nodes. In that case we would
3939 end up inserting an expression with VN_TOP operands into the
3940 valid table which makes us derive bogus equivalences later.
3941 The cheapest way to check this is to assume it for all PHI nodes. */
3942 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3943 /* Fallthru to iteration. */ ;
3944 else
3945 {
3946 visit_use (use);
3947 return;
3948 }
9e9e6e3e 3949 }
3df47675 3950
c443af04 3951 if (dump_file && (dump_flags & TDF_DETAILS))
3952 print_scc (dump_file, scc);
3953
3df47675 3954 /* Iterate over the SCC with the optimistic table until it stops
3955 changing. */
3956 current_info = optimistic_info;
3957 while (changed)
9e9e6e3e 3958 {
3df47675 3959 changed = false;
3960 iterations++;
b81ffaee 3961 if (dump_file && (dump_flags & TDF_DETAILS))
3962 fprintf (dump_file, "Starting iteration %d\n", iterations);
3df47675 3963 /* As we are value-numbering optimistically we have to
3964 clear the expression tables and the simplified expressions
3965 in each iteration until we converge. */
c1f445d2 3966 optimistic_info->nary->empty ();
3967 optimistic_info->phis->empty ();
3968 optimistic_info->references->empty ();
3df47675 3969 obstack_free (&optimistic_info->nary_obstack, NULL);
3970 gcc_obstack_init (&optimistic_info->nary_obstack);
d27a7bc4 3971 optimistic_info->phis_pool->release ();
3972 optimistic_info->references_pool->release ();
f1f41a6c 3973 FOR_EACH_VEC_ELT (scc, i, var)
3df47675 3974 VN_INFO (var)->expr = NULL_TREE;
f1f41a6c 3975 FOR_EACH_VEC_ELT (scc, i, var)
3df47675 3976 changed |= visit_use (var);
3977 }
9e9e6e3e 3978
c443af04 3979 if (dump_file && (dump_flags & TDF_DETAILS))
3980 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3df47675 3981 statistics_histogram_event (cfun, "SCC iterations", iterations);
9e9e6e3e 3982
3df47675 3983 /* Finally, copy the contents of the no longer used optimistic
3984 table to the valid table. */
c1f445d2 3985 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3df47675 3986 copy_nary (nary, valid_info);
c1f445d2 3987 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3df47675 3988 copy_phi (phi, valid_info);
c1f445d2 3989 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3e871d4d 3990 ref, vn_reference_t, hir)
3df47675 3991 copy_reference (ref, valid_info);
3992
3993 current_info = valid_info;
9e9e6e3e 3994}
3995
000ef0a0 3996
3997/* Pop the components of the found SCC for NAME off the SCC stack
3998 and process them. Returns true if all went well, false if
3999 we run into resource limits. */
4000
4001static bool
4002extract_and_process_scc_for_name (tree name)
4003{
c2078b80 4004 auto_vec<tree> scc;
000ef0a0 4005 tree x;
4006
4007 /* Found an SCC, pop the components off the SCC stack and
4008 process them. */
4009 do
4010 {
f1f41a6c 4011 x = sccstack.pop ();
000ef0a0 4012
4013 VN_INFO (x)->on_sccstack = false;
f1f41a6c 4014 scc.safe_push (x);
000ef0a0 4015 } while (x != name);
4016
4017 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
f1f41a6c 4018 if (scc.length ()
000ef0a0 4019 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4020 {
4021 if (dump_file)
4022 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
f1f41a6c 4023 "SCC size %u exceeding %u\n", scc.length (),
000ef0a0 4024 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
83b709f2 4025
000ef0a0 4026 return false;
4027 }
4028
f1f41a6c 4029 if (scc.length () > 1)
000ef0a0 4030 sort_scc (scc);
4031
000ef0a0 4032 process_scc (scc);
4033
000ef0a0 4034 return true;
4035}
4036
9e9e6e3e 4037/* Depth first search on NAME to discover and process SCC's in the SSA
4038 graph.
4039 Execution of this algorithm relies on the fact that the SCC's are
a9b2282e 4040 popped off the stack in topological order.
4041 Returns true if successful, false if we stopped processing SCC's due
f0b5f617 4042 to resource constraints. */
9e9e6e3e 4043
a9b2282e 4044static bool
9e9e6e3e 4045DFS (tree name)
4046{
1e094109 4047 vec<ssa_op_iter> itervec = vNULL;
4048 vec<tree> namevec = vNULL;
000ef0a0 4049 use_operand_p usep = NULL;
75a70cf9 4050 gimple defstmt;
4051 tree use;
9e9e6e3e 4052 ssa_op_iter iter;
9e9e6e3e 4053
000ef0a0 4054start_over:
9e9e6e3e 4055 /* SCC info */
4056 VN_INFO (name)->dfsnum = next_dfs_num++;
4057 VN_INFO (name)->visited = true;
4058 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4059
f1f41a6c 4060 sccstack.safe_push (name);
9e9e6e3e 4061 VN_INFO (name)->on_sccstack = true;
4062 defstmt = SSA_NAME_DEF_STMT (name);
4063
4064 /* Recursively DFS on our operands, looking for SCC's. */
75a70cf9 4065 if (!gimple_nop_p (defstmt))
9e9e6e3e 4066 {
000ef0a0 4067 /* Push a new iterator. */
1a91d914 4068 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4069 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
000ef0a0 4070 else
4071 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4072 }
4073 else
5383fb56 4074 clear_and_done_ssa_iter (&iter);
000ef0a0 4075
4076 while (1)
4077 {
4078 /* If we are done processing uses of a name, go up the stack
4079 of iterators and process SCCs as we found them. */
4080 if (op_iter_done (&iter))
9e9e6e3e 4081 {
000ef0a0 4082 /* See if we found an SCC. */
4083 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4084 if (!extract_and_process_scc_for_name (name))
4085 {
f1f41a6c 4086 namevec.release ();
4087 itervec.release ();
000ef0a0 4088 return false;
4089 }
9e9e6e3e 4090
000ef0a0 4091 /* Check if we are done. */
f1f41a6c 4092 if (namevec.is_empty ())
000ef0a0 4093 {
f1f41a6c 4094 namevec.release ();
4095 itervec.release ();
000ef0a0 4096 return true;
4097 }
4098
4099 /* Restore the last use walker and continue walking there. */
4100 use = name;
f1f41a6c 4101 name = namevec.pop ();
4102 memcpy (&iter, &itervec.last (),
000ef0a0 4103 sizeof (ssa_op_iter));
f1f41a6c 4104 itervec.pop ();
000ef0a0 4105 goto continue_walking;
4106 }
9e9e6e3e 4107
000ef0a0 4108 use = USE_FROM_PTR (usep);
4109
4110 /* Since we handle phi nodes, we will sometimes get
4111 invariants in the use expression. */
4112 if (TREE_CODE (use) == SSA_NAME)
4113 {
9e9e6e3e 4114 if (! (VN_INFO (use)->visited))
4115 {
000ef0a0 4116 /* Recurse by pushing the current use walking state on
4117 the stack and starting over. */
f1f41a6c 4118 itervec.safe_push (iter);
4119 namevec.safe_push (name);
000ef0a0 4120 name = use;
4121 goto start_over;
4122
4123continue_walking:
9e9e6e3e 4124 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4125 VN_INFO (use)->low);
4126 }
4127 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4128 && VN_INFO (use)->on_sccstack)
4129 {
4130 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4131 VN_INFO (name)->low);
4132 }
4133 }
a9b2282e 4134
000ef0a0 4135 usep = op_iter_next_use (&iter);
9e9e6e3e 4136 }
4137}
4138
9e9e6e3e 4139/* Allocate a value number table. */
4140
4141static void
4142allocate_vn_table (vn_tables_t table)
4143{
c1f445d2 4144 table->phis = new vn_phi_table_type (23);
4145 table->nary = new vn_nary_op_table_type (23);
4146 table->references = new vn_reference_table_type (23);
9e9e6e3e 4147
51a23cfc 4148 gcc_obstack_init (&table->nary_obstack);
d27a7bc4 4149 table->phis_pool = new pool_allocator<vn_phi_s> ("VN phis", 30);
4150 table->references_pool = new pool_allocator<vn_reference_s> ("VN references",
4151 30);
9e9e6e3e 4152}
4153
4154/* Free a value number table. */
4155
4156static void
4157free_vn_table (vn_tables_t table)
4158{
c1f445d2 4159 delete table->phis;
4160 table->phis = NULL;
4161 delete table->nary;
4162 table->nary = NULL;
4163 delete table->references;
4164 table->references = NULL;
51a23cfc 4165 obstack_free (&table->nary_obstack, NULL);
d27a7bc4 4166 delete table->phis_pool;
4167 delete table->references_pool;
9e9e6e3e 4168}
4169
4170static void
4171init_scc_vn (void)
4172{
4173 size_t i;
4174 int j;
4175 int *rpo_numbers_temp;
9e9e6e3e 4176
4177 calculate_dominance_info (CDI_DOMINATORS);
f1f41a6c 4178 sccstack.create (0);
c1f445d2 4179 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
48e1416a 4180
f6c33c78 4181 constant_value_ids = BITMAP_ALLOC (NULL);
48e1416a 4182
9e9e6e3e 4183 next_dfs_num = 1;
f6c33c78 4184 next_value_id = 1;
48e1416a 4185
f1f41a6c 4186 vn_ssa_aux_table.create (num_ssa_names + 1);
9e9e6e3e 4187 /* VEC_alloc doesn't actually grow it to the right size, it just
4188 preallocates the space to do so. */
f1f41a6c 4189 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
b9584939 4190 gcc_obstack_init (&vn_ssa_aux_obstack);
4191
f1f41a6c 4192 shared_lookup_phiargs.create (0);
4193 shared_lookup_references.create (0);
fe672ac0 4194 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
a28770e1 4195 rpo_numbers_temp =
4196 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
9e9e6e3e 4197 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4198
4199 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4200 the i'th block in RPO order is bb. We want to map bb's to RPO
4201 numbers, so we need to rearrange this array. */
a28770e1 4202 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
9e9e6e3e 4203 rpo_numbers[rpo_numbers_temp[j]] = j;
4204
b9584939 4205 XDELETE (rpo_numbers_temp);
9e9e6e3e 4206
4207 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4208
4209 /* Create the VN_INFO structures, and initialize value numbers to
4210 TOP. */
4211 for (i = 0; i < num_ssa_names; i++)
4212 {
4213 tree name = ssa_name (i);
4214 if (name)
4215 {
4216 VN_INFO_GET (name)->valnum = VN_TOP;
75a70cf9 4217 VN_INFO (name)->expr = NULL_TREE;
f6c33c78 4218 VN_INFO (name)->value_id = 0;
9e9e6e3e 4219 }
4220 }
4221
ec415c45 4222 renumber_gimple_stmt_uids ();
9e9e6e3e 4223
4224 /* Create the valid and optimistic value numbering tables. */
4225 valid_info = XCNEW (struct vn_tables_s);
4226 allocate_vn_table (valid_info);
4227 optimistic_info = XCNEW (struct vn_tables_s);
4228 allocate_vn_table (optimistic_info);
9e9e6e3e 4229}
4230
4231void
4232free_scc_vn (void)
4233{
4234 size_t i;
4235
c1f445d2 4236 delete constant_to_value_id;
4237 constant_to_value_id = NULL;
f6c33c78 4238 BITMAP_FREE (constant_value_ids);
f1f41a6c 4239 shared_lookup_phiargs.release ();
4240 shared_lookup_references.release ();
9e9e6e3e 4241 XDELETEVEC (rpo_numbers);
b9584939 4242
9e9e6e3e 4243 for (i = 0; i < num_ssa_names; i++)
4244 {
4245 tree name = ssa_name (i);
1d9353f3 4246 if (name
4247 && VN_INFO (name)->needs_insertion)
4248 release_ssa_name (name);
9e9e6e3e 4249 }
b9584939 4250 obstack_free (&vn_ssa_aux_obstack, NULL);
f1f41a6c 4251 vn_ssa_aux_table.release ();
b9584939 4252
f1f41a6c 4253 sccstack.release ();
9e9e6e3e 4254 free_vn_table (valid_info);
4255 XDELETE (valid_info);
4256 free_vn_table (optimistic_info);
4257 XDELETE (optimistic_info);
9e9e6e3e 4258}
4259
63628665 4260/* Set *ID according to RESULT. */
f8ce304c 4261
4262static void
4263set_value_id_for_result (tree result, unsigned int *id)
4264{
63628665 4265 if (result && TREE_CODE (result) == SSA_NAME)
4266 *id = VN_INFO (result)->value_id;
4267 else if (result && is_gimple_min_invariant (result))
4268 *id = get_or_alloc_constant_value_id (result);
4269 else
4270 *id = get_next_value_id ();
f8ce304c 4271}
4272
8883e700 4273/* Set the value ids in the valid hash tables. */
f6c33c78 4274
4275static void
4276set_hashtable_value_ids (void)
4277{
3e871d4d 4278 vn_nary_op_iterator_type hin;
4279 vn_phi_iterator_type hip;
4280 vn_reference_iterator_type hir;
f6c33c78 4281 vn_nary_op_t vno;
4282 vn_reference_t vr;
4283 vn_phi_t vp;
8883e700 4284
f6c33c78 4285 /* Now set the value ids of the things we had put in the hash
4286 table. */
4287
c1f445d2 4288 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
f8ce304c 4289 set_value_id_for_result (vno->result, &vno->value_id);
f6c33c78 4290
c1f445d2 4291 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
f8ce304c 4292 set_value_id_for_result (vp->result, &vp->value_id);
f6c33c78 4293
c1f445d2 4294 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4295 hir)
f8ce304c 4296 set_value_id_for_result (vr->result, &vr->value_id);
f6c33c78 4297}
4298
85e9a542 4299class cond_dom_walker : public dom_walker
4300{
4301public:
4302 cond_dom_walker () : dom_walker (CDI_DOMINATORS), fail (false) {}
4303
4304 virtual void before_dom_children (basic_block);
4305
4306 bool fail;
4307};
4308
4309void
4310cond_dom_walker::before_dom_children (basic_block bb)
4311{
4312 edge e;
4313 edge_iterator ei;
4314
4315 if (fail)
4316 return;
4317
22499a39 4318 /* If any of the predecessor edges that do not come from blocks dominated
4319 by us are still marked as possibly executable consider this block
4320 reachable. */
85e9a542 4321 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4322 FOR_EACH_EDGE (e, ei, bb->preds)
22499a39 4323 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4324 reachable |= (e->flags & EDGE_EXECUTABLE);
85e9a542 4325
4326 /* If the block is not reachable all outgoing edges are not
4327 executable. */
4328 if (!reachable)
4329 {
4330 if (dump_file && (dump_flags & TDF_DETAILS))
4331 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4332 "BB %d as not executable\n", bb->index);
4333
4334 FOR_EACH_EDGE (e, ei, bb->succs)
4335 e->flags &= ~EDGE_EXECUTABLE;
4336 return;
4337 }
4338
4339 gimple stmt = last_stmt (bb);
4340 if (!stmt)
4341 return;
4342
c443af04 4343 enum gimple_code code = gimple_code (stmt);
4344 if (code != GIMPLE_COND
4345 && code != GIMPLE_SWITCH
4346 && code != GIMPLE_GOTO)
4347 return;
4348
4349 if (dump_file && (dump_flags & TDF_DETAILS))
4350 {
4351 fprintf (dump_file, "Value-numbering operands of stmt ending BB %d: ",
4352 bb->index);
4353 print_gimple_stmt (dump_file, stmt, 0, 0);
4354 }
4355
85e9a542 4356 /* Value-number the last stmts SSA uses. */
4357 ssa_op_iter i;
4358 tree op;
4359 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
4360 if (VN_INFO (op)->visited == false
4361 && !DFS (op))
4362 {
4363 fail = true;
4364 return;
4365 }
4366
4367 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4368 if value-numbering can prove they are not reachable. Handling
4369 computed gotos is also possible. */
4370 tree val;
c443af04 4371 switch (code)
85e9a542 4372 {
4373 case GIMPLE_COND:
4374 {
4375 tree lhs = gimple_cond_lhs (stmt);
4376 tree rhs = gimple_cond_rhs (stmt);
4377 /* Work hard in computing the condition and take into account
4378 the valueization of the defining stmt. */
4379 if (TREE_CODE (lhs) == SSA_NAME)
4380 lhs = vn_get_expr_for (lhs);
4381 if (TREE_CODE (rhs) == SSA_NAME)
4382 rhs = vn_get_expr_for (rhs);
4383 val = fold_binary (gimple_cond_code (stmt),
4384 boolean_type_node, lhs, rhs);
4385 break;
4386 }
4387 case GIMPLE_SWITCH:
1a91d914 4388 val = gimple_switch_index (as_a <gswitch *> (stmt));
85e9a542 4389 break;
4390 case GIMPLE_GOTO:
4391 val = gimple_goto_dest (stmt);
4392 break;
4393 default:
c443af04 4394 gcc_unreachable ();
85e9a542 4395 }
4396 if (!val)
4397 return;
4398
4399 edge taken = find_taken_edge (bb, vn_valueize (val));
4400 if (!taken)
4401 return;
4402
4403 if (dump_file && (dump_flags & TDF_DETAILS))
4404 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4405 "not executable\n", bb->index, bb->index, taken->dest->index);
4406
4407 FOR_EACH_EDGE (e, ei, bb->succs)
4408 if (e != taken)
4409 e->flags &= ~EDGE_EXECUTABLE;
4410}
4411
a9b2282e 4412/* Do SCCVN. Returns true if it finished, false if we bailed out
8f190c8a 4413 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4414 how we use the alias oracle walking during the VN process. */
a9b2282e 4415
4416bool
8f190c8a 4417run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
9e9e6e3e 4418{
85e9a542 4419 basic_block bb;
9e9e6e3e 4420 size_t i;
4421 tree param;
48e1416a 4422
8f190c8a 4423 default_vn_walk_kind = default_vn_walk_kind_;
4424
9e9e6e3e 4425 init_scc_vn ();
4426 current_info = valid_info;
4427
4428 for (param = DECL_ARGUMENTS (current_function_decl);
4429 param;
1767a056 4430 param = DECL_CHAIN (param))
9e9e6e3e 4431 {
c6dfe037 4432 tree def = ssa_default_def (cfun, param);
4433 if (def)
c443af04 4434 {
4435 VN_INFO (def)->visited = true;
4436 VN_INFO (def)->valnum = def;
4437 }
9e9e6e3e 4438 }
4439
85e9a542 4440 /* Mark all edges as possibly executable. */
4441 FOR_ALL_BB_FN (bb, cfun)
4442 {
4443 edge_iterator ei;
4444 edge e;
4445 FOR_EACH_EDGE (e, ei, bb->succs)
4446 e->flags |= EDGE_EXECUTABLE;
4447 }
4448
4449 /* Walk all blocks in dominator order, value-numbering the last stmts
4450 SSA uses and decide whether outgoing edges are not executable. */
4451 cond_dom_walker walker;
4452 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4453 if (walker.fail)
4454 {
4455 free_scc_vn ();
4456 return false;
4457 }
4458
4459 /* Value-number remaining SSA names. */
1d9353f3 4460 for (i = 1; i < num_ssa_names; ++i)
9e9e6e3e 4461 {
4462 tree name = ssa_name (i);
4463 if (name
4464 && VN_INFO (name)->visited == false
4465 && !has_zero_uses (name))
a9b2282e 4466 if (!DFS (name))
4467 {
4468 free_scc_vn ();
4469 return false;
4470 }
9e9e6e3e 4471 }
4472
f6c33c78 4473 /* Initialize the value ids. */
48e1416a 4474
f6c33c78 4475 for (i = 1; i < num_ssa_names; ++i)
4476 {
4477 tree name = ssa_name (i);
4478 vn_ssa_aux_t info;
4479 if (!name)
4480 continue;
4481 info = VN_INFO (name);
d94bf438 4482 if (info->valnum == name
4483 || info->valnum == VN_TOP)
f6c33c78 4484 info->value_id = get_next_value_id ();
4485 else if (is_gimple_min_invariant (info->valnum))
4486 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4487 }
48e1416a 4488
597ce871 4489 /* Propagate. */
4490 for (i = 1; i < num_ssa_names; ++i)
f6c33c78 4491 {
597ce871 4492 tree name = ssa_name (i);
4493 vn_ssa_aux_t info;
4494 if (!name)
4495 continue;
4496 info = VN_INFO (name);
4497 if (TREE_CODE (info->valnum) == SSA_NAME
4498 && info->valnum != name
4499 && info->value_id != VN_INFO (info->valnum)->value_id)
4500 info->value_id = VN_INFO (info->valnum)->value_id;
f6c33c78 4501 }
48e1416a 4502
f6c33c78 4503 set_hashtable_value_ids ();
48e1416a 4504
9e9e6e3e 4505 if (dump_file && (dump_flags & TDF_DETAILS))
4506 {
4507 fprintf (dump_file, "Value numbers:\n");
4508 for (i = 0; i < num_ssa_names; i++)
4509 {
4510 tree name = ssa_name (i);
8883e700 4511 if (name
4512 && VN_INFO (name)->visited
4513 && SSA_VAL (name) != name)
9e9e6e3e 4514 {
4515 print_generic_expr (dump_file, name, 0);
4516 fprintf (dump_file, " = ");
8883e700 4517 print_generic_expr (dump_file, SSA_VAL (name), 0);
9e9e6e3e 4518 fprintf (dump_file, "\n");
4519 }
4520 }
4521 }
a9b2282e 4522
4523 return true;
9e9e6e3e 4524}
f6c33c78 4525
4526/* Return the maximum value id we have ever seen. */
4527
4528unsigned int
48e1416a 4529get_max_value_id (void)
f6c33c78 4530{
4531 return next_value_id;
4532}
4533
4534/* Return the next unique value id. */
4535
4536unsigned int
4537get_next_value_id (void)
4538{
4539 return next_value_id++;
4540}
4541
4542
127fb64d 4543/* Compare two expressions E1 and E2 and return true if they are equal. */
f6c33c78 4544
4545bool
4546expressions_equal_p (tree e1, tree e2)
4547{
127fb64d 4548 /* The obvious case. */
f6c33c78 4549 if (e1 == e2)
4550 return true;
4551
127fb64d 4552 /* If only one of them is null, they cannot be equal. */
4553 if (!e1 || !e2)
4554 return false;
4555
127fb64d 4556 /* Now perform the actual comparison. */
4557 if (TREE_CODE (e1) == TREE_CODE (e2)
4558 && operand_equal_p (e1, e2, OEP_PURE_SAME))
f6c33c78 4559 return true;
4560
4561 return false;
4562}
4563
2ac47fdf 4564
4565/* Return true if the nary operation NARY may trap. This is a copy
4566 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4567
4568bool
4569vn_nary_may_trap (vn_nary_op_t nary)
4570{
4571 tree type;
888b74b6 4572 tree rhs2 = NULL_TREE;
2ac47fdf 4573 bool honor_nans = false;
4574 bool honor_snans = false;
4575 bool fp_operation = false;
4576 bool honor_trapv = false;
4577 bool handled, ret;
4578 unsigned i;
4579
4580 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4581 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4582 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4583 {
4584 type = nary->type;
4585 fp_operation = FLOAT_TYPE_P (type);
4586 if (fp_operation)
4587 {
4588 honor_nans = flag_trapping_math && !flag_finite_math_only;
4589 honor_snans = flag_signaling_nans != 0;
4590 }
4591 else if (INTEGRAL_TYPE_P (type)
4592 && TYPE_OVERFLOW_TRAPS (type))
4593 honor_trapv = true;
4594 }
888b74b6 4595 if (nary->length >= 2)
4596 rhs2 = nary->op[1];
2ac47fdf 4597 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4598 honor_trapv,
4599 honor_nans, honor_snans, rhs2,
4600 &handled);
4601 if (handled
4602 && ret)
4603 return true;
4604
4605 for (i = 0; i < nary->length; ++i)
4606 if (tree_could_trap_p (nary->op[i]))
4607 return true;
4608
4609 return false;
4610}