]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-sccvn.c
2015-10-29 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62
63 /* This algorithm is based on the SCC algorithm presented by Keith
64 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65 (http://citeseer.ist.psu.edu/41805.html). In
66 straight line code, it is equivalent to a regular hash based value
67 numbering that is performed in reverse postorder.
68
69 For code with cycles, there are two alternatives, both of which
70 require keeping the hashtables separate from the actual list of
71 value numbers for SSA names.
72
73 1. Iterate value numbering in an RPO walk of the blocks, removing
74 all the entries from the hashtable after each iteration (but
75 keeping the SSA name->value number mapping between iterations).
76 Iterate until it does not change.
77
78 2. Perform value numbering as part of an SCC walk on the SSA graph,
79 iterating only the cycles in the SSA graph until they do not change
80 (using a separate, optimistic hashtable for value numbering the SCC
81 operands).
82
83 The second is not just faster in practice (because most SSA graph
84 cycles do not involve all the variables in the graph), it also has
85 some nice properties.
86
87 One of these nice properties is that when we pop an SCC off the
88 stack, we are guaranteed to have processed all the operands coming from
89 *outside of that SCC*, so we do not need to do anything special to
90 ensure they have value numbers.
91
92 Another nice property is that the SCC walk is done as part of a DFS
93 of the SSA graph, which makes it easy to perform combining and
94 simplifying operations at the same time.
95
96 The code below is deliberately written in a way that makes it easy
97 to separate the SCC walk from the other work it does.
98
99 In order to propagate constants through the code, we track which
100 expressions contain constants, and use those while folding. In
101 theory, we could also track expressions whose value numbers are
102 replaced, in case we end up folding based on expression
103 identities.
104
105 In order to value number memory, we assign value numbers to vuses.
106 This enables us to note that, for example, stores to the same
107 address of the same value from the same starting memory states are
108 equivalent.
109 TODO:
110
111 1. We can iterate only the changing portions of the SCC's, but
112 I have not seen an SCC big enough for this to be a win.
113 2. If you differentiate between phi nodes for loops and phi nodes
114 for if-then-else, you can properly consider phi nodes in different
115 blocks for equivalence.
116 3. We could value number vuses in more cases, particularly, whole
117 structure copies.
118 */
119
120
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
125
126 /* vn_nary_op hashtable helpers. */
127
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
129 {
130 typedef vn_nary_op_s *compare_type;
131 static inline hashval_t hash (const vn_nary_op_s *);
132 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
133 };
134
135 /* Return the computed hashcode for nary operation P1. */
136
137 inline hashval_t
138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
139 {
140 return vno1->hashcode;
141 }
142
143 /* Compare nary operations P1 and P2 and return true if they are
144 equivalent. */
145
146 inline bool
147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
148 {
149 return vn_nary_op_eq (vno1, vno2);
150 }
151
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
154
155
156 /* vn_phi hashtable helpers. */
157
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
160
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
162 {
163 static inline hashval_t hash (const vn_phi_s *);
164 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165 static inline void remove (vn_phi_s *);
166 };
167
168 /* Return the computed hashcode for phi operation P1. */
169
170 inline hashval_t
171 vn_phi_hasher::hash (const vn_phi_s *vp1)
172 {
173 return vp1->hashcode;
174 }
175
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
177
178 inline bool
179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
180 {
181 return vn_phi_eq (vp1, vp2);
182 }
183
184 /* Free a phi operation structure VP. */
185
186 inline void
187 vn_phi_hasher::remove (vn_phi_s *phi)
188 {
189 phi->phiargs.release ();
190 }
191
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
194
195
196 /* Compare two reference operands P1 and P2 for equality. Return true if
197 they are equal, and false otherwise. */
198
199 static int
200 vn_reference_op_eq (const void *p1, const void *p2)
201 {
202 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
204
205 return (vro1->opcode == vro2->opcode
206 /* We do not care for differences in type qualification. */
207 && (vro1->type == vro2->type
208 || (vro1->type && vro2->type
209 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 TYPE_MAIN_VARIANT (vro2->type))))
211 && expressions_equal_p (vro1->op0, vro2->op0)
212 && expressions_equal_p (vro1->op1, vro2->op1)
213 && expressions_equal_p (vro1->op2, vro2->op2));
214 }
215
216 /* Free a reference operation structure VP. */
217
218 static inline void
219 free_reference (vn_reference_s *vr)
220 {
221 vr->operands.release ();
222 }
223
224
225 /* vn_reference hashtable helpers. */
226
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
228 {
229 static inline hashval_t hash (const vn_reference_s *);
230 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231 static inline void remove (vn_reference_s *);
232 };
233
234 /* Return the hashcode for a given reference operation P1. */
235
236 inline hashval_t
237 vn_reference_hasher::hash (const vn_reference_s *vr1)
238 {
239 return vr1->hashcode;
240 }
241
242 inline bool
243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
244 {
245 return vn_reference_eq (v, c);
246 }
247
248 inline void
249 vn_reference_hasher::remove (vn_reference_s *v)
250 {
251 free_reference (v);
252 }
253
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
256
257
258 /* The set of hashtables and alloc_pool's for their items. */
259
260 typedef struct vn_tables_s
261 {
262 vn_nary_op_table_type *nary;
263 vn_phi_table_type *phis;
264 vn_reference_table_type *references;
265 struct obstack nary_obstack;
266 object_allocator<vn_phi_s> *phis_pool;
267 object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
269
270
271 /* vn_constant hashtable helpers. */
272
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
274 {
275 static inline hashval_t hash (const vn_constant_s *);
276 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
277 };
278
279 /* Hash table hash function for vn_constant_t. */
280
281 inline hashval_t
282 vn_constant_hasher::hash (const vn_constant_s *vc1)
283 {
284 return vc1->hashcode;
285 }
286
287 /* Hash table equality function for vn_constant_t. */
288
289 inline bool
290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
291 {
292 if (vc1->hashcode != vc2->hashcode)
293 return false;
294
295 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
296 }
297
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
300
301
302 /* Valid hashtables storing information we have proven to be
303 correct. */
304
305 static vn_tables_t valid_info;
306
307 /* Optimistic hashtables storing information we are making assumptions about
308 during iterations. */
309
310 static vn_tables_t optimistic_info;
311
312 /* Pointer to the set of hashtables that is currently being used.
313 Should always point to either the optimistic_info, or the
314 valid_info. */
315
316 static vn_tables_t current_info;
317
318
319 /* Reverse post order index for each basic block. */
320
321 static int *rpo_numbers;
322
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
324
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326 during elimination which will value-number the VDEF to the
327 associated VUSE (but not substitute in the whole lattice). */
328
329 static inline tree
330 vuse_ssa_val (tree x)
331 {
332 if (!x)
333 return NULL_TREE;
334
335 do
336 {
337 x = SSA_VAL (x);
338 }
339 while (SSA_NAME_IN_FREE_LIST (x));
340
341 return x;
342 }
343
344 /* This represents the top of the VN lattice, which is the universal
345 value. */
346
347 tree VN_TOP;
348
349 /* Unique counter for our value ids. */
350
351 static unsigned int next_value_id;
352
353 /* Next DFS number and the stack for strongly connected component
354 detection. */
355
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
358
359
360
361 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
362 are allocated on an obstack for locality reasons, and to free them
363 without looping over the vec. */
364
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
367
368 /* Return whether there is value numbering information for a given SSA name. */
369
370 bool
371 has_VN_INFO (tree name)
372 {
373 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375 return false;
376 }
377
378 /* Return the value numbering information for a given SSA name. */
379
380 vn_ssa_aux_t
381 VN_INFO (tree name)
382 {
383 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384 gcc_checking_assert (res);
385 return res;
386 }
387
388 /* Set the value numbering info for a given SSA name to a given
389 value. */
390
391 static inline void
392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
393 {
394 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
395 }
396
397 /* Initialize the value numbering info for a given SSA name.
398 This should be called just once for every SSA name. */
399
400 vn_ssa_aux_t
401 VN_INFO_GET (tree name)
402 {
403 vn_ssa_aux_t newinfo;
404
405 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
411 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412 return newinfo;
413 }
414
415
416 /* Return the vn_kind the expression computed by the stmt should be
417 associated with. */
418
419 enum vn_kind
420 vn_get_stmt_kind (gimple *stmt)
421 {
422 switch (gimple_code (stmt))
423 {
424 case GIMPLE_CALL:
425 return VN_REFERENCE;
426 case GIMPLE_PHI:
427 return VN_PHI;
428 case GIMPLE_ASSIGN:
429 {
430 enum tree_code code = gimple_assign_rhs_code (stmt);
431 tree rhs1 = gimple_assign_rhs1 (stmt);
432 switch (get_gimple_rhs_class (code))
433 {
434 case GIMPLE_UNARY_RHS:
435 case GIMPLE_BINARY_RHS:
436 case GIMPLE_TERNARY_RHS:
437 return VN_NARY;
438 case GIMPLE_SINGLE_RHS:
439 switch (TREE_CODE_CLASS (code))
440 {
441 case tcc_reference:
442 /* VOP-less references can go through unary case. */
443 if ((code == REALPART_EXPR
444 || code == IMAGPART_EXPR
445 || code == VIEW_CONVERT_EXPR
446 || code == BIT_FIELD_REF)
447 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 return VN_NARY;
449
450 /* Fallthrough. */
451 case tcc_declaration:
452 return VN_REFERENCE;
453
454 case tcc_constant:
455 return VN_CONSTANT;
456
457 default:
458 if (code == ADDR_EXPR)
459 return (is_gimple_min_invariant (rhs1)
460 ? VN_CONSTANT : VN_REFERENCE);
461 else if (code == CONSTRUCTOR)
462 return VN_NARY;
463 return VN_NONE;
464 }
465 default:
466 return VN_NONE;
467 }
468 }
469 default:
470 return VN_NONE;
471 }
472 }
473
474 /* Lookup a value id for CONSTANT and return it. If it does not
475 exist returns 0. */
476
477 unsigned int
478 get_constant_value_id (tree constant)
479 {
480 vn_constant_s **slot;
481 struct vn_constant_s vc;
482
483 vc.hashcode = vn_hash_constant_with_type (constant);
484 vc.constant = constant;
485 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486 if (slot)
487 return (*slot)->value_id;
488 return 0;
489 }
490
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492 new one and return it. If it does exist, return it. */
493
494 unsigned int
495 get_or_alloc_constant_value_id (tree constant)
496 {
497 vn_constant_s **slot;
498 struct vn_constant_s vc;
499 vn_constant_t vcp;
500
501 vc.hashcode = vn_hash_constant_with_type (constant);
502 vc.constant = constant;
503 slot = constant_to_value_id->find_slot (&vc, INSERT);
504 if (*slot)
505 return (*slot)->value_id;
506
507 vcp = XNEW (struct vn_constant_s);
508 vcp->hashcode = vc.hashcode;
509 vcp->constant = constant;
510 vcp->value_id = get_next_value_id ();
511 *slot = vcp;
512 bitmap_set_bit (constant_value_ids, vcp->value_id);
513 return vcp->value_id;
514 }
515
516 /* Return true if V is a value id for a constant. */
517
518 bool
519 value_id_constant_p (unsigned int v)
520 {
521 return bitmap_bit_p (constant_value_ids, v);
522 }
523
524 /* Compute the hash for a reference operand VRO1. */
525
526 static void
527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
528 {
529 hstate.add_int (vro1->opcode);
530 if (vro1->op0)
531 inchash::add_expr (vro1->op0, hstate);
532 if (vro1->op1)
533 inchash::add_expr (vro1->op1, hstate);
534 if (vro1->op2)
535 inchash::add_expr (vro1->op2, hstate);
536 }
537
538 /* Compute a hash for the reference operation VR1 and return it. */
539
540 static hashval_t
541 vn_reference_compute_hash (const vn_reference_t vr1)
542 {
543 inchash::hash hstate;
544 hashval_t result;
545 int i;
546 vn_reference_op_t vro;
547 HOST_WIDE_INT off = -1;
548 bool deref = false;
549
550 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
551 {
552 if (vro->opcode == MEM_REF)
553 deref = true;
554 else if (vro->opcode != ADDR_EXPR)
555 deref = false;
556 if (vro->off != -1)
557 {
558 if (off == -1)
559 off = 0;
560 off += vro->off;
561 }
562 else
563 {
564 if (off != -1
565 && off != 0)
566 hstate.add_int (off);
567 off = -1;
568 if (deref
569 && vro->opcode == ADDR_EXPR)
570 {
571 if (vro->op0)
572 {
573 tree op = TREE_OPERAND (vro->op0, 0);
574 hstate.add_int (TREE_CODE (op));
575 inchash::add_expr (op, hstate);
576 }
577 }
578 else
579 vn_reference_op_compute_hash (vro, hstate);
580 }
581 }
582 result = hstate.end ();
583 /* ??? We would ICE later if we hash instead of adding that in. */
584 if (vr1->vuse)
585 result += SSA_NAME_VERSION (vr1->vuse);
586
587 return result;
588 }
589
590 /* Return true if reference operations VR1 and VR2 are equivalent. This
591 means they have the same set of operands and vuses. */
592
593 bool
594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
595 {
596 unsigned i, j;
597
598 /* Early out if this is not a hash collision. */
599 if (vr1->hashcode != vr2->hashcode)
600 return false;
601
602 /* The VOP needs to be the same. */
603 if (vr1->vuse != vr2->vuse)
604 return false;
605
606 /* If the operands are the same we are done. */
607 if (vr1->operands == vr2->operands)
608 return true;
609
610 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611 return false;
612
613 if (INTEGRAL_TYPE_P (vr1->type)
614 && INTEGRAL_TYPE_P (vr2->type))
615 {
616 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 return false;
618 }
619 else if (INTEGRAL_TYPE_P (vr1->type)
620 && (TYPE_PRECISION (vr1->type)
621 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622 return false;
623 else if (INTEGRAL_TYPE_P (vr2->type)
624 && (TYPE_PRECISION (vr2->type)
625 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626 return false;
627
628 i = 0;
629 j = 0;
630 do
631 {
632 HOST_WIDE_INT off1 = 0, off2 = 0;
633 vn_reference_op_t vro1, vro2;
634 vn_reference_op_s tem1, tem2;
635 bool deref1 = false, deref2 = false;
636 for (; vr1->operands.iterate (i, &vro1); i++)
637 {
638 if (vro1->opcode == MEM_REF)
639 deref1 = true;
640 if (vro1->off == -1)
641 break;
642 off1 += vro1->off;
643 }
644 for (; vr2->operands.iterate (j, &vro2); j++)
645 {
646 if (vro2->opcode == MEM_REF)
647 deref2 = true;
648 if (vro2->off == -1)
649 break;
650 off2 += vro2->off;
651 }
652 if (off1 != off2)
653 return false;
654 if (deref1 && vro1->opcode == ADDR_EXPR)
655 {
656 memset (&tem1, 0, sizeof (tem1));
657 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
658 tem1.type = TREE_TYPE (tem1.op0);
659 tem1.opcode = TREE_CODE (tem1.op0);
660 vro1 = &tem1;
661 deref1 = false;
662 }
663 if (deref2 && vro2->opcode == ADDR_EXPR)
664 {
665 memset (&tem2, 0, sizeof (tem2));
666 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
667 tem2.type = TREE_TYPE (tem2.op0);
668 tem2.opcode = TREE_CODE (tem2.op0);
669 vro2 = &tem2;
670 deref2 = false;
671 }
672 if (deref1 != deref2)
673 return false;
674 if (!vn_reference_op_eq (vro1, vro2))
675 return false;
676 ++j;
677 ++i;
678 }
679 while (vr1->operands.length () != i
680 || vr2->operands.length () != j);
681
682 return true;
683 }
684
685 /* Copy the operations present in load/store REF into RESULT, a vector of
686 vn_reference_op_s's. */
687
688 static void
689 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
690 {
691 if (TREE_CODE (ref) == TARGET_MEM_REF)
692 {
693 vn_reference_op_s temp;
694
695 result->reserve (3);
696
697 memset (&temp, 0, sizeof (temp));
698 temp.type = TREE_TYPE (ref);
699 temp.opcode = TREE_CODE (ref);
700 temp.op0 = TMR_INDEX (ref);
701 temp.op1 = TMR_STEP (ref);
702 temp.op2 = TMR_OFFSET (ref);
703 temp.off = -1;
704 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
705 temp.base = MR_DEPENDENCE_BASE (ref);
706 result->quick_push (temp);
707
708 memset (&temp, 0, sizeof (temp));
709 temp.type = NULL_TREE;
710 temp.opcode = ERROR_MARK;
711 temp.op0 = TMR_INDEX2 (ref);
712 temp.off = -1;
713 result->quick_push (temp);
714
715 memset (&temp, 0, sizeof (temp));
716 temp.type = NULL_TREE;
717 temp.opcode = TREE_CODE (TMR_BASE (ref));
718 temp.op0 = TMR_BASE (ref);
719 temp.off = -1;
720 result->quick_push (temp);
721 return;
722 }
723
724 /* For non-calls, store the information that makes up the address. */
725 tree orig = ref;
726 while (ref)
727 {
728 vn_reference_op_s temp;
729
730 memset (&temp, 0, sizeof (temp));
731 temp.type = TREE_TYPE (ref);
732 temp.opcode = TREE_CODE (ref);
733 temp.off = -1;
734
735 switch (temp.opcode)
736 {
737 case MODIFY_EXPR:
738 temp.op0 = TREE_OPERAND (ref, 1);
739 break;
740 case WITH_SIZE_EXPR:
741 temp.op0 = TREE_OPERAND (ref, 1);
742 temp.off = 0;
743 break;
744 case MEM_REF:
745 /* The base address gets its own vn_reference_op_s structure. */
746 temp.op0 = TREE_OPERAND (ref, 1);
747 if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
748 temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
749 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
750 temp.base = MR_DEPENDENCE_BASE (ref);
751 break;
752 case BIT_FIELD_REF:
753 /* Record bits and position. */
754 temp.op0 = TREE_OPERAND (ref, 1);
755 temp.op1 = TREE_OPERAND (ref, 2);
756 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
757 {
758 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
759 if (off % BITS_PER_UNIT == 0)
760 temp.off = off / BITS_PER_UNIT;
761 }
762 break;
763 case COMPONENT_REF:
764 /* The field decl is enough to unambiguously specify the field,
765 a matching type is not necessary and a mismatching type
766 is always a spurious difference. */
767 temp.type = NULL_TREE;
768 temp.op0 = TREE_OPERAND (ref, 1);
769 temp.op1 = TREE_OPERAND (ref, 2);
770 {
771 tree this_offset = component_ref_field_offset (ref);
772 if (this_offset
773 && TREE_CODE (this_offset) == INTEGER_CST)
774 {
775 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
776 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
777 {
778 offset_int off
779 = (wi::to_offset (this_offset)
780 + wi::lrshift (wi::to_offset (bit_offset),
781 LOG2_BITS_PER_UNIT));
782 if (wi::fits_shwi_p (off)
783 /* Probibit value-numbering zero offset components
784 of addresses the same before the pass folding
785 __builtin_object_size had a chance to run
786 (checking cfun->after_inlining does the
787 trick here). */
788 && (TREE_CODE (orig) != ADDR_EXPR
789 || off != 0
790 || cfun->after_inlining))
791 temp.off = off.to_shwi ();
792 }
793 }
794 }
795 break;
796 case ARRAY_RANGE_REF:
797 case ARRAY_REF:
798 /* Record index as operand. */
799 temp.op0 = TREE_OPERAND (ref, 1);
800 /* Always record lower bounds and element size. */
801 temp.op1 = array_ref_low_bound (ref);
802 temp.op2 = array_ref_element_size (ref);
803 if (TREE_CODE (temp.op0) == INTEGER_CST
804 && TREE_CODE (temp.op1) == INTEGER_CST
805 && TREE_CODE (temp.op2) == INTEGER_CST)
806 {
807 offset_int off = ((wi::to_offset (temp.op0)
808 - wi::to_offset (temp.op1))
809 * wi::to_offset (temp.op2));
810 if (wi::fits_shwi_p (off))
811 temp.off = off.to_shwi();
812 }
813 break;
814 case VAR_DECL:
815 if (DECL_HARD_REGISTER (ref))
816 {
817 temp.op0 = ref;
818 break;
819 }
820 /* Fallthru. */
821 case PARM_DECL:
822 case CONST_DECL:
823 case RESULT_DECL:
824 /* Canonicalize decls to MEM[&decl] which is what we end up with
825 when valueizing MEM[ptr] with ptr = &decl. */
826 temp.opcode = MEM_REF;
827 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
828 temp.off = 0;
829 result->safe_push (temp);
830 temp.opcode = ADDR_EXPR;
831 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
832 temp.type = TREE_TYPE (temp.op0);
833 temp.off = -1;
834 break;
835 case STRING_CST:
836 case INTEGER_CST:
837 case COMPLEX_CST:
838 case VECTOR_CST:
839 case REAL_CST:
840 case FIXED_CST:
841 case CONSTRUCTOR:
842 case SSA_NAME:
843 temp.op0 = ref;
844 break;
845 case ADDR_EXPR:
846 if (is_gimple_min_invariant (ref))
847 {
848 temp.op0 = ref;
849 break;
850 }
851 break;
852 /* These are only interesting for their operands, their
853 existence, and their type. They will never be the last
854 ref in the chain of references (IE they require an
855 operand), so we don't have to put anything
856 for op* as it will be handled by the iteration */
857 case REALPART_EXPR:
858 case VIEW_CONVERT_EXPR:
859 temp.off = 0;
860 break;
861 case IMAGPART_EXPR:
862 /* This is only interesting for its constant offset. */
863 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
864 break;
865 default:
866 gcc_unreachable ();
867 }
868 result->safe_push (temp);
869
870 if (REFERENCE_CLASS_P (ref)
871 || TREE_CODE (ref) == MODIFY_EXPR
872 || TREE_CODE (ref) == WITH_SIZE_EXPR
873 || (TREE_CODE (ref) == ADDR_EXPR
874 && !is_gimple_min_invariant (ref)))
875 ref = TREE_OPERAND (ref, 0);
876 else
877 ref = NULL_TREE;
878 }
879 }
880
881 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
882 operands in *OPS, the reference alias set SET and the reference type TYPE.
883 Return true if something useful was produced. */
884
885 bool
886 ao_ref_init_from_vn_reference (ao_ref *ref,
887 alias_set_type set, tree type,
888 vec<vn_reference_op_s> ops)
889 {
890 vn_reference_op_t op;
891 unsigned i;
892 tree base = NULL_TREE;
893 tree *op0_p = &base;
894 offset_int offset = 0;
895 offset_int max_size;
896 offset_int size = -1;
897 tree size_tree = NULL_TREE;
898 alias_set_type base_alias_set = -1;
899
900 /* First get the final access size from just the outermost expression. */
901 op = &ops[0];
902 if (op->opcode == COMPONENT_REF)
903 size_tree = DECL_SIZE (op->op0);
904 else if (op->opcode == BIT_FIELD_REF)
905 size_tree = op->op0;
906 else
907 {
908 machine_mode mode = TYPE_MODE (type);
909 if (mode == BLKmode)
910 size_tree = TYPE_SIZE (type);
911 else
912 size = int (GET_MODE_BITSIZE (mode));
913 }
914 if (size_tree != NULL_TREE
915 && TREE_CODE (size_tree) == INTEGER_CST)
916 size = wi::to_offset (size_tree);
917
918 /* Initially, maxsize is the same as the accessed element size.
919 In the following it will only grow (or become -1). */
920 max_size = size;
921
922 /* Compute cumulative bit-offset for nested component-refs and array-refs,
923 and find the ultimate containing object. */
924 FOR_EACH_VEC_ELT (ops, i, op)
925 {
926 switch (op->opcode)
927 {
928 /* These may be in the reference ops, but we cannot do anything
929 sensible with them here. */
930 case ADDR_EXPR:
931 /* Apart from ADDR_EXPR arguments to MEM_REF. */
932 if (base != NULL_TREE
933 && TREE_CODE (base) == MEM_REF
934 && op->op0
935 && DECL_P (TREE_OPERAND (op->op0, 0)))
936 {
937 vn_reference_op_t pop = &ops[i-1];
938 base = TREE_OPERAND (op->op0, 0);
939 if (pop->off == -1)
940 {
941 max_size = -1;
942 offset = 0;
943 }
944 else
945 offset += pop->off * BITS_PER_UNIT;
946 op0_p = NULL;
947 break;
948 }
949 /* Fallthru. */
950 case CALL_EXPR:
951 return false;
952
953 /* Record the base objects. */
954 case MEM_REF:
955 base_alias_set = get_deref_alias_set (op->op0);
956 *op0_p = build2 (MEM_REF, op->type,
957 NULL_TREE, op->op0);
958 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
959 MR_DEPENDENCE_BASE (*op0_p) = op->base;
960 op0_p = &TREE_OPERAND (*op0_p, 0);
961 break;
962
963 case VAR_DECL:
964 case PARM_DECL:
965 case RESULT_DECL:
966 case SSA_NAME:
967 *op0_p = op->op0;
968 op0_p = NULL;
969 break;
970
971 /* And now the usual component-reference style ops. */
972 case BIT_FIELD_REF:
973 offset += wi::to_offset (op->op1);
974 break;
975
976 case COMPONENT_REF:
977 {
978 tree field = op->op0;
979 /* We do not have a complete COMPONENT_REF tree here so we
980 cannot use component_ref_field_offset. Do the interesting
981 parts manually. */
982 tree this_offset = DECL_FIELD_OFFSET (field);
983
984 if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
985 max_size = -1;
986 else
987 {
988 offset_int woffset = wi::lshift (wi::to_offset (this_offset),
989 LOG2_BITS_PER_UNIT);
990 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
991 offset += woffset;
992 }
993 break;
994 }
995
996 case ARRAY_RANGE_REF:
997 case ARRAY_REF:
998 /* We recorded the lower bound and the element size. */
999 if (TREE_CODE (op->op0) != INTEGER_CST
1000 || TREE_CODE (op->op1) != INTEGER_CST
1001 || TREE_CODE (op->op2) != INTEGER_CST)
1002 max_size = -1;
1003 else
1004 {
1005 offset_int woffset
1006 = wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1007 TYPE_PRECISION (TREE_TYPE (op->op0)));
1008 woffset *= wi::to_offset (op->op2);
1009 woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
1010 offset += woffset;
1011 }
1012 break;
1013
1014 case REALPART_EXPR:
1015 break;
1016
1017 case IMAGPART_EXPR:
1018 offset += size;
1019 break;
1020
1021 case VIEW_CONVERT_EXPR:
1022 break;
1023
1024 case STRING_CST:
1025 case INTEGER_CST:
1026 case COMPLEX_CST:
1027 case VECTOR_CST:
1028 case REAL_CST:
1029 case CONSTRUCTOR:
1030 case CONST_DECL:
1031 return false;
1032
1033 default:
1034 return false;
1035 }
1036 }
1037
1038 if (base == NULL_TREE)
1039 return false;
1040
1041 ref->ref = NULL_TREE;
1042 ref->base = base;
1043 ref->ref_alias_set = set;
1044 if (base_alias_set != -1)
1045 ref->base_alias_set = base_alias_set;
1046 else
1047 ref->base_alias_set = get_alias_set (base);
1048 /* We discount volatiles from value-numbering elsewhere. */
1049 ref->volatile_p = false;
1050
1051 if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1052 {
1053 ref->offset = 0;
1054 ref->size = -1;
1055 ref->max_size = -1;
1056 return true;
1057 }
1058
1059 ref->size = size.to_shwi ();
1060
1061 if (!wi::fits_shwi_p (offset))
1062 {
1063 ref->offset = 0;
1064 ref->max_size = -1;
1065 return true;
1066 }
1067
1068 ref->offset = offset.to_shwi ();
1069
1070 if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1071 ref->max_size = -1;
1072 else
1073 ref->max_size = max_size.to_shwi ();
1074
1075 return true;
1076 }
1077
1078 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1079 vn_reference_op_s's. */
1080
1081 static void
1082 copy_reference_ops_from_call (gcall *call,
1083 vec<vn_reference_op_s> *result)
1084 {
1085 vn_reference_op_s temp;
1086 unsigned i;
1087 tree lhs = gimple_call_lhs (call);
1088 int lr;
1089
1090 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1091 different. By adding the lhs here in the vector, we ensure that the
1092 hashcode is different, guaranteeing a different value number. */
1093 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1094 {
1095 memset (&temp, 0, sizeof (temp));
1096 temp.opcode = MODIFY_EXPR;
1097 temp.type = TREE_TYPE (lhs);
1098 temp.op0 = lhs;
1099 temp.off = -1;
1100 result->safe_push (temp);
1101 }
1102
1103 /* Copy the type, opcode, function, static chain and EH region, if any. */
1104 memset (&temp, 0, sizeof (temp));
1105 temp.type = gimple_call_return_type (call);
1106 temp.opcode = CALL_EXPR;
1107 temp.op0 = gimple_call_fn (call);
1108 temp.op1 = gimple_call_chain (call);
1109 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1110 temp.op2 = size_int (lr);
1111 temp.off = -1;
1112 if (gimple_call_with_bounds_p (call))
1113 temp.with_bounds = 1;
1114 result->safe_push (temp);
1115
1116 /* Copy the call arguments. As they can be references as well,
1117 just chain them together. */
1118 for (i = 0; i < gimple_call_num_args (call); ++i)
1119 {
1120 tree callarg = gimple_call_arg (call, i);
1121 copy_reference_ops_from_ref (callarg, result);
1122 }
1123 }
1124
1125 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1126 *I_P to point to the last element of the replacement. */
1127 static bool
1128 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1129 unsigned int *i_p)
1130 {
1131 unsigned int i = *i_p;
1132 vn_reference_op_t op = &(*ops)[i];
1133 vn_reference_op_t mem_op = &(*ops)[i - 1];
1134 tree addr_base;
1135 HOST_WIDE_INT addr_offset = 0;
1136
1137 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1138 from .foo.bar to the preceding MEM_REF offset and replace the
1139 address with &OBJ. */
1140 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1141 &addr_offset);
1142 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1143 if (addr_base != TREE_OPERAND (op->op0, 0))
1144 {
1145 offset_int off = offset_int::from (mem_op->op0, SIGNED);
1146 off += addr_offset;
1147 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1148 op->op0 = build_fold_addr_expr (addr_base);
1149 if (tree_fits_shwi_p (mem_op->op0))
1150 mem_op->off = tree_to_shwi (mem_op->op0);
1151 else
1152 mem_op->off = -1;
1153 return true;
1154 }
1155 return false;
1156 }
1157
1158 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1159 *I_P to point to the last element of the replacement. */
1160 static bool
1161 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1162 unsigned int *i_p)
1163 {
1164 unsigned int i = *i_p;
1165 vn_reference_op_t op = &(*ops)[i];
1166 vn_reference_op_t mem_op = &(*ops)[i - 1];
1167 gimple *def_stmt;
1168 enum tree_code code;
1169 offset_int off;
1170
1171 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1172 if (!is_gimple_assign (def_stmt))
1173 return false;
1174
1175 code = gimple_assign_rhs_code (def_stmt);
1176 if (code != ADDR_EXPR
1177 && code != POINTER_PLUS_EXPR)
1178 return false;
1179
1180 off = offset_int::from (mem_op->op0, SIGNED);
1181
1182 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1183 from .foo.bar to the preceding MEM_REF offset and replace the
1184 address with &OBJ. */
1185 if (code == ADDR_EXPR)
1186 {
1187 tree addr, addr_base;
1188 HOST_WIDE_INT addr_offset;
1189
1190 addr = gimple_assign_rhs1 (def_stmt);
1191 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1192 &addr_offset);
1193 /* If that didn't work because the address isn't invariant propagate
1194 the reference tree from the address operation in case the current
1195 dereference isn't offsetted. */
1196 if (!addr_base
1197 && *i_p == ops->length () - 1
1198 && off == 0
1199 /* This makes us disable this transform for PRE where the
1200 reference ops might be also used for code insertion which
1201 is invalid. */
1202 && default_vn_walk_kind == VN_WALKREWRITE)
1203 {
1204 auto_vec<vn_reference_op_s, 32> tem;
1205 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1206 ops->pop ();
1207 ops->pop ();
1208 ops->safe_splice (tem);
1209 --*i_p;
1210 return true;
1211 }
1212 if (!addr_base
1213 || TREE_CODE (addr_base) != MEM_REF)
1214 return false;
1215
1216 off += addr_offset;
1217 off += mem_ref_offset (addr_base);
1218 op->op0 = TREE_OPERAND (addr_base, 0);
1219 }
1220 else
1221 {
1222 tree ptr, ptroff;
1223 ptr = gimple_assign_rhs1 (def_stmt);
1224 ptroff = gimple_assign_rhs2 (def_stmt);
1225 if (TREE_CODE (ptr) != SSA_NAME
1226 || TREE_CODE (ptroff) != INTEGER_CST)
1227 return false;
1228
1229 off += wi::to_offset (ptroff);
1230 op->op0 = ptr;
1231 }
1232
1233 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1234 if (tree_fits_shwi_p (mem_op->op0))
1235 mem_op->off = tree_to_shwi (mem_op->op0);
1236 else
1237 mem_op->off = -1;
1238 if (TREE_CODE (op->op0) == SSA_NAME)
1239 op->op0 = SSA_VAL (op->op0);
1240 if (TREE_CODE (op->op0) != SSA_NAME)
1241 op->opcode = TREE_CODE (op->op0);
1242
1243 /* And recurse. */
1244 if (TREE_CODE (op->op0) == SSA_NAME)
1245 vn_reference_maybe_forwprop_address (ops, i_p);
1246 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1247 vn_reference_fold_indirect (ops, i_p);
1248 return true;
1249 }
1250
1251 /* Optimize the reference REF to a constant if possible or return
1252 NULL_TREE if not. */
1253
1254 tree
1255 fully_constant_vn_reference_p (vn_reference_t ref)
1256 {
1257 vec<vn_reference_op_s> operands = ref->operands;
1258 vn_reference_op_t op;
1259
1260 /* Try to simplify the translated expression if it is
1261 a call to a builtin function with at most two arguments. */
1262 op = &operands[0];
1263 if (op->opcode == CALL_EXPR
1264 && TREE_CODE (op->op0) == ADDR_EXPR
1265 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1266 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1267 && operands.length () >= 2
1268 && operands.length () <= 3)
1269 {
1270 vn_reference_op_t arg0, arg1 = NULL;
1271 bool anyconst = false;
1272 arg0 = &operands[1];
1273 if (operands.length () > 2)
1274 arg1 = &operands[2];
1275 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1276 || (arg0->opcode == ADDR_EXPR
1277 && is_gimple_min_invariant (arg0->op0)))
1278 anyconst = true;
1279 if (arg1
1280 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1281 || (arg1->opcode == ADDR_EXPR
1282 && is_gimple_min_invariant (arg1->op0))))
1283 anyconst = true;
1284 if (anyconst)
1285 {
1286 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1287 arg1 ? 2 : 1,
1288 arg0->op0,
1289 arg1 ? arg1->op0 : NULL);
1290 if (folded
1291 && TREE_CODE (folded) == NOP_EXPR)
1292 folded = TREE_OPERAND (folded, 0);
1293 if (folded
1294 && is_gimple_min_invariant (folded))
1295 return folded;
1296 }
1297 }
1298
1299 /* Simplify reads from constants or constant initializers. */
1300 else if (BITS_PER_UNIT == 8
1301 && is_gimple_reg_type (ref->type)
1302 && (!INTEGRAL_TYPE_P (ref->type)
1303 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1304 {
1305 HOST_WIDE_INT off = 0;
1306 HOST_WIDE_INT size;
1307 if (INTEGRAL_TYPE_P (ref->type))
1308 size = TYPE_PRECISION (ref->type);
1309 else
1310 size = tree_to_shwi (TYPE_SIZE (ref->type));
1311 if (size % BITS_PER_UNIT != 0
1312 || size > MAX_BITSIZE_MODE_ANY_MODE)
1313 return NULL_TREE;
1314 size /= BITS_PER_UNIT;
1315 unsigned i;
1316 for (i = 0; i < operands.length (); ++i)
1317 {
1318 if (operands[i].off == -1)
1319 return NULL_TREE;
1320 off += operands[i].off;
1321 if (operands[i].opcode == MEM_REF)
1322 {
1323 ++i;
1324 break;
1325 }
1326 }
1327 vn_reference_op_t base = &operands[--i];
1328 tree ctor = error_mark_node;
1329 tree decl = NULL_TREE;
1330 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1331 ctor = base->op0;
1332 else if (base->opcode == MEM_REF
1333 && base[1].opcode == ADDR_EXPR
1334 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1335 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1336 {
1337 decl = TREE_OPERAND (base[1].op0, 0);
1338 ctor = ctor_for_folding (decl);
1339 }
1340 if (ctor == NULL_TREE)
1341 return build_zero_cst (ref->type);
1342 else if (ctor != error_mark_node)
1343 {
1344 if (decl)
1345 {
1346 tree res = fold_ctor_reference (ref->type, ctor,
1347 off * BITS_PER_UNIT,
1348 size * BITS_PER_UNIT, decl);
1349 if (res)
1350 {
1351 STRIP_USELESS_TYPE_CONVERSION (res);
1352 if (is_gimple_min_invariant (res))
1353 return res;
1354 }
1355 }
1356 else
1357 {
1358 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1359 if (native_encode_expr (ctor, buf, size, off) > 0)
1360 return native_interpret_expr (ref->type, buf, size);
1361 }
1362 }
1363 }
1364
1365 return NULL_TREE;
1366 }
1367
1368 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1369 structures into their value numbers. This is done in-place, and
1370 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1371 whether any operands were valueized. */
1372
1373 static vec<vn_reference_op_s>
1374 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1375 {
1376 vn_reference_op_t vro;
1377 unsigned int i;
1378
1379 *valueized_anything = false;
1380
1381 FOR_EACH_VEC_ELT (orig, i, vro)
1382 {
1383 if (vro->opcode == SSA_NAME
1384 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1385 {
1386 tree tem = SSA_VAL (vro->op0);
1387 if (tem != vro->op0)
1388 {
1389 *valueized_anything = true;
1390 vro->op0 = tem;
1391 }
1392 /* If it transforms from an SSA_NAME to a constant, update
1393 the opcode. */
1394 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1395 vro->opcode = TREE_CODE (vro->op0);
1396 }
1397 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1398 {
1399 tree tem = SSA_VAL (vro->op1);
1400 if (tem != vro->op1)
1401 {
1402 *valueized_anything = true;
1403 vro->op1 = tem;
1404 }
1405 }
1406 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1407 {
1408 tree tem = SSA_VAL (vro->op2);
1409 if (tem != vro->op2)
1410 {
1411 *valueized_anything = true;
1412 vro->op2 = tem;
1413 }
1414 }
1415 /* If it transforms from an SSA_NAME to an address, fold with
1416 a preceding indirect reference. */
1417 if (i > 0
1418 && vro->op0
1419 && TREE_CODE (vro->op0) == ADDR_EXPR
1420 && orig[i - 1].opcode == MEM_REF)
1421 {
1422 if (vn_reference_fold_indirect (&orig, &i))
1423 *valueized_anything = true;
1424 }
1425 else if (i > 0
1426 && vro->opcode == SSA_NAME
1427 && orig[i - 1].opcode == MEM_REF)
1428 {
1429 if (vn_reference_maybe_forwprop_address (&orig, &i))
1430 *valueized_anything = true;
1431 }
1432 /* If it transforms a non-constant ARRAY_REF into a constant
1433 one, adjust the constant offset. */
1434 else if (vro->opcode == ARRAY_REF
1435 && vro->off == -1
1436 && TREE_CODE (vro->op0) == INTEGER_CST
1437 && TREE_CODE (vro->op1) == INTEGER_CST
1438 && TREE_CODE (vro->op2) == INTEGER_CST)
1439 {
1440 offset_int off = ((wi::to_offset (vro->op0)
1441 - wi::to_offset (vro->op1))
1442 * wi::to_offset (vro->op2));
1443 if (wi::fits_shwi_p (off))
1444 vro->off = off.to_shwi ();
1445 }
1446 }
1447
1448 return orig;
1449 }
1450
1451 static vec<vn_reference_op_s>
1452 valueize_refs (vec<vn_reference_op_s> orig)
1453 {
1454 bool tem;
1455 return valueize_refs_1 (orig, &tem);
1456 }
1457
1458 static vec<vn_reference_op_s> shared_lookup_references;
1459
1460 /* Create a vector of vn_reference_op_s structures from REF, a
1461 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1462 this function. *VALUEIZED_ANYTHING will specify whether any
1463 operands were valueized. */
1464
1465 static vec<vn_reference_op_s>
1466 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1467 {
1468 if (!ref)
1469 return vNULL;
1470 shared_lookup_references.truncate (0);
1471 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1472 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1473 valueized_anything);
1474 return shared_lookup_references;
1475 }
1476
1477 /* Create a vector of vn_reference_op_s structures from CALL, a
1478 call statement. The vector is shared among all callers of
1479 this function. */
1480
1481 static vec<vn_reference_op_s>
1482 valueize_shared_reference_ops_from_call (gcall *call)
1483 {
1484 if (!call)
1485 return vNULL;
1486 shared_lookup_references.truncate (0);
1487 copy_reference_ops_from_call (call, &shared_lookup_references);
1488 shared_lookup_references = valueize_refs (shared_lookup_references);
1489 return shared_lookup_references;
1490 }
1491
1492 /* Lookup a SCCVN reference operation VR in the current hash table.
1493 Returns the resulting value number if it exists in the hash table,
1494 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1495 vn_reference_t stored in the hashtable if something is found. */
1496
1497 static tree
1498 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1499 {
1500 vn_reference_s **slot;
1501 hashval_t hash;
1502
1503 hash = vr->hashcode;
1504 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1505 if (!slot && current_info == optimistic_info)
1506 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1507 if (slot)
1508 {
1509 if (vnresult)
1510 *vnresult = (vn_reference_t)*slot;
1511 return ((vn_reference_t)*slot)->result;
1512 }
1513
1514 return NULL_TREE;
1515 }
1516
1517 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1518 with the current VUSE and performs the expression lookup. */
1519
1520 static void *
1521 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1522 unsigned int cnt, void *vr_)
1523 {
1524 vn_reference_t vr = (vn_reference_t)vr_;
1525 vn_reference_s **slot;
1526 hashval_t hash;
1527
1528 /* This bounds the stmt walks we perform on reference lookups
1529 to O(1) instead of O(N) where N is the number of dominating
1530 stores. */
1531 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1532 return (void *)-1;
1533
1534 if (last_vuse_ptr)
1535 *last_vuse_ptr = vuse;
1536
1537 /* Fixup vuse and hash. */
1538 if (vr->vuse)
1539 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1540 vr->vuse = vuse_ssa_val (vuse);
1541 if (vr->vuse)
1542 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1543
1544 hash = vr->hashcode;
1545 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1546 if (!slot && current_info == optimistic_info)
1547 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1548 if (slot)
1549 return *slot;
1550
1551 return NULL;
1552 }
1553
1554 /* Lookup an existing or insert a new vn_reference entry into the
1555 value table for the VUSE, SET, TYPE, OPERANDS reference which
1556 has the value VALUE which is either a constant or an SSA name. */
1557
1558 static vn_reference_t
1559 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1560 alias_set_type set,
1561 tree type,
1562 vec<vn_reference_op_s,
1563 va_heap> operands,
1564 tree value)
1565 {
1566 vn_reference_s vr1;
1567 vn_reference_t result;
1568 unsigned value_id;
1569 vr1.vuse = vuse;
1570 vr1.operands = operands;
1571 vr1.type = type;
1572 vr1.set = set;
1573 vr1.hashcode = vn_reference_compute_hash (&vr1);
1574 if (vn_reference_lookup_1 (&vr1, &result))
1575 return result;
1576 if (TREE_CODE (value) == SSA_NAME)
1577 value_id = VN_INFO (value)->value_id;
1578 else
1579 value_id = get_or_alloc_constant_value_id (value);
1580 return vn_reference_insert_pieces (vuse, set, type,
1581 operands.copy (), value, value_id);
1582 }
1583
1584 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1585 from the statement defining VUSE and if not successful tries to
1586 translate *REFP and VR_ through an aggregate copy at the definition
1587 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1588 of *REF and *VR. If only disambiguation was performed then
1589 *DISAMBIGUATE_ONLY is set to true. */
1590
1591 static void *
1592 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1593 bool *disambiguate_only)
1594 {
1595 vn_reference_t vr = (vn_reference_t)vr_;
1596 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1597 tree base = ao_ref_base (ref);
1598 HOST_WIDE_INT offset, maxsize;
1599 static vec<vn_reference_op_s>
1600 lhs_ops = vNULL;
1601 ao_ref lhs_ref;
1602 bool lhs_ref_ok = false;
1603
1604 /* If the reference is based on a parameter that was determined as
1605 pointing to readonly memory it doesn't change. */
1606 if (TREE_CODE (base) == MEM_REF
1607 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1608 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1609 && bitmap_bit_p (const_parms,
1610 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1611 {
1612 *disambiguate_only = true;
1613 return NULL;
1614 }
1615
1616 /* First try to disambiguate after value-replacing in the definitions LHS. */
1617 if (is_gimple_assign (def_stmt))
1618 {
1619 tree lhs = gimple_assign_lhs (def_stmt);
1620 bool valueized_anything = false;
1621 /* Avoid re-allocation overhead. */
1622 lhs_ops.truncate (0);
1623 copy_reference_ops_from_ref (lhs, &lhs_ops);
1624 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1625 if (valueized_anything)
1626 {
1627 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1628 get_alias_set (lhs),
1629 TREE_TYPE (lhs), lhs_ops);
1630 if (lhs_ref_ok
1631 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1632 {
1633 *disambiguate_only = true;
1634 return NULL;
1635 }
1636 }
1637 else
1638 {
1639 ao_ref_init (&lhs_ref, lhs);
1640 lhs_ref_ok = true;
1641 }
1642 }
1643 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1644 && gimple_call_num_args (def_stmt) <= 4)
1645 {
1646 /* For builtin calls valueize its arguments and call the
1647 alias oracle again. Valueization may improve points-to
1648 info of pointers and constify size and position arguments.
1649 Originally this was motivated by PR61034 which has
1650 conditional calls to free falsely clobbering ref because
1651 of imprecise points-to info of the argument. */
1652 tree oldargs[4];
1653 bool valueized_anything = false;
1654 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1655 {
1656 oldargs[i] = gimple_call_arg (def_stmt, i);
1657 if (TREE_CODE (oldargs[i]) == SSA_NAME
1658 && VN_INFO (oldargs[i])->valnum != oldargs[i])
1659 {
1660 gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1661 valueized_anything = true;
1662 }
1663 }
1664 if (valueized_anything)
1665 {
1666 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1667 ref);
1668 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1669 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1670 if (!res)
1671 {
1672 *disambiguate_only = true;
1673 return NULL;
1674 }
1675 }
1676 }
1677
1678 if (*disambiguate_only)
1679 return (void *)-1;
1680
1681 offset = ref->offset;
1682 maxsize = ref->max_size;
1683
1684 /* If we cannot constrain the size of the reference we cannot
1685 test if anything kills it. */
1686 if (maxsize == -1)
1687 return (void *)-1;
1688
1689 /* We can't deduce anything useful from clobbers. */
1690 if (gimple_clobber_p (def_stmt))
1691 return (void *)-1;
1692
1693 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1694 from that definition.
1695 1) Memset. */
1696 if (is_gimple_reg_type (vr->type)
1697 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1698 && integer_zerop (gimple_call_arg (def_stmt, 1))
1699 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1700 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1701 {
1702 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1703 tree base2;
1704 HOST_WIDE_INT offset2, size2, maxsize2;
1705 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1706 size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1707 if ((unsigned HOST_WIDE_INT)size2 / 8
1708 == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1709 && maxsize2 != -1
1710 && operand_equal_p (base, base2, 0)
1711 && offset2 <= offset
1712 && offset2 + size2 >= offset + maxsize)
1713 {
1714 tree val = build_zero_cst (vr->type);
1715 return vn_reference_lookup_or_insert_for_pieces
1716 (vuse, vr->set, vr->type, vr->operands, val);
1717 }
1718 }
1719
1720 /* 2) Assignment from an empty CONSTRUCTOR. */
1721 else if (is_gimple_reg_type (vr->type)
1722 && gimple_assign_single_p (def_stmt)
1723 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1724 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1725 {
1726 tree base2;
1727 HOST_WIDE_INT offset2, size2, maxsize2;
1728 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1729 &offset2, &size2, &maxsize2);
1730 if (maxsize2 != -1
1731 && operand_equal_p (base, base2, 0)
1732 && offset2 <= offset
1733 && offset2 + size2 >= offset + maxsize)
1734 {
1735 tree val = build_zero_cst (vr->type);
1736 return vn_reference_lookup_or_insert_for_pieces
1737 (vuse, vr->set, vr->type, vr->operands, val);
1738 }
1739 }
1740
1741 /* 3) Assignment from a constant. We can use folds native encode/interpret
1742 routines to extract the assigned bits. */
1743 else if (vn_walk_kind == VN_WALKREWRITE
1744 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1745 && ref->size == maxsize
1746 && maxsize % BITS_PER_UNIT == 0
1747 && offset % BITS_PER_UNIT == 0
1748 && is_gimple_reg_type (vr->type)
1749 && gimple_assign_single_p (def_stmt)
1750 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1751 {
1752 tree base2;
1753 HOST_WIDE_INT offset2, size2, maxsize2;
1754 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1755 &offset2, &size2, &maxsize2);
1756 if (maxsize2 != -1
1757 && maxsize2 == size2
1758 && size2 % BITS_PER_UNIT == 0
1759 && offset2 % BITS_PER_UNIT == 0
1760 && operand_equal_p (base, base2, 0)
1761 && offset2 <= offset
1762 && offset2 + size2 >= offset + maxsize)
1763 {
1764 /* We support up to 512-bit values (for V8DFmode). */
1765 unsigned char buffer[64];
1766 int len;
1767
1768 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1769 buffer, sizeof (buffer));
1770 if (len > 0)
1771 {
1772 tree val = native_interpret_expr (vr->type,
1773 buffer
1774 + ((offset - offset2)
1775 / BITS_PER_UNIT),
1776 ref->size / BITS_PER_UNIT);
1777 if (val)
1778 return vn_reference_lookup_or_insert_for_pieces
1779 (vuse, vr->set, vr->type, vr->operands, val);
1780 }
1781 }
1782 }
1783
1784 /* 4) Assignment from an SSA name which definition we may be able
1785 to access pieces from. */
1786 else if (ref->size == maxsize
1787 && is_gimple_reg_type (vr->type)
1788 && gimple_assign_single_p (def_stmt)
1789 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1790 {
1791 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1792 gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1793 if (is_gimple_assign (def_stmt2)
1794 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1795 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1796 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1797 {
1798 tree base2;
1799 HOST_WIDE_INT offset2, size2, maxsize2, off;
1800 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1801 &offset2, &size2, &maxsize2);
1802 off = offset - offset2;
1803 if (maxsize2 != -1
1804 && maxsize2 == size2
1805 && operand_equal_p (base, base2, 0)
1806 && offset2 <= offset
1807 && offset2 + size2 >= offset + maxsize)
1808 {
1809 tree val = NULL_TREE;
1810 HOST_WIDE_INT elsz
1811 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1812 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1813 {
1814 if (off == 0)
1815 val = gimple_assign_rhs1 (def_stmt2);
1816 else if (off == elsz)
1817 val = gimple_assign_rhs2 (def_stmt2);
1818 }
1819 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1820 && off % elsz == 0)
1821 {
1822 tree ctor = gimple_assign_rhs1 (def_stmt2);
1823 unsigned i = off / elsz;
1824 if (i < CONSTRUCTOR_NELTS (ctor))
1825 {
1826 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1827 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1828 {
1829 if (TREE_CODE (TREE_TYPE (elt->value))
1830 != VECTOR_TYPE)
1831 val = elt->value;
1832 }
1833 }
1834 }
1835 if (val)
1836 return vn_reference_lookup_or_insert_for_pieces
1837 (vuse, vr->set, vr->type, vr->operands, val);
1838 }
1839 }
1840 }
1841
1842 /* 5) For aggregate copies translate the reference through them if
1843 the copy kills ref. */
1844 else if (vn_walk_kind == VN_WALKREWRITE
1845 && gimple_assign_single_p (def_stmt)
1846 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1847 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1848 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1849 {
1850 tree base2;
1851 HOST_WIDE_INT maxsize2;
1852 int i, j;
1853 auto_vec<vn_reference_op_s> rhs;
1854 vn_reference_op_t vro;
1855 ao_ref r;
1856
1857 if (!lhs_ref_ok)
1858 return (void *)-1;
1859
1860 /* See if the assignment kills REF. */
1861 base2 = ao_ref_base (&lhs_ref);
1862 maxsize2 = lhs_ref.max_size;
1863 if (maxsize2 == -1
1864 || (base != base2
1865 && (TREE_CODE (base) != MEM_REF
1866 || TREE_CODE (base2) != MEM_REF
1867 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1868 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1869 TREE_OPERAND (base2, 1))))
1870 || !stmt_kills_ref_p (def_stmt, ref))
1871 return (void *)-1;
1872
1873 /* Find the common base of ref and the lhs. lhs_ops already
1874 contains valueized operands for the lhs. */
1875 i = vr->operands.length () - 1;
1876 j = lhs_ops.length () - 1;
1877 while (j >= 0 && i >= 0
1878 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1879 {
1880 i--;
1881 j--;
1882 }
1883
1884 /* ??? The innermost op should always be a MEM_REF and we already
1885 checked that the assignment to the lhs kills vr. Thus for
1886 aggregate copies using char[] types the vn_reference_op_eq
1887 may fail when comparing types for compatibility. But we really
1888 don't care here - further lookups with the rewritten operands
1889 will simply fail if we messed up types too badly. */
1890 HOST_WIDE_INT extra_off = 0;
1891 if (j == 0 && i >= 0
1892 && lhs_ops[0].opcode == MEM_REF
1893 && lhs_ops[0].off != -1)
1894 {
1895 if (lhs_ops[0].off == vr->operands[i].off)
1896 i--, j--;
1897 else if (vr->operands[i].opcode == MEM_REF
1898 && vr->operands[i].off != -1)
1899 {
1900 extra_off = vr->operands[i].off - lhs_ops[0].off;
1901 i--, j--;
1902 }
1903 }
1904
1905 /* i now points to the first additional op.
1906 ??? LHS may not be completely contained in VR, one or more
1907 VIEW_CONVERT_EXPRs could be in its way. We could at least
1908 try handling outermost VIEW_CONVERT_EXPRs. */
1909 if (j != -1)
1910 return (void *)-1;
1911
1912 /* Now re-write REF to be based on the rhs of the assignment. */
1913 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1914
1915 /* Apply an extra offset to the inner MEM_REF of the RHS. */
1916 if (extra_off != 0)
1917 {
1918 if (rhs.length () < 2
1919 || rhs[0].opcode != MEM_REF
1920 || rhs[0].off == -1)
1921 return (void *)-1;
1922 rhs[0].off += extra_off;
1923 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
1924 build_int_cst (TREE_TYPE (rhs[0].op0),
1925 extra_off));
1926 }
1927
1928 /* We need to pre-pend vr->operands[0..i] to rhs. */
1929 vec<vn_reference_op_s> old = vr->operands;
1930 if (i + 1 + rhs.length () > vr->operands.length ())
1931 {
1932 vr->operands.safe_grow (i + 1 + rhs.length ());
1933 if (old == shared_lookup_references)
1934 shared_lookup_references = vr->operands;
1935 }
1936 else
1937 vr->operands.truncate (i + 1 + rhs.length ());
1938 FOR_EACH_VEC_ELT (rhs, j, vro)
1939 vr->operands[i + 1 + j] = *vro;
1940 vr->operands = valueize_refs (vr->operands);
1941 if (old == shared_lookup_references)
1942 shared_lookup_references = vr->operands;
1943 vr->hashcode = vn_reference_compute_hash (vr);
1944
1945 /* Try folding the new reference to a constant. */
1946 tree val = fully_constant_vn_reference_p (vr);
1947 if (val)
1948 return vn_reference_lookup_or_insert_for_pieces
1949 (vuse, vr->set, vr->type, vr->operands, val);
1950
1951 /* Adjust *ref from the new operands. */
1952 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1953 return (void *)-1;
1954 /* This can happen with bitfields. */
1955 if (ref->size != r.size)
1956 return (void *)-1;
1957 *ref = r;
1958
1959 /* Do not update last seen VUSE after translating. */
1960 last_vuse_ptr = NULL;
1961
1962 /* Keep looking for the adjusted *REF / VR pair. */
1963 return NULL;
1964 }
1965
1966 /* 6) For memcpy copies translate the reference through them if
1967 the copy kills ref. */
1968 else if (vn_walk_kind == VN_WALKREWRITE
1969 && is_gimple_reg_type (vr->type)
1970 /* ??? Handle BCOPY as well. */
1971 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1972 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1973 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1974 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1975 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1976 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1977 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1978 && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1979 {
1980 tree lhs, rhs;
1981 ao_ref r;
1982 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1983 vn_reference_op_s op;
1984 HOST_WIDE_INT at;
1985
1986
1987 /* Only handle non-variable, addressable refs. */
1988 if (ref->size != maxsize
1989 || offset % BITS_PER_UNIT != 0
1990 || ref->size % BITS_PER_UNIT != 0)
1991 return (void *)-1;
1992
1993 /* Extract a pointer base and an offset for the destination. */
1994 lhs = gimple_call_arg (def_stmt, 0);
1995 lhs_offset = 0;
1996 if (TREE_CODE (lhs) == SSA_NAME)
1997 {
1998 lhs = SSA_VAL (lhs);
1999 if (TREE_CODE (lhs) == SSA_NAME)
2000 {
2001 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2002 if (gimple_assign_single_p (def_stmt)
2003 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2004 lhs = gimple_assign_rhs1 (def_stmt);
2005 }
2006 }
2007 if (TREE_CODE (lhs) == ADDR_EXPR)
2008 {
2009 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2010 &lhs_offset);
2011 if (!tem)
2012 return (void *)-1;
2013 if (TREE_CODE (tem) == MEM_REF
2014 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2015 {
2016 lhs = TREE_OPERAND (tem, 0);
2017 if (TREE_CODE (lhs) == SSA_NAME)
2018 lhs = SSA_VAL (lhs);
2019 lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2020 }
2021 else if (DECL_P (tem))
2022 lhs = build_fold_addr_expr (tem);
2023 else
2024 return (void *)-1;
2025 }
2026 if (TREE_CODE (lhs) != SSA_NAME
2027 && TREE_CODE (lhs) != ADDR_EXPR)
2028 return (void *)-1;
2029
2030 /* Extract a pointer base and an offset for the source. */
2031 rhs = gimple_call_arg (def_stmt, 1);
2032 rhs_offset = 0;
2033 if (TREE_CODE (rhs) == SSA_NAME)
2034 rhs = SSA_VAL (rhs);
2035 if (TREE_CODE (rhs) == ADDR_EXPR)
2036 {
2037 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2038 &rhs_offset);
2039 if (!tem)
2040 return (void *)-1;
2041 if (TREE_CODE (tem) == MEM_REF
2042 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2043 {
2044 rhs = TREE_OPERAND (tem, 0);
2045 rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2046 }
2047 else if (DECL_P (tem))
2048 rhs = build_fold_addr_expr (tem);
2049 else
2050 return (void *)-1;
2051 }
2052 if (TREE_CODE (rhs) != SSA_NAME
2053 && TREE_CODE (rhs) != ADDR_EXPR)
2054 return (void *)-1;
2055
2056 copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2057
2058 /* The bases of the destination and the references have to agree. */
2059 if ((TREE_CODE (base) != MEM_REF
2060 && !DECL_P (base))
2061 || (TREE_CODE (base) == MEM_REF
2062 && (TREE_OPERAND (base, 0) != lhs
2063 || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2064 || (DECL_P (base)
2065 && (TREE_CODE (lhs) != ADDR_EXPR
2066 || TREE_OPERAND (lhs, 0) != base)))
2067 return (void *)-1;
2068
2069 at = offset / BITS_PER_UNIT;
2070 if (TREE_CODE (base) == MEM_REF)
2071 at += tree_to_uhwi (TREE_OPERAND (base, 1));
2072 /* If the access is completely outside of the memcpy destination
2073 area there is no aliasing. */
2074 if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2075 || lhs_offset + copy_size <= at)
2076 return NULL;
2077 /* And the access has to be contained within the memcpy destination. */
2078 if (lhs_offset > at
2079 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2080 return (void *)-1;
2081
2082 /* Make room for 2 operands in the new reference. */
2083 if (vr->operands.length () < 2)
2084 {
2085 vec<vn_reference_op_s> old = vr->operands;
2086 vr->operands.safe_grow_cleared (2);
2087 if (old == shared_lookup_references
2088 && vr->operands != old)
2089 shared_lookup_references = vr->operands;
2090 }
2091 else
2092 vr->operands.truncate (2);
2093
2094 /* The looked-through reference is a simple MEM_REF. */
2095 memset (&op, 0, sizeof (op));
2096 op.type = vr->type;
2097 op.opcode = MEM_REF;
2098 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
2099 op.off = at - lhs_offset + rhs_offset;
2100 vr->operands[0] = op;
2101 op.type = TREE_TYPE (rhs);
2102 op.opcode = TREE_CODE (rhs);
2103 op.op0 = rhs;
2104 op.off = -1;
2105 vr->operands[1] = op;
2106 vr->hashcode = vn_reference_compute_hash (vr);
2107
2108 /* Adjust *ref from the new operands. */
2109 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2110 return (void *)-1;
2111 /* This can happen with bitfields. */
2112 if (ref->size != r.size)
2113 return (void *)-1;
2114 *ref = r;
2115
2116 /* Do not update last seen VUSE after translating. */
2117 last_vuse_ptr = NULL;
2118
2119 /* Keep looking for the adjusted *REF / VR pair. */
2120 return NULL;
2121 }
2122
2123 /* Bail out and stop walking. */
2124 return (void *)-1;
2125 }
2126
2127 /* Lookup a reference operation by it's parts, in the current hash table.
2128 Returns the resulting value number if it exists in the hash table,
2129 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2130 vn_reference_t stored in the hashtable if something is found. */
2131
2132 tree
2133 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2134 vec<vn_reference_op_s> operands,
2135 vn_reference_t *vnresult, vn_lookup_kind kind)
2136 {
2137 struct vn_reference_s vr1;
2138 vn_reference_t tmp;
2139 tree cst;
2140
2141 if (!vnresult)
2142 vnresult = &tmp;
2143 *vnresult = NULL;
2144
2145 vr1.vuse = vuse_ssa_val (vuse);
2146 shared_lookup_references.truncate (0);
2147 shared_lookup_references.safe_grow (operands.length ());
2148 memcpy (shared_lookup_references.address (),
2149 operands.address (),
2150 sizeof (vn_reference_op_s)
2151 * operands.length ());
2152 vr1.operands = operands = shared_lookup_references
2153 = valueize_refs (shared_lookup_references);
2154 vr1.type = type;
2155 vr1.set = set;
2156 vr1.hashcode = vn_reference_compute_hash (&vr1);
2157 if ((cst = fully_constant_vn_reference_p (&vr1)))
2158 return cst;
2159
2160 vn_reference_lookup_1 (&vr1, vnresult);
2161 if (!*vnresult
2162 && kind != VN_NOWALK
2163 && vr1.vuse)
2164 {
2165 ao_ref r;
2166 vn_walk_kind = kind;
2167 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2168 *vnresult =
2169 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2170 vn_reference_lookup_2,
2171 vn_reference_lookup_3,
2172 vuse_ssa_val, &vr1);
2173 gcc_checking_assert (vr1.operands == shared_lookup_references);
2174 }
2175
2176 if (*vnresult)
2177 return (*vnresult)->result;
2178
2179 return NULL_TREE;
2180 }
2181
2182 /* Lookup OP in the current hash table, and return the resulting value
2183 number if it exists in the hash table. Return NULL_TREE if it does
2184 not exist in the hash table or if the result field of the structure
2185 was NULL.. VNRESULT will be filled in with the vn_reference_t
2186 stored in the hashtable if one exists. */
2187
2188 tree
2189 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2190 vn_reference_t *vnresult)
2191 {
2192 vec<vn_reference_op_s> operands;
2193 struct vn_reference_s vr1;
2194 tree cst;
2195 bool valuezied_anything;
2196
2197 if (vnresult)
2198 *vnresult = NULL;
2199
2200 vr1.vuse = vuse_ssa_val (vuse);
2201 vr1.operands = operands
2202 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2203 vr1.type = TREE_TYPE (op);
2204 vr1.set = get_alias_set (op);
2205 vr1.hashcode = vn_reference_compute_hash (&vr1);
2206 if ((cst = fully_constant_vn_reference_p (&vr1)))
2207 return cst;
2208
2209 if (kind != VN_NOWALK
2210 && vr1.vuse)
2211 {
2212 vn_reference_t wvnresult;
2213 ao_ref r;
2214 /* Make sure to use a valueized reference if we valueized anything.
2215 Otherwise preserve the full reference for advanced TBAA. */
2216 if (!valuezied_anything
2217 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2218 vr1.operands))
2219 ao_ref_init (&r, op);
2220 vn_walk_kind = kind;
2221 wvnresult =
2222 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2223 vn_reference_lookup_2,
2224 vn_reference_lookup_3,
2225 vuse_ssa_val, &vr1);
2226 gcc_checking_assert (vr1.operands == shared_lookup_references);
2227 if (wvnresult)
2228 {
2229 if (vnresult)
2230 *vnresult = wvnresult;
2231 return wvnresult->result;
2232 }
2233
2234 return NULL_TREE;
2235 }
2236
2237 return vn_reference_lookup_1 (&vr1, vnresult);
2238 }
2239
2240 /* Lookup CALL in the current hash table and return the entry in
2241 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2242
2243 void
2244 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2245 vn_reference_t vr)
2246 {
2247 if (vnresult)
2248 *vnresult = NULL;
2249
2250 tree vuse = gimple_vuse (call);
2251
2252 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2253 vr->operands = valueize_shared_reference_ops_from_call (call);
2254 vr->type = gimple_expr_type (call);
2255 vr->set = 0;
2256 vr->hashcode = vn_reference_compute_hash (vr);
2257 vn_reference_lookup_1 (vr, vnresult);
2258 }
2259
2260 /* Insert OP into the current hash table with a value number of
2261 RESULT, and return the resulting reference structure we created. */
2262
2263 static vn_reference_t
2264 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2265 {
2266 vn_reference_s **slot;
2267 vn_reference_t vr1;
2268 bool tem;
2269
2270 vr1 = current_info->references_pool->allocate ();
2271 if (TREE_CODE (result) == SSA_NAME)
2272 vr1->value_id = VN_INFO (result)->value_id;
2273 else
2274 vr1->value_id = get_or_alloc_constant_value_id (result);
2275 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2276 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2277 vr1->type = TREE_TYPE (op);
2278 vr1->set = get_alias_set (op);
2279 vr1->hashcode = vn_reference_compute_hash (vr1);
2280 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2281 vr1->result_vdef = vdef;
2282
2283 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2284 INSERT);
2285
2286 /* Because we lookup stores using vuses, and value number failures
2287 using the vdefs (see visit_reference_op_store for how and why),
2288 it's possible that on failure we may try to insert an already
2289 inserted store. This is not wrong, there is no ssa name for a
2290 store that we could use as a differentiator anyway. Thus, unlike
2291 the other lookup functions, you cannot gcc_assert (!*slot)
2292 here. */
2293
2294 /* But free the old slot in case of a collision. */
2295 if (*slot)
2296 free_reference (*slot);
2297
2298 *slot = vr1;
2299 return vr1;
2300 }
2301
2302 /* Insert a reference by it's pieces into the current hash table with
2303 a value number of RESULT. Return the resulting reference
2304 structure we created. */
2305
2306 vn_reference_t
2307 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2308 vec<vn_reference_op_s> operands,
2309 tree result, unsigned int value_id)
2310
2311 {
2312 vn_reference_s **slot;
2313 vn_reference_t vr1;
2314
2315 vr1 = current_info->references_pool->allocate ();
2316 vr1->value_id = value_id;
2317 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2318 vr1->operands = valueize_refs (operands);
2319 vr1->type = type;
2320 vr1->set = set;
2321 vr1->hashcode = vn_reference_compute_hash (vr1);
2322 if (result && TREE_CODE (result) == SSA_NAME)
2323 result = SSA_VAL (result);
2324 vr1->result = result;
2325
2326 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2327 INSERT);
2328
2329 /* At this point we should have all the things inserted that we have
2330 seen before, and we should never try inserting something that
2331 already exists. */
2332 gcc_assert (!*slot);
2333 if (*slot)
2334 free_reference (*slot);
2335
2336 *slot = vr1;
2337 return vr1;
2338 }
2339
2340 /* Compute and return the hash value for nary operation VBO1. */
2341
2342 static hashval_t
2343 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2344 {
2345 inchash::hash hstate;
2346 unsigned i;
2347
2348 for (i = 0; i < vno1->length; ++i)
2349 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2350 vno1->op[i] = SSA_VAL (vno1->op[i]);
2351
2352 if (((vno1->length == 2
2353 && commutative_tree_code (vno1->opcode))
2354 || (vno1->length == 3
2355 && commutative_ternary_tree_code (vno1->opcode)))
2356 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2357 std::swap (vno1->op[0], vno1->op[1]);
2358 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2359 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2360 {
2361 std::swap (vno1->op[0], vno1->op[1]);
2362 vno1->opcode = swap_tree_comparison (vno1->opcode);
2363 }
2364
2365 hstate.add_int (vno1->opcode);
2366 for (i = 0; i < vno1->length; ++i)
2367 inchash::add_expr (vno1->op[i], hstate);
2368
2369 return hstate.end ();
2370 }
2371
2372 /* Compare nary operations VNO1 and VNO2 and return true if they are
2373 equivalent. */
2374
2375 bool
2376 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2377 {
2378 unsigned i;
2379
2380 if (vno1->hashcode != vno2->hashcode)
2381 return false;
2382
2383 if (vno1->length != vno2->length)
2384 return false;
2385
2386 if (vno1->opcode != vno2->opcode
2387 || !types_compatible_p (vno1->type, vno2->type))
2388 return false;
2389
2390 for (i = 0; i < vno1->length; ++i)
2391 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2392 return false;
2393
2394 return true;
2395 }
2396
2397 /* Initialize VNO from the pieces provided. */
2398
2399 static void
2400 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2401 enum tree_code code, tree type, tree *ops)
2402 {
2403 vno->opcode = code;
2404 vno->length = length;
2405 vno->type = type;
2406 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2407 }
2408
2409 /* Initialize VNO from OP. */
2410
2411 static void
2412 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2413 {
2414 unsigned i;
2415
2416 vno->opcode = TREE_CODE (op);
2417 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2418 vno->type = TREE_TYPE (op);
2419 for (i = 0; i < vno->length; ++i)
2420 vno->op[i] = TREE_OPERAND (op, i);
2421 }
2422
2423 /* Return the number of operands for a vn_nary ops structure from STMT. */
2424
2425 static unsigned int
2426 vn_nary_length_from_stmt (gimple *stmt)
2427 {
2428 switch (gimple_assign_rhs_code (stmt))
2429 {
2430 case REALPART_EXPR:
2431 case IMAGPART_EXPR:
2432 case VIEW_CONVERT_EXPR:
2433 return 1;
2434
2435 case BIT_FIELD_REF:
2436 return 3;
2437
2438 case CONSTRUCTOR:
2439 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2440
2441 default:
2442 return gimple_num_ops (stmt) - 1;
2443 }
2444 }
2445
2446 /* Initialize VNO from STMT. */
2447
2448 static void
2449 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2450 {
2451 unsigned i;
2452
2453 vno->opcode = gimple_assign_rhs_code (stmt);
2454 vno->type = gimple_expr_type (stmt);
2455 switch (vno->opcode)
2456 {
2457 case REALPART_EXPR:
2458 case IMAGPART_EXPR:
2459 case VIEW_CONVERT_EXPR:
2460 vno->length = 1;
2461 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2462 break;
2463
2464 case BIT_FIELD_REF:
2465 vno->length = 3;
2466 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2467 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2468 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2469 break;
2470
2471 case CONSTRUCTOR:
2472 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2473 for (i = 0; i < vno->length; ++i)
2474 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2475 break;
2476
2477 default:
2478 gcc_checking_assert (!gimple_assign_single_p (stmt));
2479 vno->length = gimple_num_ops (stmt) - 1;
2480 for (i = 0; i < vno->length; ++i)
2481 vno->op[i] = gimple_op (stmt, i + 1);
2482 }
2483 }
2484
2485 /* Compute the hashcode for VNO and look for it in the hash table;
2486 return the resulting value number if it exists in the hash table.
2487 Return NULL_TREE if it does not exist in the hash table or if the
2488 result field of the operation is NULL. VNRESULT will contain the
2489 vn_nary_op_t from the hashtable if it exists. */
2490
2491 static tree
2492 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2493 {
2494 vn_nary_op_s **slot;
2495
2496 if (vnresult)
2497 *vnresult = NULL;
2498
2499 vno->hashcode = vn_nary_op_compute_hash (vno);
2500 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2501 NO_INSERT);
2502 if (!slot && current_info == optimistic_info)
2503 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2504 NO_INSERT);
2505 if (!slot)
2506 return NULL_TREE;
2507 if (vnresult)
2508 *vnresult = *slot;
2509 return (*slot)->result;
2510 }
2511
2512 /* Lookup a n-ary operation by its pieces and return the resulting value
2513 number if it exists in the hash table. Return NULL_TREE if it does
2514 not exist in the hash table or if the result field of the operation
2515 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2516 if it exists. */
2517
2518 tree
2519 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2520 tree type, tree *ops, vn_nary_op_t *vnresult)
2521 {
2522 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2523 sizeof_vn_nary_op (length));
2524 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2525 return vn_nary_op_lookup_1 (vno1, vnresult);
2526 }
2527
2528 /* Lookup OP in the current hash table, and return the resulting value
2529 number if it exists in the hash table. Return NULL_TREE if it does
2530 not exist in the hash table or if the result field of the operation
2531 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2532 if it exists. */
2533
2534 tree
2535 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2536 {
2537 vn_nary_op_t vno1
2538 = XALLOCAVAR (struct vn_nary_op_s,
2539 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2540 init_vn_nary_op_from_op (vno1, op);
2541 return vn_nary_op_lookup_1 (vno1, vnresult);
2542 }
2543
2544 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2545 value number if it exists in the hash table. Return NULL_TREE if
2546 it does not exist in the hash table. VNRESULT will contain the
2547 vn_nary_op_t from the hashtable if it exists. */
2548
2549 tree
2550 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2551 {
2552 vn_nary_op_t vno1
2553 = XALLOCAVAR (struct vn_nary_op_s,
2554 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2555 init_vn_nary_op_from_stmt (vno1, stmt);
2556 return vn_nary_op_lookup_1 (vno1, vnresult);
2557 }
2558
2559 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
2560
2561 static tree
2562 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2563 {
2564 if (!rcode.is_tree_code ())
2565 return NULL_TREE;
2566 vn_nary_op_t vnresult = NULL;
2567 return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2568 (tree_code) rcode, type, ops, &vnresult);
2569 }
2570
2571 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2572
2573 static vn_nary_op_t
2574 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2575 {
2576 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2577 }
2578
2579 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2580 obstack. */
2581
2582 static vn_nary_op_t
2583 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2584 {
2585 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2586 &current_info->nary_obstack);
2587
2588 vno1->value_id = value_id;
2589 vno1->length = length;
2590 vno1->result = result;
2591
2592 return vno1;
2593 }
2594
2595 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2596 VNO->HASHCODE first. */
2597
2598 static vn_nary_op_t
2599 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2600 bool compute_hash)
2601 {
2602 vn_nary_op_s **slot;
2603
2604 if (compute_hash)
2605 vno->hashcode = vn_nary_op_compute_hash (vno);
2606
2607 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2608 gcc_assert (!*slot);
2609
2610 *slot = vno;
2611 return vno;
2612 }
2613
2614 /* Insert a n-ary operation into the current hash table using it's
2615 pieces. Return the vn_nary_op_t structure we created and put in
2616 the hashtable. */
2617
2618 vn_nary_op_t
2619 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2620 tree type, tree *ops,
2621 tree result, unsigned int value_id)
2622 {
2623 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2624 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2625 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2626 }
2627
2628 /* Insert OP into the current hash table with a value number of
2629 RESULT. Return the vn_nary_op_t structure we created and put in
2630 the hashtable. */
2631
2632 vn_nary_op_t
2633 vn_nary_op_insert (tree op, tree result)
2634 {
2635 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2636 vn_nary_op_t vno1;
2637
2638 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2639 init_vn_nary_op_from_op (vno1, op);
2640 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2641 }
2642
2643 /* Insert the rhs of STMT into the current hash table with a value number of
2644 RESULT. */
2645
2646 static vn_nary_op_t
2647 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2648 {
2649 vn_nary_op_t vno1
2650 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2651 result, VN_INFO (result)->value_id);
2652 init_vn_nary_op_from_stmt (vno1, stmt);
2653 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2654 }
2655
2656 /* Compute a hashcode for PHI operation VP1 and return it. */
2657
2658 static inline hashval_t
2659 vn_phi_compute_hash (vn_phi_t vp1)
2660 {
2661 inchash::hash hstate (vp1->phiargs.length () > 2
2662 ? vp1->block->index : vp1->phiargs.length ());
2663 tree phi1op;
2664 tree type;
2665 edge e;
2666 edge_iterator ei;
2667
2668 /* If all PHI arguments are constants we need to distinguish
2669 the PHI node via its type. */
2670 type = vp1->type;
2671 hstate.merge_hash (vn_hash_type (type));
2672
2673 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2674 {
2675 /* Don't hash backedge values they need to be handled as VN_TOP
2676 for optimistic value-numbering. */
2677 if (e->flags & EDGE_DFS_BACK)
2678 continue;
2679
2680 phi1op = vp1->phiargs[e->dest_idx];
2681 if (phi1op == VN_TOP)
2682 continue;
2683 inchash::add_expr (phi1op, hstate);
2684 }
2685
2686 return hstate.end ();
2687 }
2688
2689
2690 /* Return true if COND1 and COND2 represent the same condition, set
2691 *INVERTED_P if one needs to be inverted to make it the same as
2692 the other. */
2693
2694 static bool
2695 cond_stmts_equal_p (gcond *cond1, gcond *cond2, bool *inverted_p)
2696 {
2697 enum tree_code code1 = gimple_cond_code (cond1);
2698 enum tree_code code2 = gimple_cond_code (cond2);
2699 tree lhs1 = gimple_cond_lhs (cond1);
2700 tree lhs2 = gimple_cond_lhs (cond2);
2701 tree rhs1 = gimple_cond_rhs (cond1);
2702 tree rhs2 = gimple_cond_rhs (cond2);
2703
2704 *inverted_p = false;
2705 if (code1 == code2)
2706 ;
2707 else if (code1 == swap_tree_comparison (code2))
2708 std::swap (lhs2, rhs2);
2709 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2710 *inverted_p = true;
2711 else if (code1 == invert_tree_comparison
2712 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2713 {
2714 std::swap (lhs2, rhs2);
2715 *inverted_p = true;
2716 }
2717 else
2718 return false;
2719
2720 if (! expressions_equal_p (vn_valueize (lhs1), vn_valueize (lhs2))
2721 || ! expressions_equal_p (vn_valueize (rhs1), vn_valueize (rhs2)))
2722 return false;
2723
2724 return true;
2725 }
2726
2727 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2728
2729 static int
2730 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2731 {
2732 if (vp1->hashcode != vp2->hashcode)
2733 return false;
2734
2735 if (vp1->block != vp2->block)
2736 {
2737 if (vp1->phiargs.length () != vp2->phiargs.length ())
2738 return false;
2739
2740 switch (vp1->phiargs.length ())
2741 {
2742 case 1:
2743 /* Single-arg PHIs are just copies. */
2744 break;
2745
2746 case 2:
2747 {
2748 /* Rule out backedges into the PHI. */
2749 if (vp1->block->loop_father->header == vp1->block
2750 || vp2->block->loop_father->header == vp2->block)
2751 return false;
2752
2753 /* If the PHI nodes do not have compatible types
2754 they are not the same. */
2755 if (!types_compatible_p (vp1->type, vp2->type))
2756 return false;
2757
2758 basic_block idom1
2759 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2760 basic_block idom2
2761 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2762 /* If the immediate dominator end in switch stmts multiple
2763 values may end up in the same PHI arg via intermediate
2764 CFG merges. */
2765 if (EDGE_COUNT (idom1->succs) != 2
2766 || EDGE_COUNT (idom2->succs) != 2)
2767 return false;
2768
2769 /* Verify the controlling stmt is the same. */
2770 gimple *last1 = last_stmt (idom1);
2771 gimple *last2 = last_stmt (idom2);
2772 if (gimple_code (last1) != GIMPLE_COND
2773 || gimple_code (last2) != GIMPLE_COND)
2774 return false;
2775 bool inverted_p;
2776 if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2777 as_a <gcond *> (last2), &inverted_p))
2778 return false;
2779
2780 /* Get at true/false controlled edges into the PHI. */
2781 edge te1, te2, fe1, fe2;
2782 if (! extract_true_false_controlled_edges (idom1, vp1->block,
2783 &te1, &fe1)
2784 || ! extract_true_false_controlled_edges (idom2, vp2->block,
2785 &te2, &fe2))
2786 return false;
2787
2788 /* Swap edges if the second condition is the inverted of the
2789 first. */
2790 if (inverted_p)
2791 std::swap (te2, fe2);
2792
2793 /* ??? Handle VN_TOP specially. */
2794 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2795 vp2->phiargs[te2->dest_idx])
2796 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2797 vp2->phiargs[fe2->dest_idx]))
2798 return false;
2799
2800 return true;
2801 }
2802
2803 default:
2804 return false;
2805 }
2806 }
2807
2808 /* If the PHI nodes do not have compatible types
2809 they are not the same. */
2810 if (!types_compatible_p (vp1->type, vp2->type))
2811 return false;
2812
2813 /* Any phi in the same block will have it's arguments in the
2814 same edge order, because of how we store phi nodes. */
2815 int i;
2816 tree phi1op;
2817 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2818 {
2819 tree phi2op = vp2->phiargs[i];
2820 if (phi1op == VN_TOP || phi2op == VN_TOP)
2821 continue;
2822 if (!expressions_equal_p (phi1op, phi2op))
2823 return false;
2824 }
2825
2826 return true;
2827 }
2828
2829 static vec<tree> shared_lookup_phiargs;
2830
2831 /* Lookup PHI in the current hash table, and return the resulting
2832 value number if it exists in the hash table. Return NULL_TREE if
2833 it does not exist in the hash table. */
2834
2835 static tree
2836 vn_phi_lookup (gimple *phi)
2837 {
2838 vn_phi_s **slot;
2839 struct vn_phi_s vp1;
2840 edge e;
2841 edge_iterator ei;
2842
2843 shared_lookup_phiargs.truncate (0);
2844 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
2845
2846 /* Canonicalize the SSA_NAME's to their value number. */
2847 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2848 {
2849 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2850 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2851 shared_lookup_phiargs[e->dest_idx] = def;
2852 }
2853 vp1.type = TREE_TYPE (gimple_phi_result (phi));
2854 vp1.phiargs = shared_lookup_phiargs;
2855 vp1.block = gimple_bb (phi);
2856 vp1.hashcode = vn_phi_compute_hash (&vp1);
2857 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2858 NO_INSERT);
2859 if (!slot && current_info == optimistic_info)
2860 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2861 NO_INSERT);
2862 if (!slot)
2863 return NULL_TREE;
2864 return (*slot)->result;
2865 }
2866
2867 /* Insert PHI into the current hash table with a value number of
2868 RESULT. */
2869
2870 static vn_phi_t
2871 vn_phi_insert (gimple *phi, tree result)
2872 {
2873 vn_phi_s **slot;
2874 vn_phi_t vp1 = current_info->phis_pool->allocate ();
2875 vec<tree> args = vNULL;
2876 edge e;
2877 edge_iterator ei;
2878
2879 args.safe_grow (gimple_phi_num_args (phi));
2880
2881 /* Canonicalize the SSA_NAME's to their value number. */
2882 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2883 {
2884 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2885 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2886 args[e->dest_idx] = def;
2887 }
2888 vp1->value_id = VN_INFO (result)->value_id;
2889 vp1->type = TREE_TYPE (gimple_phi_result (phi));
2890 vp1->phiargs = args;
2891 vp1->block = gimple_bb (phi);
2892 vp1->result = result;
2893 vp1->hashcode = vn_phi_compute_hash (vp1);
2894
2895 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2896
2897 /* Because we iterate over phi operations more than once, it's
2898 possible the slot might already exist here, hence no assert.*/
2899 *slot = vp1;
2900 return vp1;
2901 }
2902
2903
2904 /* Print set of components in strongly connected component SCC to OUT. */
2905
2906 static void
2907 print_scc (FILE *out, vec<tree> scc)
2908 {
2909 tree var;
2910 unsigned int i;
2911
2912 fprintf (out, "SCC consists of:");
2913 FOR_EACH_VEC_ELT (scc, i, var)
2914 {
2915 fprintf (out, " ");
2916 print_generic_expr (out, var, 0);
2917 }
2918 fprintf (out, "\n");
2919 }
2920
2921 /* Set the value number of FROM to TO, return true if it has changed
2922 as a result. */
2923
2924 static inline bool
2925 set_ssa_val_to (tree from, tree to)
2926 {
2927 tree currval = SSA_VAL (from);
2928 HOST_WIDE_INT toff, coff;
2929
2930 /* The only thing we allow as value numbers are ssa_names
2931 and invariants. So assert that here. We don't allow VN_TOP
2932 as visiting a stmt should produce a value-number other than
2933 that.
2934 ??? Still VN_TOP can happen for unreachable code, so force
2935 it to varying in that case. Not all code is prepared to
2936 get VN_TOP on valueization. */
2937 if (to == VN_TOP)
2938 {
2939 if (dump_file && (dump_flags & TDF_DETAILS))
2940 fprintf (dump_file, "Forcing value number to varying on "
2941 "receiving VN_TOP\n");
2942 to = from;
2943 }
2944
2945 gcc_assert (to != NULL_TREE
2946 && ((TREE_CODE (to) == SSA_NAME
2947 && (to == from || SSA_VAL (to) == to))
2948 || is_gimple_min_invariant (to)));
2949
2950 if (from != to)
2951 {
2952 if (currval == from)
2953 {
2954 if (dump_file && (dump_flags & TDF_DETAILS))
2955 {
2956 fprintf (dump_file, "Not changing value number of ");
2957 print_generic_expr (dump_file, from, 0);
2958 fprintf (dump_file, " from VARYING to ");
2959 print_generic_expr (dump_file, to, 0);
2960 fprintf (dump_file, "\n");
2961 }
2962 return false;
2963 }
2964 else if (TREE_CODE (to) == SSA_NAME
2965 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2966 to = from;
2967 }
2968
2969 if (dump_file && (dump_flags & TDF_DETAILS))
2970 {
2971 fprintf (dump_file, "Setting value number of ");
2972 print_generic_expr (dump_file, from, 0);
2973 fprintf (dump_file, " to ");
2974 print_generic_expr (dump_file, to, 0);
2975 }
2976
2977 if (currval != to
2978 && !operand_equal_p (currval, to, 0)
2979 /* ??? For addresses involving volatile objects or types operand_equal_p
2980 does not reliably detect ADDR_EXPRs as equal. We know we are only
2981 getting invariant gimple addresses here, so can use
2982 get_addr_base_and_unit_offset to do this comparison. */
2983 && !(TREE_CODE (currval) == ADDR_EXPR
2984 && TREE_CODE (to) == ADDR_EXPR
2985 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2986 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2987 && coff == toff))
2988 {
2989 VN_INFO (from)->valnum = to;
2990 if (dump_file && (dump_flags & TDF_DETAILS))
2991 fprintf (dump_file, " (changed)\n");
2992 return true;
2993 }
2994 if (dump_file && (dump_flags & TDF_DETAILS))
2995 fprintf (dump_file, "\n");
2996 return false;
2997 }
2998
2999 /* Mark as processed all the definitions in the defining stmt of USE, or
3000 the USE itself. */
3001
3002 static void
3003 mark_use_processed (tree use)
3004 {
3005 ssa_op_iter iter;
3006 def_operand_p defp;
3007 gimple *stmt = SSA_NAME_DEF_STMT (use);
3008
3009 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3010 {
3011 VN_INFO (use)->use_processed = true;
3012 return;
3013 }
3014
3015 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3016 {
3017 tree def = DEF_FROM_PTR (defp);
3018
3019 VN_INFO (def)->use_processed = true;
3020 }
3021 }
3022
3023 /* Set all definitions in STMT to value number to themselves.
3024 Return true if a value number changed. */
3025
3026 static bool
3027 defs_to_varying (gimple *stmt)
3028 {
3029 bool changed = false;
3030 ssa_op_iter iter;
3031 def_operand_p defp;
3032
3033 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3034 {
3035 tree def = DEF_FROM_PTR (defp);
3036 changed |= set_ssa_val_to (def, def);
3037 }
3038 return changed;
3039 }
3040
3041 /* Visit a copy between LHS and RHS, return true if the value number
3042 changed. */
3043
3044 static bool
3045 visit_copy (tree lhs, tree rhs)
3046 {
3047 /* Valueize. */
3048 rhs = SSA_VAL (rhs);
3049
3050 return set_ssa_val_to (lhs, rhs);
3051 }
3052
3053 /* Visit a nary operator RHS, value number it, and return true if the
3054 value number of LHS has changed as a result. */
3055
3056 static bool
3057 visit_nary_op (tree lhs, gimple *stmt)
3058 {
3059 bool changed = false;
3060 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3061
3062 if (result)
3063 changed = set_ssa_val_to (lhs, result);
3064 else
3065 {
3066 changed = set_ssa_val_to (lhs, lhs);
3067 vn_nary_op_insert_stmt (stmt, lhs);
3068 }
3069
3070 return changed;
3071 }
3072
3073 /* Visit a call STMT storing into LHS. Return true if the value number
3074 of the LHS has changed as a result. */
3075
3076 static bool
3077 visit_reference_op_call (tree lhs, gcall *stmt)
3078 {
3079 bool changed = false;
3080 struct vn_reference_s vr1;
3081 vn_reference_t vnresult = NULL;
3082 tree vdef = gimple_vdef (stmt);
3083
3084 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3085 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3086 lhs = NULL_TREE;
3087
3088 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3089 if (vnresult)
3090 {
3091 if (vnresult->result_vdef && vdef)
3092 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3093
3094 if (!vnresult->result && lhs)
3095 vnresult->result = lhs;
3096
3097 if (vnresult->result && lhs)
3098 changed |= set_ssa_val_to (lhs, vnresult->result);
3099 }
3100 else
3101 {
3102 vn_reference_t vr2;
3103 vn_reference_s **slot;
3104 if (vdef)
3105 changed |= set_ssa_val_to (vdef, vdef);
3106 if (lhs)
3107 changed |= set_ssa_val_to (lhs, lhs);
3108 vr2 = current_info->references_pool->allocate ();
3109 vr2->vuse = vr1.vuse;
3110 /* As we are not walking the virtual operand chain we know the
3111 shared_lookup_references are still original so we can re-use
3112 them here. */
3113 vr2->operands = vr1.operands.copy ();
3114 vr2->type = vr1.type;
3115 vr2->set = vr1.set;
3116 vr2->hashcode = vr1.hashcode;
3117 vr2->result = lhs;
3118 vr2->result_vdef = vdef;
3119 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3120 INSERT);
3121 gcc_assert (!*slot);
3122 *slot = vr2;
3123 }
3124
3125 return changed;
3126 }
3127
3128 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3129 and return true if the value number of the LHS has changed as a result. */
3130
3131 static bool
3132 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3133 {
3134 bool changed = false;
3135 tree last_vuse;
3136 tree result;
3137
3138 last_vuse = gimple_vuse (stmt);
3139 last_vuse_ptr = &last_vuse;
3140 result = vn_reference_lookup (op, gimple_vuse (stmt),
3141 default_vn_walk_kind, NULL);
3142 last_vuse_ptr = NULL;
3143
3144 /* We handle type-punning through unions by value-numbering based
3145 on offset and size of the access. Be prepared to handle a
3146 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3147 if (result
3148 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3149 {
3150 /* We will be setting the value number of lhs to the value number
3151 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3152 So first simplify and lookup this expression to see if it
3153 is already available. */
3154 mprts_hook = vn_lookup_simplify_result;
3155 code_helper rcode = VIEW_CONVERT_EXPR;
3156 tree ops[3] = { result };
3157 bool res = gimple_resimplify1 (NULL, &rcode, TREE_TYPE (op), ops,
3158 vn_valueize);
3159 mprts_hook = NULL;
3160 gimple *new_stmt = NULL;
3161 if (res
3162 && gimple_simplified_result_is_gimple_val (rcode, ops))
3163 /* The expression is already available. */
3164 result = ops[0];
3165 else
3166 {
3167 tree val = vn_lookup_simplify_result (rcode, TREE_TYPE (op), ops);
3168 if (!val)
3169 {
3170 gimple_seq stmts = NULL;
3171 result = maybe_push_res_to_seq (rcode, TREE_TYPE (op), ops,
3172 &stmts);
3173 gcc_assert (result && gimple_seq_singleton_p (stmts));
3174 new_stmt = gimple_seq_first_stmt (stmts);
3175 }
3176 else
3177 /* The expression is already available. */
3178 result = val;
3179 }
3180 if (new_stmt)
3181 {
3182 /* The expression is not yet available, value-number lhs to
3183 the new SSA_NAME we created. */
3184 /* Initialize value-number information properly. */
3185 VN_INFO_GET (result)->valnum = result;
3186 VN_INFO (result)->value_id = get_next_value_id ();
3187 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
3188 new_stmt);
3189 VN_INFO (result)->needs_insertion = true;
3190 /* As all "inserted" statements are singleton SCCs, insert
3191 to the valid table. This is strictly needed to
3192 avoid re-generating new value SSA_NAMEs for the same
3193 expression during SCC iteration over and over (the
3194 optimistic table gets cleared after each iteration).
3195 We do not need to insert into the optimistic table, as
3196 lookups there will fall back to the valid table. */
3197 if (current_info == optimistic_info)
3198 {
3199 current_info = valid_info;
3200 vn_nary_op_insert_stmt (new_stmt, result);
3201 current_info = optimistic_info;
3202 }
3203 else
3204 vn_nary_op_insert_stmt (new_stmt, result);
3205 if (dump_file && (dump_flags & TDF_DETAILS))
3206 {
3207 fprintf (dump_file, "Inserting name ");
3208 print_generic_expr (dump_file, result, 0);
3209 fprintf (dump_file, " for expression ");
3210 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
3211 fprintf (dump_file, "\n");
3212 }
3213 }
3214 }
3215
3216 if (result)
3217 changed = set_ssa_val_to (lhs, result);
3218 else
3219 {
3220 changed = set_ssa_val_to (lhs, lhs);
3221 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3222 }
3223
3224 return changed;
3225 }
3226
3227
3228 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3229 and return true if the value number of the LHS has changed as a result. */
3230
3231 static bool
3232 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3233 {
3234 bool changed = false;
3235 vn_reference_t vnresult = NULL;
3236 tree result, assign;
3237 bool resultsame = false;
3238 tree vuse = gimple_vuse (stmt);
3239 tree vdef = gimple_vdef (stmt);
3240
3241 if (TREE_CODE (op) == SSA_NAME)
3242 op = SSA_VAL (op);
3243
3244 /* First we want to lookup using the *vuses* from the store and see
3245 if there the last store to this location with the same address
3246 had the same value.
3247
3248 The vuses represent the memory state before the store. If the
3249 memory state, address, and value of the store is the same as the
3250 last store to this location, then this store will produce the
3251 same memory state as that store.
3252
3253 In this case the vdef versions for this store are value numbered to those
3254 vuse versions, since they represent the same memory state after
3255 this store.
3256
3257 Otherwise, the vdefs for the store are used when inserting into
3258 the table, since the store generates a new memory state. */
3259
3260 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
3261
3262 if (result)
3263 {
3264 if (TREE_CODE (result) == SSA_NAME)
3265 result = SSA_VAL (result);
3266 resultsame = expressions_equal_p (result, op);
3267 }
3268
3269 if ((!result || !resultsame)
3270 /* Only perform the following when being called from PRE
3271 which embeds tail merging. */
3272 && default_vn_walk_kind == VN_WALK)
3273 {
3274 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3275 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
3276 if (vnresult)
3277 {
3278 VN_INFO (vdef)->use_processed = true;
3279 return set_ssa_val_to (vdef, vnresult->result_vdef);
3280 }
3281 }
3282
3283 if (!result || !resultsame)
3284 {
3285 if (dump_file && (dump_flags & TDF_DETAILS))
3286 {
3287 fprintf (dump_file, "No store match\n");
3288 fprintf (dump_file, "Value numbering store ");
3289 print_generic_expr (dump_file, lhs, 0);
3290 fprintf (dump_file, " to ");
3291 print_generic_expr (dump_file, op, 0);
3292 fprintf (dump_file, "\n");
3293 }
3294 /* Have to set value numbers before insert, since insert is
3295 going to valueize the references in-place. */
3296 if (vdef)
3297 {
3298 changed |= set_ssa_val_to (vdef, vdef);
3299 }
3300
3301 /* Do not insert structure copies into the tables. */
3302 if (is_gimple_min_invariant (op)
3303 || is_gimple_reg (op))
3304 vn_reference_insert (lhs, op, vdef, NULL);
3305
3306 /* Only perform the following when being called from PRE
3307 which embeds tail merging. */
3308 if (default_vn_walk_kind == VN_WALK)
3309 {
3310 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3311 vn_reference_insert (assign, lhs, vuse, vdef);
3312 }
3313 }
3314 else
3315 {
3316 /* We had a match, so value number the vdef to have the value
3317 number of the vuse it came from. */
3318
3319 if (dump_file && (dump_flags & TDF_DETAILS))
3320 fprintf (dump_file, "Store matched earlier value,"
3321 "value numbering store vdefs to matching vuses.\n");
3322
3323 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3324 }
3325
3326 return changed;
3327 }
3328
3329 /* Visit and value number PHI, return true if the value number
3330 changed. */
3331
3332 static bool
3333 visit_phi (gimple *phi)
3334 {
3335 bool changed = false;
3336 tree result;
3337 tree sameval = VN_TOP;
3338 bool allsame = true;
3339
3340 /* TODO: We could check for this in init_sccvn, and replace this
3341 with a gcc_assert. */
3342 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3343 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3344
3345 /* See if all non-TOP arguments have the same value. TOP is
3346 equivalent to everything, so we can ignore it. */
3347 edge_iterator ei;
3348 edge e;
3349 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3350 if (e->flags & EDGE_EXECUTABLE)
3351 {
3352 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3353
3354 if (TREE_CODE (def) == SSA_NAME)
3355 def = SSA_VAL (def);
3356 if (def == VN_TOP)
3357 continue;
3358 if (sameval == VN_TOP)
3359 sameval = def;
3360 else if (!expressions_equal_p (def, sameval))
3361 {
3362 allsame = false;
3363 break;
3364 }
3365 }
3366
3367 /* If none of the edges was executable or all incoming values are
3368 undefined keep the value-number at VN_TOP. */
3369 if (sameval == VN_TOP)
3370 return set_ssa_val_to (PHI_RESULT (phi), VN_TOP);
3371
3372 /* First see if it is equivalent to a phi node in this block. We prefer
3373 this as it allows IV elimination - see PRs 66502 and 67167. */
3374 result = vn_phi_lookup (phi);
3375 if (result)
3376 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3377 /* Otherwise all value numbered to the same value, the phi node has that
3378 value. */
3379 else if (allsame)
3380 changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3381 else
3382 {
3383 vn_phi_insert (phi, PHI_RESULT (phi));
3384 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3385 }
3386
3387 return changed;
3388 }
3389
3390 /* Try to simplify RHS using equivalences and constant folding. */
3391
3392 static tree
3393 try_to_simplify (gassign *stmt)
3394 {
3395 enum tree_code code = gimple_assign_rhs_code (stmt);
3396 tree tem;
3397
3398 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3399 in this case, there is no point in doing extra work. */
3400 if (code == SSA_NAME)
3401 return NULL_TREE;
3402
3403 /* First try constant folding based on our current lattice. */
3404 mprts_hook = vn_lookup_simplify_result;
3405 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3406 mprts_hook = NULL;
3407 if (tem
3408 && (TREE_CODE (tem) == SSA_NAME
3409 || is_gimple_min_invariant (tem)))
3410 return tem;
3411
3412 return NULL_TREE;
3413 }
3414
3415 /* Visit and value number USE, return true if the value number
3416 changed. */
3417
3418 static bool
3419 visit_use (tree use)
3420 {
3421 bool changed = false;
3422 gimple *stmt = SSA_NAME_DEF_STMT (use);
3423
3424 mark_use_processed (use);
3425
3426 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3427 if (dump_file && (dump_flags & TDF_DETAILS)
3428 && !SSA_NAME_IS_DEFAULT_DEF (use))
3429 {
3430 fprintf (dump_file, "Value numbering ");
3431 print_generic_expr (dump_file, use, 0);
3432 fprintf (dump_file, " stmt = ");
3433 print_gimple_stmt (dump_file, stmt, 0, 0);
3434 }
3435
3436 /* Handle uninitialized uses. */
3437 if (SSA_NAME_IS_DEFAULT_DEF (use))
3438 changed = set_ssa_val_to (use, use);
3439 else if (gimple_code (stmt) == GIMPLE_PHI)
3440 changed = visit_phi (stmt);
3441 else if (gimple_has_volatile_ops (stmt))
3442 changed = defs_to_varying (stmt);
3443 else if (gassign *ass = dyn_cast <gassign *> (stmt))
3444 {
3445 enum tree_code code = gimple_assign_rhs_code (ass);
3446 tree lhs = gimple_assign_lhs (ass);
3447 tree rhs1 = gimple_assign_rhs1 (ass);
3448 tree simplified;
3449
3450 /* Shortcut for copies. Simplifying copies is pointless,
3451 since we copy the expression and value they represent. */
3452 if (code == SSA_NAME
3453 && TREE_CODE (lhs) == SSA_NAME)
3454 {
3455 changed = visit_copy (lhs, rhs1);
3456 goto done;
3457 }
3458 simplified = try_to_simplify (ass);
3459 if (simplified)
3460 {
3461 if (dump_file && (dump_flags & TDF_DETAILS))
3462 {
3463 fprintf (dump_file, "RHS ");
3464 print_gimple_expr (dump_file, ass, 0, 0);
3465 fprintf (dump_file, " simplified to ");
3466 print_generic_expr (dump_file, simplified, 0);
3467 fprintf (dump_file, "\n");
3468 }
3469 }
3470 /* Setting value numbers to constants will occasionally
3471 screw up phi congruence because constants are not
3472 uniquely associated with a single ssa name that can be
3473 looked up. */
3474 if (simplified
3475 && is_gimple_min_invariant (simplified)
3476 && TREE_CODE (lhs) == SSA_NAME)
3477 {
3478 changed = set_ssa_val_to (lhs, simplified);
3479 goto done;
3480 }
3481 else if (simplified
3482 && TREE_CODE (simplified) == SSA_NAME
3483 && TREE_CODE (lhs) == SSA_NAME)
3484 {
3485 changed = visit_copy (lhs, simplified);
3486 goto done;
3487 }
3488
3489 if ((TREE_CODE (lhs) == SSA_NAME
3490 /* We can substitute SSA_NAMEs that are live over
3491 abnormal edges with their constant value. */
3492 && !(gimple_assign_copy_p (ass)
3493 && is_gimple_min_invariant (rhs1))
3494 && !(simplified
3495 && is_gimple_min_invariant (simplified))
3496 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3497 /* Stores or copies from SSA_NAMEs that are live over
3498 abnormal edges are a problem. */
3499 || (code == SSA_NAME
3500 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3501 changed = defs_to_varying (ass);
3502 else if (REFERENCE_CLASS_P (lhs)
3503 || DECL_P (lhs))
3504 changed = visit_reference_op_store (lhs, rhs1, ass);
3505 else if (TREE_CODE (lhs) == SSA_NAME)
3506 {
3507 if ((gimple_assign_copy_p (ass)
3508 && is_gimple_min_invariant (rhs1))
3509 || (simplified
3510 && is_gimple_min_invariant (simplified)))
3511 {
3512 if (simplified)
3513 changed = set_ssa_val_to (lhs, simplified);
3514 else
3515 changed = set_ssa_val_to (lhs, rhs1);
3516 }
3517 else
3518 {
3519 /* Visit the original statement. */
3520 switch (vn_get_stmt_kind (ass))
3521 {
3522 case VN_NARY:
3523 changed = visit_nary_op (lhs, ass);
3524 break;
3525 case VN_REFERENCE:
3526 changed = visit_reference_op_load (lhs, rhs1, ass);
3527 break;
3528 default:
3529 changed = defs_to_varying (ass);
3530 break;
3531 }
3532 }
3533 }
3534 else
3535 changed = defs_to_varying (ass);
3536 }
3537 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3538 {
3539 tree lhs = gimple_call_lhs (call_stmt);
3540 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3541 {
3542 /* Try constant folding based on our current lattice. */
3543 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3544 vn_valueize);
3545 if (simplified)
3546 {
3547 if (dump_file && (dump_flags & TDF_DETAILS))
3548 {
3549 fprintf (dump_file, "call ");
3550 print_gimple_expr (dump_file, call_stmt, 0, 0);
3551 fprintf (dump_file, " simplified to ");
3552 print_generic_expr (dump_file, simplified, 0);
3553 fprintf (dump_file, "\n");
3554 }
3555 }
3556 /* Setting value numbers to constants will occasionally
3557 screw up phi congruence because constants are not
3558 uniquely associated with a single ssa name that can be
3559 looked up. */
3560 if (simplified
3561 && is_gimple_min_invariant (simplified))
3562 {
3563 changed = set_ssa_val_to (lhs, simplified);
3564 if (gimple_vdef (call_stmt))
3565 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3566 SSA_VAL (gimple_vuse (call_stmt)));
3567 goto done;
3568 }
3569 else if (simplified
3570 && TREE_CODE (simplified) == SSA_NAME)
3571 {
3572 changed = visit_copy (lhs, simplified);
3573 if (gimple_vdef (call_stmt))
3574 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3575 SSA_VAL (gimple_vuse (call_stmt)));
3576 goto done;
3577 }
3578 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3579 {
3580 changed = defs_to_varying (call_stmt);
3581 goto done;
3582 }
3583 }
3584
3585 if (!gimple_call_internal_p (call_stmt)
3586 && (/* Calls to the same function with the same vuse
3587 and the same operands do not necessarily return the same
3588 value, unless they're pure or const. */
3589 gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3590 /* If calls have a vdef, subsequent calls won't have
3591 the same incoming vuse. So, if 2 calls with vdef have the
3592 same vuse, we know they're not subsequent.
3593 We can value number 2 calls to the same function with the
3594 same vuse and the same operands which are not subsequent
3595 the same, because there is no code in the program that can
3596 compare the 2 values... */
3597 || (gimple_vdef (call_stmt)
3598 /* ... unless the call returns a pointer which does
3599 not alias with anything else. In which case the
3600 information that the values are distinct are encoded
3601 in the IL. */
3602 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3603 /* Only perform the following when being called from PRE
3604 which embeds tail merging. */
3605 && default_vn_walk_kind == VN_WALK)))
3606 changed = visit_reference_op_call (lhs, call_stmt);
3607 else
3608 changed = defs_to_varying (call_stmt);
3609 }
3610 else
3611 changed = defs_to_varying (stmt);
3612 done:
3613 return changed;
3614 }
3615
3616 /* Compare two operands by reverse postorder index */
3617
3618 static int
3619 compare_ops (const void *pa, const void *pb)
3620 {
3621 const tree opa = *((const tree *)pa);
3622 const tree opb = *((const tree *)pb);
3623 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3624 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3625 basic_block bba;
3626 basic_block bbb;
3627
3628 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3629 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3630 else if (gimple_nop_p (opstmta))
3631 return -1;
3632 else if (gimple_nop_p (opstmtb))
3633 return 1;
3634
3635 bba = gimple_bb (opstmta);
3636 bbb = gimple_bb (opstmtb);
3637
3638 if (!bba && !bbb)
3639 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3640 else if (!bba)
3641 return -1;
3642 else if (!bbb)
3643 return 1;
3644
3645 if (bba == bbb)
3646 {
3647 if (gimple_code (opstmta) == GIMPLE_PHI
3648 && gimple_code (opstmtb) == GIMPLE_PHI)
3649 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3650 else if (gimple_code (opstmta) == GIMPLE_PHI)
3651 return -1;
3652 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3653 return 1;
3654 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3655 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3656 else
3657 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3658 }
3659 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3660 }
3661
3662 /* Sort an array containing members of a strongly connected component
3663 SCC so that the members are ordered by RPO number.
3664 This means that when the sort is complete, iterating through the
3665 array will give you the members in RPO order. */
3666
3667 static void
3668 sort_scc (vec<tree> scc)
3669 {
3670 scc.qsort (compare_ops);
3671 }
3672
3673 /* Insert the no longer used nary ONARY to the hash INFO. */
3674
3675 static void
3676 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3677 {
3678 size_t size = sizeof_vn_nary_op (onary->length);
3679 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3680 &info->nary_obstack);
3681 memcpy (nary, onary, size);
3682 vn_nary_op_insert_into (nary, info->nary, false);
3683 }
3684
3685 /* Insert the no longer used phi OPHI to the hash INFO. */
3686
3687 static void
3688 copy_phi (vn_phi_t ophi, vn_tables_t info)
3689 {
3690 vn_phi_t phi = info->phis_pool->allocate ();
3691 vn_phi_s **slot;
3692 memcpy (phi, ophi, sizeof (*phi));
3693 ophi->phiargs.create (0);
3694 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3695 gcc_assert (!*slot);
3696 *slot = phi;
3697 }
3698
3699 /* Insert the no longer used reference OREF to the hash INFO. */
3700
3701 static void
3702 copy_reference (vn_reference_t oref, vn_tables_t info)
3703 {
3704 vn_reference_t ref;
3705 vn_reference_s **slot;
3706 ref = info->references_pool->allocate ();
3707 memcpy (ref, oref, sizeof (*ref));
3708 oref->operands.create (0);
3709 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
3710 if (*slot)
3711 free_reference (*slot);
3712 *slot = ref;
3713 }
3714
3715 /* Process a strongly connected component in the SSA graph. */
3716
3717 static void
3718 process_scc (vec<tree> scc)
3719 {
3720 tree var;
3721 unsigned int i;
3722 unsigned int iterations = 0;
3723 bool changed = true;
3724 vn_nary_op_iterator_type hin;
3725 vn_phi_iterator_type hip;
3726 vn_reference_iterator_type hir;
3727 vn_nary_op_t nary;
3728 vn_phi_t phi;
3729 vn_reference_t ref;
3730
3731 /* If the SCC has a single member, just visit it. */
3732 if (scc.length () == 1)
3733 {
3734 tree use = scc[0];
3735 if (VN_INFO (use)->use_processed)
3736 return;
3737 /* We need to make sure it doesn't form a cycle itself, which can
3738 happen for self-referential PHI nodes. In that case we would
3739 end up inserting an expression with VN_TOP operands into the
3740 valid table which makes us derive bogus equivalences later.
3741 The cheapest way to check this is to assume it for all PHI nodes. */
3742 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3743 /* Fallthru to iteration. */ ;
3744 else
3745 {
3746 visit_use (use);
3747 return;
3748 }
3749 }
3750
3751 if (dump_file && (dump_flags & TDF_DETAILS))
3752 print_scc (dump_file, scc);
3753
3754 /* Iterate over the SCC with the optimistic table until it stops
3755 changing. */
3756 current_info = optimistic_info;
3757 while (changed)
3758 {
3759 changed = false;
3760 iterations++;
3761 if (dump_file && (dump_flags & TDF_DETAILS))
3762 fprintf (dump_file, "Starting iteration %d\n", iterations);
3763 /* As we are value-numbering optimistically we have to
3764 clear the expression tables and the simplified expressions
3765 in each iteration until we converge. */
3766 optimistic_info->nary->empty ();
3767 optimistic_info->phis->empty ();
3768 optimistic_info->references->empty ();
3769 obstack_free (&optimistic_info->nary_obstack, NULL);
3770 gcc_obstack_init (&optimistic_info->nary_obstack);
3771 optimistic_info->phis_pool->release ();
3772 optimistic_info->references_pool->release ();
3773 FOR_EACH_VEC_ELT (scc, i, var)
3774 gcc_assert (!VN_INFO (var)->needs_insertion
3775 && VN_INFO (var)->expr == NULL);
3776 FOR_EACH_VEC_ELT (scc, i, var)
3777 changed |= visit_use (var);
3778 }
3779
3780 if (dump_file && (dump_flags & TDF_DETAILS))
3781 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
3782 statistics_histogram_event (cfun, "SCC iterations", iterations);
3783
3784 /* Finally, copy the contents of the no longer used optimistic
3785 table to the valid table. */
3786 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
3787 copy_nary (nary, valid_info);
3788 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
3789 copy_phi (phi, valid_info);
3790 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
3791 ref, vn_reference_t, hir)
3792 copy_reference (ref, valid_info);
3793
3794 current_info = valid_info;
3795 }
3796
3797
3798 /* Pop the components of the found SCC for NAME off the SCC stack
3799 and process them. Returns true if all went well, false if
3800 we run into resource limits. */
3801
3802 static bool
3803 extract_and_process_scc_for_name (tree name)
3804 {
3805 auto_vec<tree> scc;
3806 tree x;
3807
3808 /* Found an SCC, pop the components off the SCC stack and
3809 process them. */
3810 do
3811 {
3812 x = sccstack.pop ();
3813
3814 VN_INFO (x)->on_sccstack = false;
3815 scc.safe_push (x);
3816 } while (x != name);
3817
3818 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3819 if (scc.length ()
3820 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3821 {
3822 if (dump_file)
3823 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3824 "SCC size %u exceeding %u\n", scc.length (),
3825 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3826
3827 return false;
3828 }
3829
3830 if (scc.length () > 1)
3831 sort_scc (scc);
3832
3833 process_scc (scc);
3834
3835 return true;
3836 }
3837
3838 /* Depth first search on NAME to discover and process SCC's in the SSA
3839 graph.
3840 Execution of this algorithm relies on the fact that the SCC's are
3841 popped off the stack in topological order.
3842 Returns true if successful, false if we stopped processing SCC's due
3843 to resource constraints. */
3844
3845 static bool
3846 DFS (tree name)
3847 {
3848 vec<ssa_op_iter> itervec = vNULL;
3849 vec<tree> namevec = vNULL;
3850 use_operand_p usep = NULL;
3851 gimple *defstmt;
3852 tree use;
3853 ssa_op_iter iter;
3854
3855 start_over:
3856 /* SCC info */
3857 VN_INFO (name)->dfsnum = next_dfs_num++;
3858 VN_INFO (name)->visited = true;
3859 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3860
3861 sccstack.safe_push (name);
3862 VN_INFO (name)->on_sccstack = true;
3863 defstmt = SSA_NAME_DEF_STMT (name);
3864
3865 /* Recursively DFS on our operands, looking for SCC's. */
3866 if (!gimple_nop_p (defstmt))
3867 {
3868 /* Push a new iterator. */
3869 if (gphi *phi = dyn_cast <gphi *> (defstmt))
3870 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
3871 else
3872 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3873 }
3874 else
3875 clear_and_done_ssa_iter (&iter);
3876
3877 while (1)
3878 {
3879 /* If we are done processing uses of a name, go up the stack
3880 of iterators and process SCCs as we found them. */
3881 if (op_iter_done (&iter))
3882 {
3883 /* See if we found an SCC. */
3884 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3885 if (!extract_and_process_scc_for_name (name))
3886 {
3887 namevec.release ();
3888 itervec.release ();
3889 return false;
3890 }
3891
3892 /* Check if we are done. */
3893 if (namevec.is_empty ())
3894 {
3895 namevec.release ();
3896 itervec.release ();
3897 return true;
3898 }
3899
3900 /* Restore the last use walker and continue walking there. */
3901 use = name;
3902 name = namevec.pop ();
3903 memcpy (&iter, &itervec.last (),
3904 sizeof (ssa_op_iter));
3905 itervec.pop ();
3906 goto continue_walking;
3907 }
3908
3909 use = USE_FROM_PTR (usep);
3910
3911 /* Since we handle phi nodes, we will sometimes get
3912 invariants in the use expression. */
3913 if (TREE_CODE (use) == SSA_NAME)
3914 {
3915 if (! (VN_INFO (use)->visited))
3916 {
3917 /* Recurse by pushing the current use walking state on
3918 the stack and starting over. */
3919 itervec.safe_push (iter);
3920 namevec.safe_push (name);
3921 name = use;
3922 goto start_over;
3923
3924 continue_walking:
3925 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3926 VN_INFO (use)->low);
3927 }
3928 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3929 && VN_INFO (use)->on_sccstack)
3930 {
3931 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3932 VN_INFO (name)->low);
3933 }
3934 }
3935
3936 usep = op_iter_next_use (&iter);
3937 }
3938 }
3939
3940 /* Allocate a value number table. */
3941
3942 static void
3943 allocate_vn_table (vn_tables_t table)
3944 {
3945 table->phis = new vn_phi_table_type (23);
3946 table->nary = new vn_nary_op_table_type (23);
3947 table->references = new vn_reference_table_type (23);
3948
3949 gcc_obstack_init (&table->nary_obstack);
3950 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
3951 table->references_pool = new object_allocator<vn_reference_s>
3952 ("VN references");
3953 }
3954
3955 /* Free a value number table. */
3956
3957 static void
3958 free_vn_table (vn_tables_t table)
3959 {
3960 delete table->phis;
3961 table->phis = NULL;
3962 delete table->nary;
3963 table->nary = NULL;
3964 delete table->references;
3965 table->references = NULL;
3966 obstack_free (&table->nary_obstack, NULL);
3967 delete table->phis_pool;
3968 delete table->references_pool;
3969 }
3970
3971 static void
3972 init_scc_vn (void)
3973 {
3974 size_t i;
3975 int j;
3976 int *rpo_numbers_temp;
3977
3978 calculate_dominance_info (CDI_DOMINATORS);
3979 mark_dfs_back_edges ();
3980
3981 sccstack.create (0);
3982 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
3983
3984 constant_value_ids = BITMAP_ALLOC (NULL);
3985
3986 next_dfs_num = 1;
3987 next_value_id = 1;
3988
3989 vn_ssa_aux_table.create (num_ssa_names + 1);
3990 /* VEC_alloc doesn't actually grow it to the right size, it just
3991 preallocates the space to do so. */
3992 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3993 gcc_obstack_init (&vn_ssa_aux_obstack);
3994
3995 shared_lookup_phiargs.create (0);
3996 shared_lookup_references.create (0);
3997 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
3998 rpo_numbers_temp =
3999 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4000 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4001
4002 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4003 the i'th block in RPO order is bb. We want to map bb's to RPO
4004 numbers, so we need to rearrange this array. */
4005 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4006 rpo_numbers[rpo_numbers_temp[j]] = j;
4007
4008 XDELETE (rpo_numbers_temp);
4009
4010 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4011
4012 renumber_gimple_stmt_uids ();
4013
4014 /* Create the valid and optimistic value numbering tables. */
4015 valid_info = XCNEW (struct vn_tables_s);
4016 allocate_vn_table (valid_info);
4017 optimistic_info = XCNEW (struct vn_tables_s);
4018 allocate_vn_table (optimistic_info);
4019 current_info = valid_info;
4020
4021 /* Create the VN_INFO structures, and initialize value numbers to
4022 TOP or VARYING for parameters. */
4023 for (i = 1; i < num_ssa_names; i++)
4024 {
4025 tree name = ssa_name (i);
4026 if (!name)
4027 continue;
4028
4029 VN_INFO_GET (name)->valnum = VN_TOP;
4030 VN_INFO (name)->needs_insertion = false;
4031 VN_INFO (name)->expr = NULL;
4032 VN_INFO (name)->value_id = 0;
4033
4034 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4035 continue;
4036
4037 switch (TREE_CODE (SSA_NAME_VAR (name)))
4038 {
4039 case VAR_DECL:
4040 /* Undefined vars keep TOP. */
4041 break;
4042
4043 case PARM_DECL:
4044 /* Parameters are VARYING but we can record a condition
4045 if we know it is a non-NULL pointer. */
4046 VN_INFO (name)->visited = true;
4047 VN_INFO (name)->valnum = name;
4048 if (POINTER_TYPE_P (TREE_TYPE (name))
4049 && nonnull_arg_p (SSA_NAME_VAR (name)))
4050 {
4051 tree ops[2];
4052 ops[0] = name;
4053 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4054 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4055 boolean_true_node, 0);
4056 if (dump_file && (dump_flags & TDF_DETAILS))
4057 {
4058 fprintf (dump_file, "Recording ");
4059 print_generic_expr (dump_file, name, TDF_SLIM);
4060 fprintf (dump_file, " != 0\n");
4061 }
4062 }
4063 break;
4064
4065 case RESULT_DECL:
4066 /* If the result is passed by invisible reference the default
4067 def is initialized, otherwise it's uninitialized. */
4068 if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4069 {
4070 VN_INFO (name)->visited = true;
4071 VN_INFO (name)->valnum = name;
4072 }
4073 break;
4074
4075 default:
4076 gcc_unreachable ();
4077 }
4078 }
4079 }
4080
4081 void
4082 free_scc_vn (void)
4083 {
4084 size_t i;
4085
4086 delete constant_to_value_id;
4087 constant_to_value_id = NULL;
4088 BITMAP_FREE (constant_value_ids);
4089 shared_lookup_phiargs.release ();
4090 shared_lookup_references.release ();
4091 XDELETEVEC (rpo_numbers);
4092
4093 for (i = 0; i < num_ssa_names; i++)
4094 {
4095 tree name = ssa_name (i);
4096 if (name
4097 && has_VN_INFO (name)
4098 && VN_INFO (name)->needs_insertion)
4099 release_ssa_name (name);
4100 }
4101 obstack_free (&vn_ssa_aux_obstack, NULL);
4102 vn_ssa_aux_table.release ();
4103
4104 sccstack.release ();
4105 free_vn_table (valid_info);
4106 XDELETE (valid_info);
4107 free_vn_table (optimistic_info);
4108 XDELETE (optimistic_info);
4109
4110 BITMAP_FREE (const_parms);
4111 }
4112
4113 /* Set *ID according to RESULT. */
4114
4115 static void
4116 set_value_id_for_result (tree result, unsigned int *id)
4117 {
4118 if (result && TREE_CODE (result) == SSA_NAME)
4119 *id = VN_INFO (result)->value_id;
4120 else if (result && is_gimple_min_invariant (result))
4121 *id = get_or_alloc_constant_value_id (result);
4122 else
4123 *id = get_next_value_id ();
4124 }
4125
4126 /* Set the value ids in the valid hash tables. */
4127
4128 static void
4129 set_hashtable_value_ids (void)
4130 {
4131 vn_nary_op_iterator_type hin;
4132 vn_phi_iterator_type hip;
4133 vn_reference_iterator_type hir;
4134 vn_nary_op_t vno;
4135 vn_reference_t vr;
4136 vn_phi_t vp;
4137
4138 /* Now set the value ids of the things we had put in the hash
4139 table. */
4140
4141 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4142 set_value_id_for_result (vno->result, &vno->value_id);
4143
4144 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4145 set_value_id_for_result (vp->result, &vp->value_id);
4146
4147 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4148 hir)
4149 set_value_id_for_result (vr->result, &vr->value_id);
4150 }
4151
4152 class sccvn_dom_walker : public dom_walker
4153 {
4154 public:
4155 sccvn_dom_walker ()
4156 : dom_walker (CDI_DOMINATORS), fail (false), cond_stack (vNULL) {}
4157
4158 virtual void before_dom_children (basic_block);
4159 virtual void after_dom_children (basic_block);
4160
4161 void record_cond (basic_block,
4162 enum tree_code code, tree lhs, tree rhs, bool value);
4163 void record_conds (basic_block,
4164 enum tree_code code, tree lhs, tree rhs, bool value);
4165
4166 bool fail;
4167 vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4168 cond_stack;
4169 };
4170
4171 /* Record a temporary condition for the BB and its dominated blocks. */
4172
4173 void
4174 sccvn_dom_walker::record_cond (basic_block bb,
4175 enum tree_code code, tree lhs, tree rhs,
4176 bool value)
4177 {
4178 tree ops[2] = { lhs, rhs };
4179 vn_nary_op_t old = NULL;
4180 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4181 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4182 vn_nary_op_t cond
4183 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4184 value
4185 ? boolean_true_node
4186 : boolean_false_node, 0);
4187 if (dump_file && (dump_flags & TDF_DETAILS))
4188 {
4189 fprintf (dump_file, "Recording temporarily ");
4190 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4191 fprintf (dump_file, " %s ", get_tree_code_name (code));
4192 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4193 fprintf (dump_file, " == %s%s\n",
4194 value ? "true" : "false",
4195 old ? " (old entry saved)" : "");
4196 }
4197 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4198 }
4199
4200 /* Record temporary conditions for the BB and its dominated blocks
4201 according to LHS CODE RHS == VALUE and its dominated conditions. */
4202
4203 void
4204 sccvn_dom_walker::record_conds (basic_block bb,
4205 enum tree_code code, tree lhs, tree rhs,
4206 bool value)
4207 {
4208 /* Record the original condition. */
4209 record_cond (bb, code, lhs, rhs, value);
4210
4211 if (!value)
4212 return;
4213
4214 /* Record dominated conditions if the condition is true. Note that
4215 the inversion is already recorded. */
4216 switch (code)
4217 {
4218 case LT_EXPR:
4219 case GT_EXPR:
4220 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4221 record_cond (bb, NE_EXPR, lhs, rhs, true);
4222 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4223 break;
4224
4225 case EQ_EXPR:
4226 record_cond (bb, LE_EXPR, lhs, rhs, true);
4227 record_cond (bb, GE_EXPR, lhs, rhs, true);
4228 record_cond (bb, LT_EXPR, lhs, rhs, false);
4229 record_cond (bb, GT_EXPR, lhs, rhs, false);
4230 break;
4231
4232 default:
4233 break;
4234 }
4235 }
4236
4237 /* Restore expressions and values derived from conditionals. */
4238
4239 void
4240 sccvn_dom_walker::after_dom_children (basic_block bb)
4241 {
4242 while (!cond_stack.is_empty ()
4243 && cond_stack.last ().first == bb)
4244 {
4245 vn_nary_op_t cond = cond_stack.last ().second.first;
4246 vn_nary_op_t old = cond_stack.last ().second.second;
4247 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4248 if (old)
4249 vn_nary_op_insert_into (old, current_info->nary, false);
4250 cond_stack.pop ();
4251 }
4252 }
4253
4254 /* Value number all statements in BB. */
4255
4256 void
4257 sccvn_dom_walker::before_dom_children (basic_block bb)
4258 {
4259 edge e;
4260 edge_iterator ei;
4261
4262 if (fail)
4263 return;
4264
4265 /* If any of the predecessor edges that do not come from blocks dominated
4266 by us are still marked as possibly executable consider this block
4267 reachable. */
4268 bool reachable = bb == ENTRY_BLOCK_PTR_FOR_FN (cfun);
4269 FOR_EACH_EDGE (e, ei, bb->preds)
4270 if (!dominated_by_p (CDI_DOMINATORS, e->src, bb))
4271 reachable |= (e->flags & EDGE_EXECUTABLE);
4272
4273 /* If the block is not reachable all outgoing edges are not
4274 executable. Neither are incoming edges with src dominated by us. */
4275 if (!reachable)
4276 {
4277 if (dump_file && (dump_flags & TDF_DETAILS))
4278 fprintf (dump_file, "Marking all outgoing edges of unreachable "
4279 "BB %d as not executable\n", bb->index);
4280
4281 FOR_EACH_EDGE (e, ei, bb->succs)
4282 e->flags &= ~EDGE_EXECUTABLE;
4283
4284 FOR_EACH_EDGE (e, ei, bb->preds)
4285 {
4286 if (dominated_by_p (CDI_DOMINATORS, e->src, bb))
4287 {
4288 if (dump_file && (dump_flags & TDF_DETAILS))
4289 fprintf (dump_file, "Marking backedge from BB %d into "
4290 "unreachable BB %d as not executable\n",
4291 e->src->index, bb->index);
4292 e->flags &= ~EDGE_EXECUTABLE;
4293 }
4294 }
4295 return;
4296 }
4297
4298 if (dump_file && (dump_flags & TDF_DETAILS))
4299 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4300
4301 /* If we have a single predecessor record the equivalence from a
4302 possible condition on the predecessor edge. */
4303 if (single_pred_p (bb))
4304 {
4305 edge e = single_pred_edge (bb);
4306 /* Check if there are multiple executable successor edges in
4307 the source block. Otherwise there is no additional info
4308 to be recorded. */
4309 edge e2;
4310 FOR_EACH_EDGE (e2, ei, e->src->succs)
4311 if (e2 != e
4312 && e2->flags & EDGE_EXECUTABLE)
4313 break;
4314 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4315 {
4316 gimple *stmt = last_stmt (e->src);
4317 if (stmt
4318 && gimple_code (stmt) == GIMPLE_COND)
4319 {
4320 enum tree_code code = gimple_cond_code (stmt);
4321 tree lhs = gimple_cond_lhs (stmt);
4322 tree rhs = gimple_cond_rhs (stmt);
4323 record_conds (bb, code, lhs, rhs,
4324 (e->flags & EDGE_TRUE_VALUE) != 0);
4325 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4326 if (code != ERROR_MARK)
4327 record_conds (bb, code, lhs, rhs,
4328 (e->flags & EDGE_TRUE_VALUE) == 0);
4329 }
4330 }
4331 }
4332
4333 /* Value-number all defs in the basic-block. */
4334 for (gphi_iterator gsi = gsi_start_phis (bb);
4335 !gsi_end_p (gsi); gsi_next (&gsi))
4336 {
4337 gphi *phi = gsi.phi ();
4338 tree res = PHI_RESULT (phi);
4339 if (!VN_INFO (res)->visited
4340 && !DFS (res))
4341 {
4342 fail = true;
4343 return;
4344 }
4345 }
4346 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4347 !gsi_end_p (gsi); gsi_next (&gsi))
4348 {
4349 ssa_op_iter i;
4350 tree op;
4351 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4352 if (!VN_INFO (op)->visited
4353 && !DFS (op))
4354 {
4355 fail = true;
4356 return;
4357 }
4358 }
4359
4360 /* Finally look at the last stmt. */
4361 gimple *stmt = last_stmt (bb);
4362 if (!stmt)
4363 return;
4364
4365 enum gimple_code code = gimple_code (stmt);
4366 if (code != GIMPLE_COND
4367 && code != GIMPLE_SWITCH
4368 && code != GIMPLE_GOTO)
4369 return;
4370
4371 if (dump_file && (dump_flags & TDF_DETAILS))
4372 {
4373 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4374 print_gimple_stmt (dump_file, stmt, 0, 0);
4375 }
4376
4377 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4378 if value-numbering can prove they are not reachable. Handling
4379 computed gotos is also possible. */
4380 tree val;
4381 switch (code)
4382 {
4383 case GIMPLE_COND:
4384 {
4385 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4386 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4387 val = gimple_simplify (gimple_cond_code (stmt),
4388 boolean_type_node, lhs, rhs,
4389 NULL, vn_valueize);
4390 /* If that didn't simplify to a constant see if we have recorded
4391 temporary expressions from taken edges. */
4392 if (!val || TREE_CODE (val) != INTEGER_CST)
4393 {
4394 tree ops[2];
4395 ops[0] = lhs;
4396 ops[1] = rhs;
4397 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4398 boolean_type_node, ops, NULL);
4399 }
4400 break;
4401 }
4402 case GIMPLE_SWITCH:
4403 val = gimple_switch_index (as_a <gswitch *> (stmt));
4404 break;
4405 case GIMPLE_GOTO:
4406 val = gimple_goto_dest (stmt);
4407 break;
4408 default:
4409 gcc_unreachable ();
4410 }
4411 if (!val)
4412 return;
4413
4414 edge taken = find_taken_edge (bb, vn_valueize (val));
4415 if (!taken)
4416 return;
4417
4418 if (dump_file && (dump_flags & TDF_DETAILS))
4419 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4420 "not executable\n", bb->index, bb->index, taken->dest->index);
4421
4422 FOR_EACH_EDGE (e, ei, bb->succs)
4423 if (e != taken)
4424 e->flags &= ~EDGE_EXECUTABLE;
4425 }
4426
4427 /* Do SCCVN. Returns true if it finished, false if we bailed out
4428 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
4429 how we use the alias oracle walking during the VN process. */
4430
4431 bool
4432 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4433 {
4434 basic_block bb;
4435 size_t i;
4436
4437 default_vn_walk_kind = default_vn_walk_kind_;
4438
4439 init_scc_vn ();
4440
4441 /* Collect pointers we know point to readonly memory. */
4442 const_parms = BITMAP_ALLOC (NULL);
4443 tree fnspec = lookup_attribute ("fn spec",
4444 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4445 if (fnspec)
4446 {
4447 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4448 i = 1;
4449 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4450 arg; arg = DECL_CHAIN (arg), ++i)
4451 {
4452 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4453 break;
4454 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4455 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4456 {
4457 tree name = ssa_default_def (cfun, arg);
4458 if (name)
4459 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4460 }
4461 }
4462 }
4463
4464 /* Mark all edges as possibly executable. */
4465 FOR_ALL_BB_FN (bb, cfun)
4466 {
4467 edge_iterator ei;
4468 edge e;
4469 FOR_EACH_EDGE (e, ei, bb->succs)
4470 e->flags |= EDGE_EXECUTABLE;
4471 }
4472
4473 /* Walk all blocks in dominator order, value-numbering stmts
4474 SSA defs and decide whether outgoing edges are not executable. */
4475 sccvn_dom_walker walker;
4476 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4477 if (walker.fail)
4478 {
4479 free_scc_vn ();
4480 return false;
4481 }
4482
4483 /* Initialize the value ids and prune out remaining VN_TOPs
4484 from dead code. */
4485 for (i = 1; i < num_ssa_names; ++i)
4486 {
4487 tree name = ssa_name (i);
4488 vn_ssa_aux_t info;
4489 if (!name)
4490 continue;
4491 info = VN_INFO (name);
4492 if (!info->visited)
4493 info->valnum = name;
4494 if (info->valnum == name
4495 || info->valnum == VN_TOP)
4496 info->value_id = get_next_value_id ();
4497 else if (is_gimple_min_invariant (info->valnum))
4498 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4499 }
4500
4501 /* Propagate. */
4502 for (i = 1; i < num_ssa_names; ++i)
4503 {
4504 tree name = ssa_name (i);
4505 vn_ssa_aux_t info;
4506 if (!name)
4507 continue;
4508 info = VN_INFO (name);
4509 if (TREE_CODE (info->valnum) == SSA_NAME
4510 && info->valnum != name
4511 && info->value_id != VN_INFO (info->valnum)->value_id)
4512 info->value_id = VN_INFO (info->valnum)->value_id;
4513 }
4514
4515 set_hashtable_value_ids ();
4516
4517 if (dump_file && (dump_flags & TDF_DETAILS))
4518 {
4519 fprintf (dump_file, "Value numbers:\n");
4520 for (i = 0; i < num_ssa_names; i++)
4521 {
4522 tree name = ssa_name (i);
4523 if (name
4524 && VN_INFO (name)->visited
4525 && SSA_VAL (name) != name)
4526 {
4527 print_generic_expr (dump_file, name, 0);
4528 fprintf (dump_file, " = ");
4529 print_generic_expr (dump_file, SSA_VAL (name), 0);
4530 fprintf (dump_file, "\n");
4531 }
4532 }
4533 }
4534
4535 return true;
4536 }
4537
4538 /* Return the maximum value id we have ever seen. */
4539
4540 unsigned int
4541 get_max_value_id (void)
4542 {
4543 return next_value_id;
4544 }
4545
4546 /* Return the next unique value id. */
4547
4548 unsigned int
4549 get_next_value_id (void)
4550 {
4551 return next_value_id++;
4552 }
4553
4554
4555 /* Compare two expressions E1 and E2 and return true if they are equal. */
4556
4557 bool
4558 expressions_equal_p (tree e1, tree e2)
4559 {
4560 /* The obvious case. */
4561 if (e1 == e2)
4562 return true;
4563
4564 /* If only one of them is null, they cannot be equal. */
4565 if (!e1 || !e2)
4566 return false;
4567
4568 /* Now perform the actual comparison. */
4569 if (TREE_CODE (e1) == TREE_CODE (e2)
4570 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4571 return true;
4572
4573 return false;
4574 }
4575
4576
4577 /* Return true if the nary operation NARY may trap. This is a copy
4578 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4579
4580 bool
4581 vn_nary_may_trap (vn_nary_op_t nary)
4582 {
4583 tree type;
4584 tree rhs2 = NULL_TREE;
4585 bool honor_nans = false;
4586 bool honor_snans = false;
4587 bool fp_operation = false;
4588 bool honor_trapv = false;
4589 bool handled, ret;
4590 unsigned i;
4591
4592 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4593 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4594 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4595 {
4596 type = nary->type;
4597 fp_operation = FLOAT_TYPE_P (type);
4598 if (fp_operation)
4599 {
4600 honor_nans = flag_trapping_math && !flag_finite_math_only;
4601 honor_snans = flag_signaling_nans != 0;
4602 }
4603 else if (INTEGRAL_TYPE_P (type)
4604 && TYPE_OVERFLOW_TRAPS (type))
4605 honor_trapv = true;
4606 }
4607 if (nary->length >= 2)
4608 rhs2 = nary->op[1];
4609 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4610 honor_trapv,
4611 honor_nans, honor_snans, rhs2,
4612 &handled);
4613 if (handled
4614 && ret)
4615 return true;
4616
4617 for (i = 0; i < nary->length; ++i)
4618 if (tree_could_trap_p (nary->op[i]))
4619 return true;
4620
4621 return false;
4622 }