]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-sccvn.c
poly_int: get_ref_base_and_extent
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2017 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
73
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
79
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
83
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
88
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
93
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
97
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
102
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
106
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
109
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
115
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
121
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
129 */
130
131
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
136
137 /* vn_nary_op hashtable helpers. */
138
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 {
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144 };
145
146 /* Return the computed hashcode for nary operation P1. */
147
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 {
151 return vno1->hashcode;
152 }
153
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
156
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 {
160 return vn_nary_op_eq (vno1, vno2);
161 }
162
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165
166
167 /* vn_phi hashtable helpers. */
168
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
173 {
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 static inline void remove (vn_phi_s *);
177 };
178
179 /* Return the computed hashcode for phi operation P1. */
180
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 {
184 return vp1->hashcode;
185 }
186
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 {
192 return vn_phi_eq (vp1, vp2);
193 }
194
195 /* Free a phi operation structure VP. */
196
197 inline void
198 vn_phi_hasher::remove (vn_phi_s *phi)
199 {
200 phi->phiargs.release ();
201 }
202
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
205
206
207 /* Compare two reference operands P1 and P2 for equality. Return true if
208 they are equal, and false otherwise. */
209
210 static int
211 vn_reference_op_eq (const void *p1, const void *p2)
212 {
213 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
215
216 return (vro1->opcode == vro2->opcode
217 /* We do not care for differences in type qualification. */
218 && (vro1->type == vro2->type
219 || (vro1->type && vro2->type
220 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 TYPE_MAIN_VARIANT (vro2->type))))
222 && expressions_equal_p (vro1->op0, vro2->op0)
223 && expressions_equal_p (vro1->op1, vro2->op1)
224 && expressions_equal_p (vro1->op2, vro2->op2));
225 }
226
227 /* Free a reference operation structure VP. */
228
229 static inline void
230 free_reference (vn_reference_s *vr)
231 {
232 vr->operands.release ();
233 }
234
235
236 /* vn_reference hashtable helpers. */
237
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
239 {
240 static inline hashval_t hash (const vn_reference_s *);
241 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242 static inline void remove (vn_reference_s *);
243 };
244
245 /* Return the hashcode for a given reference operation P1. */
246
247 inline hashval_t
248 vn_reference_hasher::hash (const vn_reference_s *vr1)
249 {
250 return vr1->hashcode;
251 }
252
253 inline bool
254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
255 {
256 return vn_reference_eq (v, c);
257 }
258
259 inline void
260 vn_reference_hasher::remove (vn_reference_s *v)
261 {
262 free_reference (v);
263 }
264
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
267
268
269 /* The set of hashtables and alloc_pool's for their items. */
270
271 typedef struct vn_tables_s
272 {
273 vn_nary_op_table_type *nary;
274 vn_phi_table_type *phis;
275 vn_reference_table_type *references;
276 struct obstack nary_obstack;
277 object_allocator<vn_phi_s> *phis_pool;
278 object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
280
281
282 /* vn_constant hashtable helpers. */
283
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
285 {
286 static inline hashval_t hash (const vn_constant_s *);
287 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
288 };
289
290 /* Hash table hash function for vn_constant_t. */
291
292 inline hashval_t
293 vn_constant_hasher::hash (const vn_constant_s *vc1)
294 {
295 return vc1->hashcode;
296 }
297
298 /* Hash table equality function for vn_constant_t. */
299
300 inline bool
301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
302 {
303 if (vc1->hashcode != vc2->hashcode)
304 return false;
305
306 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
307 }
308
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
311
312
313 /* Valid hashtables storing information we have proven to be
314 correct. */
315
316 static vn_tables_t valid_info;
317
318 /* Optimistic hashtables storing information we are making assumptions about
319 during iterations. */
320
321 static vn_tables_t optimistic_info;
322
323 /* Pointer to the set of hashtables that is currently being used.
324 Should always point to either the optimistic_info, or the
325 valid_info. */
326
327 static vn_tables_t current_info;
328
329
330 /* Reverse post order index for each basic block. */
331
332 static int *rpo_numbers;
333
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
335
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337 during elimination which will value-number the VDEF to the
338 associated VUSE (but not substitute in the whole lattice). */
339
340 static inline tree
341 vuse_ssa_val (tree x)
342 {
343 if (!x)
344 return NULL_TREE;
345
346 do
347 {
348 tree tem = SSA_VAL (x);
349 /* stmt walking can walk over a backedge and reach code we didn't
350 value-number yet. */
351 if (tem == VN_TOP)
352 return x;
353 x = tem;
354 }
355 while (SSA_NAME_IN_FREE_LIST (x));
356
357 return x;
358 }
359
360 /* This represents the top of the VN lattice, which is the universal
361 value. */
362
363 tree VN_TOP;
364
365 /* Unique counter for our value ids. */
366
367 static unsigned int next_value_id;
368
369 /* Next DFS number and the stack for strongly connected component
370 detection. */
371
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
374
375
376
377 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
378 are allocated on an obstack for locality reasons, and to free them
379 without looping over the vec. */
380
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
383
384 /* Return whether there is value numbering information for a given SSA name. */
385
386 bool
387 has_VN_INFO (tree name)
388 {
389 if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390 return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391 return false;
392 }
393
394 /* Return the value numbering information for a given SSA name. */
395
396 vn_ssa_aux_t
397 VN_INFO (tree name)
398 {
399 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400 gcc_checking_assert (res);
401 return res;
402 }
403
404 /* Set the value numbering info for a given SSA name to a given
405 value. */
406
407 static inline void
408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
409 {
410 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
411 }
412
413 /* Initialize the value numbering info for a given SSA name.
414 This should be called just once for every SSA name. */
415
416 vn_ssa_aux_t
417 VN_INFO_GET (tree name)
418 {
419 vn_ssa_aux_t newinfo;
420
421 gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426 vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428 return newinfo;
429 }
430
431
432 /* Return the vn_kind the expression computed by the stmt should be
433 associated with. */
434
435 enum vn_kind
436 vn_get_stmt_kind (gimple *stmt)
437 {
438 switch (gimple_code (stmt))
439 {
440 case GIMPLE_CALL:
441 return VN_REFERENCE;
442 case GIMPLE_PHI:
443 return VN_PHI;
444 case GIMPLE_ASSIGN:
445 {
446 enum tree_code code = gimple_assign_rhs_code (stmt);
447 tree rhs1 = gimple_assign_rhs1 (stmt);
448 switch (get_gimple_rhs_class (code))
449 {
450 case GIMPLE_UNARY_RHS:
451 case GIMPLE_BINARY_RHS:
452 case GIMPLE_TERNARY_RHS:
453 return VN_NARY;
454 case GIMPLE_SINGLE_RHS:
455 switch (TREE_CODE_CLASS (code))
456 {
457 case tcc_reference:
458 /* VOP-less references can go through unary case. */
459 if ((code == REALPART_EXPR
460 || code == IMAGPART_EXPR
461 || code == VIEW_CONVERT_EXPR
462 || code == BIT_FIELD_REF)
463 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 return VN_NARY;
465
466 /* Fallthrough. */
467 case tcc_declaration:
468 return VN_REFERENCE;
469
470 case tcc_constant:
471 return VN_CONSTANT;
472
473 default:
474 if (code == ADDR_EXPR)
475 return (is_gimple_min_invariant (rhs1)
476 ? VN_CONSTANT : VN_REFERENCE);
477 else if (code == CONSTRUCTOR)
478 return VN_NARY;
479 return VN_NONE;
480 }
481 default:
482 return VN_NONE;
483 }
484 }
485 default:
486 return VN_NONE;
487 }
488 }
489
490 /* Lookup a value id for CONSTANT and return it. If it does not
491 exist returns 0. */
492
493 unsigned int
494 get_constant_value_id (tree constant)
495 {
496 vn_constant_s **slot;
497 struct vn_constant_s vc;
498
499 vc.hashcode = vn_hash_constant_with_type (constant);
500 vc.constant = constant;
501 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502 if (slot)
503 return (*slot)->value_id;
504 return 0;
505 }
506
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508 new one and return it. If it does exist, return it. */
509
510 unsigned int
511 get_or_alloc_constant_value_id (tree constant)
512 {
513 vn_constant_s **slot;
514 struct vn_constant_s vc;
515 vn_constant_t vcp;
516
517 vc.hashcode = vn_hash_constant_with_type (constant);
518 vc.constant = constant;
519 slot = constant_to_value_id->find_slot (&vc, INSERT);
520 if (*slot)
521 return (*slot)->value_id;
522
523 vcp = XNEW (struct vn_constant_s);
524 vcp->hashcode = vc.hashcode;
525 vcp->constant = constant;
526 vcp->value_id = get_next_value_id ();
527 *slot = vcp;
528 bitmap_set_bit (constant_value_ids, vcp->value_id);
529 return vcp->value_id;
530 }
531
532 /* Return true if V is a value id for a constant. */
533
534 bool
535 value_id_constant_p (unsigned int v)
536 {
537 return bitmap_bit_p (constant_value_ids, v);
538 }
539
540 /* Compute the hash for a reference operand VRO1. */
541
542 static void
543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
544 {
545 hstate.add_int (vro1->opcode);
546 if (vro1->op0)
547 inchash::add_expr (vro1->op0, hstate);
548 if (vro1->op1)
549 inchash::add_expr (vro1->op1, hstate);
550 if (vro1->op2)
551 inchash::add_expr (vro1->op2, hstate);
552 }
553
554 /* Compute a hash for the reference operation VR1 and return it. */
555
556 static hashval_t
557 vn_reference_compute_hash (const vn_reference_t vr1)
558 {
559 inchash::hash hstate;
560 hashval_t result;
561 int i;
562 vn_reference_op_t vro;
563 poly_int64 off = -1;
564 bool deref = false;
565
566 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
567 {
568 if (vro->opcode == MEM_REF)
569 deref = true;
570 else if (vro->opcode != ADDR_EXPR)
571 deref = false;
572 if (maybe_ne (vro->off, -1))
573 {
574 if (known_eq (off, -1))
575 off = 0;
576 off += vro->off;
577 }
578 else
579 {
580 if (maybe_ne (off, -1)
581 && maybe_ne (off, 0))
582 hstate.add_poly_int (off);
583 off = -1;
584 if (deref
585 && vro->opcode == ADDR_EXPR)
586 {
587 if (vro->op0)
588 {
589 tree op = TREE_OPERAND (vro->op0, 0);
590 hstate.add_int (TREE_CODE (op));
591 inchash::add_expr (op, hstate);
592 }
593 }
594 else
595 vn_reference_op_compute_hash (vro, hstate);
596 }
597 }
598 result = hstate.end ();
599 /* ??? We would ICE later if we hash instead of adding that in. */
600 if (vr1->vuse)
601 result += SSA_NAME_VERSION (vr1->vuse);
602
603 return result;
604 }
605
606 /* Return true if reference operations VR1 and VR2 are equivalent. This
607 means they have the same set of operands and vuses. */
608
609 bool
610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
611 {
612 unsigned i, j;
613
614 /* Early out if this is not a hash collision. */
615 if (vr1->hashcode != vr2->hashcode)
616 return false;
617
618 /* The VOP needs to be the same. */
619 if (vr1->vuse != vr2->vuse)
620 return false;
621
622 /* If the operands are the same we are done. */
623 if (vr1->operands == vr2->operands)
624 return true;
625
626 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627 return false;
628
629 if (INTEGRAL_TYPE_P (vr1->type)
630 && INTEGRAL_TYPE_P (vr2->type))
631 {
632 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 return false;
634 }
635 else if (INTEGRAL_TYPE_P (vr1->type)
636 && (TYPE_PRECISION (vr1->type)
637 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638 return false;
639 else if (INTEGRAL_TYPE_P (vr2->type)
640 && (TYPE_PRECISION (vr2->type)
641 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642 return false;
643
644 i = 0;
645 j = 0;
646 do
647 {
648 poly_int64 off1 = 0, off2 = 0;
649 vn_reference_op_t vro1, vro2;
650 vn_reference_op_s tem1, tem2;
651 bool deref1 = false, deref2 = false;
652 for (; vr1->operands.iterate (i, &vro1); i++)
653 {
654 if (vro1->opcode == MEM_REF)
655 deref1 = true;
656 /* Do not look through a storage order barrier. */
657 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 return false;
659 if (known_eq (vro1->off, -1))
660 break;
661 off1 += vro1->off;
662 }
663 for (; vr2->operands.iterate (j, &vro2); j++)
664 {
665 if (vro2->opcode == MEM_REF)
666 deref2 = true;
667 /* Do not look through a storage order barrier. */
668 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 return false;
670 if (known_eq (vro2->off, -1))
671 break;
672 off2 += vro2->off;
673 }
674 if (maybe_ne (off1, off2))
675 return false;
676 if (deref1 && vro1->opcode == ADDR_EXPR)
677 {
678 memset (&tem1, 0, sizeof (tem1));
679 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 tem1.type = TREE_TYPE (tem1.op0);
681 tem1.opcode = TREE_CODE (tem1.op0);
682 vro1 = &tem1;
683 deref1 = false;
684 }
685 if (deref2 && vro2->opcode == ADDR_EXPR)
686 {
687 memset (&tem2, 0, sizeof (tem2));
688 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 tem2.type = TREE_TYPE (tem2.op0);
690 tem2.opcode = TREE_CODE (tem2.op0);
691 vro2 = &tem2;
692 deref2 = false;
693 }
694 if (deref1 != deref2)
695 return false;
696 if (!vn_reference_op_eq (vro1, vro2))
697 return false;
698 ++j;
699 ++i;
700 }
701 while (vr1->operands.length () != i
702 || vr2->operands.length () != j);
703
704 return true;
705 }
706
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708 vn_reference_op_s's. */
709
710 static void
711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
712 {
713 if (TREE_CODE (ref) == TARGET_MEM_REF)
714 {
715 vn_reference_op_s temp;
716
717 result->reserve (3);
718
719 memset (&temp, 0, sizeof (temp));
720 temp.type = TREE_TYPE (ref);
721 temp.opcode = TREE_CODE (ref);
722 temp.op0 = TMR_INDEX (ref);
723 temp.op1 = TMR_STEP (ref);
724 temp.op2 = TMR_OFFSET (ref);
725 temp.off = -1;
726 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727 temp.base = MR_DEPENDENCE_BASE (ref);
728 result->quick_push (temp);
729
730 memset (&temp, 0, sizeof (temp));
731 temp.type = NULL_TREE;
732 temp.opcode = ERROR_MARK;
733 temp.op0 = TMR_INDEX2 (ref);
734 temp.off = -1;
735 result->quick_push (temp);
736
737 memset (&temp, 0, sizeof (temp));
738 temp.type = NULL_TREE;
739 temp.opcode = TREE_CODE (TMR_BASE (ref));
740 temp.op0 = TMR_BASE (ref);
741 temp.off = -1;
742 result->quick_push (temp);
743 return;
744 }
745
746 /* For non-calls, store the information that makes up the address. */
747 tree orig = ref;
748 while (ref)
749 {
750 vn_reference_op_s temp;
751
752 memset (&temp, 0, sizeof (temp));
753 temp.type = TREE_TYPE (ref);
754 temp.opcode = TREE_CODE (ref);
755 temp.off = -1;
756
757 switch (temp.opcode)
758 {
759 case MODIFY_EXPR:
760 temp.op0 = TREE_OPERAND (ref, 1);
761 break;
762 case WITH_SIZE_EXPR:
763 temp.op0 = TREE_OPERAND (ref, 1);
764 temp.off = 0;
765 break;
766 case MEM_REF:
767 /* The base address gets its own vn_reference_op_s structure. */
768 temp.op0 = TREE_OPERAND (ref, 1);
769 {
770 offset_int off = mem_ref_offset (ref);
771 if (wi::fits_shwi_p (off))
772 temp.off = off.to_shwi ();
773 }
774 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
775 temp.base = MR_DEPENDENCE_BASE (ref);
776 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
777 break;
778 case BIT_FIELD_REF:
779 /* Record bits, position and storage order. */
780 temp.op0 = TREE_OPERAND (ref, 1);
781 temp.op1 = TREE_OPERAND (ref, 2);
782 if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
783 {
784 HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
785 if (off % BITS_PER_UNIT == 0)
786 temp.off = off / BITS_PER_UNIT;
787 }
788 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
789 break;
790 case COMPONENT_REF:
791 /* The field decl is enough to unambiguously specify the field,
792 a matching type is not necessary and a mismatching type
793 is always a spurious difference. */
794 temp.type = NULL_TREE;
795 temp.op0 = TREE_OPERAND (ref, 1);
796 temp.op1 = TREE_OPERAND (ref, 2);
797 {
798 tree this_offset = component_ref_field_offset (ref);
799 if (this_offset
800 && poly_int_tree_p (this_offset))
801 {
802 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
803 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
804 {
805 poly_offset_int off
806 = (wi::to_poly_offset (this_offset)
807 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
808 /* Probibit value-numbering zero offset components
809 of addresses the same before the pass folding
810 __builtin_object_size had a chance to run
811 (checking cfun->after_inlining does the
812 trick here). */
813 if (TREE_CODE (orig) != ADDR_EXPR
814 || maybe_ne (off, 0)
815 || cfun->after_inlining)
816 off.to_shwi (&temp.off);
817 }
818 }
819 }
820 break;
821 case ARRAY_RANGE_REF:
822 case ARRAY_REF:
823 {
824 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
825 /* Record index as operand. */
826 temp.op0 = TREE_OPERAND (ref, 1);
827 /* Always record lower bounds and element size. */
828 temp.op1 = array_ref_low_bound (ref);
829 /* But record element size in units of the type alignment. */
830 temp.op2 = TREE_OPERAND (ref, 3);
831 temp.align = eltype->type_common.align;
832 if (! temp.op2)
833 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
834 size_int (TYPE_ALIGN_UNIT (eltype)));
835 if (poly_int_tree_p (temp.op0)
836 && poly_int_tree_p (temp.op1)
837 && TREE_CODE (temp.op2) == INTEGER_CST)
838 {
839 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
840 - wi::to_poly_offset (temp.op1))
841 * wi::to_offset (temp.op2)
842 * vn_ref_op_align_unit (&temp));
843 off.to_shwi (&temp.off);
844 }
845 }
846 break;
847 case VAR_DECL:
848 if (DECL_HARD_REGISTER (ref))
849 {
850 temp.op0 = ref;
851 break;
852 }
853 /* Fallthru. */
854 case PARM_DECL:
855 case CONST_DECL:
856 case RESULT_DECL:
857 /* Canonicalize decls to MEM[&decl] which is what we end up with
858 when valueizing MEM[ptr] with ptr = &decl. */
859 temp.opcode = MEM_REF;
860 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
861 temp.off = 0;
862 result->safe_push (temp);
863 temp.opcode = ADDR_EXPR;
864 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
865 temp.type = TREE_TYPE (temp.op0);
866 temp.off = -1;
867 break;
868 case STRING_CST:
869 case INTEGER_CST:
870 case COMPLEX_CST:
871 case VECTOR_CST:
872 case REAL_CST:
873 case FIXED_CST:
874 case CONSTRUCTOR:
875 case SSA_NAME:
876 temp.op0 = ref;
877 break;
878 case ADDR_EXPR:
879 if (is_gimple_min_invariant (ref))
880 {
881 temp.op0 = ref;
882 break;
883 }
884 break;
885 /* These are only interesting for their operands, their
886 existence, and their type. They will never be the last
887 ref in the chain of references (IE they require an
888 operand), so we don't have to put anything
889 for op* as it will be handled by the iteration */
890 case REALPART_EXPR:
891 temp.off = 0;
892 break;
893 case VIEW_CONVERT_EXPR:
894 temp.off = 0;
895 temp.reverse = storage_order_barrier_p (ref);
896 break;
897 case IMAGPART_EXPR:
898 /* This is only interesting for its constant offset. */
899 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
900 break;
901 default:
902 gcc_unreachable ();
903 }
904 result->safe_push (temp);
905
906 if (REFERENCE_CLASS_P (ref)
907 || TREE_CODE (ref) == MODIFY_EXPR
908 || TREE_CODE (ref) == WITH_SIZE_EXPR
909 || (TREE_CODE (ref) == ADDR_EXPR
910 && !is_gimple_min_invariant (ref)))
911 ref = TREE_OPERAND (ref, 0);
912 else
913 ref = NULL_TREE;
914 }
915 }
916
917 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
918 operands in *OPS, the reference alias set SET and the reference type TYPE.
919 Return true if something useful was produced. */
920
921 bool
922 ao_ref_init_from_vn_reference (ao_ref *ref,
923 alias_set_type set, tree type,
924 vec<vn_reference_op_s> ops)
925 {
926 vn_reference_op_t op;
927 unsigned i;
928 tree base = NULL_TREE;
929 tree *op0_p = &base;
930 poly_offset_int offset = 0;
931 poly_offset_int max_size;
932 poly_offset_int size = -1;
933 tree size_tree = NULL_TREE;
934 alias_set_type base_alias_set = -1;
935
936 /* First get the final access size from just the outermost expression. */
937 op = &ops[0];
938 if (op->opcode == COMPONENT_REF)
939 size_tree = DECL_SIZE (op->op0);
940 else if (op->opcode == BIT_FIELD_REF)
941 size_tree = op->op0;
942 else
943 {
944 machine_mode mode = TYPE_MODE (type);
945 if (mode == BLKmode)
946 size_tree = TYPE_SIZE (type);
947 else
948 size = GET_MODE_BITSIZE (mode);
949 }
950 if (size_tree != NULL_TREE
951 && poly_int_tree_p (size_tree))
952 size = wi::to_poly_offset (size_tree);
953
954 /* Initially, maxsize is the same as the accessed element size.
955 In the following it will only grow (or become -1). */
956 max_size = size;
957
958 /* Compute cumulative bit-offset for nested component-refs and array-refs,
959 and find the ultimate containing object. */
960 FOR_EACH_VEC_ELT (ops, i, op)
961 {
962 switch (op->opcode)
963 {
964 /* These may be in the reference ops, but we cannot do anything
965 sensible with them here. */
966 case ADDR_EXPR:
967 /* Apart from ADDR_EXPR arguments to MEM_REF. */
968 if (base != NULL_TREE
969 && TREE_CODE (base) == MEM_REF
970 && op->op0
971 && DECL_P (TREE_OPERAND (op->op0, 0)))
972 {
973 vn_reference_op_t pop = &ops[i-1];
974 base = TREE_OPERAND (op->op0, 0);
975 if (known_eq (pop->off, -1))
976 {
977 max_size = -1;
978 offset = 0;
979 }
980 else
981 offset += pop->off * BITS_PER_UNIT;
982 op0_p = NULL;
983 break;
984 }
985 /* Fallthru. */
986 case CALL_EXPR:
987 return false;
988
989 /* Record the base objects. */
990 case MEM_REF:
991 base_alias_set = get_deref_alias_set (op->op0);
992 *op0_p = build2 (MEM_REF, op->type,
993 NULL_TREE, op->op0);
994 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
995 MR_DEPENDENCE_BASE (*op0_p) = op->base;
996 op0_p = &TREE_OPERAND (*op0_p, 0);
997 break;
998
999 case VAR_DECL:
1000 case PARM_DECL:
1001 case RESULT_DECL:
1002 case SSA_NAME:
1003 *op0_p = op->op0;
1004 op0_p = NULL;
1005 break;
1006
1007 /* And now the usual component-reference style ops. */
1008 case BIT_FIELD_REF:
1009 offset += wi::to_offset (op->op1);
1010 break;
1011
1012 case COMPONENT_REF:
1013 {
1014 tree field = op->op0;
1015 /* We do not have a complete COMPONENT_REF tree here so we
1016 cannot use component_ref_field_offset. Do the interesting
1017 parts manually. */
1018 tree this_offset = DECL_FIELD_OFFSET (field);
1019
1020 if (op->op1 || !poly_int_tree_p (this_offset))
1021 max_size = -1;
1022 else
1023 {
1024 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1025 << LOG2_BITS_PER_UNIT);
1026 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1027 offset += woffset;
1028 }
1029 break;
1030 }
1031
1032 case ARRAY_RANGE_REF:
1033 case ARRAY_REF:
1034 /* We recorded the lower bound and the element size. */
1035 if (!poly_int_tree_p (op->op0)
1036 || !poly_int_tree_p (op->op1)
1037 || TREE_CODE (op->op2) != INTEGER_CST)
1038 max_size = -1;
1039 else
1040 {
1041 poly_offset_int woffset
1042 = wi::sext (wi::to_poly_offset (op->op0)
1043 - wi::to_poly_offset (op->op1),
1044 TYPE_PRECISION (TREE_TYPE (op->op0)));
1045 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1046 woffset <<= LOG2_BITS_PER_UNIT;
1047 offset += woffset;
1048 }
1049 break;
1050
1051 case REALPART_EXPR:
1052 break;
1053
1054 case IMAGPART_EXPR:
1055 offset += size;
1056 break;
1057
1058 case VIEW_CONVERT_EXPR:
1059 break;
1060
1061 case STRING_CST:
1062 case INTEGER_CST:
1063 case COMPLEX_CST:
1064 case VECTOR_CST:
1065 case REAL_CST:
1066 case CONSTRUCTOR:
1067 case CONST_DECL:
1068 return false;
1069
1070 default:
1071 return false;
1072 }
1073 }
1074
1075 if (base == NULL_TREE)
1076 return false;
1077
1078 ref->ref = NULL_TREE;
1079 ref->base = base;
1080 ref->ref_alias_set = set;
1081 if (base_alias_set != -1)
1082 ref->base_alias_set = base_alias_set;
1083 else
1084 ref->base_alias_set = get_alias_set (base);
1085 /* We discount volatiles from value-numbering elsewhere. */
1086 ref->volatile_p = false;
1087
1088 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1089 {
1090 ref->offset = 0;
1091 ref->size = -1;
1092 ref->max_size = -1;
1093 return true;
1094 }
1095
1096 if (!offset.to_shwi (&ref->offset))
1097 {
1098 ref->offset = 0;
1099 ref->max_size = -1;
1100 return true;
1101 }
1102
1103 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1104 ref->max_size = -1;
1105
1106 return true;
1107 }
1108
1109 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1110 vn_reference_op_s's. */
1111
1112 static void
1113 copy_reference_ops_from_call (gcall *call,
1114 vec<vn_reference_op_s> *result)
1115 {
1116 vn_reference_op_s temp;
1117 unsigned i;
1118 tree lhs = gimple_call_lhs (call);
1119 int lr;
1120
1121 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1122 different. By adding the lhs here in the vector, we ensure that the
1123 hashcode is different, guaranteeing a different value number. */
1124 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1125 {
1126 memset (&temp, 0, sizeof (temp));
1127 temp.opcode = MODIFY_EXPR;
1128 temp.type = TREE_TYPE (lhs);
1129 temp.op0 = lhs;
1130 temp.off = -1;
1131 result->safe_push (temp);
1132 }
1133
1134 /* Copy the type, opcode, function, static chain and EH region, if any. */
1135 memset (&temp, 0, sizeof (temp));
1136 temp.type = gimple_call_return_type (call);
1137 temp.opcode = CALL_EXPR;
1138 temp.op0 = gimple_call_fn (call);
1139 temp.op1 = gimple_call_chain (call);
1140 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1141 temp.op2 = size_int (lr);
1142 temp.off = -1;
1143 if (gimple_call_with_bounds_p (call))
1144 temp.with_bounds = 1;
1145 result->safe_push (temp);
1146
1147 /* Copy the call arguments. As they can be references as well,
1148 just chain them together. */
1149 for (i = 0; i < gimple_call_num_args (call); ++i)
1150 {
1151 tree callarg = gimple_call_arg (call, i);
1152 copy_reference_ops_from_ref (callarg, result);
1153 }
1154 }
1155
1156 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1157 *I_P to point to the last element of the replacement. */
1158 static bool
1159 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1160 unsigned int *i_p)
1161 {
1162 unsigned int i = *i_p;
1163 vn_reference_op_t op = &(*ops)[i];
1164 vn_reference_op_t mem_op = &(*ops)[i - 1];
1165 tree addr_base;
1166 HOST_WIDE_INT addr_offset = 0;
1167
1168 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1169 from .foo.bar to the preceding MEM_REF offset and replace the
1170 address with &OBJ. */
1171 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1172 &addr_offset);
1173 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1174 if (addr_base != TREE_OPERAND (op->op0, 0))
1175 {
1176 offset_int off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1177 off += addr_offset;
1178 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1179 op->op0 = build_fold_addr_expr (addr_base);
1180 if (tree_fits_shwi_p (mem_op->op0))
1181 mem_op->off = tree_to_shwi (mem_op->op0);
1182 else
1183 mem_op->off = -1;
1184 return true;
1185 }
1186 return false;
1187 }
1188
1189 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1190 *I_P to point to the last element of the replacement. */
1191 static bool
1192 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1193 unsigned int *i_p)
1194 {
1195 unsigned int i = *i_p;
1196 vn_reference_op_t op = &(*ops)[i];
1197 vn_reference_op_t mem_op = &(*ops)[i - 1];
1198 gimple *def_stmt;
1199 enum tree_code code;
1200 offset_int off;
1201
1202 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1203 if (!is_gimple_assign (def_stmt))
1204 return false;
1205
1206 code = gimple_assign_rhs_code (def_stmt);
1207 if (code != ADDR_EXPR
1208 && code != POINTER_PLUS_EXPR)
1209 return false;
1210
1211 off = offset_int::from (wi::to_wide (mem_op->op0), SIGNED);
1212
1213 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1214 from .foo.bar to the preceding MEM_REF offset and replace the
1215 address with &OBJ. */
1216 if (code == ADDR_EXPR)
1217 {
1218 tree addr, addr_base;
1219 HOST_WIDE_INT addr_offset;
1220
1221 addr = gimple_assign_rhs1 (def_stmt);
1222 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1223 &addr_offset);
1224 /* If that didn't work because the address isn't invariant propagate
1225 the reference tree from the address operation in case the current
1226 dereference isn't offsetted. */
1227 if (!addr_base
1228 && *i_p == ops->length () - 1
1229 && off == 0
1230 /* This makes us disable this transform for PRE where the
1231 reference ops might be also used for code insertion which
1232 is invalid. */
1233 && default_vn_walk_kind == VN_WALKREWRITE)
1234 {
1235 auto_vec<vn_reference_op_s, 32> tem;
1236 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1237 /* Make sure to preserve TBAA info. The only objects not
1238 wrapped in MEM_REFs that can have their address taken are
1239 STRING_CSTs. */
1240 if (tem.length () >= 2
1241 && tem[tem.length () - 2].opcode == MEM_REF)
1242 {
1243 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1244 new_mem_op->op0
1245 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1246 wi::to_wide (new_mem_op->op0));
1247 }
1248 else
1249 gcc_assert (tem.last ().opcode == STRING_CST);
1250 ops->pop ();
1251 ops->pop ();
1252 ops->safe_splice (tem);
1253 --*i_p;
1254 return true;
1255 }
1256 if (!addr_base
1257 || TREE_CODE (addr_base) != MEM_REF)
1258 return false;
1259
1260 off += addr_offset;
1261 off += mem_ref_offset (addr_base);
1262 op->op0 = TREE_OPERAND (addr_base, 0);
1263 }
1264 else
1265 {
1266 tree ptr, ptroff;
1267 ptr = gimple_assign_rhs1 (def_stmt);
1268 ptroff = gimple_assign_rhs2 (def_stmt);
1269 if (TREE_CODE (ptr) != SSA_NAME
1270 || TREE_CODE (ptroff) != INTEGER_CST)
1271 return false;
1272
1273 off += wi::to_offset (ptroff);
1274 op->op0 = ptr;
1275 }
1276
1277 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1278 if (tree_fits_shwi_p (mem_op->op0))
1279 mem_op->off = tree_to_shwi (mem_op->op0);
1280 else
1281 mem_op->off = -1;
1282 if (TREE_CODE (op->op0) == SSA_NAME)
1283 op->op0 = SSA_VAL (op->op0);
1284 if (TREE_CODE (op->op0) != SSA_NAME)
1285 op->opcode = TREE_CODE (op->op0);
1286
1287 /* And recurse. */
1288 if (TREE_CODE (op->op0) == SSA_NAME)
1289 vn_reference_maybe_forwprop_address (ops, i_p);
1290 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1291 vn_reference_fold_indirect (ops, i_p);
1292 return true;
1293 }
1294
1295 /* Optimize the reference REF to a constant if possible or return
1296 NULL_TREE if not. */
1297
1298 tree
1299 fully_constant_vn_reference_p (vn_reference_t ref)
1300 {
1301 vec<vn_reference_op_s> operands = ref->operands;
1302 vn_reference_op_t op;
1303
1304 /* Try to simplify the translated expression if it is
1305 a call to a builtin function with at most two arguments. */
1306 op = &operands[0];
1307 if (op->opcode == CALL_EXPR
1308 && TREE_CODE (op->op0) == ADDR_EXPR
1309 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1310 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1311 && operands.length () >= 2
1312 && operands.length () <= 3)
1313 {
1314 vn_reference_op_t arg0, arg1 = NULL;
1315 bool anyconst = false;
1316 arg0 = &operands[1];
1317 if (operands.length () > 2)
1318 arg1 = &operands[2];
1319 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1320 || (arg0->opcode == ADDR_EXPR
1321 && is_gimple_min_invariant (arg0->op0)))
1322 anyconst = true;
1323 if (arg1
1324 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1325 || (arg1->opcode == ADDR_EXPR
1326 && is_gimple_min_invariant (arg1->op0))))
1327 anyconst = true;
1328 if (anyconst)
1329 {
1330 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1331 arg1 ? 2 : 1,
1332 arg0->op0,
1333 arg1 ? arg1->op0 : NULL);
1334 if (folded
1335 && TREE_CODE (folded) == NOP_EXPR)
1336 folded = TREE_OPERAND (folded, 0);
1337 if (folded
1338 && is_gimple_min_invariant (folded))
1339 return folded;
1340 }
1341 }
1342
1343 /* Simplify reads from constants or constant initializers. */
1344 else if (BITS_PER_UNIT == 8
1345 && is_gimple_reg_type (ref->type)
1346 && (!INTEGRAL_TYPE_P (ref->type)
1347 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1348 {
1349 poly_int64 off = 0;
1350 HOST_WIDE_INT size;
1351 if (INTEGRAL_TYPE_P (ref->type))
1352 size = TYPE_PRECISION (ref->type);
1353 else
1354 size = tree_to_shwi (TYPE_SIZE (ref->type));
1355 if (size % BITS_PER_UNIT != 0
1356 || size > MAX_BITSIZE_MODE_ANY_MODE)
1357 return NULL_TREE;
1358 size /= BITS_PER_UNIT;
1359 unsigned i;
1360 for (i = 0; i < operands.length (); ++i)
1361 {
1362 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1363 {
1364 ++i;
1365 break;
1366 }
1367 if (known_eq (operands[i].off, -1))
1368 return NULL_TREE;
1369 off += operands[i].off;
1370 if (operands[i].opcode == MEM_REF)
1371 {
1372 ++i;
1373 break;
1374 }
1375 }
1376 vn_reference_op_t base = &operands[--i];
1377 tree ctor = error_mark_node;
1378 tree decl = NULL_TREE;
1379 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1380 ctor = base->op0;
1381 else if (base->opcode == MEM_REF
1382 && base[1].opcode == ADDR_EXPR
1383 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1384 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1385 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1386 {
1387 decl = TREE_OPERAND (base[1].op0, 0);
1388 if (TREE_CODE (decl) == STRING_CST)
1389 ctor = decl;
1390 else
1391 ctor = ctor_for_folding (decl);
1392 }
1393 if (ctor == NULL_TREE)
1394 return build_zero_cst (ref->type);
1395 else if (ctor != error_mark_node)
1396 {
1397 HOST_WIDE_INT const_off;
1398 if (decl)
1399 {
1400 tree res = fold_ctor_reference (ref->type, ctor,
1401 off * BITS_PER_UNIT,
1402 size * BITS_PER_UNIT, decl);
1403 if (res)
1404 {
1405 STRIP_USELESS_TYPE_CONVERSION (res);
1406 if (is_gimple_min_invariant (res))
1407 return res;
1408 }
1409 }
1410 else if (off.is_constant (&const_off))
1411 {
1412 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1413 int len = native_encode_expr (ctor, buf, size, const_off);
1414 if (len > 0)
1415 return native_interpret_expr (ref->type, buf, len);
1416 }
1417 }
1418 }
1419
1420 return NULL_TREE;
1421 }
1422
1423 /* Return true if OPS contain a storage order barrier. */
1424
1425 static bool
1426 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1427 {
1428 vn_reference_op_t op;
1429 unsigned i;
1430
1431 FOR_EACH_VEC_ELT (ops, i, op)
1432 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1433 return true;
1434
1435 return false;
1436 }
1437
1438 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1439 structures into their value numbers. This is done in-place, and
1440 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1441 whether any operands were valueized. */
1442
1443 static vec<vn_reference_op_s>
1444 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1445 {
1446 vn_reference_op_t vro;
1447 unsigned int i;
1448
1449 *valueized_anything = false;
1450
1451 FOR_EACH_VEC_ELT (orig, i, vro)
1452 {
1453 if (vro->opcode == SSA_NAME
1454 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1455 {
1456 tree tem = SSA_VAL (vro->op0);
1457 if (tem != vro->op0)
1458 {
1459 *valueized_anything = true;
1460 vro->op0 = tem;
1461 }
1462 /* If it transforms from an SSA_NAME to a constant, update
1463 the opcode. */
1464 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1465 vro->opcode = TREE_CODE (vro->op0);
1466 }
1467 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1468 {
1469 tree tem = SSA_VAL (vro->op1);
1470 if (tem != vro->op1)
1471 {
1472 *valueized_anything = true;
1473 vro->op1 = tem;
1474 }
1475 }
1476 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1477 {
1478 tree tem = SSA_VAL (vro->op2);
1479 if (tem != vro->op2)
1480 {
1481 *valueized_anything = true;
1482 vro->op2 = tem;
1483 }
1484 }
1485 /* If it transforms from an SSA_NAME to an address, fold with
1486 a preceding indirect reference. */
1487 if (i > 0
1488 && vro->op0
1489 && TREE_CODE (vro->op0) == ADDR_EXPR
1490 && orig[i - 1].opcode == MEM_REF)
1491 {
1492 if (vn_reference_fold_indirect (&orig, &i))
1493 *valueized_anything = true;
1494 }
1495 else if (i > 0
1496 && vro->opcode == SSA_NAME
1497 && orig[i - 1].opcode == MEM_REF)
1498 {
1499 if (vn_reference_maybe_forwprop_address (&orig, &i))
1500 *valueized_anything = true;
1501 }
1502 /* If it transforms a non-constant ARRAY_REF into a constant
1503 one, adjust the constant offset. */
1504 else if (vro->opcode == ARRAY_REF
1505 && known_eq (vro->off, -1)
1506 && poly_int_tree_p (vro->op0)
1507 && poly_int_tree_p (vro->op1)
1508 && TREE_CODE (vro->op2) == INTEGER_CST)
1509 {
1510 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1511 - wi::to_poly_offset (vro->op1))
1512 * wi::to_offset (vro->op2)
1513 * vn_ref_op_align_unit (vro));
1514 off.to_shwi (&vro->off);
1515 }
1516 }
1517
1518 return orig;
1519 }
1520
1521 static vec<vn_reference_op_s>
1522 valueize_refs (vec<vn_reference_op_s> orig)
1523 {
1524 bool tem;
1525 return valueize_refs_1 (orig, &tem);
1526 }
1527
1528 static vec<vn_reference_op_s> shared_lookup_references;
1529
1530 /* Create a vector of vn_reference_op_s structures from REF, a
1531 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1532 this function. *VALUEIZED_ANYTHING will specify whether any
1533 operands were valueized. */
1534
1535 static vec<vn_reference_op_s>
1536 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1537 {
1538 if (!ref)
1539 return vNULL;
1540 shared_lookup_references.truncate (0);
1541 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1542 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1543 valueized_anything);
1544 return shared_lookup_references;
1545 }
1546
1547 /* Create a vector of vn_reference_op_s structures from CALL, a
1548 call statement. The vector is shared among all callers of
1549 this function. */
1550
1551 static vec<vn_reference_op_s>
1552 valueize_shared_reference_ops_from_call (gcall *call)
1553 {
1554 if (!call)
1555 return vNULL;
1556 shared_lookup_references.truncate (0);
1557 copy_reference_ops_from_call (call, &shared_lookup_references);
1558 shared_lookup_references = valueize_refs (shared_lookup_references);
1559 return shared_lookup_references;
1560 }
1561
1562 /* Lookup a SCCVN reference operation VR in the current hash table.
1563 Returns the resulting value number if it exists in the hash table,
1564 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1565 vn_reference_t stored in the hashtable if something is found. */
1566
1567 static tree
1568 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1569 {
1570 vn_reference_s **slot;
1571 hashval_t hash;
1572
1573 hash = vr->hashcode;
1574 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575 if (!slot && current_info == optimistic_info)
1576 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1577 if (slot)
1578 {
1579 if (vnresult)
1580 *vnresult = (vn_reference_t)*slot;
1581 return ((vn_reference_t)*slot)->result;
1582 }
1583
1584 return NULL_TREE;
1585 }
1586
1587 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1588 with the current VUSE and performs the expression lookup. */
1589
1590 static void *
1591 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1592 unsigned int cnt, void *vr_)
1593 {
1594 vn_reference_t vr = (vn_reference_t)vr_;
1595 vn_reference_s **slot;
1596 hashval_t hash;
1597
1598 /* This bounds the stmt walks we perform on reference lookups
1599 to O(1) instead of O(N) where N is the number of dominating
1600 stores. */
1601 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1602 return (void *)-1;
1603
1604 if (last_vuse_ptr)
1605 *last_vuse_ptr = vuse;
1606
1607 /* Fixup vuse and hash. */
1608 if (vr->vuse)
1609 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1610 vr->vuse = vuse_ssa_val (vuse);
1611 if (vr->vuse)
1612 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1613
1614 hash = vr->hashcode;
1615 slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1616 if (!slot && current_info == optimistic_info)
1617 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1618 if (slot)
1619 return *slot;
1620
1621 return NULL;
1622 }
1623
1624 /* Lookup an existing or insert a new vn_reference entry into the
1625 value table for the VUSE, SET, TYPE, OPERANDS reference which
1626 has the value VALUE which is either a constant or an SSA name. */
1627
1628 static vn_reference_t
1629 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1630 alias_set_type set,
1631 tree type,
1632 vec<vn_reference_op_s,
1633 va_heap> operands,
1634 tree value)
1635 {
1636 vn_reference_s vr1;
1637 vn_reference_t result;
1638 unsigned value_id;
1639 vr1.vuse = vuse;
1640 vr1.operands = operands;
1641 vr1.type = type;
1642 vr1.set = set;
1643 vr1.hashcode = vn_reference_compute_hash (&vr1);
1644 if (vn_reference_lookup_1 (&vr1, &result))
1645 return result;
1646 if (TREE_CODE (value) == SSA_NAME)
1647 value_id = VN_INFO (value)->value_id;
1648 else
1649 value_id = get_or_alloc_constant_value_id (value);
1650 return vn_reference_insert_pieces (vuse, set, type,
1651 operands.copy (), value, value_id);
1652 }
1653
1654 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1655 static unsigned mprts_hook_cnt;
1656
1657 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
1658
1659 static tree
1660 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1661 {
1662 if (!rcode.is_tree_code ())
1663 return NULL_TREE;
1664 tree *ops = ops_;
1665 unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1666 if (rcode == CONSTRUCTOR
1667 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1668 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
1669 && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1670 {
1671 length = CONSTRUCTOR_NELTS (ops_[0]);
1672 ops = XALLOCAVEC (tree, length);
1673 for (unsigned i = 0; i < length; ++i)
1674 ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1675 }
1676 vn_nary_op_t vnresult = NULL;
1677 tree res = vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1678 type, ops, &vnresult);
1679 /* We can end up endlessly recursing simplifications if the lookup above
1680 presents us with a def-use chain that mirrors the original simplification.
1681 See PR80887 for an example. Limit successful lookup artificially
1682 to 10 times if we are called as mprts_hook. */
1683 if (res
1684 && mprts_hook
1685 && --mprts_hook_cnt == 0)
1686 {
1687 if (dump_file && (dump_flags & TDF_DETAILS))
1688 fprintf (dump_file, "Resetting mprts_hook after too many "
1689 "invocations.\n");
1690 mprts_hook = NULL;
1691 }
1692 return res;
1693 }
1694
1695 /* Return a value-number for RCODE OPS... either by looking up an existing
1696 value-number for the simplified result or by inserting the operation if
1697 INSERT is true. */
1698
1699 static tree
1700 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1701 bool insert)
1702 {
1703 tree result = NULL_TREE;
1704 /* We will be creating a value number for
1705 RCODE (OPS...).
1706 So first simplify and lookup this expression to see if it
1707 is already available. */
1708 mprts_hook = vn_lookup_simplify_result;
1709 mprts_hook_cnt = 9;
1710 bool res = false;
1711 switch (TREE_CODE_LENGTH ((tree_code) rcode))
1712 {
1713 case 1:
1714 res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1715 break;
1716 case 2:
1717 res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1718 break;
1719 case 3:
1720 res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1721 break;
1722 }
1723 mprts_hook = NULL;
1724 gimple *new_stmt = NULL;
1725 if (res
1726 && gimple_simplified_result_is_gimple_val (rcode, ops))
1727 /* The expression is already available. */
1728 result = ops[0];
1729 else
1730 {
1731 tree val = vn_lookup_simplify_result (rcode, type, ops);
1732 if (!val && insert)
1733 {
1734 gimple_seq stmts = NULL;
1735 result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1736 if (result)
1737 {
1738 gcc_assert (gimple_seq_singleton_p (stmts));
1739 new_stmt = gimple_seq_first_stmt (stmts);
1740 }
1741 }
1742 else
1743 /* The expression is already available. */
1744 result = val;
1745 }
1746 if (new_stmt)
1747 {
1748 /* The expression is not yet available, value-number lhs to
1749 the new SSA_NAME we created. */
1750 /* Initialize value-number information properly. */
1751 VN_INFO_GET (result)->valnum = result;
1752 VN_INFO (result)->value_id = get_next_value_id ();
1753 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1754 new_stmt);
1755 VN_INFO (result)->needs_insertion = true;
1756 /* ??? PRE phi-translation inserts NARYs without corresponding
1757 SSA name result. Re-use those but set their result according
1758 to the stmt we just built. */
1759 vn_nary_op_t nary = NULL;
1760 vn_nary_op_lookup_stmt (new_stmt, &nary);
1761 if (nary)
1762 {
1763 gcc_assert (nary->result == NULL_TREE);
1764 nary->result = gimple_assign_lhs (new_stmt);
1765 }
1766 /* As all "inserted" statements are singleton SCCs, insert
1767 to the valid table. This is strictly needed to
1768 avoid re-generating new value SSA_NAMEs for the same
1769 expression during SCC iteration over and over (the
1770 optimistic table gets cleared after each iteration).
1771 We do not need to insert into the optimistic table, as
1772 lookups there will fall back to the valid table. */
1773 else if (current_info == optimistic_info)
1774 {
1775 current_info = valid_info;
1776 vn_nary_op_insert_stmt (new_stmt, result);
1777 current_info = optimistic_info;
1778 }
1779 else
1780 vn_nary_op_insert_stmt (new_stmt, result);
1781 if (dump_file && (dump_flags & TDF_DETAILS))
1782 {
1783 fprintf (dump_file, "Inserting name ");
1784 print_generic_expr (dump_file, result);
1785 fprintf (dump_file, " for expression ");
1786 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1787 fprintf (dump_file, "\n");
1788 }
1789 }
1790 return result;
1791 }
1792
1793 /* Return a value-number for RCODE OPS... either by looking up an existing
1794 value-number for the simplified result or by inserting the operation. */
1795
1796 static tree
1797 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1798 {
1799 return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1800 }
1801
1802 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1803 its value if present. */
1804
1805 tree
1806 vn_nary_simplify (vn_nary_op_t nary)
1807 {
1808 if (nary->length > 3)
1809 return NULL_TREE;
1810 tree ops[3];
1811 memcpy (ops, nary->op, sizeof (tree) * nary->length);
1812 return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1813 }
1814
1815
1816 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1817 from the statement defining VUSE and if not successful tries to
1818 translate *REFP and VR_ through an aggregate copy at the definition
1819 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1820 of *REF and *VR. If only disambiguation was performed then
1821 *DISAMBIGUATE_ONLY is set to true. */
1822
1823 static void *
1824 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1825 bool *disambiguate_only)
1826 {
1827 vn_reference_t vr = (vn_reference_t)vr_;
1828 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1829 tree base = ao_ref_base (ref);
1830 HOST_WIDE_INT offseti, maxsizei;
1831 static vec<vn_reference_op_s> lhs_ops;
1832 ao_ref lhs_ref;
1833 bool lhs_ref_ok = false;
1834 poly_int64 copy_size;
1835
1836 /* If the reference is based on a parameter that was determined as
1837 pointing to readonly memory it doesn't change. */
1838 if (TREE_CODE (base) == MEM_REF
1839 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1840 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1841 && bitmap_bit_p (const_parms,
1842 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1843 {
1844 *disambiguate_only = true;
1845 return NULL;
1846 }
1847
1848 /* First try to disambiguate after value-replacing in the definitions LHS. */
1849 if (is_gimple_assign (def_stmt))
1850 {
1851 tree lhs = gimple_assign_lhs (def_stmt);
1852 bool valueized_anything = false;
1853 /* Avoid re-allocation overhead. */
1854 lhs_ops.truncate (0);
1855 copy_reference_ops_from_ref (lhs, &lhs_ops);
1856 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1857 if (valueized_anything)
1858 {
1859 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1860 get_alias_set (lhs),
1861 TREE_TYPE (lhs), lhs_ops);
1862 if (lhs_ref_ok
1863 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1864 {
1865 *disambiguate_only = true;
1866 return NULL;
1867 }
1868 }
1869 else
1870 {
1871 ao_ref_init (&lhs_ref, lhs);
1872 lhs_ref_ok = true;
1873 }
1874
1875 /* If we reach a clobbering statement try to skip it and see if
1876 we find a VN result with exactly the same value as the
1877 possible clobber. In this case we can ignore the clobber
1878 and return the found value.
1879 Note that we don't need to worry about partial overlapping
1880 accesses as we then can use TBAA to disambiguate against the
1881 clobbering statement when looking up a load (thus the
1882 VN_WALKREWRITE guard). */
1883 if (vn_walk_kind == VN_WALKREWRITE
1884 && is_gimple_reg_type (TREE_TYPE (lhs))
1885 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1886 {
1887 tree *saved_last_vuse_ptr = last_vuse_ptr;
1888 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1889 last_vuse_ptr = NULL;
1890 tree saved_vuse = vr->vuse;
1891 hashval_t saved_hashcode = vr->hashcode;
1892 void *res = vn_reference_lookup_2 (ref,
1893 gimple_vuse (def_stmt), 0, vr);
1894 /* Need to restore vr->vuse and vr->hashcode. */
1895 vr->vuse = saved_vuse;
1896 vr->hashcode = saved_hashcode;
1897 last_vuse_ptr = saved_last_vuse_ptr;
1898 if (res && res != (void *)-1)
1899 {
1900 vn_reference_t vnresult = (vn_reference_t) res;
1901 if (vnresult->result
1902 && operand_equal_p (vnresult->result,
1903 gimple_assign_rhs1 (def_stmt), 0))
1904 return res;
1905 }
1906 }
1907 }
1908 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1909 && gimple_call_num_args (def_stmt) <= 4)
1910 {
1911 /* For builtin calls valueize its arguments and call the
1912 alias oracle again. Valueization may improve points-to
1913 info of pointers and constify size and position arguments.
1914 Originally this was motivated by PR61034 which has
1915 conditional calls to free falsely clobbering ref because
1916 of imprecise points-to info of the argument. */
1917 tree oldargs[4];
1918 bool valueized_anything = false;
1919 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1920 {
1921 oldargs[i] = gimple_call_arg (def_stmt, i);
1922 tree val = vn_valueize (oldargs[i]);
1923 if (val != oldargs[i])
1924 {
1925 gimple_call_set_arg (def_stmt, i, val);
1926 valueized_anything = true;
1927 }
1928 }
1929 if (valueized_anything)
1930 {
1931 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1932 ref);
1933 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1934 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1935 if (!res)
1936 {
1937 *disambiguate_only = true;
1938 return NULL;
1939 }
1940 }
1941 }
1942
1943 if (*disambiguate_only)
1944 return (void *)-1;
1945
1946 /* If we cannot constrain the size of the reference we cannot
1947 test if anything kills it. */
1948 if (!ref->max_size_known_p ())
1949 return (void *)-1;
1950
1951 poly_int64 offset = ref->offset;
1952 poly_int64 maxsize = ref->max_size;
1953
1954 /* We can't deduce anything useful from clobbers. */
1955 if (gimple_clobber_p (def_stmt))
1956 return (void *)-1;
1957
1958 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1959 from that definition.
1960 1) Memset. */
1961 if (is_gimple_reg_type (vr->type)
1962 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1963 && integer_zerop (gimple_call_arg (def_stmt, 1))
1964 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1965 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1966 {
1967 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1968 tree base2;
1969 poly_int64 offset2, size2, maxsize2;
1970 bool reverse;
1971 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1972 &reverse);
1973 tree len = gimple_call_arg (def_stmt, 2);
1974 if (known_size_p (maxsize2)
1975 && operand_equal_p (base, base2, 0)
1976 && known_subrange_p (offset, maxsize, offset2,
1977 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
1978 {
1979 tree val = build_zero_cst (vr->type);
1980 return vn_reference_lookup_or_insert_for_pieces
1981 (vuse, vr->set, vr->type, vr->operands, val);
1982 }
1983 }
1984
1985 /* 2) Assignment from an empty CONSTRUCTOR. */
1986 else if (is_gimple_reg_type (vr->type)
1987 && gimple_assign_single_p (def_stmt)
1988 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1989 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1990 {
1991 tree base2;
1992 poly_int64 offset2, size2, maxsize2;
1993 bool reverse;
1994 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1995 &offset2, &size2, &maxsize2, &reverse);
1996 if (known_size_p (maxsize2)
1997 && operand_equal_p (base, base2, 0)
1998 && known_subrange_p (offset, maxsize, offset2, size2))
1999 {
2000 tree val = build_zero_cst (vr->type);
2001 return vn_reference_lookup_or_insert_for_pieces
2002 (vuse, vr->set, vr->type, vr->operands, val);
2003 }
2004 }
2005
2006 /* 3) Assignment from a constant. We can use folds native encode/interpret
2007 routines to extract the assigned bits. */
2008 else if (known_eq (ref->size, maxsize)
2009 && is_gimple_reg_type (vr->type)
2010 && !contains_storage_order_barrier_p (vr->operands)
2011 && gimple_assign_single_p (def_stmt)
2012 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2013 /* native_encode and native_decode operate on arrays of bytes
2014 and so fundamentally need a compile-time size and offset. */
2015 && maxsize.is_constant (&maxsizei)
2016 && maxsizei % BITS_PER_UNIT == 0
2017 && offset.is_constant (&offseti)
2018 && offseti % BITS_PER_UNIT == 0
2019 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2020 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2021 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2022 {
2023 tree base2;
2024 HOST_WIDE_INT offset2, size2;
2025 bool reverse;
2026 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2027 &offset2, &size2, &reverse);
2028 if (base2
2029 && !reverse
2030 && size2 % BITS_PER_UNIT == 0
2031 && offset2 % BITS_PER_UNIT == 0
2032 && operand_equal_p (base, base2, 0)
2033 && known_subrange_p (offseti, maxsizei, offset2, size2))
2034 {
2035 /* We support up to 512-bit values (for V8DFmode). */
2036 unsigned char buffer[64];
2037 int len;
2038
2039 tree rhs = gimple_assign_rhs1 (def_stmt);
2040 if (TREE_CODE (rhs) == SSA_NAME)
2041 rhs = SSA_VAL (rhs);
2042 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2043 buffer, sizeof (buffer));
2044 if (len > 0)
2045 {
2046 tree type = vr->type;
2047 /* Make sure to interpret in a type that has a range
2048 covering the whole access size. */
2049 if (INTEGRAL_TYPE_P (vr->type)
2050 && maxsizei != TYPE_PRECISION (vr->type))
2051 type = build_nonstandard_integer_type (maxsizei,
2052 TYPE_UNSIGNED (type));
2053 tree val = native_interpret_expr (type,
2054 buffer
2055 + ((offseti - offset2)
2056 / BITS_PER_UNIT),
2057 maxsizei / BITS_PER_UNIT);
2058 /* If we chop off bits because the types precision doesn't
2059 match the memory access size this is ok when optimizing
2060 reads but not when called from the DSE code during
2061 elimination. */
2062 if (val
2063 && type != vr->type)
2064 {
2065 if (! int_fits_type_p (val, vr->type))
2066 val = NULL_TREE;
2067 else
2068 val = fold_convert (vr->type, val);
2069 }
2070
2071 if (val)
2072 return vn_reference_lookup_or_insert_for_pieces
2073 (vuse, vr->set, vr->type, vr->operands, val);
2074 }
2075 }
2076 }
2077
2078 /* 4) Assignment from an SSA name which definition we may be able
2079 to access pieces from. */
2080 else if (known_eq (ref->size, maxsize)
2081 && is_gimple_reg_type (vr->type)
2082 && !contains_storage_order_barrier_p (vr->operands)
2083 && gimple_assign_single_p (def_stmt)
2084 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2085 {
2086 tree base2;
2087 poly_int64 offset2, size2, maxsize2;
2088 bool reverse;
2089 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2090 &offset2, &size2, &maxsize2,
2091 &reverse);
2092 if (!reverse
2093 && known_size_p (maxsize2)
2094 && known_eq (maxsize2, size2)
2095 && operand_equal_p (base, base2, 0)
2096 && known_subrange_p (offset, maxsize, offset2, size2)
2097 /* ??? We can't handle bitfield precision extracts without
2098 either using an alternate type for the BIT_FIELD_REF and
2099 then doing a conversion or possibly adjusting the offset
2100 according to endianness. */
2101 && (! INTEGRAL_TYPE_P (vr->type)
2102 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2103 && multiple_p (ref->size, BITS_PER_UNIT))
2104 {
2105 code_helper rcode = BIT_FIELD_REF;
2106 tree ops[3];
2107 ops[0] = SSA_VAL (gimple_assign_rhs1 (def_stmt));
2108 ops[1] = bitsize_int (ref->size);
2109 ops[2] = bitsize_int (offset - offset2);
2110 tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2111 if (val
2112 && (TREE_CODE (val) != SSA_NAME
2113 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2114 {
2115 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2116 (vuse, vr->set, vr->type, vr->operands, val);
2117 return res;
2118 }
2119 }
2120 }
2121
2122 /* 5) For aggregate copies translate the reference through them if
2123 the copy kills ref. */
2124 else if (vn_walk_kind == VN_WALKREWRITE
2125 && gimple_assign_single_p (def_stmt)
2126 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2127 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2128 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2129 {
2130 tree base2;
2131 int i, j, k;
2132 auto_vec<vn_reference_op_s> rhs;
2133 vn_reference_op_t vro;
2134 ao_ref r;
2135
2136 if (!lhs_ref_ok)
2137 return (void *)-1;
2138
2139 /* See if the assignment kills REF. */
2140 base2 = ao_ref_base (&lhs_ref);
2141 if (!lhs_ref.max_size_known_p ()
2142 || (base != base2
2143 && (TREE_CODE (base) != MEM_REF
2144 || TREE_CODE (base2) != MEM_REF
2145 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2146 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2147 TREE_OPERAND (base2, 1))))
2148 || !stmt_kills_ref_p (def_stmt, ref))
2149 return (void *)-1;
2150
2151 /* Find the common base of ref and the lhs. lhs_ops already
2152 contains valueized operands for the lhs. */
2153 i = vr->operands.length () - 1;
2154 j = lhs_ops.length () - 1;
2155 while (j >= 0 && i >= 0
2156 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2157 {
2158 i--;
2159 j--;
2160 }
2161
2162 /* ??? The innermost op should always be a MEM_REF and we already
2163 checked that the assignment to the lhs kills vr. Thus for
2164 aggregate copies using char[] types the vn_reference_op_eq
2165 may fail when comparing types for compatibility. But we really
2166 don't care here - further lookups with the rewritten operands
2167 will simply fail if we messed up types too badly. */
2168 poly_int64 extra_off = 0;
2169 if (j == 0 && i >= 0
2170 && lhs_ops[0].opcode == MEM_REF
2171 && maybe_ne (lhs_ops[0].off, -1))
2172 {
2173 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2174 i--, j--;
2175 else if (vr->operands[i].opcode == MEM_REF
2176 && maybe_ne (vr->operands[i].off, -1))
2177 {
2178 extra_off = vr->operands[i].off - lhs_ops[0].off;
2179 i--, j--;
2180 }
2181 }
2182
2183 /* i now points to the first additional op.
2184 ??? LHS may not be completely contained in VR, one or more
2185 VIEW_CONVERT_EXPRs could be in its way. We could at least
2186 try handling outermost VIEW_CONVERT_EXPRs. */
2187 if (j != -1)
2188 return (void *)-1;
2189
2190 /* Punt if the additional ops contain a storage order barrier. */
2191 for (k = i; k >= 0; k--)
2192 {
2193 vro = &vr->operands[k];
2194 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2195 return (void *)-1;
2196 }
2197
2198 /* Now re-write REF to be based on the rhs of the assignment. */
2199 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2200
2201 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2202 if (maybe_ne (extra_off, 0))
2203 {
2204 if (rhs.length () < 2
2205 || rhs[0].opcode != MEM_REF
2206 || known_eq (rhs[0].off, -1))
2207 return (void *)-1;
2208 rhs[0].off += extra_off;
2209 rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2210 build_int_cst (TREE_TYPE (rhs[0].op0),
2211 extra_off));
2212 }
2213
2214 /* We need to pre-pend vr->operands[0..i] to rhs. */
2215 vec<vn_reference_op_s> old = vr->operands;
2216 if (i + 1 + rhs.length () > vr->operands.length ())
2217 vr->operands.safe_grow (i + 1 + rhs.length ());
2218 else
2219 vr->operands.truncate (i + 1 + rhs.length ());
2220 FOR_EACH_VEC_ELT (rhs, j, vro)
2221 vr->operands[i + 1 + j] = *vro;
2222 vr->operands = valueize_refs (vr->operands);
2223 if (old == shared_lookup_references)
2224 shared_lookup_references = vr->operands;
2225 vr->hashcode = vn_reference_compute_hash (vr);
2226
2227 /* Try folding the new reference to a constant. */
2228 tree val = fully_constant_vn_reference_p (vr);
2229 if (val)
2230 return vn_reference_lookup_or_insert_for_pieces
2231 (vuse, vr->set, vr->type, vr->operands, val);
2232
2233 /* Adjust *ref from the new operands. */
2234 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2235 return (void *)-1;
2236 /* This can happen with bitfields. */
2237 if (maybe_ne (ref->size, r.size))
2238 return (void *)-1;
2239 *ref = r;
2240
2241 /* Do not update last seen VUSE after translating. */
2242 last_vuse_ptr = NULL;
2243
2244 /* Keep looking for the adjusted *REF / VR pair. */
2245 return NULL;
2246 }
2247
2248 /* 6) For memcpy copies translate the reference through them if
2249 the copy kills ref. */
2250 else if (vn_walk_kind == VN_WALKREWRITE
2251 && is_gimple_reg_type (vr->type)
2252 /* ??? Handle BCOPY as well. */
2253 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2254 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2255 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2256 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2257 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2258 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2259 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2260 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2261 {
2262 tree lhs, rhs;
2263 ao_ref r;
2264 poly_int64 rhs_offset, lhs_offset;
2265 vn_reference_op_s op;
2266 poly_uint64 mem_offset;
2267 poly_int64 at, byte_maxsize;
2268
2269 /* Only handle non-variable, addressable refs. */
2270 if (maybe_ne (ref->size, maxsize)
2271 || !multiple_p (offset, BITS_PER_UNIT, &at)
2272 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2273 return (void *)-1;
2274
2275 /* Extract a pointer base and an offset for the destination. */
2276 lhs = gimple_call_arg (def_stmt, 0);
2277 lhs_offset = 0;
2278 if (TREE_CODE (lhs) == SSA_NAME)
2279 {
2280 lhs = SSA_VAL (lhs);
2281 if (TREE_CODE (lhs) == SSA_NAME)
2282 {
2283 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2284 if (gimple_assign_single_p (def_stmt)
2285 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2286 lhs = gimple_assign_rhs1 (def_stmt);
2287 }
2288 }
2289 if (TREE_CODE (lhs) == ADDR_EXPR)
2290 {
2291 HOST_WIDE_INT tmp_lhs_offset;
2292 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2293 &tmp_lhs_offset);
2294 lhs_offset = tmp_lhs_offset;
2295 if (!tem)
2296 return (void *)-1;
2297 if (TREE_CODE (tem) == MEM_REF
2298 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2299 {
2300 lhs = TREE_OPERAND (tem, 0);
2301 if (TREE_CODE (lhs) == SSA_NAME)
2302 lhs = SSA_VAL (lhs);
2303 lhs_offset += mem_offset;
2304 }
2305 else if (DECL_P (tem))
2306 lhs = build_fold_addr_expr (tem);
2307 else
2308 return (void *)-1;
2309 }
2310 if (TREE_CODE (lhs) != SSA_NAME
2311 && TREE_CODE (lhs) != ADDR_EXPR)
2312 return (void *)-1;
2313
2314 /* Extract a pointer base and an offset for the source. */
2315 rhs = gimple_call_arg (def_stmt, 1);
2316 rhs_offset = 0;
2317 if (TREE_CODE (rhs) == SSA_NAME)
2318 rhs = SSA_VAL (rhs);
2319 if (TREE_CODE (rhs) == ADDR_EXPR)
2320 {
2321 HOST_WIDE_INT tmp_rhs_offset;
2322 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2323 &tmp_rhs_offset);
2324 rhs_offset = tmp_rhs_offset;
2325 if (!tem)
2326 return (void *)-1;
2327 if (TREE_CODE (tem) == MEM_REF
2328 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2329 {
2330 rhs = TREE_OPERAND (tem, 0);
2331 rhs_offset += mem_offset;
2332 }
2333 else if (DECL_P (tem)
2334 || TREE_CODE (tem) == STRING_CST)
2335 rhs = build_fold_addr_expr (tem);
2336 else
2337 return (void *)-1;
2338 }
2339 if (TREE_CODE (rhs) != SSA_NAME
2340 && TREE_CODE (rhs) != ADDR_EXPR)
2341 return (void *)-1;
2342
2343 /* The bases of the destination and the references have to agree. */
2344 if (TREE_CODE (base) == MEM_REF)
2345 {
2346 if (TREE_OPERAND (base, 0) != lhs
2347 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2348 return (void *) -1;
2349 at += mem_offset;
2350 }
2351 else if (!DECL_P (base)
2352 || TREE_CODE (lhs) != ADDR_EXPR
2353 || TREE_OPERAND (lhs, 0) != base)
2354 return (void *)-1;
2355
2356 /* If the access is completely outside of the memcpy destination
2357 area there is no aliasing. */
2358 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2359 return NULL;
2360 /* And the access has to be contained within the memcpy destination. */
2361 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2362 return (void *)-1;
2363
2364 /* Make room for 2 operands in the new reference. */
2365 if (vr->operands.length () < 2)
2366 {
2367 vec<vn_reference_op_s> old = vr->operands;
2368 vr->operands.safe_grow_cleared (2);
2369 if (old == shared_lookup_references)
2370 shared_lookup_references = vr->operands;
2371 }
2372 else
2373 vr->operands.truncate (2);
2374
2375 /* The looked-through reference is a simple MEM_REF. */
2376 memset (&op, 0, sizeof (op));
2377 op.type = vr->type;
2378 op.opcode = MEM_REF;
2379 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2380 op.off = at - lhs_offset + rhs_offset;
2381 vr->operands[0] = op;
2382 op.type = TREE_TYPE (rhs);
2383 op.opcode = TREE_CODE (rhs);
2384 op.op0 = rhs;
2385 op.off = -1;
2386 vr->operands[1] = op;
2387 vr->hashcode = vn_reference_compute_hash (vr);
2388
2389 /* Try folding the new reference to a constant. */
2390 tree val = fully_constant_vn_reference_p (vr);
2391 if (val)
2392 return vn_reference_lookup_or_insert_for_pieces
2393 (vuse, vr->set, vr->type, vr->operands, val);
2394
2395 /* Adjust *ref from the new operands. */
2396 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2397 return (void *)-1;
2398 /* This can happen with bitfields. */
2399 if (maybe_ne (ref->size, r.size))
2400 return (void *)-1;
2401 *ref = r;
2402
2403 /* Do not update last seen VUSE after translating. */
2404 last_vuse_ptr = NULL;
2405
2406 /* Keep looking for the adjusted *REF / VR pair. */
2407 return NULL;
2408 }
2409
2410 /* Bail out and stop walking. */
2411 return (void *)-1;
2412 }
2413
2414 /* Return a reference op vector from OP that can be used for
2415 vn_reference_lookup_pieces. The caller is responsible for releasing
2416 the vector. */
2417
2418 vec<vn_reference_op_s>
2419 vn_reference_operands_for_lookup (tree op)
2420 {
2421 bool valueized;
2422 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2423 }
2424
2425 /* Lookup a reference operation by it's parts, in the current hash table.
2426 Returns the resulting value number if it exists in the hash table,
2427 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2428 vn_reference_t stored in the hashtable if something is found. */
2429
2430 tree
2431 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2432 vec<vn_reference_op_s> operands,
2433 vn_reference_t *vnresult, vn_lookup_kind kind)
2434 {
2435 struct vn_reference_s vr1;
2436 vn_reference_t tmp;
2437 tree cst;
2438
2439 if (!vnresult)
2440 vnresult = &tmp;
2441 *vnresult = NULL;
2442
2443 vr1.vuse = vuse_ssa_val (vuse);
2444 shared_lookup_references.truncate (0);
2445 shared_lookup_references.safe_grow (operands.length ());
2446 memcpy (shared_lookup_references.address (),
2447 operands.address (),
2448 sizeof (vn_reference_op_s)
2449 * operands.length ());
2450 vr1.operands = operands = shared_lookup_references
2451 = valueize_refs (shared_lookup_references);
2452 vr1.type = type;
2453 vr1.set = set;
2454 vr1.hashcode = vn_reference_compute_hash (&vr1);
2455 if ((cst = fully_constant_vn_reference_p (&vr1)))
2456 return cst;
2457
2458 vn_reference_lookup_1 (&vr1, vnresult);
2459 if (!*vnresult
2460 && kind != VN_NOWALK
2461 && vr1.vuse)
2462 {
2463 ao_ref r;
2464 vn_walk_kind = kind;
2465 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2466 *vnresult =
2467 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2468 vn_reference_lookup_2,
2469 vn_reference_lookup_3,
2470 vuse_ssa_val, &vr1);
2471 gcc_checking_assert (vr1.operands == shared_lookup_references);
2472 }
2473
2474 if (*vnresult)
2475 return (*vnresult)->result;
2476
2477 return NULL_TREE;
2478 }
2479
2480 /* Lookup OP in the current hash table, and return the resulting value
2481 number if it exists in the hash table. Return NULL_TREE if it does
2482 not exist in the hash table or if the result field of the structure
2483 was NULL.. VNRESULT will be filled in with the vn_reference_t
2484 stored in the hashtable if one exists. When TBAA_P is false assume
2485 we are looking up a store and treat it as having alias-set zero. */
2486
2487 tree
2488 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2489 vn_reference_t *vnresult, bool tbaa_p)
2490 {
2491 vec<vn_reference_op_s> operands;
2492 struct vn_reference_s vr1;
2493 tree cst;
2494 bool valuezied_anything;
2495
2496 if (vnresult)
2497 *vnresult = NULL;
2498
2499 vr1.vuse = vuse_ssa_val (vuse);
2500 vr1.operands = operands
2501 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2502 vr1.type = TREE_TYPE (op);
2503 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2504 vr1.hashcode = vn_reference_compute_hash (&vr1);
2505 if ((cst = fully_constant_vn_reference_p (&vr1)))
2506 return cst;
2507
2508 if (kind != VN_NOWALK
2509 && vr1.vuse)
2510 {
2511 vn_reference_t wvnresult;
2512 ao_ref r;
2513 /* Make sure to use a valueized reference if we valueized anything.
2514 Otherwise preserve the full reference for advanced TBAA. */
2515 if (!valuezied_anything
2516 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2517 vr1.operands))
2518 ao_ref_init (&r, op);
2519 if (! tbaa_p)
2520 r.ref_alias_set = r.base_alias_set = 0;
2521 vn_walk_kind = kind;
2522 wvnresult =
2523 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2524 vn_reference_lookup_2,
2525 vn_reference_lookup_3,
2526 vuse_ssa_val, &vr1);
2527 gcc_checking_assert (vr1.operands == shared_lookup_references);
2528 if (wvnresult)
2529 {
2530 if (vnresult)
2531 *vnresult = wvnresult;
2532 return wvnresult->result;
2533 }
2534
2535 return NULL_TREE;
2536 }
2537
2538 return vn_reference_lookup_1 (&vr1, vnresult);
2539 }
2540
2541 /* Lookup CALL in the current hash table and return the entry in
2542 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2543
2544 void
2545 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2546 vn_reference_t vr)
2547 {
2548 if (vnresult)
2549 *vnresult = NULL;
2550
2551 tree vuse = gimple_vuse (call);
2552
2553 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2554 vr->operands = valueize_shared_reference_ops_from_call (call);
2555 vr->type = gimple_expr_type (call);
2556 vr->set = 0;
2557 vr->hashcode = vn_reference_compute_hash (vr);
2558 vn_reference_lookup_1 (vr, vnresult);
2559 }
2560
2561 /* Insert OP into the current hash table with a value number of
2562 RESULT, and return the resulting reference structure we created. */
2563
2564 static vn_reference_t
2565 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2566 {
2567 vn_reference_s **slot;
2568 vn_reference_t vr1;
2569 bool tem;
2570
2571 vr1 = current_info->references_pool->allocate ();
2572 if (TREE_CODE (result) == SSA_NAME)
2573 vr1->value_id = VN_INFO (result)->value_id;
2574 else
2575 vr1->value_id = get_or_alloc_constant_value_id (result);
2576 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2577 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2578 vr1->type = TREE_TYPE (op);
2579 vr1->set = get_alias_set (op);
2580 vr1->hashcode = vn_reference_compute_hash (vr1);
2581 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2582 vr1->result_vdef = vdef;
2583
2584 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2585 INSERT);
2586
2587 /* Because we lookup stores using vuses, and value number failures
2588 using the vdefs (see visit_reference_op_store for how and why),
2589 it's possible that on failure we may try to insert an already
2590 inserted store. This is not wrong, there is no ssa name for a
2591 store that we could use as a differentiator anyway. Thus, unlike
2592 the other lookup functions, you cannot gcc_assert (!*slot)
2593 here. */
2594
2595 /* But free the old slot in case of a collision. */
2596 if (*slot)
2597 free_reference (*slot);
2598
2599 *slot = vr1;
2600 return vr1;
2601 }
2602
2603 /* Insert a reference by it's pieces into the current hash table with
2604 a value number of RESULT. Return the resulting reference
2605 structure we created. */
2606
2607 vn_reference_t
2608 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2609 vec<vn_reference_op_s> operands,
2610 tree result, unsigned int value_id)
2611
2612 {
2613 vn_reference_s **slot;
2614 vn_reference_t vr1;
2615
2616 vr1 = current_info->references_pool->allocate ();
2617 vr1->value_id = value_id;
2618 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2619 vr1->operands = valueize_refs (operands);
2620 vr1->type = type;
2621 vr1->set = set;
2622 vr1->hashcode = vn_reference_compute_hash (vr1);
2623 if (result && TREE_CODE (result) == SSA_NAME)
2624 result = SSA_VAL (result);
2625 vr1->result = result;
2626
2627 slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2628 INSERT);
2629
2630 /* At this point we should have all the things inserted that we have
2631 seen before, and we should never try inserting something that
2632 already exists. */
2633 gcc_assert (!*slot);
2634 if (*slot)
2635 free_reference (*slot);
2636
2637 *slot = vr1;
2638 return vr1;
2639 }
2640
2641 /* Compute and return the hash value for nary operation VBO1. */
2642
2643 static hashval_t
2644 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2645 {
2646 inchash::hash hstate;
2647 unsigned i;
2648
2649 for (i = 0; i < vno1->length; ++i)
2650 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2651 vno1->op[i] = SSA_VAL (vno1->op[i]);
2652
2653 if (((vno1->length == 2
2654 && commutative_tree_code (vno1->opcode))
2655 || (vno1->length == 3
2656 && commutative_ternary_tree_code (vno1->opcode)))
2657 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2658 std::swap (vno1->op[0], vno1->op[1]);
2659 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2660 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2661 {
2662 std::swap (vno1->op[0], vno1->op[1]);
2663 vno1->opcode = swap_tree_comparison (vno1->opcode);
2664 }
2665
2666 hstate.add_int (vno1->opcode);
2667 for (i = 0; i < vno1->length; ++i)
2668 inchash::add_expr (vno1->op[i], hstate);
2669
2670 return hstate.end ();
2671 }
2672
2673 /* Compare nary operations VNO1 and VNO2 and return true if they are
2674 equivalent. */
2675
2676 bool
2677 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2678 {
2679 unsigned i;
2680
2681 if (vno1->hashcode != vno2->hashcode)
2682 return false;
2683
2684 if (vno1->length != vno2->length)
2685 return false;
2686
2687 if (vno1->opcode != vno2->opcode
2688 || !types_compatible_p (vno1->type, vno2->type))
2689 return false;
2690
2691 for (i = 0; i < vno1->length; ++i)
2692 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2693 return false;
2694
2695 /* BIT_INSERT_EXPR has an implict operand as the type precision
2696 of op1. Need to check to make sure they are the same. */
2697 if (vno1->opcode == BIT_INSERT_EXPR
2698 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2699 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2700 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2701 return false;
2702
2703 return true;
2704 }
2705
2706 /* Initialize VNO from the pieces provided. */
2707
2708 static void
2709 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2710 enum tree_code code, tree type, tree *ops)
2711 {
2712 vno->opcode = code;
2713 vno->length = length;
2714 vno->type = type;
2715 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2716 }
2717
2718 /* Initialize VNO from OP. */
2719
2720 static void
2721 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2722 {
2723 unsigned i;
2724
2725 vno->opcode = TREE_CODE (op);
2726 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2727 vno->type = TREE_TYPE (op);
2728 for (i = 0; i < vno->length; ++i)
2729 vno->op[i] = TREE_OPERAND (op, i);
2730 }
2731
2732 /* Return the number of operands for a vn_nary ops structure from STMT. */
2733
2734 static unsigned int
2735 vn_nary_length_from_stmt (gimple *stmt)
2736 {
2737 switch (gimple_assign_rhs_code (stmt))
2738 {
2739 case REALPART_EXPR:
2740 case IMAGPART_EXPR:
2741 case VIEW_CONVERT_EXPR:
2742 return 1;
2743
2744 case BIT_FIELD_REF:
2745 return 3;
2746
2747 case CONSTRUCTOR:
2748 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2749
2750 default:
2751 return gimple_num_ops (stmt) - 1;
2752 }
2753 }
2754
2755 /* Initialize VNO from STMT. */
2756
2757 static void
2758 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2759 {
2760 unsigned i;
2761
2762 vno->opcode = gimple_assign_rhs_code (stmt);
2763 vno->type = gimple_expr_type (stmt);
2764 switch (vno->opcode)
2765 {
2766 case REALPART_EXPR:
2767 case IMAGPART_EXPR:
2768 case VIEW_CONVERT_EXPR:
2769 vno->length = 1;
2770 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2771 break;
2772
2773 case BIT_FIELD_REF:
2774 vno->length = 3;
2775 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2776 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2777 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2778 break;
2779
2780 case CONSTRUCTOR:
2781 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2782 for (i = 0; i < vno->length; ++i)
2783 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2784 break;
2785
2786 default:
2787 gcc_checking_assert (!gimple_assign_single_p (stmt));
2788 vno->length = gimple_num_ops (stmt) - 1;
2789 for (i = 0; i < vno->length; ++i)
2790 vno->op[i] = gimple_op (stmt, i + 1);
2791 }
2792 }
2793
2794 /* Compute the hashcode for VNO and look for it in the hash table;
2795 return the resulting value number if it exists in the hash table.
2796 Return NULL_TREE if it does not exist in the hash table or if the
2797 result field of the operation is NULL. VNRESULT will contain the
2798 vn_nary_op_t from the hashtable if it exists. */
2799
2800 static tree
2801 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2802 {
2803 vn_nary_op_s **slot;
2804
2805 if (vnresult)
2806 *vnresult = NULL;
2807
2808 vno->hashcode = vn_nary_op_compute_hash (vno);
2809 slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2810 NO_INSERT);
2811 if (!slot && current_info == optimistic_info)
2812 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2813 NO_INSERT);
2814 if (!slot)
2815 return NULL_TREE;
2816 if (vnresult)
2817 *vnresult = *slot;
2818 return (*slot)->result;
2819 }
2820
2821 /* Lookup a n-ary operation by its pieces and return the resulting value
2822 number if it exists in the hash table. Return NULL_TREE if it does
2823 not exist in the hash table or if the result field of the operation
2824 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2825 if it exists. */
2826
2827 tree
2828 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2829 tree type, tree *ops, vn_nary_op_t *vnresult)
2830 {
2831 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2832 sizeof_vn_nary_op (length));
2833 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2834 return vn_nary_op_lookup_1 (vno1, vnresult);
2835 }
2836
2837 /* Lookup OP in the current hash table, and return the resulting value
2838 number if it exists in the hash table. Return NULL_TREE if it does
2839 not exist in the hash table or if the result field of the operation
2840 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2841 if it exists. */
2842
2843 tree
2844 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2845 {
2846 vn_nary_op_t vno1
2847 = XALLOCAVAR (struct vn_nary_op_s,
2848 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2849 init_vn_nary_op_from_op (vno1, op);
2850 return vn_nary_op_lookup_1 (vno1, vnresult);
2851 }
2852
2853 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2854 value number if it exists in the hash table. Return NULL_TREE if
2855 it does not exist in the hash table. VNRESULT will contain the
2856 vn_nary_op_t from the hashtable if it exists. */
2857
2858 tree
2859 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2860 {
2861 vn_nary_op_t vno1
2862 = XALLOCAVAR (struct vn_nary_op_s,
2863 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2864 init_vn_nary_op_from_stmt (vno1, stmt);
2865 return vn_nary_op_lookup_1 (vno1, vnresult);
2866 }
2867
2868 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2869
2870 static vn_nary_op_t
2871 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2872 {
2873 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2874 }
2875
2876 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2877 obstack. */
2878
2879 static vn_nary_op_t
2880 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2881 {
2882 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2883 &current_info->nary_obstack);
2884
2885 vno1->value_id = value_id;
2886 vno1->length = length;
2887 vno1->result = result;
2888
2889 return vno1;
2890 }
2891
2892 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2893 VNO->HASHCODE first. */
2894
2895 static vn_nary_op_t
2896 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2897 bool compute_hash)
2898 {
2899 vn_nary_op_s **slot;
2900
2901 if (compute_hash)
2902 vno->hashcode = vn_nary_op_compute_hash (vno);
2903
2904 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2905 /* While we do not want to insert things twice it's awkward to
2906 avoid it in the case where visit_nary_op pattern-matches stuff
2907 and ends up simplifying the replacement to itself. We then
2908 get two inserts, one from visit_nary_op and one from
2909 vn_nary_build_or_lookup.
2910 So allow inserts with the same value number. */
2911 if (*slot && (*slot)->result == vno->result)
2912 return *slot;
2913
2914 gcc_assert (!*slot);
2915
2916 *slot = vno;
2917 return vno;
2918 }
2919
2920 /* Insert a n-ary operation into the current hash table using it's
2921 pieces. Return the vn_nary_op_t structure we created and put in
2922 the hashtable. */
2923
2924 vn_nary_op_t
2925 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2926 tree type, tree *ops,
2927 tree result, unsigned int value_id)
2928 {
2929 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2930 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2931 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2932 }
2933
2934 /* Insert OP into the current hash table with a value number of
2935 RESULT. Return the vn_nary_op_t structure we created and put in
2936 the hashtable. */
2937
2938 vn_nary_op_t
2939 vn_nary_op_insert (tree op, tree result)
2940 {
2941 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2942 vn_nary_op_t vno1;
2943
2944 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2945 init_vn_nary_op_from_op (vno1, op);
2946 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2947 }
2948
2949 /* Insert the rhs of STMT into the current hash table with a value number of
2950 RESULT. */
2951
2952 static vn_nary_op_t
2953 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2954 {
2955 vn_nary_op_t vno1
2956 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2957 result, VN_INFO (result)->value_id);
2958 init_vn_nary_op_from_stmt (vno1, stmt);
2959 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2960 }
2961
2962 /* Compute a hashcode for PHI operation VP1 and return it. */
2963
2964 static inline hashval_t
2965 vn_phi_compute_hash (vn_phi_t vp1)
2966 {
2967 inchash::hash hstate (vp1->phiargs.length () > 2
2968 ? vp1->block->index : vp1->phiargs.length ());
2969 tree phi1op;
2970 tree type;
2971 edge e;
2972 edge_iterator ei;
2973
2974 /* If all PHI arguments are constants we need to distinguish
2975 the PHI node via its type. */
2976 type = vp1->type;
2977 hstate.merge_hash (vn_hash_type (type));
2978
2979 FOR_EACH_EDGE (e, ei, vp1->block->preds)
2980 {
2981 /* Don't hash backedge values they need to be handled as VN_TOP
2982 for optimistic value-numbering. */
2983 if (e->flags & EDGE_DFS_BACK)
2984 continue;
2985
2986 phi1op = vp1->phiargs[e->dest_idx];
2987 if (phi1op == VN_TOP)
2988 continue;
2989 inchash::add_expr (phi1op, hstate);
2990 }
2991
2992 return hstate.end ();
2993 }
2994
2995
2996 /* Return true if COND1 and COND2 represent the same condition, set
2997 *INVERTED_P if one needs to be inverted to make it the same as
2998 the other. */
2999
3000 static bool
3001 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3002 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3003 {
3004 enum tree_code code1 = gimple_cond_code (cond1);
3005 enum tree_code code2 = gimple_cond_code (cond2);
3006
3007 *inverted_p = false;
3008 if (code1 == code2)
3009 ;
3010 else if (code1 == swap_tree_comparison (code2))
3011 std::swap (lhs2, rhs2);
3012 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3013 *inverted_p = true;
3014 else if (code1 == invert_tree_comparison
3015 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3016 {
3017 std::swap (lhs2, rhs2);
3018 *inverted_p = true;
3019 }
3020 else
3021 return false;
3022
3023 return ((expressions_equal_p (lhs1, lhs2)
3024 && expressions_equal_p (rhs1, rhs2))
3025 || (commutative_tree_code (code1)
3026 && expressions_equal_p (lhs1, rhs2)
3027 && expressions_equal_p (rhs1, lhs2)));
3028 }
3029
3030 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3031
3032 static int
3033 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3034 {
3035 if (vp1->hashcode != vp2->hashcode)
3036 return false;
3037
3038 if (vp1->block != vp2->block)
3039 {
3040 if (vp1->phiargs.length () != vp2->phiargs.length ())
3041 return false;
3042
3043 switch (vp1->phiargs.length ())
3044 {
3045 case 1:
3046 /* Single-arg PHIs are just copies. */
3047 break;
3048
3049 case 2:
3050 {
3051 /* Rule out backedges into the PHI. */
3052 if (vp1->block->loop_father->header == vp1->block
3053 || vp2->block->loop_father->header == vp2->block)
3054 return false;
3055
3056 /* If the PHI nodes do not have compatible types
3057 they are not the same. */
3058 if (!types_compatible_p (vp1->type, vp2->type))
3059 return false;
3060
3061 basic_block idom1
3062 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3063 basic_block idom2
3064 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3065 /* If the immediate dominator end in switch stmts multiple
3066 values may end up in the same PHI arg via intermediate
3067 CFG merges. */
3068 if (EDGE_COUNT (idom1->succs) != 2
3069 || EDGE_COUNT (idom2->succs) != 2)
3070 return false;
3071
3072 /* Verify the controlling stmt is the same. */
3073 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3074 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3075 if (! last1 || ! last2)
3076 return false;
3077 bool inverted_p;
3078 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3079 last2, vp2->cclhs, vp2->ccrhs,
3080 &inverted_p))
3081 return false;
3082
3083 /* Get at true/false controlled edges into the PHI. */
3084 edge te1, te2, fe1, fe2;
3085 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3086 &te1, &fe1)
3087 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3088 &te2, &fe2))
3089 return false;
3090
3091 /* Swap edges if the second condition is the inverted of the
3092 first. */
3093 if (inverted_p)
3094 std::swap (te2, fe2);
3095
3096 /* ??? Handle VN_TOP specially. */
3097 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3098 vp2->phiargs[te2->dest_idx])
3099 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3100 vp2->phiargs[fe2->dest_idx]))
3101 return false;
3102
3103 return true;
3104 }
3105
3106 default:
3107 return false;
3108 }
3109 }
3110
3111 /* If the PHI nodes do not have compatible types
3112 they are not the same. */
3113 if (!types_compatible_p (vp1->type, vp2->type))
3114 return false;
3115
3116 /* Any phi in the same block will have it's arguments in the
3117 same edge order, because of how we store phi nodes. */
3118 int i;
3119 tree phi1op;
3120 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3121 {
3122 tree phi2op = vp2->phiargs[i];
3123 if (phi1op == VN_TOP || phi2op == VN_TOP)
3124 continue;
3125 if (!expressions_equal_p (phi1op, phi2op))
3126 return false;
3127 }
3128
3129 return true;
3130 }
3131
3132 static vec<tree> shared_lookup_phiargs;
3133
3134 /* Lookup PHI in the current hash table, and return the resulting
3135 value number if it exists in the hash table. Return NULL_TREE if
3136 it does not exist in the hash table. */
3137
3138 static tree
3139 vn_phi_lookup (gimple *phi)
3140 {
3141 vn_phi_s **slot;
3142 struct vn_phi_s vp1;
3143 edge e;
3144 edge_iterator ei;
3145
3146 shared_lookup_phiargs.truncate (0);
3147 shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3148
3149 /* Canonicalize the SSA_NAME's to their value number. */
3150 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3151 {
3152 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3153 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3154 shared_lookup_phiargs[e->dest_idx] = def;
3155 }
3156 vp1.type = TREE_TYPE (gimple_phi_result (phi));
3157 vp1.phiargs = shared_lookup_phiargs;
3158 vp1.block = gimple_bb (phi);
3159 /* Extract values of the controlling condition. */
3160 vp1.cclhs = NULL_TREE;
3161 vp1.ccrhs = NULL_TREE;
3162 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3163 if (EDGE_COUNT (idom1->succs) == 2)
3164 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3165 {
3166 vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3167 vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3168 }
3169 vp1.hashcode = vn_phi_compute_hash (&vp1);
3170 slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3171 NO_INSERT);
3172 if (!slot && current_info == optimistic_info)
3173 slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3174 NO_INSERT);
3175 if (!slot)
3176 return NULL_TREE;
3177 return (*slot)->result;
3178 }
3179
3180 /* Insert PHI into the current hash table with a value number of
3181 RESULT. */
3182
3183 static vn_phi_t
3184 vn_phi_insert (gimple *phi, tree result)
3185 {
3186 vn_phi_s **slot;
3187 vn_phi_t vp1 = current_info->phis_pool->allocate ();
3188 vec<tree> args = vNULL;
3189 edge e;
3190 edge_iterator ei;
3191
3192 args.safe_grow (gimple_phi_num_args (phi));
3193
3194 /* Canonicalize the SSA_NAME's to their value number. */
3195 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3196 {
3197 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3198 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3199 args[e->dest_idx] = def;
3200 }
3201 vp1->value_id = VN_INFO (result)->value_id;
3202 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3203 vp1->phiargs = args;
3204 vp1->block = gimple_bb (phi);
3205 /* Extract values of the controlling condition. */
3206 vp1->cclhs = NULL_TREE;
3207 vp1->ccrhs = NULL_TREE;
3208 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3209 if (EDGE_COUNT (idom1->succs) == 2)
3210 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3211 {
3212 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3213 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3214 }
3215 vp1->result = result;
3216 vp1->hashcode = vn_phi_compute_hash (vp1);
3217
3218 slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3219
3220 /* Because we iterate over phi operations more than once, it's
3221 possible the slot might already exist here, hence no assert.*/
3222 *slot = vp1;
3223 return vp1;
3224 }
3225
3226
3227 /* Print set of components in strongly connected component SCC to OUT. */
3228
3229 static void
3230 print_scc (FILE *out, vec<tree> scc)
3231 {
3232 tree var;
3233 unsigned int i;
3234
3235 fprintf (out, "SCC consists of %u:", scc.length ());
3236 FOR_EACH_VEC_ELT (scc, i, var)
3237 {
3238 fprintf (out, " ");
3239 print_generic_expr (out, var);
3240 }
3241 fprintf (out, "\n");
3242 }
3243
3244 /* Return true if BB1 is dominated by BB2 taking into account edges
3245 that are not executable. */
3246
3247 static bool
3248 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3249 {
3250 edge_iterator ei;
3251 edge e;
3252
3253 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3254 return true;
3255
3256 /* Before iterating we'd like to know if there exists a
3257 (executable) path from bb2 to bb1 at all, if not we can
3258 directly return false. For now simply iterate once. */
3259
3260 /* Iterate to the single executable bb1 predecessor. */
3261 if (EDGE_COUNT (bb1->preds) > 1)
3262 {
3263 edge prede = NULL;
3264 FOR_EACH_EDGE (e, ei, bb1->preds)
3265 if (e->flags & EDGE_EXECUTABLE)
3266 {
3267 if (prede)
3268 {
3269 prede = NULL;
3270 break;
3271 }
3272 prede = e;
3273 }
3274 if (prede)
3275 {
3276 bb1 = prede->src;
3277
3278 /* Re-do the dominance check with changed bb1. */
3279 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3280 return true;
3281 }
3282 }
3283
3284 /* Iterate to the single executable bb2 successor. */
3285 edge succe = NULL;
3286 FOR_EACH_EDGE (e, ei, bb2->succs)
3287 if (e->flags & EDGE_EXECUTABLE)
3288 {
3289 if (succe)
3290 {
3291 succe = NULL;
3292 break;
3293 }
3294 succe = e;
3295 }
3296 if (succe)
3297 {
3298 /* Verify the reached block is only reached through succe.
3299 If there is only one edge we can spare us the dominator
3300 check and iterate directly. */
3301 if (EDGE_COUNT (succe->dest->preds) > 1)
3302 {
3303 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3304 if (e != succe
3305 && (e->flags & EDGE_EXECUTABLE))
3306 {
3307 succe = NULL;
3308 break;
3309 }
3310 }
3311 if (succe)
3312 {
3313 bb2 = succe->dest;
3314
3315 /* Re-do the dominance check with changed bb2. */
3316 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3317 return true;
3318 }
3319 }
3320
3321 /* We could now iterate updating bb1 / bb2. */
3322 return false;
3323 }
3324
3325 /* Set the value number of FROM to TO, return true if it has changed
3326 as a result. */
3327
3328 static inline bool
3329 set_ssa_val_to (tree from, tree to)
3330 {
3331 tree currval = SSA_VAL (from);
3332 HOST_WIDE_INT toff, coff;
3333
3334 /* The only thing we allow as value numbers are ssa_names
3335 and invariants. So assert that here. We don't allow VN_TOP
3336 as visiting a stmt should produce a value-number other than
3337 that.
3338 ??? Still VN_TOP can happen for unreachable code, so force
3339 it to varying in that case. Not all code is prepared to
3340 get VN_TOP on valueization. */
3341 if (to == VN_TOP)
3342 {
3343 if (dump_file && (dump_flags & TDF_DETAILS))
3344 fprintf (dump_file, "Forcing value number to varying on "
3345 "receiving VN_TOP\n");
3346 to = from;
3347 }
3348
3349 gcc_assert (to != NULL_TREE
3350 && ((TREE_CODE (to) == SSA_NAME
3351 && (to == from || SSA_VAL (to) == to))
3352 || is_gimple_min_invariant (to)));
3353
3354 if (from != to)
3355 {
3356 if (currval == from)
3357 {
3358 if (dump_file && (dump_flags & TDF_DETAILS))
3359 {
3360 fprintf (dump_file, "Not changing value number of ");
3361 print_generic_expr (dump_file, from);
3362 fprintf (dump_file, " from VARYING to ");
3363 print_generic_expr (dump_file, to);
3364 fprintf (dump_file, "\n");
3365 }
3366 return false;
3367 }
3368 else if (currval != VN_TOP
3369 && ! is_gimple_min_invariant (currval)
3370 && is_gimple_min_invariant (to))
3371 {
3372 if (dump_file && (dump_flags & TDF_DETAILS))
3373 {
3374 fprintf (dump_file, "Forcing VARYING instead of changing "
3375 "value number of ");
3376 print_generic_expr (dump_file, from);
3377 fprintf (dump_file, " from ");
3378 print_generic_expr (dump_file, currval);
3379 fprintf (dump_file, " (non-constant) to ");
3380 print_generic_expr (dump_file, to);
3381 fprintf (dump_file, " (constant)\n");
3382 }
3383 to = from;
3384 }
3385 else if (TREE_CODE (to) == SSA_NAME
3386 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3387 to = from;
3388 }
3389
3390 if (dump_file && (dump_flags & TDF_DETAILS))
3391 {
3392 fprintf (dump_file, "Setting value number of ");
3393 print_generic_expr (dump_file, from);
3394 fprintf (dump_file, " to ");
3395 print_generic_expr (dump_file, to);
3396 }
3397
3398 if (currval != to
3399 && !operand_equal_p (currval, to, 0)
3400 /* Different undefined SSA names are not actually different. See
3401 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3402 && !(TREE_CODE (currval) == SSA_NAME
3403 && TREE_CODE (to) == SSA_NAME
3404 && ssa_undefined_value_p (currval, false)
3405 && ssa_undefined_value_p (to, false))
3406 /* ??? For addresses involving volatile objects or types operand_equal_p
3407 does not reliably detect ADDR_EXPRs as equal. We know we are only
3408 getting invariant gimple addresses here, so can use
3409 get_addr_base_and_unit_offset to do this comparison. */
3410 && !(TREE_CODE (currval) == ADDR_EXPR
3411 && TREE_CODE (to) == ADDR_EXPR
3412 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3413 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3414 && coff == toff))
3415 {
3416 if (dump_file && (dump_flags & TDF_DETAILS))
3417 fprintf (dump_file, " (changed)\n");
3418
3419 /* If we equate two SSA names we have to make the side-band info
3420 of the leader conservative (and remember whatever original value
3421 was present). */
3422 if (TREE_CODE (to) == SSA_NAME)
3423 {
3424 if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3425 && SSA_NAME_RANGE_INFO (to))
3426 {
3427 if (SSA_NAME_IS_DEFAULT_DEF (to)
3428 || dominated_by_p_w_unex
3429 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3430 gimple_bb (SSA_NAME_DEF_STMT (to))))
3431 /* Keep the info from the dominator. */
3432 ;
3433 else
3434 {
3435 /* Save old info. */
3436 if (! VN_INFO (to)->info.range_info)
3437 {
3438 VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3439 VN_INFO (to)->range_info_anti_range_p
3440 = SSA_NAME_ANTI_RANGE_P (to);
3441 }
3442 /* Rather than allocating memory and unioning the info
3443 just clear it. */
3444 if (dump_file && (dump_flags & TDF_DETAILS))
3445 {
3446 fprintf (dump_file, "clearing range info of ");
3447 print_generic_expr (dump_file, to);
3448 fprintf (dump_file, "\n");
3449 }
3450 SSA_NAME_RANGE_INFO (to) = NULL;
3451 }
3452 }
3453 else if (POINTER_TYPE_P (TREE_TYPE (to))
3454 && SSA_NAME_PTR_INFO (to))
3455 {
3456 if (SSA_NAME_IS_DEFAULT_DEF (to)
3457 || dominated_by_p_w_unex
3458 (gimple_bb (SSA_NAME_DEF_STMT (from)),
3459 gimple_bb (SSA_NAME_DEF_STMT (to))))
3460 /* Keep the info from the dominator. */
3461 ;
3462 else if (! SSA_NAME_PTR_INFO (from)
3463 /* Handle the case of trivially equivalent info. */
3464 || memcmp (SSA_NAME_PTR_INFO (to),
3465 SSA_NAME_PTR_INFO (from),
3466 sizeof (ptr_info_def)) != 0)
3467 {
3468 /* Save old info. */
3469 if (! VN_INFO (to)->info.ptr_info)
3470 VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3471 /* Rather than allocating memory and unioning the info
3472 just clear it. */
3473 if (dump_file && (dump_flags & TDF_DETAILS))
3474 {
3475 fprintf (dump_file, "clearing points-to info of ");
3476 print_generic_expr (dump_file, to);
3477 fprintf (dump_file, "\n");
3478 }
3479 SSA_NAME_PTR_INFO (to) = NULL;
3480 }
3481 }
3482 }
3483
3484 VN_INFO (from)->valnum = to;
3485 return true;
3486 }
3487 if (dump_file && (dump_flags & TDF_DETAILS))
3488 fprintf (dump_file, "\n");
3489 return false;
3490 }
3491
3492 /* Mark as processed all the definitions in the defining stmt of USE, or
3493 the USE itself. */
3494
3495 static void
3496 mark_use_processed (tree use)
3497 {
3498 ssa_op_iter iter;
3499 def_operand_p defp;
3500 gimple *stmt = SSA_NAME_DEF_STMT (use);
3501
3502 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3503 {
3504 VN_INFO (use)->use_processed = true;
3505 return;
3506 }
3507
3508 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3509 {
3510 tree def = DEF_FROM_PTR (defp);
3511
3512 VN_INFO (def)->use_processed = true;
3513 }
3514 }
3515
3516 /* Set all definitions in STMT to value number to themselves.
3517 Return true if a value number changed. */
3518
3519 static bool
3520 defs_to_varying (gimple *stmt)
3521 {
3522 bool changed = false;
3523 ssa_op_iter iter;
3524 def_operand_p defp;
3525
3526 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3527 {
3528 tree def = DEF_FROM_PTR (defp);
3529 changed |= set_ssa_val_to (def, def);
3530 }
3531 return changed;
3532 }
3533
3534 /* Visit a copy between LHS and RHS, return true if the value number
3535 changed. */
3536
3537 static bool
3538 visit_copy (tree lhs, tree rhs)
3539 {
3540 /* Valueize. */
3541 rhs = SSA_VAL (rhs);
3542
3543 return set_ssa_val_to (lhs, rhs);
3544 }
3545
3546 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3547 is the same. */
3548
3549 static tree
3550 valueized_wider_op (tree wide_type, tree op)
3551 {
3552 if (TREE_CODE (op) == SSA_NAME)
3553 op = SSA_VAL (op);
3554
3555 /* Either we have the op widened available. */
3556 tree ops[3] = {};
3557 ops[0] = op;
3558 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3559 wide_type, ops, NULL);
3560 if (tem)
3561 return tem;
3562
3563 /* Or the op is truncated from some existing value. */
3564 if (TREE_CODE (op) == SSA_NAME)
3565 {
3566 gimple *def = SSA_NAME_DEF_STMT (op);
3567 if (is_gimple_assign (def)
3568 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3569 {
3570 tem = gimple_assign_rhs1 (def);
3571 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3572 {
3573 if (TREE_CODE (tem) == SSA_NAME)
3574 tem = SSA_VAL (tem);
3575 return tem;
3576 }
3577 }
3578 }
3579
3580 /* For constants simply extend it. */
3581 if (TREE_CODE (op) == INTEGER_CST)
3582 return wide_int_to_tree (wide_type, wi::to_wide (op));
3583
3584 return NULL_TREE;
3585 }
3586
3587 /* Visit a nary operator RHS, value number it, and return true if the
3588 value number of LHS has changed as a result. */
3589
3590 static bool
3591 visit_nary_op (tree lhs, gassign *stmt)
3592 {
3593 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3594 if (result)
3595 return set_ssa_val_to (lhs, result);
3596
3597 /* Do some special pattern matching for redundancies of operations
3598 in different types. */
3599 enum tree_code code = gimple_assign_rhs_code (stmt);
3600 tree type = TREE_TYPE (lhs);
3601 tree rhs1 = gimple_assign_rhs1 (stmt);
3602 switch (code)
3603 {
3604 CASE_CONVERT:
3605 /* Match arithmetic done in a different type where we can easily
3606 substitute the result from some earlier sign-changed or widened
3607 operation. */
3608 if (INTEGRAL_TYPE_P (type)
3609 && TREE_CODE (rhs1) == SSA_NAME
3610 /* We only handle sign-changes or zero-extension -> & mask. */
3611 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3612 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3613 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3614 {
3615 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3616 if (def
3617 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3618 || gimple_assign_rhs_code (def) == MINUS_EXPR
3619 || gimple_assign_rhs_code (def) == MULT_EXPR))
3620 {
3621 tree ops[3] = {};
3622 /* Either we have the op widened available. */
3623 ops[0] = valueized_wider_op (type,
3624 gimple_assign_rhs1 (def));
3625 if (ops[0])
3626 ops[1] = valueized_wider_op (type,
3627 gimple_assign_rhs2 (def));
3628 if (ops[0] && ops[1])
3629 {
3630 ops[0] = vn_nary_op_lookup_pieces
3631 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3632 /* We have wider operation available. */
3633 if (ops[0])
3634 {
3635 unsigned lhs_prec = TYPE_PRECISION (type);
3636 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3637 if (lhs_prec == rhs_prec)
3638 {
3639 ops[1] = NULL_TREE;
3640 result = vn_nary_build_or_lookup (NOP_EXPR,
3641 type, ops);
3642 if (result)
3643 {
3644 bool changed = set_ssa_val_to (lhs, result);
3645 vn_nary_op_insert_stmt (stmt, result);
3646 return changed;
3647 }
3648 }
3649 else
3650 {
3651 ops[1] = wide_int_to_tree (type,
3652 wi::mask (rhs_prec, false,
3653 lhs_prec));
3654 result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3655 TREE_TYPE (lhs),
3656 ops);
3657 if (result)
3658 {
3659 bool changed = set_ssa_val_to (lhs, result);
3660 vn_nary_op_insert_stmt (stmt, result);
3661 return changed;
3662 }
3663 }
3664 }
3665 }
3666 }
3667 }
3668 default:;
3669 }
3670
3671 bool changed = set_ssa_val_to (lhs, lhs);
3672 vn_nary_op_insert_stmt (stmt, lhs);
3673 return changed;
3674 }
3675
3676 /* Visit a call STMT storing into LHS. Return true if the value number
3677 of the LHS has changed as a result. */
3678
3679 static bool
3680 visit_reference_op_call (tree lhs, gcall *stmt)
3681 {
3682 bool changed = false;
3683 struct vn_reference_s vr1;
3684 vn_reference_t vnresult = NULL;
3685 tree vdef = gimple_vdef (stmt);
3686
3687 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3688 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3689 lhs = NULL_TREE;
3690
3691 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3692 if (vnresult)
3693 {
3694 if (vnresult->result_vdef && vdef)
3695 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3696 else if (vdef)
3697 /* If the call was discovered to be pure or const reflect
3698 that as far as possible. */
3699 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3700
3701 if (!vnresult->result && lhs)
3702 vnresult->result = lhs;
3703
3704 if (vnresult->result && lhs)
3705 changed |= set_ssa_val_to (lhs, vnresult->result);
3706 }
3707 else
3708 {
3709 vn_reference_t vr2;
3710 vn_reference_s **slot;
3711 tree vdef_val = vdef;
3712 if (vdef)
3713 {
3714 /* If we value numbered an indirect functions function to
3715 one not clobbering memory value number its VDEF to its
3716 VUSE. */
3717 tree fn = gimple_call_fn (stmt);
3718 if (fn && TREE_CODE (fn) == SSA_NAME)
3719 {
3720 fn = SSA_VAL (fn);
3721 if (TREE_CODE (fn) == ADDR_EXPR
3722 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3723 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3724 & (ECF_CONST | ECF_PURE)))
3725 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3726 }
3727 changed |= set_ssa_val_to (vdef, vdef_val);
3728 }
3729 if (lhs)
3730 changed |= set_ssa_val_to (lhs, lhs);
3731 vr2 = current_info->references_pool->allocate ();
3732 vr2->vuse = vr1.vuse;
3733 /* As we are not walking the virtual operand chain we know the
3734 shared_lookup_references are still original so we can re-use
3735 them here. */
3736 vr2->operands = vr1.operands.copy ();
3737 vr2->type = vr1.type;
3738 vr2->set = vr1.set;
3739 vr2->hashcode = vr1.hashcode;
3740 vr2->result = lhs;
3741 vr2->result_vdef = vdef_val;
3742 slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3743 INSERT);
3744 gcc_assert (!*slot);
3745 *slot = vr2;
3746 }
3747
3748 return changed;
3749 }
3750
3751 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3752 and return true if the value number of the LHS has changed as a result. */
3753
3754 static bool
3755 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3756 {
3757 bool changed = false;
3758 tree last_vuse;
3759 tree result;
3760
3761 last_vuse = gimple_vuse (stmt);
3762 last_vuse_ptr = &last_vuse;
3763 result = vn_reference_lookup (op, gimple_vuse (stmt),
3764 default_vn_walk_kind, NULL, true);
3765 last_vuse_ptr = NULL;
3766
3767 /* We handle type-punning through unions by value-numbering based
3768 on offset and size of the access. Be prepared to handle a
3769 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3770 if (result
3771 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3772 {
3773 /* We will be setting the value number of lhs to the value number
3774 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3775 So first simplify and lookup this expression to see if it
3776 is already available. */
3777 code_helper rcode = VIEW_CONVERT_EXPR;
3778 tree ops[3] = { result };
3779 result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3780 }
3781
3782 if (result)
3783 changed = set_ssa_val_to (lhs, result);
3784 else
3785 {
3786 changed = set_ssa_val_to (lhs, lhs);
3787 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3788 }
3789
3790 return changed;
3791 }
3792
3793
3794 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3795 and return true if the value number of the LHS has changed as a result. */
3796
3797 static bool
3798 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3799 {
3800 bool changed = false;
3801 vn_reference_t vnresult = NULL;
3802 tree assign;
3803 bool resultsame = false;
3804 tree vuse = gimple_vuse (stmt);
3805 tree vdef = gimple_vdef (stmt);
3806
3807 if (TREE_CODE (op) == SSA_NAME)
3808 op = SSA_VAL (op);
3809
3810 /* First we want to lookup using the *vuses* from the store and see
3811 if there the last store to this location with the same address
3812 had the same value.
3813
3814 The vuses represent the memory state before the store. If the
3815 memory state, address, and value of the store is the same as the
3816 last store to this location, then this store will produce the
3817 same memory state as that store.
3818
3819 In this case the vdef versions for this store are value numbered to those
3820 vuse versions, since they represent the same memory state after
3821 this store.
3822
3823 Otherwise, the vdefs for the store are used when inserting into
3824 the table, since the store generates a new memory state. */
3825
3826 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3827 if (vnresult
3828 && vnresult->result)
3829 {
3830 tree result = vnresult->result;
3831 if (TREE_CODE (result) == SSA_NAME)
3832 result = SSA_VAL (result);
3833 resultsame = expressions_equal_p (result, op);
3834 if (resultsame)
3835 {
3836 /* If the TBAA state isn't compatible for downstream reads
3837 we cannot value-number the VDEFs the same. */
3838 alias_set_type set = get_alias_set (lhs);
3839 if (vnresult->set != set
3840 && ! alias_set_subset_of (set, vnresult->set))
3841 resultsame = false;
3842 }
3843 }
3844
3845 if (!resultsame)
3846 {
3847 /* Only perform the following when being called from PRE
3848 which embeds tail merging. */
3849 if (default_vn_walk_kind == VN_WALK)
3850 {
3851 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3852 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3853 if (vnresult)
3854 {
3855 VN_INFO (vdef)->use_processed = true;
3856 return set_ssa_val_to (vdef, vnresult->result_vdef);
3857 }
3858 }
3859
3860 if (dump_file && (dump_flags & TDF_DETAILS))
3861 {
3862 fprintf (dump_file, "No store match\n");
3863 fprintf (dump_file, "Value numbering store ");
3864 print_generic_expr (dump_file, lhs);
3865 fprintf (dump_file, " to ");
3866 print_generic_expr (dump_file, op);
3867 fprintf (dump_file, "\n");
3868 }
3869 /* Have to set value numbers before insert, since insert is
3870 going to valueize the references in-place. */
3871 if (vdef)
3872 changed |= set_ssa_val_to (vdef, vdef);
3873
3874 /* Do not insert structure copies into the tables. */
3875 if (is_gimple_min_invariant (op)
3876 || is_gimple_reg (op))
3877 vn_reference_insert (lhs, op, vdef, NULL);
3878
3879 /* Only perform the following when being called from PRE
3880 which embeds tail merging. */
3881 if (default_vn_walk_kind == VN_WALK)
3882 {
3883 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3884 vn_reference_insert (assign, lhs, vuse, vdef);
3885 }
3886 }
3887 else
3888 {
3889 /* We had a match, so value number the vdef to have the value
3890 number of the vuse it came from. */
3891
3892 if (dump_file && (dump_flags & TDF_DETAILS))
3893 fprintf (dump_file, "Store matched earlier value, "
3894 "value numbering store vdefs to matching vuses.\n");
3895
3896 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3897 }
3898
3899 return changed;
3900 }
3901
3902 /* Visit and value number PHI, return true if the value number
3903 changed. */
3904
3905 static bool
3906 visit_phi (gimple *phi)
3907 {
3908 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3909 unsigned n_executable = 0;
3910 bool allsame = true;
3911 edge_iterator ei;
3912 edge e;
3913
3914 /* TODO: We could check for this in init_sccvn, and replace this
3915 with a gcc_assert. */
3916 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3917 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3918
3919 /* See if all non-TOP arguments have the same value. TOP is
3920 equivalent to everything, so we can ignore it. */
3921 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3922 if (e->flags & EDGE_EXECUTABLE)
3923 {
3924 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3925
3926 ++n_executable;
3927 if (TREE_CODE (def) == SSA_NAME)
3928 def = SSA_VAL (def);
3929 if (def == VN_TOP)
3930 ;
3931 /* Ignore undefined defs for sameval but record one. */
3932 else if (TREE_CODE (def) == SSA_NAME
3933 && ssa_undefined_value_p (def, false))
3934 seen_undef = def;
3935 else if (sameval == VN_TOP)
3936 sameval = def;
3937 else if (!expressions_equal_p (def, sameval))
3938 {
3939 allsame = false;
3940 break;
3941 }
3942 }
3943
3944
3945 /* If none of the edges was executable keep the value-number at VN_TOP,
3946 if only a single edge is exectuable use its value. */
3947 if (n_executable <= 1)
3948 result = seen_undef ? seen_undef : sameval;
3949 /* If we saw only undefined values and VN_TOP use one of the
3950 undefined values. */
3951 else if (sameval == VN_TOP)
3952 result = seen_undef ? seen_undef : sameval;
3953 /* First see if it is equivalent to a phi node in this block. We prefer
3954 this as it allows IV elimination - see PRs 66502 and 67167. */
3955 else if ((result = vn_phi_lookup (phi)))
3956 ;
3957 /* If all values are the same use that, unless we've seen undefined
3958 values as well and the value isn't constant.
3959 CCP/copyprop have the same restriction to not remove uninit warnings. */
3960 else if (allsame
3961 && (! seen_undef || is_gimple_min_invariant (sameval)))
3962 result = sameval;
3963 else
3964 {
3965 result = PHI_RESULT (phi);
3966 /* Only insert PHIs that are varying, for constant value numbers
3967 we mess up equivalences otherwise as we are only comparing
3968 the immediate controlling predicates. */
3969 vn_phi_insert (phi, result);
3970 }
3971
3972 return set_ssa_val_to (PHI_RESULT (phi), result);
3973 }
3974
3975 /* Try to simplify RHS using equivalences and constant folding. */
3976
3977 static tree
3978 try_to_simplify (gassign *stmt)
3979 {
3980 enum tree_code code = gimple_assign_rhs_code (stmt);
3981 tree tem;
3982
3983 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3984 in this case, there is no point in doing extra work. */
3985 if (code == SSA_NAME)
3986 return NULL_TREE;
3987
3988 /* First try constant folding based on our current lattice. */
3989 mprts_hook = vn_lookup_simplify_result;
3990 mprts_hook_cnt = 9;
3991 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3992 mprts_hook = NULL;
3993 if (tem
3994 && (TREE_CODE (tem) == SSA_NAME
3995 || is_gimple_min_invariant (tem)))
3996 return tem;
3997
3998 return NULL_TREE;
3999 }
4000
4001 /* Visit and value number USE, return true if the value number
4002 changed. */
4003
4004 static bool
4005 visit_use (tree use)
4006 {
4007 bool changed = false;
4008 gimple *stmt = SSA_NAME_DEF_STMT (use);
4009
4010 mark_use_processed (use);
4011
4012 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4013 && !SSA_NAME_IS_DEFAULT_DEF (use));
4014
4015 if (dump_file && (dump_flags & TDF_DETAILS))
4016 {
4017 fprintf (dump_file, "Value numbering ");
4018 print_generic_expr (dump_file, use);
4019 fprintf (dump_file, " stmt = ");
4020 print_gimple_stmt (dump_file, stmt, 0);
4021 }
4022
4023 if (gimple_code (stmt) == GIMPLE_PHI)
4024 changed = visit_phi (stmt);
4025 else if (gimple_has_volatile_ops (stmt))
4026 changed = defs_to_varying (stmt);
4027 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4028 {
4029 enum tree_code code = gimple_assign_rhs_code (ass);
4030 tree lhs = gimple_assign_lhs (ass);
4031 tree rhs1 = gimple_assign_rhs1 (ass);
4032 tree simplified;
4033
4034 /* Shortcut for copies. Simplifying copies is pointless,
4035 since we copy the expression and value they represent. */
4036 if (code == SSA_NAME
4037 && TREE_CODE (lhs) == SSA_NAME)
4038 {
4039 changed = visit_copy (lhs, rhs1);
4040 goto done;
4041 }
4042 simplified = try_to_simplify (ass);
4043 if (simplified)
4044 {
4045 if (dump_file && (dump_flags & TDF_DETAILS))
4046 {
4047 fprintf (dump_file, "RHS ");
4048 print_gimple_expr (dump_file, ass, 0);
4049 fprintf (dump_file, " simplified to ");
4050 print_generic_expr (dump_file, simplified);
4051 fprintf (dump_file, "\n");
4052 }
4053 }
4054 /* Setting value numbers to constants will occasionally
4055 screw up phi congruence because constants are not
4056 uniquely associated with a single ssa name that can be
4057 looked up. */
4058 if (simplified
4059 && is_gimple_min_invariant (simplified)
4060 && TREE_CODE (lhs) == SSA_NAME)
4061 {
4062 changed = set_ssa_val_to (lhs, simplified);
4063 goto done;
4064 }
4065 else if (simplified
4066 && TREE_CODE (simplified) == SSA_NAME
4067 && TREE_CODE (lhs) == SSA_NAME)
4068 {
4069 changed = visit_copy (lhs, simplified);
4070 goto done;
4071 }
4072
4073 if ((TREE_CODE (lhs) == SSA_NAME
4074 /* We can substitute SSA_NAMEs that are live over
4075 abnormal edges with their constant value. */
4076 && !(gimple_assign_copy_p (ass)
4077 && is_gimple_min_invariant (rhs1))
4078 && !(simplified
4079 && is_gimple_min_invariant (simplified))
4080 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4081 /* Stores or copies from SSA_NAMEs that are live over
4082 abnormal edges are a problem. */
4083 || (code == SSA_NAME
4084 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4085 changed = defs_to_varying (ass);
4086 else if (REFERENCE_CLASS_P (lhs)
4087 || DECL_P (lhs))
4088 changed = visit_reference_op_store (lhs, rhs1, ass);
4089 else if (TREE_CODE (lhs) == SSA_NAME)
4090 {
4091 if ((gimple_assign_copy_p (ass)
4092 && is_gimple_min_invariant (rhs1))
4093 || (simplified
4094 && is_gimple_min_invariant (simplified)))
4095 {
4096 if (simplified)
4097 changed = set_ssa_val_to (lhs, simplified);
4098 else
4099 changed = set_ssa_val_to (lhs, rhs1);
4100 }
4101 else
4102 {
4103 /* Visit the original statement. */
4104 switch (vn_get_stmt_kind (ass))
4105 {
4106 case VN_NARY:
4107 changed = visit_nary_op (lhs, ass);
4108 break;
4109 case VN_REFERENCE:
4110 changed = visit_reference_op_load (lhs, rhs1, ass);
4111 break;
4112 default:
4113 changed = defs_to_varying (ass);
4114 break;
4115 }
4116 }
4117 }
4118 else
4119 changed = defs_to_varying (ass);
4120 }
4121 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4122 {
4123 tree lhs = gimple_call_lhs (call_stmt);
4124 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4125 {
4126 /* Try constant folding based on our current lattice. */
4127 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4128 vn_valueize);
4129 if (simplified)
4130 {
4131 if (dump_file && (dump_flags & TDF_DETAILS))
4132 {
4133 fprintf (dump_file, "call ");
4134 print_gimple_expr (dump_file, call_stmt, 0);
4135 fprintf (dump_file, " simplified to ");
4136 print_generic_expr (dump_file, simplified);
4137 fprintf (dump_file, "\n");
4138 }
4139 }
4140 /* Setting value numbers to constants will occasionally
4141 screw up phi congruence because constants are not
4142 uniquely associated with a single ssa name that can be
4143 looked up. */
4144 if (simplified
4145 && is_gimple_min_invariant (simplified))
4146 {
4147 changed = set_ssa_val_to (lhs, simplified);
4148 if (gimple_vdef (call_stmt))
4149 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4150 SSA_VAL (gimple_vuse (call_stmt)));
4151 goto done;
4152 }
4153 else if (simplified
4154 && TREE_CODE (simplified) == SSA_NAME)
4155 {
4156 changed = visit_copy (lhs, simplified);
4157 if (gimple_vdef (call_stmt))
4158 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4159 SSA_VAL (gimple_vuse (call_stmt)));
4160 goto done;
4161 }
4162 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4163 {
4164 changed = defs_to_varying (call_stmt);
4165 goto done;
4166 }
4167 }
4168
4169 /* Pick up flags from a devirtualization target. */
4170 tree fn = gimple_call_fn (stmt);
4171 int extra_fnflags = 0;
4172 if (fn && TREE_CODE (fn) == SSA_NAME)
4173 {
4174 fn = SSA_VAL (fn);
4175 if (TREE_CODE (fn) == ADDR_EXPR
4176 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4177 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4178 }
4179 if (!gimple_call_internal_p (call_stmt)
4180 && (/* Calls to the same function with the same vuse
4181 and the same operands do not necessarily return the same
4182 value, unless they're pure or const. */
4183 ((gimple_call_flags (call_stmt) | extra_fnflags)
4184 & (ECF_PURE | ECF_CONST))
4185 /* If calls have a vdef, subsequent calls won't have
4186 the same incoming vuse. So, if 2 calls with vdef have the
4187 same vuse, we know they're not subsequent.
4188 We can value number 2 calls to the same function with the
4189 same vuse and the same operands which are not subsequent
4190 the same, because there is no code in the program that can
4191 compare the 2 values... */
4192 || (gimple_vdef (call_stmt)
4193 /* ... unless the call returns a pointer which does
4194 not alias with anything else. In which case the
4195 information that the values are distinct are encoded
4196 in the IL. */
4197 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4198 /* Only perform the following when being called from PRE
4199 which embeds tail merging. */
4200 && default_vn_walk_kind == VN_WALK)))
4201 changed = visit_reference_op_call (lhs, call_stmt);
4202 else
4203 changed = defs_to_varying (call_stmt);
4204 }
4205 else
4206 changed = defs_to_varying (stmt);
4207 done:
4208 return changed;
4209 }
4210
4211 /* Compare two operands by reverse postorder index */
4212
4213 static int
4214 compare_ops (const void *pa, const void *pb)
4215 {
4216 const tree opa = *((const tree *)pa);
4217 const tree opb = *((const tree *)pb);
4218 gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4219 gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4220 basic_block bba;
4221 basic_block bbb;
4222
4223 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4224 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4225 else if (gimple_nop_p (opstmta))
4226 return -1;
4227 else if (gimple_nop_p (opstmtb))
4228 return 1;
4229
4230 bba = gimple_bb (opstmta);
4231 bbb = gimple_bb (opstmtb);
4232
4233 if (!bba && !bbb)
4234 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4235 else if (!bba)
4236 return -1;
4237 else if (!bbb)
4238 return 1;
4239
4240 if (bba == bbb)
4241 {
4242 if (gimple_code (opstmta) == GIMPLE_PHI
4243 && gimple_code (opstmtb) == GIMPLE_PHI)
4244 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4245 else if (gimple_code (opstmta) == GIMPLE_PHI)
4246 return -1;
4247 else if (gimple_code (opstmtb) == GIMPLE_PHI)
4248 return 1;
4249 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4250 return gimple_uid (opstmta) - gimple_uid (opstmtb);
4251 else
4252 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4253 }
4254 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4255 }
4256
4257 /* Sort an array containing members of a strongly connected component
4258 SCC so that the members are ordered by RPO number.
4259 This means that when the sort is complete, iterating through the
4260 array will give you the members in RPO order. */
4261
4262 static void
4263 sort_scc (vec<tree> scc)
4264 {
4265 scc.qsort (compare_ops);
4266 }
4267
4268 /* Insert the no longer used nary ONARY to the hash INFO. */
4269
4270 static void
4271 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4272 {
4273 size_t size = sizeof_vn_nary_op (onary->length);
4274 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4275 &info->nary_obstack);
4276 memcpy (nary, onary, size);
4277 vn_nary_op_insert_into (nary, info->nary, false);
4278 }
4279
4280 /* Insert the no longer used phi OPHI to the hash INFO. */
4281
4282 static void
4283 copy_phi (vn_phi_t ophi, vn_tables_t info)
4284 {
4285 vn_phi_t phi = info->phis_pool->allocate ();
4286 vn_phi_s **slot;
4287 memcpy (phi, ophi, sizeof (*phi));
4288 ophi->phiargs.create (0);
4289 slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4290 gcc_assert (!*slot);
4291 *slot = phi;
4292 }
4293
4294 /* Insert the no longer used reference OREF to the hash INFO. */
4295
4296 static void
4297 copy_reference (vn_reference_t oref, vn_tables_t info)
4298 {
4299 vn_reference_t ref;
4300 vn_reference_s **slot;
4301 ref = info->references_pool->allocate ();
4302 memcpy (ref, oref, sizeof (*ref));
4303 oref->operands.create (0);
4304 slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4305 if (*slot)
4306 free_reference (*slot);
4307 *slot = ref;
4308 }
4309
4310 /* Process a strongly connected component in the SSA graph. */
4311
4312 static void
4313 process_scc (vec<tree> scc)
4314 {
4315 tree var;
4316 unsigned int i;
4317 unsigned int iterations = 0;
4318 bool changed = true;
4319 vn_nary_op_iterator_type hin;
4320 vn_phi_iterator_type hip;
4321 vn_reference_iterator_type hir;
4322 vn_nary_op_t nary;
4323 vn_phi_t phi;
4324 vn_reference_t ref;
4325
4326 /* If the SCC has a single member, just visit it. */
4327 if (scc.length () == 1)
4328 {
4329 tree use = scc[0];
4330 if (VN_INFO (use)->use_processed)
4331 return;
4332 /* We need to make sure it doesn't form a cycle itself, which can
4333 happen for self-referential PHI nodes. In that case we would
4334 end up inserting an expression with VN_TOP operands into the
4335 valid table which makes us derive bogus equivalences later.
4336 The cheapest way to check this is to assume it for all PHI nodes. */
4337 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4338 /* Fallthru to iteration. */ ;
4339 else
4340 {
4341 visit_use (use);
4342 return;
4343 }
4344 }
4345
4346 if (dump_file && (dump_flags & TDF_DETAILS))
4347 print_scc (dump_file, scc);
4348
4349 /* Iterate over the SCC with the optimistic table until it stops
4350 changing. */
4351 current_info = optimistic_info;
4352 while (changed)
4353 {
4354 changed = false;
4355 iterations++;
4356 if (dump_file && (dump_flags & TDF_DETAILS))
4357 fprintf (dump_file, "Starting iteration %d\n", iterations);
4358 /* As we are value-numbering optimistically we have to
4359 clear the expression tables and the simplified expressions
4360 in each iteration until we converge. */
4361 optimistic_info->nary->empty ();
4362 optimistic_info->phis->empty ();
4363 optimistic_info->references->empty ();
4364 obstack_free (&optimistic_info->nary_obstack, NULL);
4365 gcc_obstack_init (&optimistic_info->nary_obstack);
4366 optimistic_info->phis_pool->release ();
4367 optimistic_info->references_pool->release ();
4368 FOR_EACH_VEC_ELT (scc, i, var)
4369 gcc_assert (!VN_INFO (var)->needs_insertion
4370 && VN_INFO (var)->expr == NULL);
4371 FOR_EACH_VEC_ELT (scc, i, var)
4372 changed |= visit_use (var);
4373 }
4374
4375 if (dump_file && (dump_flags & TDF_DETAILS))
4376 fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4377 statistics_histogram_event (cfun, "SCC iterations", iterations);
4378
4379 /* Finally, copy the contents of the no longer used optimistic
4380 table to the valid table. */
4381 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4382 copy_nary (nary, valid_info);
4383 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4384 copy_phi (phi, valid_info);
4385 FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4386 ref, vn_reference_t, hir)
4387 copy_reference (ref, valid_info);
4388
4389 current_info = valid_info;
4390 }
4391
4392
4393 /* Pop the components of the found SCC for NAME off the SCC stack
4394 and process them. Returns true if all went well, false if
4395 we run into resource limits. */
4396
4397 static void
4398 extract_and_process_scc_for_name (tree name)
4399 {
4400 auto_vec<tree> scc;
4401 tree x;
4402
4403 /* Found an SCC, pop the components off the SCC stack and
4404 process them. */
4405 do
4406 {
4407 x = sccstack.pop ();
4408
4409 VN_INFO (x)->on_sccstack = false;
4410 scc.safe_push (x);
4411 } while (x != name);
4412
4413 /* Drop all defs in the SCC to varying in case a SCC turns out to be
4414 incredibly large.
4415 ??? Just switch to a non-optimistic mode that avoids any iteration. */
4416 if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4417 {
4418 if (dump_file)
4419 {
4420 print_scc (dump_file, scc);
4421 fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4422 "size %u exceeding %u\n", scc.length (),
4423 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4424 }
4425 tree var;
4426 unsigned i;
4427 FOR_EACH_VEC_ELT (scc, i, var)
4428 {
4429 gimple *def = SSA_NAME_DEF_STMT (var);
4430 mark_use_processed (var);
4431 if (SSA_NAME_IS_DEFAULT_DEF (var)
4432 || gimple_code (def) == GIMPLE_PHI)
4433 set_ssa_val_to (var, var);
4434 else
4435 defs_to_varying (def);
4436 }
4437 return;
4438 }
4439
4440 if (scc.length () > 1)
4441 sort_scc (scc);
4442
4443 process_scc (scc);
4444 }
4445
4446 /* Depth first search on NAME to discover and process SCC's in the SSA
4447 graph.
4448 Execution of this algorithm relies on the fact that the SCC's are
4449 popped off the stack in topological order.
4450 Returns true if successful, false if we stopped processing SCC's due
4451 to resource constraints. */
4452
4453 static void
4454 DFS (tree name)
4455 {
4456 auto_vec<ssa_op_iter> itervec;
4457 auto_vec<tree> namevec;
4458 use_operand_p usep = NULL;
4459 gimple *defstmt;
4460 tree use;
4461 ssa_op_iter iter;
4462
4463 start_over:
4464 /* SCC info */
4465 VN_INFO (name)->dfsnum = next_dfs_num++;
4466 VN_INFO (name)->visited = true;
4467 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4468
4469 sccstack.safe_push (name);
4470 VN_INFO (name)->on_sccstack = true;
4471 defstmt = SSA_NAME_DEF_STMT (name);
4472
4473 /* Recursively DFS on our operands, looking for SCC's. */
4474 if (!gimple_nop_p (defstmt))
4475 {
4476 /* Push a new iterator. */
4477 if (gphi *phi = dyn_cast <gphi *> (defstmt))
4478 usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4479 else
4480 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4481 }
4482 else
4483 clear_and_done_ssa_iter (&iter);
4484
4485 while (1)
4486 {
4487 /* If we are done processing uses of a name, go up the stack
4488 of iterators and process SCCs as we found them. */
4489 if (op_iter_done (&iter))
4490 {
4491 /* See if we found an SCC. */
4492 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4493 extract_and_process_scc_for_name (name);
4494
4495 /* Check if we are done. */
4496 if (namevec.is_empty ())
4497 return;
4498
4499 /* Restore the last use walker and continue walking there. */
4500 use = name;
4501 name = namevec.pop ();
4502 memcpy (&iter, &itervec.last (),
4503 sizeof (ssa_op_iter));
4504 itervec.pop ();
4505 goto continue_walking;
4506 }
4507
4508 use = USE_FROM_PTR (usep);
4509
4510 /* Since we handle phi nodes, we will sometimes get
4511 invariants in the use expression. */
4512 if (TREE_CODE (use) == SSA_NAME)
4513 {
4514 if (! (VN_INFO (use)->visited))
4515 {
4516 /* Recurse by pushing the current use walking state on
4517 the stack and starting over. */
4518 itervec.safe_push (iter);
4519 namevec.safe_push (name);
4520 name = use;
4521 goto start_over;
4522
4523 continue_walking:
4524 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4525 VN_INFO (use)->low);
4526 }
4527 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4528 && VN_INFO (use)->on_sccstack)
4529 {
4530 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4531 VN_INFO (name)->low);
4532 }
4533 }
4534
4535 usep = op_iter_next_use (&iter);
4536 }
4537 }
4538
4539 /* Allocate a value number table. */
4540
4541 static void
4542 allocate_vn_table (vn_tables_t table)
4543 {
4544 table->phis = new vn_phi_table_type (23);
4545 table->nary = new vn_nary_op_table_type (23);
4546 table->references = new vn_reference_table_type (23);
4547
4548 gcc_obstack_init (&table->nary_obstack);
4549 table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4550 table->references_pool = new object_allocator<vn_reference_s>
4551 ("VN references");
4552 }
4553
4554 /* Free a value number table. */
4555
4556 static void
4557 free_vn_table (vn_tables_t table)
4558 {
4559 delete table->phis;
4560 table->phis = NULL;
4561 delete table->nary;
4562 table->nary = NULL;
4563 delete table->references;
4564 table->references = NULL;
4565 obstack_free (&table->nary_obstack, NULL);
4566 delete table->phis_pool;
4567 delete table->references_pool;
4568 }
4569
4570 static void
4571 init_scc_vn (void)
4572 {
4573 int j;
4574 int *rpo_numbers_temp;
4575
4576 calculate_dominance_info (CDI_DOMINATORS);
4577 mark_dfs_back_edges ();
4578
4579 sccstack.create (0);
4580 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4581
4582 constant_value_ids = BITMAP_ALLOC (NULL);
4583
4584 next_dfs_num = 1;
4585 next_value_id = 1;
4586
4587 vn_ssa_aux_table.create (num_ssa_names + 1);
4588 /* VEC_alloc doesn't actually grow it to the right size, it just
4589 preallocates the space to do so. */
4590 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4591 gcc_obstack_init (&vn_ssa_aux_obstack);
4592
4593 shared_lookup_phiargs.create (0);
4594 shared_lookup_references.create (0);
4595 rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4596 rpo_numbers_temp =
4597 XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4598 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4599
4600 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4601 the i'th block in RPO order is bb. We want to map bb's to RPO
4602 numbers, so we need to rearrange this array. */
4603 for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4604 rpo_numbers[rpo_numbers_temp[j]] = j;
4605
4606 XDELETE (rpo_numbers_temp);
4607
4608 VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4609 get_identifier ("VN_TOP"), error_mark_node);
4610
4611 renumber_gimple_stmt_uids ();
4612
4613 /* Create the valid and optimistic value numbering tables. */
4614 valid_info = XCNEW (struct vn_tables_s);
4615 allocate_vn_table (valid_info);
4616 optimistic_info = XCNEW (struct vn_tables_s);
4617 allocate_vn_table (optimistic_info);
4618 current_info = valid_info;
4619
4620 /* Create the VN_INFO structures, and initialize value numbers to
4621 TOP or VARYING for parameters. */
4622 size_t i;
4623 tree name;
4624
4625 FOR_EACH_SSA_NAME (i, name, cfun)
4626 {
4627 VN_INFO_GET (name)->valnum = VN_TOP;
4628 VN_INFO (name)->needs_insertion = false;
4629 VN_INFO (name)->expr = NULL;
4630 VN_INFO (name)->value_id = 0;
4631
4632 if (!SSA_NAME_IS_DEFAULT_DEF (name))
4633 continue;
4634
4635 switch (TREE_CODE (SSA_NAME_VAR (name)))
4636 {
4637 case VAR_DECL:
4638 /* All undefined vars are VARYING. */
4639 VN_INFO (name)->valnum = name;
4640 VN_INFO (name)->visited = true;
4641 break;
4642
4643 case PARM_DECL:
4644 /* Parameters are VARYING but we can record a condition
4645 if we know it is a non-NULL pointer. */
4646 VN_INFO (name)->visited = true;
4647 VN_INFO (name)->valnum = name;
4648 if (POINTER_TYPE_P (TREE_TYPE (name))
4649 && nonnull_arg_p (SSA_NAME_VAR (name)))
4650 {
4651 tree ops[2];
4652 ops[0] = name;
4653 ops[1] = build_int_cst (TREE_TYPE (name), 0);
4654 vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4655 boolean_true_node, 0);
4656 if (dump_file && (dump_flags & TDF_DETAILS))
4657 {
4658 fprintf (dump_file, "Recording ");
4659 print_generic_expr (dump_file, name, TDF_SLIM);
4660 fprintf (dump_file, " != 0\n");
4661 }
4662 }
4663 break;
4664
4665 case RESULT_DECL:
4666 /* If the result is passed by invisible reference the default
4667 def is initialized, otherwise it's uninitialized. Still
4668 undefined is varying. */
4669 VN_INFO (name)->visited = true;
4670 VN_INFO (name)->valnum = name;
4671 break;
4672
4673 default:
4674 gcc_unreachable ();
4675 }
4676 }
4677 }
4678
4679 /* Restore SSA info that has been reset on value leaders. */
4680
4681 void
4682 scc_vn_restore_ssa_info (void)
4683 {
4684 unsigned i;
4685 tree name;
4686
4687 FOR_EACH_SSA_NAME (i, name, cfun)
4688 {
4689 if (has_VN_INFO (name))
4690 {
4691 if (VN_INFO (name)->needs_insertion)
4692 ;
4693 else if (POINTER_TYPE_P (TREE_TYPE (name))
4694 && VN_INFO (name)->info.ptr_info)
4695 SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4696 else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4697 && VN_INFO (name)->info.range_info)
4698 {
4699 SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4700 SSA_NAME_ANTI_RANGE_P (name)
4701 = VN_INFO (name)->range_info_anti_range_p;
4702 }
4703 }
4704 }
4705 }
4706
4707 void
4708 free_scc_vn (void)
4709 {
4710 size_t i;
4711 tree name;
4712
4713 delete constant_to_value_id;
4714 constant_to_value_id = NULL;
4715 BITMAP_FREE (constant_value_ids);
4716 shared_lookup_phiargs.release ();
4717 shared_lookup_references.release ();
4718 XDELETEVEC (rpo_numbers);
4719
4720 FOR_EACH_SSA_NAME (i, name, cfun)
4721 {
4722 if (has_VN_INFO (name)
4723 && VN_INFO (name)->needs_insertion)
4724 release_ssa_name (name);
4725 }
4726 obstack_free (&vn_ssa_aux_obstack, NULL);
4727 vn_ssa_aux_table.release ();
4728
4729 sccstack.release ();
4730 free_vn_table (valid_info);
4731 XDELETE (valid_info);
4732 free_vn_table (optimistic_info);
4733 XDELETE (optimistic_info);
4734
4735 BITMAP_FREE (const_parms);
4736 }
4737
4738 /* Set *ID according to RESULT. */
4739
4740 static void
4741 set_value_id_for_result (tree result, unsigned int *id)
4742 {
4743 if (result && TREE_CODE (result) == SSA_NAME)
4744 *id = VN_INFO (result)->value_id;
4745 else if (result && is_gimple_min_invariant (result))
4746 *id = get_or_alloc_constant_value_id (result);
4747 else
4748 *id = get_next_value_id ();
4749 }
4750
4751 /* Set the value ids in the valid hash tables. */
4752
4753 static void
4754 set_hashtable_value_ids (void)
4755 {
4756 vn_nary_op_iterator_type hin;
4757 vn_phi_iterator_type hip;
4758 vn_reference_iterator_type hir;
4759 vn_nary_op_t vno;
4760 vn_reference_t vr;
4761 vn_phi_t vp;
4762
4763 /* Now set the value ids of the things we had put in the hash
4764 table. */
4765
4766 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4767 set_value_id_for_result (vno->result, &vno->value_id);
4768
4769 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4770 set_value_id_for_result (vp->result, &vp->value_id);
4771
4772 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4773 hir)
4774 set_value_id_for_result (vr->result, &vr->value_id);
4775 }
4776
4777 class sccvn_dom_walker : public dom_walker
4778 {
4779 public:
4780 sccvn_dom_walker ()
4781 : dom_walker (CDI_DOMINATORS, true), cond_stack (0) {}
4782
4783 virtual edge before_dom_children (basic_block);
4784 virtual void after_dom_children (basic_block);
4785
4786 void record_cond (basic_block,
4787 enum tree_code code, tree lhs, tree rhs, bool value);
4788 void record_conds (basic_block,
4789 enum tree_code code, tree lhs, tree rhs, bool value);
4790
4791 auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4792 cond_stack;
4793 };
4794
4795 /* Record a temporary condition for the BB and its dominated blocks. */
4796
4797 void
4798 sccvn_dom_walker::record_cond (basic_block bb,
4799 enum tree_code code, tree lhs, tree rhs,
4800 bool value)
4801 {
4802 tree ops[2] = { lhs, rhs };
4803 vn_nary_op_t old = NULL;
4804 if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4805 current_info->nary->remove_elt_with_hash (old, old->hashcode);
4806 vn_nary_op_t cond
4807 = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4808 value
4809 ? boolean_true_node
4810 : boolean_false_node, 0);
4811 if (dump_file && (dump_flags & TDF_DETAILS))
4812 {
4813 fprintf (dump_file, "Recording temporarily ");
4814 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4815 fprintf (dump_file, " %s ", get_tree_code_name (code));
4816 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4817 fprintf (dump_file, " == %s%s\n",
4818 value ? "true" : "false",
4819 old ? " (old entry saved)" : "");
4820 }
4821 cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4822 }
4823
4824 /* Record temporary conditions for the BB and its dominated blocks
4825 according to LHS CODE RHS == VALUE and its dominated conditions. */
4826
4827 void
4828 sccvn_dom_walker::record_conds (basic_block bb,
4829 enum tree_code code, tree lhs, tree rhs,
4830 bool value)
4831 {
4832 /* Record the original condition. */
4833 record_cond (bb, code, lhs, rhs, value);
4834
4835 if (!value)
4836 return;
4837
4838 /* Record dominated conditions if the condition is true. Note that
4839 the inversion is already recorded. */
4840 switch (code)
4841 {
4842 case LT_EXPR:
4843 case GT_EXPR:
4844 record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4845 record_cond (bb, NE_EXPR, lhs, rhs, true);
4846 record_cond (bb, EQ_EXPR, lhs, rhs, false);
4847 break;
4848
4849 case EQ_EXPR:
4850 record_cond (bb, LE_EXPR, lhs, rhs, true);
4851 record_cond (bb, GE_EXPR, lhs, rhs, true);
4852 record_cond (bb, LT_EXPR, lhs, rhs, false);
4853 record_cond (bb, GT_EXPR, lhs, rhs, false);
4854 break;
4855
4856 default:
4857 break;
4858 }
4859 }
4860
4861 /* Restore expressions and values derived from conditionals. */
4862
4863 void
4864 sccvn_dom_walker::after_dom_children (basic_block bb)
4865 {
4866 while (!cond_stack.is_empty ()
4867 && cond_stack.last ().first == bb)
4868 {
4869 vn_nary_op_t cond = cond_stack.last ().second.first;
4870 vn_nary_op_t old = cond_stack.last ().second.second;
4871 current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4872 if (old)
4873 vn_nary_op_insert_into (old, current_info->nary, false);
4874 cond_stack.pop ();
4875 }
4876 }
4877
4878 /* Value number all statements in BB. */
4879
4880 edge
4881 sccvn_dom_walker::before_dom_children (basic_block bb)
4882 {
4883 if (dump_file && (dump_flags & TDF_DETAILS))
4884 fprintf (dump_file, "Visiting BB %d\n", bb->index);
4885
4886 /* If we have a single predecessor record the equivalence from a
4887 possible condition on the predecessor edge. */
4888 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4889 if (pred_e)
4890 {
4891 /* Check if there are multiple executable successor edges in
4892 the source block. Otherwise there is no additional info
4893 to be recorded. */
4894 edge_iterator ei;
4895 edge e2;
4896 FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4897 if (e2 != pred_e
4898 && e2->flags & EDGE_EXECUTABLE)
4899 break;
4900 if (e2 && (e2->flags & EDGE_EXECUTABLE))
4901 {
4902 gimple *stmt = last_stmt (pred_e->src);
4903 if (stmt
4904 && gimple_code (stmt) == GIMPLE_COND)
4905 {
4906 enum tree_code code = gimple_cond_code (stmt);
4907 tree lhs = gimple_cond_lhs (stmt);
4908 tree rhs = gimple_cond_rhs (stmt);
4909 record_conds (bb, code, lhs, rhs,
4910 (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4911 code = invert_tree_comparison (code, HONOR_NANS (lhs));
4912 if (code != ERROR_MARK)
4913 record_conds (bb, code, lhs, rhs,
4914 (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4915 }
4916 }
4917 }
4918
4919 /* Value-number all defs in the basic-block. */
4920 for (gphi_iterator gsi = gsi_start_phis (bb);
4921 !gsi_end_p (gsi); gsi_next (&gsi))
4922 {
4923 gphi *phi = gsi.phi ();
4924 tree res = PHI_RESULT (phi);
4925 if (!VN_INFO (res)->visited)
4926 DFS (res);
4927 }
4928 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4929 !gsi_end_p (gsi); gsi_next (&gsi))
4930 {
4931 ssa_op_iter i;
4932 tree op;
4933 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4934 if (!VN_INFO (op)->visited)
4935 DFS (op);
4936 }
4937
4938 /* Finally look at the last stmt. */
4939 gimple *stmt = last_stmt (bb);
4940 if (!stmt)
4941 return NULL;
4942
4943 enum gimple_code code = gimple_code (stmt);
4944 if (code != GIMPLE_COND
4945 && code != GIMPLE_SWITCH
4946 && code != GIMPLE_GOTO)
4947 return NULL;
4948
4949 if (dump_file && (dump_flags & TDF_DETAILS))
4950 {
4951 fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4952 print_gimple_stmt (dump_file, stmt, 0);
4953 }
4954
4955 /* ??? We can even handle stmts with outgoing EH or ABNORMAL edges
4956 if value-numbering can prove they are not reachable. Handling
4957 computed gotos is also possible. */
4958 tree val;
4959 switch (code)
4960 {
4961 case GIMPLE_COND:
4962 {
4963 tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4964 tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4965 val = gimple_simplify (gimple_cond_code (stmt),
4966 boolean_type_node, lhs, rhs,
4967 NULL, vn_valueize);
4968 /* If that didn't simplify to a constant see if we have recorded
4969 temporary expressions from taken edges. */
4970 if (!val || TREE_CODE (val) != INTEGER_CST)
4971 {
4972 tree ops[2];
4973 ops[0] = lhs;
4974 ops[1] = rhs;
4975 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4976 boolean_type_node, ops, NULL);
4977 }
4978 break;
4979 }
4980 case GIMPLE_SWITCH:
4981 val = gimple_switch_index (as_a <gswitch *> (stmt));
4982 break;
4983 case GIMPLE_GOTO:
4984 val = gimple_goto_dest (stmt);
4985 break;
4986 default:
4987 gcc_unreachable ();
4988 }
4989 if (!val)
4990 return NULL;
4991
4992 edge taken = find_taken_edge (bb, vn_valueize (val));
4993 if (!taken)
4994 return NULL;
4995
4996 if (dump_file && (dump_flags & TDF_DETAILS))
4997 fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4998 "not executable\n", bb->index, bb->index, taken->dest->index);
4999
5000 return taken;
5001 }
5002
5003 /* Do SCCVN. Returns true if it finished, false if we bailed out
5004 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
5005 how we use the alias oracle walking during the VN process. */
5006
5007 void
5008 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5009 {
5010 size_t i;
5011
5012 default_vn_walk_kind = default_vn_walk_kind_;
5013
5014 init_scc_vn ();
5015
5016 /* Collect pointers we know point to readonly memory. */
5017 const_parms = BITMAP_ALLOC (NULL);
5018 tree fnspec = lookup_attribute ("fn spec",
5019 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5020 if (fnspec)
5021 {
5022 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5023 i = 1;
5024 for (tree arg = DECL_ARGUMENTS (cfun->decl);
5025 arg; arg = DECL_CHAIN (arg), ++i)
5026 {
5027 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5028 break;
5029 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
5030 || TREE_STRING_POINTER (fnspec)[i] == 'r')
5031 {
5032 tree name = ssa_default_def (cfun, arg);
5033 if (name)
5034 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5035 }
5036 }
5037 }
5038
5039 /* Walk all blocks in dominator order, value-numbering stmts
5040 SSA defs and decide whether outgoing edges are not executable. */
5041 sccvn_dom_walker walker;
5042 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5043
5044 /* Initialize the value ids and prune out remaining VN_TOPs
5045 from dead code. */
5046 tree name;
5047 FOR_EACH_SSA_NAME (i, name, cfun)
5048 {
5049 vn_ssa_aux_t info = VN_INFO (name);
5050 if (!info->visited
5051 || info->valnum == VN_TOP)
5052 info->valnum = name;
5053 if (info->valnum == name)
5054 info->value_id = get_next_value_id ();
5055 else if (is_gimple_min_invariant (info->valnum))
5056 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5057 }
5058
5059 /* Propagate. */
5060 FOR_EACH_SSA_NAME (i, name, cfun)
5061 {
5062 vn_ssa_aux_t info = VN_INFO (name);
5063 if (TREE_CODE (info->valnum) == SSA_NAME
5064 && info->valnum != name
5065 && info->value_id != VN_INFO (info->valnum)->value_id)
5066 info->value_id = VN_INFO (info->valnum)->value_id;
5067 }
5068
5069 set_hashtable_value_ids ();
5070
5071 if (dump_file && (dump_flags & TDF_DETAILS))
5072 {
5073 fprintf (dump_file, "Value numbers:\n");
5074 FOR_EACH_SSA_NAME (i, name, cfun)
5075 {
5076 if (VN_INFO (name)->visited
5077 && SSA_VAL (name) != name)
5078 {
5079 print_generic_expr (dump_file, name);
5080 fprintf (dump_file, " = ");
5081 print_generic_expr (dump_file, SSA_VAL (name));
5082 fprintf (dump_file, "\n");
5083 }
5084 }
5085 }
5086 }
5087
5088 /* Return the maximum value id we have ever seen. */
5089
5090 unsigned int
5091 get_max_value_id (void)
5092 {
5093 return next_value_id;
5094 }
5095
5096 /* Return the next unique value id. */
5097
5098 unsigned int
5099 get_next_value_id (void)
5100 {
5101 return next_value_id++;
5102 }
5103
5104
5105 /* Compare two expressions E1 and E2 and return true if they are equal. */
5106
5107 bool
5108 expressions_equal_p (tree e1, tree e2)
5109 {
5110 /* The obvious case. */
5111 if (e1 == e2)
5112 return true;
5113
5114 /* If either one is VN_TOP consider them equal. */
5115 if (e1 == VN_TOP || e2 == VN_TOP)
5116 return true;
5117
5118 /* If only one of them is null, they cannot be equal. */
5119 if (!e1 || !e2)
5120 return false;
5121
5122 /* Now perform the actual comparison. */
5123 if (TREE_CODE (e1) == TREE_CODE (e2)
5124 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5125 return true;
5126
5127 return false;
5128 }
5129
5130
5131 /* Return true if the nary operation NARY may trap. This is a copy
5132 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5133
5134 bool
5135 vn_nary_may_trap (vn_nary_op_t nary)
5136 {
5137 tree type;
5138 tree rhs2 = NULL_TREE;
5139 bool honor_nans = false;
5140 bool honor_snans = false;
5141 bool fp_operation = false;
5142 bool honor_trapv = false;
5143 bool handled, ret;
5144 unsigned i;
5145
5146 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5147 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5148 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5149 {
5150 type = nary->type;
5151 fp_operation = FLOAT_TYPE_P (type);
5152 if (fp_operation)
5153 {
5154 honor_nans = flag_trapping_math && !flag_finite_math_only;
5155 honor_snans = flag_signaling_nans != 0;
5156 }
5157 else if (INTEGRAL_TYPE_P (type)
5158 && TYPE_OVERFLOW_TRAPS (type))
5159 honor_trapv = true;
5160 }
5161 if (nary->length >= 2)
5162 rhs2 = nary->op[1];
5163 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5164 honor_trapv,
5165 honor_nans, honor_snans, rhs2,
5166 &handled);
5167 if (handled
5168 && ret)
5169 return true;
5170
5171 for (i = 0; i < nary->length; ++i)
5172 if (tree_could_trap_p (nary->op[i]))
5173 return true;
5174
5175 return false;
5176 }
5177
5178
5179 class eliminate_dom_walker : public dom_walker
5180 {
5181 public:
5182 eliminate_dom_walker (cdi_direction, bitmap);
5183 ~eliminate_dom_walker ();
5184
5185 virtual edge before_dom_children (basic_block);
5186 virtual void after_dom_children (basic_block);
5187
5188 tree eliminate_avail (tree op);
5189 void eliminate_push_avail (tree op);
5190 tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5191
5192 bool do_pre;
5193 unsigned int el_todo;
5194 unsigned int eliminations;
5195 unsigned int insertions;
5196
5197 /* SSA names that had their defs inserted by PRE if do_pre. */
5198 bitmap inserted_exprs;
5199
5200 /* Blocks with statements that have had their EH properties changed. */
5201 bitmap need_eh_cleanup;
5202
5203 /* Blocks with statements that have had their AB properties changed. */
5204 bitmap need_ab_cleanup;
5205
5206 auto_vec<gimple *> to_remove;
5207 auto_vec<gimple *> to_fixup;
5208 auto_vec<tree> avail;
5209 auto_vec<tree> avail_stack;
5210 };
5211
5212 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5213 bitmap inserted_exprs_)
5214 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5215 el_todo (0), eliminations (0), insertions (0),
5216 inserted_exprs (inserted_exprs_)
5217 {
5218 need_eh_cleanup = BITMAP_ALLOC (NULL);
5219 need_ab_cleanup = BITMAP_ALLOC (NULL);
5220 }
5221
5222 eliminate_dom_walker::~eliminate_dom_walker ()
5223 {
5224 BITMAP_FREE (need_eh_cleanup);
5225 BITMAP_FREE (need_ab_cleanup);
5226 }
5227
5228 /* Return a leader for OP that is available at the current point of the
5229 eliminate domwalk. */
5230
5231 tree
5232 eliminate_dom_walker::eliminate_avail (tree op)
5233 {
5234 tree valnum = VN_INFO (op)->valnum;
5235 if (TREE_CODE (valnum) == SSA_NAME)
5236 {
5237 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5238 return valnum;
5239 if (avail.length () > SSA_NAME_VERSION (valnum))
5240 return avail[SSA_NAME_VERSION (valnum)];
5241 }
5242 else if (is_gimple_min_invariant (valnum))
5243 return valnum;
5244 return NULL_TREE;
5245 }
5246
5247 /* At the current point of the eliminate domwalk make OP available. */
5248
5249 void
5250 eliminate_dom_walker::eliminate_push_avail (tree op)
5251 {
5252 tree valnum = VN_INFO (op)->valnum;
5253 if (TREE_CODE (valnum) == SSA_NAME)
5254 {
5255 if (avail.length () <= SSA_NAME_VERSION (valnum))
5256 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5257 tree pushop = op;
5258 if (avail[SSA_NAME_VERSION (valnum)])
5259 pushop = avail[SSA_NAME_VERSION (valnum)];
5260 avail_stack.safe_push (pushop);
5261 avail[SSA_NAME_VERSION (valnum)] = op;
5262 }
5263 }
5264
5265 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5266 the leader for the expression if insertion was successful. */
5267
5268 tree
5269 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5270 {
5271 /* We can insert a sequence with a single assignment only. */
5272 gimple_seq stmts = VN_INFO (val)->expr;
5273 if (!gimple_seq_singleton_p (stmts))
5274 return NULL_TREE;
5275 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5276 if (!stmt
5277 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5278 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5279 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5280 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5281 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5282 return NULL_TREE;
5283
5284 tree op = gimple_assign_rhs1 (stmt);
5285 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5286 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5287 op = TREE_OPERAND (op, 0);
5288 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5289 if (!leader)
5290 return NULL_TREE;
5291
5292 tree res;
5293 stmts = NULL;
5294 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5295 res = gimple_build (&stmts, BIT_FIELD_REF,
5296 TREE_TYPE (val), leader,
5297 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5298 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5299 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5300 res = gimple_build (&stmts, BIT_AND_EXPR,
5301 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5302 else
5303 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5304 TREE_TYPE (val), leader);
5305 if (TREE_CODE (res) != SSA_NAME
5306 || SSA_NAME_IS_DEFAULT_DEF (res)
5307 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5308 {
5309 gimple_seq_discard (stmts);
5310
5311 /* During propagation we have to treat SSA info conservatively
5312 and thus we can end up simplifying the inserted expression
5313 at elimination time to sth not defined in stmts. */
5314 /* But then this is a redundancy we failed to detect. Which means
5315 res now has two values. That doesn't play well with how
5316 we track availability here, so give up. */
5317 if (dump_file && (dump_flags & TDF_DETAILS))
5318 {
5319 if (TREE_CODE (res) == SSA_NAME)
5320 res = eliminate_avail (res);
5321 if (res)
5322 {
5323 fprintf (dump_file, "Failed to insert expression for value ");
5324 print_generic_expr (dump_file, val);
5325 fprintf (dump_file, " which is really fully redundant to ");
5326 print_generic_expr (dump_file, res);
5327 fprintf (dump_file, "\n");
5328 }
5329 }
5330
5331 return NULL_TREE;
5332 }
5333 else
5334 {
5335 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5336 VN_INFO_GET (res)->valnum = val;
5337 }
5338
5339 insertions++;
5340 if (dump_file && (dump_flags & TDF_DETAILS))
5341 {
5342 fprintf (dump_file, "Inserted ");
5343 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5344 }
5345
5346 return res;
5347 }
5348
5349
5350
5351 /* Perform elimination for the basic-block B during the domwalk. */
5352
5353 edge
5354 eliminate_dom_walker::before_dom_children (basic_block b)
5355 {
5356 /* Mark new bb. */
5357 avail_stack.safe_push (NULL_TREE);
5358
5359 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5360 edge_iterator ei;
5361 edge e;
5362 FOR_EACH_EDGE (e, ei, b->preds)
5363 if (e->flags & EDGE_EXECUTABLE)
5364 break;
5365 if (! e)
5366 return NULL;
5367
5368 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5369 {
5370 gphi *phi = gsi.phi ();
5371 tree res = PHI_RESULT (phi);
5372
5373 if (virtual_operand_p (res))
5374 {
5375 gsi_next (&gsi);
5376 continue;
5377 }
5378
5379 tree sprime = eliminate_avail (res);
5380 if (sprime
5381 && sprime != res)
5382 {
5383 if (dump_file && (dump_flags & TDF_DETAILS))
5384 {
5385 fprintf (dump_file, "Replaced redundant PHI node defining ");
5386 print_generic_expr (dump_file, res);
5387 fprintf (dump_file, " with ");
5388 print_generic_expr (dump_file, sprime);
5389 fprintf (dump_file, "\n");
5390 }
5391
5392 /* If we inserted this PHI node ourself, it's not an elimination. */
5393 if (! inserted_exprs
5394 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5395 eliminations++;
5396
5397 /* If we will propagate into all uses don't bother to do
5398 anything. */
5399 if (may_propagate_copy (res, sprime))
5400 {
5401 /* Mark the PHI for removal. */
5402 to_remove.safe_push (phi);
5403 gsi_next (&gsi);
5404 continue;
5405 }
5406
5407 remove_phi_node (&gsi, false);
5408
5409 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5410 sprime = fold_convert (TREE_TYPE (res), sprime);
5411 gimple *stmt = gimple_build_assign (res, sprime);
5412 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5413 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5414 continue;
5415 }
5416
5417 eliminate_push_avail (res);
5418 gsi_next (&gsi);
5419 }
5420
5421 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5422 !gsi_end_p (gsi);
5423 gsi_next (&gsi))
5424 {
5425 tree sprime = NULL_TREE;
5426 gimple *stmt = gsi_stmt (gsi);
5427 tree lhs = gimple_get_lhs (stmt);
5428 if (lhs && TREE_CODE (lhs) == SSA_NAME
5429 && !gimple_has_volatile_ops (stmt)
5430 /* See PR43491. Do not replace a global register variable when
5431 it is a the RHS of an assignment. Do replace local register
5432 variables since gcc does not guarantee a local variable will
5433 be allocated in register.
5434 ??? The fix isn't effective here. This should instead
5435 be ensured by not value-numbering them the same but treating
5436 them like volatiles? */
5437 && !(gimple_assign_single_p (stmt)
5438 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5439 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5440 && is_global_var (gimple_assign_rhs1 (stmt)))))
5441 {
5442 sprime = eliminate_avail (lhs);
5443 if (!sprime)
5444 {
5445 /* If there is no existing usable leader but SCCVN thinks
5446 it has an expression it wants to use as replacement,
5447 insert that. */
5448 tree val = VN_INFO (lhs)->valnum;
5449 if (val != VN_TOP
5450 && TREE_CODE (val) == SSA_NAME
5451 && VN_INFO (val)->needs_insertion
5452 && VN_INFO (val)->expr != NULL
5453 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5454 eliminate_push_avail (sprime);
5455 }
5456
5457 /* If this now constitutes a copy duplicate points-to
5458 and range info appropriately. This is especially
5459 important for inserted code. See tree-ssa-copy.c
5460 for similar code. */
5461 if (sprime
5462 && TREE_CODE (sprime) == SSA_NAME)
5463 {
5464 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5465 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5466 && VN_INFO_PTR_INFO (lhs)
5467 && ! VN_INFO_PTR_INFO (sprime))
5468 {
5469 duplicate_ssa_name_ptr_info (sprime,
5470 VN_INFO_PTR_INFO (lhs));
5471 if (b != sprime_b)
5472 mark_ptr_info_alignment_unknown
5473 (SSA_NAME_PTR_INFO (sprime));
5474 }
5475 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5476 && VN_INFO_RANGE_INFO (lhs)
5477 && ! VN_INFO_RANGE_INFO (sprime)
5478 && b == sprime_b)
5479 duplicate_ssa_name_range_info (sprime,
5480 VN_INFO_RANGE_TYPE (lhs),
5481 VN_INFO_RANGE_INFO (lhs));
5482 }
5483
5484 /* Inhibit the use of an inserted PHI on a loop header when
5485 the address of the memory reference is a simple induction
5486 variable. In other cases the vectorizer won't do anything
5487 anyway (either it's loop invariant or a complicated
5488 expression). */
5489 if (sprime
5490 && TREE_CODE (sprime) == SSA_NAME
5491 && do_pre
5492 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5493 && loop_outer (b->loop_father)
5494 && has_zero_uses (sprime)
5495 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5496 && gimple_assign_load_p (stmt))
5497 {
5498 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5499 basic_block def_bb = gimple_bb (def_stmt);
5500 if (gimple_code (def_stmt) == GIMPLE_PHI
5501 && def_bb->loop_father->header == def_bb)
5502 {
5503 loop_p loop = def_bb->loop_father;
5504 ssa_op_iter iter;
5505 tree op;
5506 bool found = false;
5507 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5508 {
5509 affine_iv iv;
5510 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5511 if (def_bb
5512 && flow_bb_inside_loop_p (loop, def_bb)
5513 && simple_iv (loop, loop, op, &iv, true))
5514 {
5515 found = true;
5516 break;
5517 }
5518 }
5519 if (found)
5520 {
5521 if (dump_file && (dump_flags & TDF_DETAILS))
5522 {
5523 fprintf (dump_file, "Not replacing ");
5524 print_gimple_expr (dump_file, stmt, 0);
5525 fprintf (dump_file, " with ");
5526 print_generic_expr (dump_file, sprime);
5527 fprintf (dump_file, " which would add a loop"
5528 " carried dependence to loop %d\n",
5529 loop->num);
5530 }
5531 /* Don't keep sprime available. */
5532 sprime = NULL_TREE;
5533 }
5534 }
5535 }
5536
5537 if (sprime)
5538 {
5539 /* If we can propagate the value computed for LHS into
5540 all uses don't bother doing anything with this stmt. */
5541 if (may_propagate_copy (lhs, sprime))
5542 {
5543 /* Mark it for removal. */
5544 to_remove.safe_push (stmt);
5545
5546 /* ??? Don't count copy/constant propagations. */
5547 if (gimple_assign_single_p (stmt)
5548 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5549 || gimple_assign_rhs1 (stmt) == sprime))
5550 continue;
5551
5552 if (dump_file && (dump_flags & TDF_DETAILS))
5553 {
5554 fprintf (dump_file, "Replaced ");
5555 print_gimple_expr (dump_file, stmt, 0);
5556 fprintf (dump_file, " with ");
5557 print_generic_expr (dump_file, sprime);
5558 fprintf (dump_file, " in all uses of ");
5559 print_gimple_stmt (dump_file, stmt, 0);
5560 }
5561
5562 eliminations++;
5563 continue;
5564 }
5565
5566 /* If this is an assignment from our leader (which
5567 happens in the case the value-number is a constant)
5568 then there is nothing to do. */
5569 if (gimple_assign_single_p (stmt)
5570 && sprime == gimple_assign_rhs1 (stmt))
5571 continue;
5572
5573 /* Else replace its RHS. */
5574 bool can_make_abnormal_goto
5575 = is_gimple_call (stmt)
5576 && stmt_can_make_abnormal_goto (stmt);
5577
5578 if (dump_file && (dump_flags & TDF_DETAILS))
5579 {
5580 fprintf (dump_file, "Replaced ");
5581 print_gimple_expr (dump_file, stmt, 0);
5582 fprintf (dump_file, " with ");
5583 print_generic_expr (dump_file, sprime);
5584 fprintf (dump_file, " in ");
5585 print_gimple_stmt (dump_file, stmt, 0);
5586 }
5587
5588 eliminations++;
5589 gimple *orig_stmt = stmt;
5590 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5591 TREE_TYPE (sprime)))
5592 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5593 tree vdef = gimple_vdef (stmt);
5594 tree vuse = gimple_vuse (stmt);
5595 propagate_tree_value_into_stmt (&gsi, sprime);
5596 stmt = gsi_stmt (gsi);
5597 update_stmt (stmt);
5598 if (vdef != gimple_vdef (stmt))
5599 VN_INFO (vdef)->valnum = vuse;
5600
5601 /* If we removed EH side-effects from the statement, clean
5602 its EH information. */
5603 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5604 {
5605 bitmap_set_bit (need_eh_cleanup,
5606 gimple_bb (stmt)->index);
5607 if (dump_file && (dump_flags & TDF_DETAILS))
5608 fprintf (dump_file, " Removed EH side-effects.\n");
5609 }
5610
5611 /* Likewise for AB side-effects. */
5612 if (can_make_abnormal_goto
5613 && !stmt_can_make_abnormal_goto (stmt))
5614 {
5615 bitmap_set_bit (need_ab_cleanup,
5616 gimple_bb (stmt)->index);
5617 if (dump_file && (dump_flags & TDF_DETAILS))
5618 fprintf (dump_file, " Removed AB side-effects.\n");
5619 }
5620
5621 continue;
5622 }
5623 }
5624
5625 /* If the statement is a scalar store, see if the expression
5626 has the same value number as its rhs. If so, the store is
5627 dead. */
5628 if (gimple_assign_single_p (stmt)
5629 && !gimple_has_volatile_ops (stmt)
5630 && !is_gimple_reg (gimple_assign_lhs (stmt))
5631 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5632 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5633 {
5634 tree val;
5635 tree rhs = gimple_assign_rhs1 (stmt);
5636 vn_reference_t vnresult;
5637 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5638 &vnresult, false);
5639 if (TREE_CODE (rhs) == SSA_NAME)
5640 rhs = VN_INFO (rhs)->valnum;
5641 if (val
5642 && operand_equal_p (val, rhs, 0))
5643 {
5644 /* We can only remove the later store if the former aliases
5645 at least all accesses the later one does or if the store
5646 was to readonly memory storing the same value. */
5647 alias_set_type set = get_alias_set (lhs);
5648 if (! vnresult
5649 || vnresult->set == set
5650 || alias_set_subset_of (set, vnresult->set))
5651 {
5652 if (dump_file && (dump_flags & TDF_DETAILS))
5653 {
5654 fprintf (dump_file, "Deleted redundant store ");
5655 print_gimple_stmt (dump_file, stmt, 0);
5656 }
5657
5658 /* Queue stmt for removal. */
5659 to_remove.safe_push (stmt);
5660 continue;
5661 }
5662 }
5663 }
5664
5665 /* If this is a control statement value numbering left edges
5666 unexecuted on force the condition in a way consistent with
5667 that. */
5668 if (gcond *cond = dyn_cast <gcond *> (stmt))
5669 {
5670 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5671 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5672 {
5673 if (dump_file && (dump_flags & TDF_DETAILS))
5674 {
5675 fprintf (dump_file, "Removing unexecutable edge from ");
5676 print_gimple_stmt (dump_file, stmt, 0);
5677 }
5678 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5679 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5680 gimple_cond_make_true (cond);
5681 else
5682 gimple_cond_make_false (cond);
5683 update_stmt (cond);
5684 el_todo |= TODO_cleanup_cfg;
5685 continue;
5686 }
5687 }
5688
5689 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5690 bool was_noreturn = (is_gimple_call (stmt)
5691 && gimple_call_noreturn_p (stmt));
5692 tree vdef = gimple_vdef (stmt);
5693 tree vuse = gimple_vuse (stmt);
5694
5695 /* If we didn't replace the whole stmt (or propagate the result
5696 into all uses), replace all uses on this stmt with their
5697 leaders. */
5698 bool modified = false;
5699 use_operand_p use_p;
5700 ssa_op_iter iter;
5701 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5702 {
5703 tree use = USE_FROM_PTR (use_p);
5704 /* ??? The call code above leaves stmt operands un-updated. */
5705 if (TREE_CODE (use) != SSA_NAME)
5706 continue;
5707 tree sprime = eliminate_avail (use);
5708 if (sprime && sprime != use
5709 && may_propagate_copy (use, sprime)
5710 /* We substitute into debug stmts to avoid excessive
5711 debug temporaries created by removed stmts, but we need
5712 to avoid doing so for inserted sprimes as we never want
5713 to create debug temporaries for them. */
5714 && (!inserted_exprs
5715 || TREE_CODE (sprime) != SSA_NAME
5716 || !is_gimple_debug (stmt)
5717 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5718 {
5719 propagate_value (use_p, sprime);
5720 modified = true;
5721 }
5722 }
5723
5724 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5725 into which is a requirement for the IPA devirt machinery. */
5726 gimple *old_stmt = stmt;
5727 if (modified)
5728 {
5729 /* If a formerly non-invariant ADDR_EXPR is turned into an
5730 invariant one it was on a separate stmt. */
5731 if (gimple_assign_single_p (stmt)
5732 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5733 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5734 gimple_stmt_iterator prev = gsi;
5735 gsi_prev (&prev);
5736 if (fold_stmt (&gsi))
5737 {
5738 /* fold_stmt may have created new stmts inbetween
5739 the previous stmt and the folded stmt. Mark
5740 all defs created there as varying to not confuse
5741 the SCCVN machinery as we're using that even during
5742 elimination. */
5743 if (gsi_end_p (prev))
5744 prev = gsi_start_bb (b);
5745 else
5746 gsi_next (&prev);
5747 if (gsi_stmt (prev) != gsi_stmt (gsi))
5748 do
5749 {
5750 tree def;
5751 ssa_op_iter dit;
5752 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5753 dit, SSA_OP_ALL_DEFS)
5754 /* As existing DEFs may move between stmts
5755 we have to guard VN_INFO_GET. */
5756 if (! has_VN_INFO (def))
5757 VN_INFO_GET (def)->valnum = def;
5758 if (gsi_stmt (prev) == gsi_stmt (gsi))
5759 break;
5760 gsi_next (&prev);
5761 }
5762 while (1);
5763 }
5764 stmt = gsi_stmt (gsi);
5765 /* In case we folded the stmt away schedule the NOP for removal. */
5766 if (gimple_nop_p (stmt))
5767 to_remove.safe_push (stmt);
5768 }
5769
5770 /* Visit indirect calls and turn them into direct calls if
5771 possible using the devirtualization machinery. Do this before
5772 checking for required EH/abnormal/noreturn cleanup as devird
5773 may expose more of those. */
5774 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5775 {
5776 tree fn = gimple_call_fn (call_stmt);
5777 if (fn
5778 && flag_devirtualize
5779 && virtual_method_call_p (fn))
5780 {
5781 tree otr_type = obj_type_ref_class (fn);
5782 unsigned HOST_WIDE_INT otr_tok
5783 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5784 tree instance;
5785 ipa_polymorphic_call_context context (current_function_decl,
5786 fn, stmt, &instance);
5787 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5788 otr_type, stmt);
5789 bool final;
5790 vec <cgraph_node *> targets
5791 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5792 otr_tok, context, &final);
5793 if (dump_file)
5794 dump_possible_polymorphic_call_targets (dump_file,
5795 obj_type_ref_class (fn),
5796 otr_tok, context);
5797 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5798 {
5799 tree fn;
5800 if (targets.length () == 1)
5801 fn = targets[0]->decl;
5802 else
5803 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5804 if (dump_enabled_p ())
5805 {
5806 location_t loc = gimple_location (stmt);
5807 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5808 "converting indirect call to "
5809 "function %s\n",
5810 lang_hooks.decl_printable_name (fn, 2));
5811 }
5812 gimple_call_set_fndecl (call_stmt, fn);
5813 /* If changing the call to __builtin_unreachable
5814 or similar noreturn function, adjust gimple_call_fntype
5815 too. */
5816 if (gimple_call_noreturn_p (call_stmt)
5817 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5818 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5819 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5820 == void_type_node))
5821 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5822 maybe_remove_unused_call_args (cfun, call_stmt);
5823 modified = true;
5824 }
5825 }
5826 }
5827
5828 if (modified)
5829 {
5830 /* When changing a call into a noreturn call, cfg cleanup
5831 is needed to fix up the noreturn call. */
5832 if (!was_noreturn
5833 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5834 to_fixup.safe_push (stmt);
5835 /* When changing a condition or switch into one we know what
5836 edge will be executed, schedule a cfg cleanup. */
5837 if ((gimple_code (stmt) == GIMPLE_COND
5838 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5839 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5840 || (gimple_code (stmt) == GIMPLE_SWITCH
5841 && TREE_CODE (gimple_switch_index
5842 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5843 el_todo |= TODO_cleanup_cfg;
5844 /* If we removed EH side-effects from the statement, clean
5845 its EH information. */
5846 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5847 {
5848 bitmap_set_bit (need_eh_cleanup,
5849 gimple_bb (stmt)->index);
5850 if (dump_file && (dump_flags & TDF_DETAILS))
5851 fprintf (dump_file, " Removed EH side-effects.\n");
5852 }
5853 /* Likewise for AB side-effects. */
5854 if (can_make_abnormal_goto
5855 && !stmt_can_make_abnormal_goto (stmt))
5856 {
5857 bitmap_set_bit (need_ab_cleanup,
5858 gimple_bb (stmt)->index);
5859 if (dump_file && (dump_flags & TDF_DETAILS))
5860 fprintf (dump_file, " Removed AB side-effects.\n");
5861 }
5862 update_stmt (stmt);
5863 if (vdef != gimple_vdef (stmt))
5864 VN_INFO (vdef)->valnum = vuse;
5865 }
5866
5867 /* Make new values available - for fully redundant LHS we
5868 continue with the next stmt above and skip this. */
5869 def_operand_p defp;
5870 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5871 eliminate_push_avail (DEF_FROM_PTR (defp));
5872 }
5873
5874 /* Replace destination PHI arguments. */
5875 FOR_EACH_EDGE (e, ei, b->succs)
5876 if (e->flags & EDGE_EXECUTABLE)
5877 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5878 !gsi_end_p (gsi);
5879 gsi_next (&gsi))
5880 {
5881 gphi *phi = gsi.phi ();
5882 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5883 tree arg = USE_FROM_PTR (use_p);
5884 if (TREE_CODE (arg) != SSA_NAME
5885 || virtual_operand_p (arg))
5886 continue;
5887 tree sprime = eliminate_avail (arg);
5888 if (sprime && may_propagate_copy (arg, sprime))
5889 propagate_value (use_p, sprime);
5890 }
5891 return NULL;
5892 }
5893
5894 /* Make no longer available leaders no longer available. */
5895
5896 void
5897 eliminate_dom_walker::after_dom_children (basic_block)
5898 {
5899 tree entry;
5900 while ((entry = avail_stack.pop ()) != NULL_TREE)
5901 {
5902 tree valnum = VN_INFO (entry)->valnum;
5903 tree old = avail[SSA_NAME_VERSION (valnum)];
5904 if (old == entry)
5905 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5906 else
5907 avail[SSA_NAME_VERSION (valnum)] = entry;
5908 }
5909 }
5910
5911 /* Eliminate fully redundant computations. */
5912
5913 unsigned int
5914 vn_eliminate (bitmap inserted_exprs)
5915 {
5916 eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5917 el.avail.reserve (num_ssa_names);
5918
5919 el.walk (cfun->cfg->x_entry_block_ptr);
5920
5921 /* We cannot remove stmts during BB walk, especially not release SSA
5922 names there as this confuses the VN machinery. The stmts ending
5923 up in to_remove are either stores or simple copies.
5924 Remove stmts in reverse order to make debug stmt creation possible. */
5925 while (!el.to_remove.is_empty ())
5926 {
5927 gimple *stmt = el.to_remove.pop ();
5928
5929 if (dump_file && (dump_flags & TDF_DETAILS))
5930 {
5931 fprintf (dump_file, "Removing dead stmt ");
5932 print_gimple_stmt (dump_file, stmt, 0, 0);
5933 }
5934
5935 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5936 if (gimple_code (stmt) == GIMPLE_PHI)
5937 remove_phi_node (&gsi, true);
5938 else
5939 {
5940 basic_block bb = gimple_bb (stmt);
5941 unlink_stmt_vdef (stmt);
5942 if (gsi_remove (&gsi, true))
5943 bitmap_set_bit (el.need_eh_cleanup, bb->index);
5944 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5945 bitmap_set_bit (el.need_ab_cleanup, bb->index);
5946 release_defs (stmt);
5947 }
5948
5949 /* Removing a stmt may expose a forwarder block. */
5950 el.el_todo |= TODO_cleanup_cfg;
5951 }
5952
5953 /* Fixup stmts that became noreturn calls. This may require splitting
5954 blocks and thus isn't possible during the dominator walk. Do this
5955 in reverse order so we don't inadvertedly remove a stmt we want to
5956 fixup by visiting a dominating now noreturn call first. */
5957 while (!el.to_fixup.is_empty ())
5958 {
5959 gimple *stmt = el.to_fixup.pop ();
5960
5961 if (dump_file && (dump_flags & TDF_DETAILS))
5962 {
5963 fprintf (dump_file, "Fixing up noreturn call ");
5964 print_gimple_stmt (dump_file, stmt, 0);
5965 }
5966
5967 if (fixup_noreturn_call (stmt))
5968 el.el_todo |= TODO_cleanup_cfg;
5969 }
5970
5971 bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5972 bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5973
5974 if (do_eh_cleanup)
5975 gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5976
5977 if (do_ab_cleanup)
5978 gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5979
5980 if (do_eh_cleanup || do_ab_cleanup)
5981 el.el_todo |= TODO_cleanup_cfg;
5982
5983 statistics_counter_event (cfun, "Eliminated", el.eliminations);
5984 statistics_counter_event (cfun, "Insertions", el.insertions);
5985
5986 return el.el_todo;
5987 }
5988
5989
5990 namespace {
5991
5992 const pass_data pass_data_fre =
5993 {
5994 GIMPLE_PASS, /* type */
5995 "fre", /* name */
5996 OPTGROUP_NONE, /* optinfo_flags */
5997 TV_TREE_FRE, /* tv_id */
5998 ( PROP_cfg | PROP_ssa ), /* properties_required */
5999 0, /* properties_provided */
6000 0, /* properties_destroyed */
6001 0, /* todo_flags_start */
6002 0, /* todo_flags_finish */
6003 };
6004
6005 class pass_fre : public gimple_opt_pass
6006 {
6007 public:
6008 pass_fre (gcc::context *ctxt)
6009 : gimple_opt_pass (pass_data_fre, ctxt)
6010 {}
6011
6012 /* opt_pass methods: */
6013 opt_pass * clone () { return new pass_fre (m_ctxt); }
6014 virtual bool gate (function *) { return flag_tree_fre != 0; }
6015 virtual unsigned int execute (function *);
6016
6017 }; // class pass_fre
6018
6019 unsigned int
6020 pass_fre::execute (function *)
6021 {
6022 unsigned int todo = 0;
6023
6024 run_scc_vn (VN_WALKREWRITE);
6025
6026 /* Remove all the redundant expressions. */
6027 todo |= vn_eliminate (NULL);
6028
6029 scc_vn_restore_ssa_info ();
6030 free_scc_vn ();
6031
6032 return todo;
6033 }
6034
6035 } // anon namespace
6036
6037 gimple_opt_pass *
6038 make_pass_fre (gcc::context *ctxt)
6039 {
6040 return new pass_fre (ctxt);
6041 }