]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-sccvn.c
Put the CL into the right dir.
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-loop-niter.h"
73 #include "builtins.h"
74 #include "tree-ssa-sccvn.h"
75
76 /* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
81
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
85
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
90
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
95
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
99
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
104
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
108
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
111
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
117
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
121 equivalent.
122 TODO:
123
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
131 */
132
133 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
134 #define BB_EXECUTABLE BB_VISITED
135
136 static vn_lookup_kind default_vn_walk_kind;
137
138 /* vn_nary_op hashtable helpers. */
139
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 {
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
145 };
146
147 /* Return the computed hashcode for nary operation P1. */
148
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 {
152 return vno1->hashcode;
153 }
154
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
157
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 {
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
162 }
163
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166
167
168 /* vn_phi hashtable helpers. */
169
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172
173 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
174 {
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
177 };
178
179 /* Return the computed hashcode for phi operation P1. */
180
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 {
184 return vp1->hashcode;
185 }
186
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 {
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
193 }
194
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197
198
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
201
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
204 {
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
217 }
218
219 /* Free a reference operation structure VP. */
220
221 static inline void
222 free_reference (vn_reference_s *vr)
223 {
224 vr->operands.release ();
225 }
226
227
228 /* vn_reference hashtable helpers. */
229
230 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
231 {
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
234 };
235
236 /* Return the hashcode for a given reference operation P1. */
237
238 inline hashval_t
239 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 {
241 return vr1->hashcode;
242 }
243
244 inline bool
245 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 {
247 return v == c || vn_reference_eq (v, c);
248 }
249
250 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
251 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252
253
254 /* The set of VN hashtables. */
255
256 typedef struct vn_tables_s
257 {
258 vn_nary_op_table_type *nary;
259 vn_phi_table_type *phis;
260 vn_reference_table_type *references;
261 } *vn_tables_t;
262
263
264 /* vn_constant hashtable helpers. */
265
266 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
267 {
268 static inline hashval_t hash (const vn_constant_s *);
269 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
270 };
271
272 /* Hash table hash function for vn_constant_t. */
273
274 inline hashval_t
275 vn_constant_hasher::hash (const vn_constant_s *vc1)
276 {
277 return vc1->hashcode;
278 }
279
280 /* Hash table equality function for vn_constant_t. */
281
282 inline bool
283 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
284 {
285 if (vc1->hashcode != vc2->hashcode)
286 return false;
287
288 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
289 }
290
291 static hash_table<vn_constant_hasher> *constant_to_value_id;
292 static bitmap constant_value_ids;
293
294
295 /* Obstack we allocate the vn-tables elements from. */
296 static obstack vn_tables_obstack;
297 /* Special obstack we never unwind. */
298 static obstack vn_tables_insert_obstack;
299
300 static vn_reference_t last_inserted_ref;
301 static vn_phi_t last_inserted_phi;
302 static vn_nary_op_t last_inserted_nary;
303
304 /* Valid hashtables storing information we have proven to be
305 correct. */
306 static vn_tables_t valid_info;
307
308
309 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 just return it. */
311 tree (*vn_valueize) (tree);
312
313
314 /* This represents the top of the VN lattice, which is the universal
315 value. */
316
317 tree VN_TOP;
318
319 /* Unique counter for our value ids. */
320
321 static unsigned int next_value_id;
322
323
324 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
325 are allocated on an obstack for locality reasons, and to free them
326 without looping over the vec. */
327
328 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
329 {
330 typedef vn_ssa_aux_t value_type;
331 typedef tree compare_type;
332 static inline hashval_t hash (const value_type &);
333 static inline bool equal (const value_type &, const compare_type &);
334 static inline void mark_deleted (value_type &) {}
335 static inline void mark_empty (value_type &e) { e = NULL; }
336 static inline bool is_deleted (value_type &) { return false; }
337 static inline bool is_empty (value_type &e) { return e == NULL; }
338 };
339
340 hashval_t
341 vn_ssa_aux_hasher::hash (const value_type &entry)
342 {
343 return SSA_NAME_VERSION (entry->name);
344 }
345
346 bool
347 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
348 {
349 return name == entry->name;
350 }
351
352 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
353 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
354 static struct obstack vn_ssa_aux_obstack;
355
356 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
357 static unsigned int vn_nary_length_from_stmt (gimple *);
358 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
359 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
360 vn_nary_op_table_type *, bool);
361 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
362 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
363 enum tree_code, tree, tree *);
364 static tree vn_lookup_simplify_result (gimple_match_op *);
365 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
366 (tree, alias_set_type, tree, vec<vn_reference_op_s, va_heap>, tree);
367
368 /* Return whether there is value numbering information for a given SSA name. */
369
370 bool
371 has_VN_INFO (tree name)
372 {
373 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
374 }
375
376 vn_ssa_aux_t
377 VN_INFO (tree name)
378 {
379 vn_ssa_aux_t *res
380 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
381 INSERT);
382 if (*res != NULL)
383 return *res;
384
385 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
386 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
387 newinfo->name = name;
388 newinfo->valnum = VN_TOP;
389 /* We are using the visited flag to handle uses with defs not within the
390 region being value-numbered. */
391 newinfo->visited = false;
392
393 /* Given we create the VN_INFOs on-demand now we have to do initialization
394 different than VN_TOP here. */
395 if (SSA_NAME_IS_DEFAULT_DEF (name))
396 switch (TREE_CODE (SSA_NAME_VAR (name)))
397 {
398 case VAR_DECL:
399 /* All undefined vars are VARYING. */
400 newinfo->valnum = name;
401 newinfo->visited = true;
402 break;
403
404 case PARM_DECL:
405 /* Parameters are VARYING but we can record a condition
406 if we know it is a non-NULL pointer. */
407 newinfo->visited = true;
408 newinfo->valnum = name;
409 if (POINTER_TYPE_P (TREE_TYPE (name))
410 && nonnull_arg_p (SSA_NAME_VAR (name)))
411 {
412 tree ops[2];
413 ops[0] = name;
414 ops[1] = build_int_cst (TREE_TYPE (name), 0);
415 vn_nary_op_t nary;
416 /* Allocate from non-unwinding stack. */
417 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
418 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
419 boolean_type_node, ops);
420 nary->predicated_values = 0;
421 nary->u.result = boolean_true_node;
422 vn_nary_op_insert_into (nary, valid_info->nary, true);
423 gcc_assert (nary->unwind_to == NULL);
424 /* Also do not link it into the undo chain. */
425 last_inserted_nary = nary->next;
426 nary->next = (vn_nary_op_t)(void *)-1;
427 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
428 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
429 boolean_type_node, ops);
430 nary->predicated_values = 0;
431 nary->u.result = boolean_false_node;
432 vn_nary_op_insert_into (nary, valid_info->nary, true);
433 gcc_assert (nary->unwind_to == NULL);
434 last_inserted_nary = nary->next;
435 nary->next = (vn_nary_op_t)(void *)-1;
436 if (dump_file && (dump_flags & TDF_DETAILS))
437 {
438 fprintf (dump_file, "Recording ");
439 print_generic_expr (dump_file, name, TDF_SLIM);
440 fprintf (dump_file, " != 0\n");
441 }
442 }
443 break;
444
445 case RESULT_DECL:
446 /* If the result is passed by invisible reference the default
447 def is initialized, otherwise it's uninitialized. Still
448 undefined is varying. */
449 newinfo->visited = true;
450 newinfo->valnum = name;
451 break;
452
453 default:
454 gcc_unreachable ();
455 }
456 return newinfo;
457 }
458
459 /* Return the SSA value of X. */
460
461 inline tree
462 SSA_VAL (tree x, bool *visited = NULL)
463 {
464 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
465 if (visited)
466 *visited = tem && tem->visited;
467 return tem && tem->visited ? tem->valnum : x;
468 }
469
470 /* Return the SSA value of the VUSE x, supporting released VDEFs
471 during elimination which will value-number the VDEF to the
472 associated VUSE (but not substitute in the whole lattice). */
473
474 static inline tree
475 vuse_ssa_val (tree x)
476 {
477 if (!x)
478 return NULL_TREE;
479
480 do
481 {
482 x = SSA_VAL (x);
483 gcc_assert (x != VN_TOP);
484 }
485 while (SSA_NAME_IN_FREE_LIST (x));
486
487 return x;
488 }
489
490 /* Similar to the above but used as callback for walk_non_aliases_vuses
491 and thus should stop at unvisited VUSE to not walk across region
492 boundaries. */
493
494 static tree
495 vuse_valueize (tree vuse)
496 {
497 do
498 {
499 bool visited;
500 vuse = SSA_VAL (vuse, &visited);
501 if (!visited)
502 return NULL_TREE;
503 gcc_assert (vuse != VN_TOP);
504 }
505 while (SSA_NAME_IN_FREE_LIST (vuse));
506 return vuse;
507 }
508
509
510 /* Return the vn_kind the expression computed by the stmt should be
511 associated with. */
512
513 enum vn_kind
514 vn_get_stmt_kind (gimple *stmt)
515 {
516 switch (gimple_code (stmt))
517 {
518 case GIMPLE_CALL:
519 return VN_REFERENCE;
520 case GIMPLE_PHI:
521 return VN_PHI;
522 case GIMPLE_ASSIGN:
523 {
524 enum tree_code code = gimple_assign_rhs_code (stmt);
525 tree rhs1 = gimple_assign_rhs1 (stmt);
526 switch (get_gimple_rhs_class (code))
527 {
528 case GIMPLE_UNARY_RHS:
529 case GIMPLE_BINARY_RHS:
530 case GIMPLE_TERNARY_RHS:
531 return VN_NARY;
532 case GIMPLE_SINGLE_RHS:
533 switch (TREE_CODE_CLASS (code))
534 {
535 case tcc_reference:
536 /* VOP-less references can go through unary case. */
537 if ((code == REALPART_EXPR
538 || code == IMAGPART_EXPR
539 || code == VIEW_CONVERT_EXPR
540 || code == BIT_FIELD_REF)
541 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
542 return VN_NARY;
543
544 /* Fallthrough. */
545 case tcc_declaration:
546 return VN_REFERENCE;
547
548 case tcc_constant:
549 return VN_CONSTANT;
550
551 default:
552 if (code == ADDR_EXPR)
553 return (is_gimple_min_invariant (rhs1)
554 ? VN_CONSTANT : VN_REFERENCE);
555 else if (code == CONSTRUCTOR)
556 return VN_NARY;
557 return VN_NONE;
558 }
559 default:
560 return VN_NONE;
561 }
562 }
563 default:
564 return VN_NONE;
565 }
566 }
567
568 /* Lookup a value id for CONSTANT and return it. If it does not
569 exist returns 0. */
570
571 unsigned int
572 get_constant_value_id (tree constant)
573 {
574 vn_constant_s **slot;
575 struct vn_constant_s vc;
576
577 vc.hashcode = vn_hash_constant_with_type (constant);
578 vc.constant = constant;
579 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
580 if (slot)
581 return (*slot)->value_id;
582 return 0;
583 }
584
585 /* Lookup a value id for CONSTANT, and if it does not exist, create a
586 new one and return it. If it does exist, return it. */
587
588 unsigned int
589 get_or_alloc_constant_value_id (tree constant)
590 {
591 vn_constant_s **slot;
592 struct vn_constant_s vc;
593 vn_constant_t vcp;
594
595 /* If the hashtable isn't initialized we're not running from PRE and thus
596 do not need value-ids. */
597 if (!constant_to_value_id)
598 return 0;
599
600 vc.hashcode = vn_hash_constant_with_type (constant);
601 vc.constant = constant;
602 slot = constant_to_value_id->find_slot (&vc, INSERT);
603 if (*slot)
604 return (*slot)->value_id;
605
606 vcp = XNEW (struct vn_constant_s);
607 vcp->hashcode = vc.hashcode;
608 vcp->constant = constant;
609 vcp->value_id = get_next_value_id ();
610 *slot = vcp;
611 bitmap_set_bit (constant_value_ids, vcp->value_id);
612 return vcp->value_id;
613 }
614
615 /* Return true if V is a value id for a constant. */
616
617 bool
618 value_id_constant_p (unsigned int v)
619 {
620 return bitmap_bit_p (constant_value_ids, v);
621 }
622
623 /* Compute the hash for a reference operand VRO1. */
624
625 static void
626 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
627 {
628 hstate.add_int (vro1->opcode);
629 if (vro1->op0)
630 inchash::add_expr (vro1->op0, hstate);
631 if (vro1->op1)
632 inchash::add_expr (vro1->op1, hstate);
633 if (vro1->op2)
634 inchash::add_expr (vro1->op2, hstate);
635 }
636
637 /* Compute a hash for the reference operation VR1 and return it. */
638
639 static hashval_t
640 vn_reference_compute_hash (const vn_reference_t vr1)
641 {
642 inchash::hash hstate;
643 hashval_t result;
644 int i;
645 vn_reference_op_t vro;
646 poly_int64 off = -1;
647 bool deref = false;
648
649 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
650 {
651 if (vro->opcode == MEM_REF)
652 deref = true;
653 else if (vro->opcode != ADDR_EXPR)
654 deref = false;
655 if (maybe_ne (vro->off, -1))
656 {
657 if (known_eq (off, -1))
658 off = 0;
659 off += vro->off;
660 }
661 else
662 {
663 if (maybe_ne (off, -1)
664 && maybe_ne (off, 0))
665 hstate.add_poly_int (off);
666 off = -1;
667 if (deref
668 && vro->opcode == ADDR_EXPR)
669 {
670 if (vro->op0)
671 {
672 tree op = TREE_OPERAND (vro->op0, 0);
673 hstate.add_int (TREE_CODE (op));
674 inchash::add_expr (op, hstate);
675 }
676 }
677 else
678 vn_reference_op_compute_hash (vro, hstate);
679 }
680 }
681 result = hstate.end ();
682 /* ??? We would ICE later if we hash instead of adding that in. */
683 if (vr1->vuse)
684 result += SSA_NAME_VERSION (vr1->vuse);
685
686 return result;
687 }
688
689 /* Return true if reference operations VR1 and VR2 are equivalent. This
690 means they have the same set of operands and vuses. */
691
692 bool
693 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
694 {
695 unsigned i, j;
696
697 /* Early out if this is not a hash collision. */
698 if (vr1->hashcode != vr2->hashcode)
699 return false;
700
701 /* The VOP needs to be the same. */
702 if (vr1->vuse != vr2->vuse)
703 return false;
704
705 /* If the operands are the same we are done. */
706 if (vr1->operands == vr2->operands)
707 return true;
708
709 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
710 return false;
711
712 if (INTEGRAL_TYPE_P (vr1->type)
713 && INTEGRAL_TYPE_P (vr2->type))
714 {
715 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
716 return false;
717 }
718 else if (INTEGRAL_TYPE_P (vr1->type)
719 && (TYPE_PRECISION (vr1->type)
720 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
721 return false;
722 else if (INTEGRAL_TYPE_P (vr2->type)
723 && (TYPE_PRECISION (vr2->type)
724 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
725 return false;
726
727 i = 0;
728 j = 0;
729 do
730 {
731 poly_int64 off1 = 0, off2 = 0;
732 vn_reference_op_t vro1, vro2;
733 vn_reference_op_s tem1, tem2;
734 bool deref1 = false, deref2 = false;
735 for (; vr1->operands.iterate (i, &vro1); i++)
736 {
737 if (vro1->opcode == MEM_REF)
738 deref1 = true;
739 /* Do not look through a storage order barrier. */
740 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
741 return false;
742 if (known_eq (vro1->off, -1))
743 break;
744 off1 += vro1->off;
745 }
746 for (; vr2->operands.iterate (j, &vro2); j++)
747 {
748 if (vro2->opcode == MEM_REF)
749 deref2 = true;
750 /* Do not look through a storage order barrier. */
751 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
752 return false;
753 if (known_eq (vro2->off, -1))
754 break;
755 off2 += vro2->off;
756 }
757 if (maybe_ne (off1, off2))
758 return false;
759 if (deref1 && vro1->opcode == ADDR_EXPR)
760 {
761 memset (&tem1, 0, sizeof (tem1));
762 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
763 tem1.type = TREE_TYPE (tem1.op0);
764 tem1.opcode = TREE_CODE (tem1.op0);
765 vro1 = &tem1;
766 deref1 = false;
767 }
768 if (deref2 && vro2->opcode == ADDR_EXPR)
769 {
770 memset (&tem2, 0, sizeof (tem2));
771 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
772 tem2.type = TREE_TYPE (tem2.op0);
773 tem2.opcode = TREE_CODE (tem2.op0);
774 vro2 = &tem2;
775 deref2 = false;
776 }
777 if (deref1 != deref2)
778 return false;
779 if (!vn_reference_op_eq (vro1, vro2))
780 return false;
781 ++j;
782 ++i;
783 }
784 while (vr1->operands.length () != i
785 || vr2->operands.length () != j);
786
787 return true;
788 }
789
790 /* Copy the operations present in load/store REF into RESULT, a vector of
791 vn_reference_op_s's. */
792
793 static void
794 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
795 {
796 /* For non-calls, store the information that makes up the address. */
797 tree orig = ref;
798 while (ref)
799 {
800 vn_reference_op_s temp;
801
802 memset (&temp, 0, sizeof (temp));
803 temp.type = TREE_TYPE (ref);
804 temp.opcode = TREE_CODE (ref);
805 temp.off = -1;
806
807 switch (temp.opcode)
808 {
809 case MODIFY_EXPR:
810 temp.op0 = TREE_OPERAND (ref, 1);
811 break;
812 case WITH_SIZE_EXPR:
813 temp.op0 = TREE_OPERAND (ref, 1);
814 temp.off = 0;
815 break;
816 case MEM_REF:
817 /* The base address gets its own vn_reference_op_s structure. */
818 temp.op0 = TREE_OPERAND (ref, 1);
819 if (!mem_ref_offset (ref).to_shwi (&temp.off))
820 temp.off = -1;
821 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
822 temp.base = MR_DEPENDENCE_BASE (ref);
823 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
824 break;
825 case TARGET_MEM_REF:
826 /* The base address gets its own vn_reference_op_s structure. */
827 temp.op0 = TMR_INDEX (ref);
828 temp.op1 = TMR_STEP (ref);
829 temp.op2 = TMR_OFFSET (ref);
830 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
831 temp.base = MR_DEPENDENCE_BASE (ref);
832 result->safe_push (temp);
833 memset (&temp, 0, sizeof (temp));
834 temp.type = NULL_TREE;
835 temp.opcode = ERROR_MARK;
836 temp.op0 = TMR_INDEX2 (ref);
837 temp.off = -1;
838 break;
839 case BIT_FIELD_REF:
840 /* Record bits, position and storage order. */
841 temp.op0 = TREE_OPERAND (ref, 1);
842 temp.op1 = TREE_OPERAND (ref, 2);
843 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
844 temp.off = -1;
845 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
846 break;
847 case COMPONENT_REF:
848 /* The field decl is enough to unambiguously specify the field,
849 a matching type is not necessary and a mismatching type
850 is always a spurious difference. */
851 temp.type = NULL_TREE;
852 temp.op0 = TREE_OPERAND (ref, 1);
853 temp.op1 = TREE_OPERAND (ref, 2);
854 {
855 tree this_offset = component_ref_field_offset (ref);
856 if (this_offset
857 && poly_int_tree_p (this_offset))
858 {
859 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
860 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
861 {
862 poly_offset_int off
863 = (wi::to_poly_offset (this_offset)
864 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
865 /* Probibit value-numbering zero offset components
866 of addresses the same before the pass folding
867 __builtin_object_size had a chance to run
868 (checking cfun->after_inlining does the
869 trick here). */
870 if (TREE_CODE (orig) != ADDR_EXPR
871 || maybe_ne (off, 0)
872 || cfun->after_inlining)
873 off.to_shwi (&temp.off);
874 }
875 }
876 }
877 break;
878 case ARRAY_RANGE_REF:
879 case ARRAY_REF:
880 {
881 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
882 /* Record index as operand. */
883 temp.op0 = TREE_OPERAND (ref, 1);
884 /* Always record lower bounds and element size. */
885 temp.op1 = array_ref_low_bound (ref);
886 /* But record element size in units of the type alignment. */
887 temp.op2 = TREE_OPERAND (ref, 3);
888 temp.align = eltype->type_common.align;
889 if (! temp.op2)
890 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
891 size_int (TYPE_ALIGN_UNIT (eltype)));
892 if (poly_int_tree_p (temp.op0)
893 && poly_int_tree_p (temp.op1)
894 && TREE_CODE (temp.op2) == INTEGER_CST)
895 {
896 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
897 - wi::to_poly_offset (temp.op1))
898 * wi::to_offset (temp.op2)
899 * vn_ref_op_align_unit (&temp));
900 off.to_shwi (&temp.off);
901 }
902 }
903 break;
904 case VAR_DECL:
905 if (DECL_HARD_REGISTER (ref))
906 {
907 temp.op0 = ref;
908 break;
909 }
910 /* Fallthru. */
911 case PARM_DECL:
912 case CONST_DECL:
913 case RESULT_DECL:
914 /* Canonicalize decls to MEM[&decl] which is what we end up with
915 when valueizing MEM[ptr] with ptr = &decl. */
916 temp.opcode = MEM_REF;
917 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
918 temp.off = 0;
919 result->safe_push (temp);
920 temp.opcode = ADDR_EXPR;
921 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
922 temp.type = TREE_TYPE (temp.op0);
923 temp.off = -1;
924 break;
925 case STRING_CST:
926 case INTEGER_CST:
927 case COMPLEX_CST:
928 case VECTOR_CST:
929 case REAL_CST:
930 case FIXED_CST:
931 case CONSTRUCTOR:
932 case SSA_NAME:
933 temp.op0 = ref;
934 break;
935 case ADDR_EXPR:
936 if (is_gimple_min_invariant (ref))
937 {
938 temp.op0 = ref;
939 break;
940 }
941 break;
942 /* These are only interesting for their operands, their
943 existence, and their type. They will never be the last
944 ref in the chain of references (IE they require an
945 operand), so we don't have to put anything
946 for op* as it will be handled by the iteration */
947 case REALPART_EXPR:
948 temp.off = 0;
949 break;
950 case VIEW_CONVERT_EXPR:
951 temp.off = 0;
952 temp.reverse = storage_order_barrier_p (ref);
953 break;
954 case IMAGPART_EXPR:
955 /* This is only interesting for its constant offset. */
956 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
957 break;
958 default:
959 gcc_unreachable ();
960 }
961 result->safe_push (temp);
962
963 if (REFERENCE_CLASS_P (ref)
964 || TREE_CODE (ref) == MODIFY_EXPR
965 || TREE_CODE (ref) == WITH_SIZE_EXPR
966 || (TREE_CODE (ref) == ADDR_EXPR
967 && !is_gimple_min_invariant (ref)))
968 ref = TREE_OPERAND (ref, 0);
969 else
970 ref = NULL_TREE;
971 }
972 }
973
974 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
975 operands in *OPS, the reference alias set SET and the reference type TYPE.
976 Return true if something useful was produced. */
977
978 bool
979 ao_ref_init_from_vn_reference (ao_ref *ref,
980 alias_set_type set, tree type,
981 vec<vn_reference_op_s> ops)
982 {
983 vn_reference_op_t op;
984 unsigned i;
985 tree base = NULL_TREE;
986 tree *op0_p = &base;
987 poly_offset_int offset = 0;
988 poly_offset_int max_size;
989 poly_offset_int size = -1;
990 tree size_tree = NULL_TREE;
991 alias_set_type base_alias_set = -1;
992
993 /* First get the final access size from just the outermost expression. */
994 op = &ops[0];
995 if (op->opcode == COMPONENT_REF)
996 size_tree = DECL_SIZE (op->op0);
997 else if (op->opcode == BIT_FIELD_REF)
998 size_tree = op->op0;
999 else
1000 {
1001 machine_mode mode = TYPE_MODE (type);
1002 if (mode == BLKmode)
1003 size_tree = TYPE_SIZE (type);
1004 else
1005 size = GET_MODE_BITSIZE (mode);
1006 }
1007 if (size_tree != NULL_TREE
1008 && poly_int_tree_p (size_tree))
1009 size = wi::to_poly_offset (size_tree);
1010
1011 /* Initially, maxsize is the same as the accessed element size.
1012 In the following it will only grow (or become -1). */
1013 max_size = size;
1014
1015 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1016 and find the ultimate containing object. */
1017 FOR_EACH_VEC_ELT (ops, i, op)
1018 {
1019 switch (op->opcode)
1020 {
1021 /* These may be in the reference ops, but we cannot do anything
1022 sensible with them here. */
1023 case ADDR_EXPR:
1024 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1025 if (base != NULL_TREE
1026 && TREE_CODE (base) == MEM_REF
1027 && op->op0
1028 && DECL_P (TREE_OPERAND (op->op0, 0)))
1029 {
1030 vn_reference_op_t pop = &ops[i-1];
1031 base = TREE_OPERAND (op->op0, 0);
1032 if (known_eq (pop->off, -1))
1033 {
1034 max_size = -1;
1035 offset = 0;
1036 }
1037 else
1038 offset += pop->off * BITS_PER_UNIT;
1039 op0_p = NULL;
1040 break;
1041 }
1042 /* Fallthru. */
1043 case CALL_EXPR:
1044 return false;
1045
1046 /* Record the base objects. */
1047 case MEM_REF:
1048 base_alias_set = get_deref_alias_set (op->op0);
1049 *op0_p = build2 (MEM_REF, op->type,
1050 NULL_TREE, op->op0);
1051 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1052 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1053 op0_p = &TREE_OPERAND (*op0_p, 0);
1054 break;
1055
1056 case VAR_DECL:
1057 case PARM_DECL:
1058 case RESULT_DECL:
1059 case SSA_NAME:
1060 *op0_p = op->op0;
1061 op0_p = NULL;
1062 break;
1063
1064 /* And now the usual component-reference style ops. */
1065 case BIT_FIELD_REF:
1066 offset += wi::to_poly_offset (op->op1);
1067 break;
1068
1069 case COMPONENT_REF:
1070 {
1071 tree field = op->op0;
1072 /* We do not have a complete COMPONENT_REF tree here so we
1073 cannot use component_ref_field_offset. Do the interesting
1074 parts manually. */
1075 tree this_offset = DECL_FIELD_OFFSET (field);
1076
1077 if (op->op1 || !poly_int_tree_p (this_offset))
1078 max_size = -1;
1079 else
1080 {
1081 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1082 << LOG2_BITS_PER_UNIT);
1083 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1084 offset += woffset;
1085 }
1086 break;
1087 }
1088
1089 case ARRAY_RANGE_REF:
1090 case ARRAY_REF:
1091 /* We recorded the lower bound and the element size. */
1092 if (!poly_int_tree_p (op->op0)
1093 || !poly_int_tree_p (op->op1)
1094 || TREE_CODE (op->op2) != INTEGER_CST)
1095 max_size = -1;
1096 else
1097 {
1098 poly_offset_int woffset
1099 = wi::sext (wi::to_poly_offset (op->op0)
1100 - wi::to_poly_offset (op->op1),
1101 TYPE_PRECISION (TREE_TYPE (op->op0)));
1102 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1103 woffset <<= LOG2_BITS_PER_UNIT;
1104 offset += woffset;
1105 }
1106 break;
1107
1108 case REALPART_EXPR:
1109 break;
1110
1111 case IMAGPART_EXPR:
1112 offset += size;
1113 break;
1114
1115 case VIEW_CONVERT_EXPR:
1116 break;
1117
1118 case STRING_CST:
1119 case INTEGER_CST:
1120 case COMPLEX_CST:
1121 case VECTOR_CST:
1122 case REAL_CST:
1123 case CONSTRUCTOR:
1124 case CONST_DECL:
1125 return false;
1126
1127 default:
1128 return false;
1129 }
1130 }
1131
1132 if (base == NULL_TREE)
1133 return false;
1134
1135 ref->ref = NULL_TREE;
1136 ref->base = base;
1137 ref->ref_alias_set = set;
1138 if (base_alias_set != -1)
1139 ref->base_alias_set = base_alias_set;
1140 else
1141 ref->base_alias_set = get_alias_set (base);
1142 /* We discount volatiles from value-numbering elsewhere. */
1143 ref->volatile_p = false;
1144
1145 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1146 {
1147 ref->offset = 0;
1148 ref->size = -1;
1149 ref->max_size = -1;
1150 return true;
1151 }
1152
1153 if (!offset.to_shwi (&ref->offset))
1154 {
1155 ref->offset = 0;
1156 ref->max_size = -1;
1157 return true;
1158 }
1159
1160 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1161 ref->max_size = -1;
1162
1163 return true;
1164 }
1165
1166 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1167 vn_reference_op_s's. */
1168
1169 static void
1170 copy_reference_ops_from_call (gcall *call,
1171 vec<vn_reference_op_s> *result)
1172 {
1173 vn_reference_op_s temp;
1174 unsigned i;
1175 tree lhs = gimple_call_lhs (call);
1176 int lr;
1177
1178 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1179 different. By adding the lhs here in the vector, we ensure that the
1180 hashcode is different, guaranteeing a different value number. */
1181 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1182 {
1183 memset (&temp, 0, sizeof (temp));
1184 temp.opcode = MODIFY_EXPR;
1185 temp.type = TREE_TYPE (lhs);
1186 temp.op0 = lhs;
1187 temp.off = -1;
1188 result->safe_push (temp);
1189 }
1190
1191 /* Copy the type, opcode, function, static chain and EH region, if any. */
1192 memset (&temp, 0, sizeof (temp));
1193 temp.type = gimple_call_fntype (call);
1194 temp.opcode = CALL_EXPR;
1195 temp.op0 = gimple_call_fn (call);
1196 temp.op1 = gimple_call_chain (call);
1197 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1198 temp.op2 = size_int (lr);
1199 temp.off = -1;
1200 result->safe_push (temp);
1201
1202 /* Copy the call arguments. As they can be references as well,
1203 just chain them together. */
1204 for (i = 0; i < gimple_call_num_args (call); ++i)
1205 {
1206 tree callarg = gimple_call_arg (call, i);
1207 copy_reference_ops_from_ref (callarg, result);
1208 }
1209 }
1210
1211 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1212 *I_P to point to the last element of the replacement. */
1213 static bool
1214 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1215 unsigned int *i_p)
1216 {
1217 unsigned int i = *i_p;
1218 vn_reference_op_t op = &(*ops)[i];
1219 vn_reference_op_t mem_op = &(*ops)[i - 1];
1220 tree addr_base;
1221 poly_int64 addr_offset = 0;
1222
1223 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1224 from .foo.bar to the preceding MEM_REF offset and replace the
1225 address with &OBJ. */
1226 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1227 &addr_offset);
1228 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1229 if (addr_base != TREE_OPERAND (op->op0, 0))
1230 {
1231 poly_offset_int off
1232 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1233 SIGNED)
1234 + addr_offset);
1235 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1236 op->op0 = build_fold_addr_expr (addr_base);
1237 if (tree_fits_shwi_p (mem_op->op0))
1238 mem_op->off = tree_to_shwi (mem_op->op0);
1239 else
1240 mem_op->off = -1;
1241 return true;
1242 }
1243 return false;
1244 }
1245
1246 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1247 *I_P to point to the last element of the replacement. */
1248 static bool
1249 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1250 unsigned int *i_p)
1251 {
1252 bool changed = false;
1253 vn_reference_op_t op;
1254
1255 do
1256 {
1257 unsigned int i = *i_p;
1258 op = &(*ops)[i];
1259 vn_reference_op_t mem_op = &(*ops)[i - 1];
1260 gimple *def_stmt;
1261 enum tree_code code;
1262 poly_offset_int off;
1263
1264 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1265 if (!is_gimple_assign (def_stmt))
1266 return changed;
1267
1268 code = gimple_assign_rhs_code (def_stmt);
1269 if (code != ADDR_EXPR
1270 && code != POINTER_PLUS_EXPR)
1271 return changed;
1272
1273 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1274
1275 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1276 from .foo.bar to the preceding MEM_REF offset and replace the
1277 address with &OBJ. */
1278 if (code == ADDR_EXPR)
1279 {
1280 tree addr, addr_base;
1281 poly_int64 addr_offset;
1282
1283 addr = gimple_assign_rhs1 (def_stmt);
1284 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1285 &addr_offset);
1286 /* If that didn't work because the address isn't invariant propagate
1287 the reference tree from the address operation in case the current
1288 dereference isn't offsetted. */
1289 if (!addr_base
1290 && *i_p == ops->length () - 1
1291 && known_eq (off, 0)
1292 /* This makes us disable this transform for PRE where the
1293 reference ops might be also used for code insertion which
1294 is invalid. */
1295 && default_vn_walk_kind == VN_WALKREWRITE)
1296 {
1297 auto_vec<vn_reference_op_s, 32> tem;
1298 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1299 /* Make sure to preserve TBAA info. The only objects not
1300 wrapped in MEM_REFs that can have their address taken are
1301 STRING_CSTs. */
1302 if (tem.length () >= 2
1303 && tem[tem.length () - 2].opcode == MEM_REF)
1304 {
1305 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1306 new_mem_op->op0
1307 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1308 wi::to_poly_wide (new_mem_op->op0));
1309 }
1310 else
1311 gcc_assert (tem.last ().opcode == STRING_CST);
1312 ops->pop ();
1313 ops->pop ();
1314 ops->safe_splice (tem);
1315 --*i_p;
1316 return true;
1317 }
1318 if (!addr_base
1319 || TREE_CODE (addr_base) != MEM_REF
1320 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1321 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1322 0))))
1323 return changed;
1324
1325 off += addr_offset;
1326 off += mem_ref_offset (addr_base);
1327 op->op0 = TREE_OPERAND (addr_base, 0);
1328 }
1329 else
1330 {
1331 tree ptr, ptroff;
1332 ptr = gimple_assign_rhs1 (def_stmt);
1333 ptroff = gimple_assign_rhs2 (def_stmt);
1334 if (TREE_CODE (ptr) != SSA_NAME
1335 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1336 /* Make sure to not endlessly recurse.
1337 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1338 happen when we value-number a PHI to its backedge value. */
1339 || SSA_VAL (ptr) == op->op0
1340 || !poly_int_tree_p (ptroff))
1341 return changed;
1342
1343 off += wi::to_poly_offset (ptroff);
1344 op->op0 = ptr;
1345 }
1346
1347 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1348 if (tree_fits_shwi_p (mem_op->op0))
1349 mem_op->off = tree_to_shwi (mem_op->op0);
1350 else
1351 mem_op->off = -1;
1352 /* ??? Can end up with endless recursion here!?
1353 gcc.c-torture/execute/strcmp-1.c */
1354 if (TREE_CODE (op->op0) == SSA_NAME)
1355 op->op0 = SSA_VAL (op->op0);
1356 if (TREE_CODE (op->op0) != SSA_NAME)
1357 op->opcode = TREE_CODE (op->op0);
1358
1359 changed = true;
1360 }
1361 /* Tail-recurse. */
1362 while (TREE_CODE (op->op0) == SSA_NAME);
1363
1364 /* Fold a remaining *&. */
1365 if (TREE_CODE (op->op0) == ADDR_EXPR)
1366 vn_reference_fold_indirect (ops, i_p);
1367
1368 return changed;
1369 }
1370
1371 /* Optimize the reference REF to a constant if possible or return
1372 NULL_TREE if not. */
1373
1374 tree
1375 fully_constant_vn_reference_p (vn_reference_t ref)
1376 {
1377 vec<vn_reference_op_s> operands = ref->operands;
1378 vn_reference_op_t op;
1379
1380 /* Try to simplify the translated expression if it is
1381 a call to a builtin function with at most two arguments. */
1382 op = &operands[0];
1383 if (op->opcode == CALL_EXPR
1384 && TREE_CODE (op->op0) == ADDR_EXPR
1385 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1386 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1387 && operands.length () >= 2
1388 && operands.length () <= 3)
1389 {
1390 vn_reference_op_t arg0, arg1 = NULL;
1391 bool anyconst = false;
1392 arg0 = &operands[1];
1393 if (operands.length () > 2)
1394 arg1 = &operands[2];
1395 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1396 || (arg0->opcode == ADDR_EXPR
1397 && is_gimple_min_invariant (arg0->op0)))
1398 anyconst = true;
1399 if (arg1
1400 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1401 || (arg1->opcode == ADDR_EXPR
1402 && is_gimple_min_invariant (arg1->op0))))
1403 anyconst = true;
1404 if (anyconst)
1405 {
1406 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1407 arg1 ? 2 : 1,
1408 arg0->op0,
1409 arg1 ? arg1->op0 : NULL);
1410 if (folded
1411 && TREE_CODE (folded) == NOP_EXPR)
1412 folded = TREE_OPERAND (folded, 0);
1413 if (folded
1414 && is_gimple_min_invariant (folded))
1415 return folded;
1416 }
1417 }
1418
1419 /* Simplify reads from constants or constant initializers. */
1420 else if (BITS_PER_UNIT == 8
1421 && COMPLETE_TYPE_P (ref->type)
1422 && is_gimple_reg_type (ref->type))
1423 {
1424 poly_int64 off = 0;
1425 HOST_WIDE_INT size;
1426 if (INTEGRAL_TYPE_P (ref->type))
1427 size = TYPE_PRECISION (ref->type);
1428 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1429 size = tree_to_shwi (TYPE_SIZE (ref->type));
1430 else
1431 return NULL_TREE;
1432 if (size % BITS_PER_UNIT != 0
1433 || size > MAX_BITSIZE_MODE_ANY_MODE)
1434 return NULL_TREE;
1435 size /= BITS_PER_UNIT;
1436 unsigned i;
1437 for (i = 0; i < operands.length (); ++i)
1438 {
1439 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1440 {
1441 ++i;
1442 break;
1443 }
1444 if (known_eq (operands[i].off, -1))
1445 return NULL_TREE;
1446 off += operands[i].off;
1447 if (operands[i].opcode == MEM_REF)
1448 {
1449 ++i;
1450 break;
1451 }
1452 }
1453 vn_reference_op_t base = &operands[--i];
1454 tree ctor = error_mark_node;
1455 tree decl = NULL_TREE;
1456 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1457 ctor = base->op0;
1458 else if (base->opcode == MEM_REF
1459 && base[1].opcode == ADDR_EXPR
1460 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1461 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1462 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1463 {
1464 decl = TREE_OPERAND (base[1].op0, 0);
1465 if (TREE_CODE (decl) == STRING_CST)
1466 ctor = decl;
1467 else
1468 ctor = ctor_for_folding (decl);
1469 }
1470 if (ctor == NULL_TREE)
1471 return build_zero_cst (ref->type);
1472 else if (ctor != error_mark_node)
1473 {
1474 HOST_WIDE_INT const_off;
1475 if (decl)
1476 {
1477 tree res = fold_ctor_reference (ref->type, ctor,
1478 off * BITS_PER_UNIT,
1479 size * BITS_PER_UNIT, decl);
1480 if (res)
1481 {
1482 STRIP_USELESS_TYPE_CONVERSION (res);
1483 if (is_gimple_min_invariant (res))
1484 return res;
1485 }
1486 }
1487 else if (off.is_constant (&const_off))
1488 {
1489 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1490 int len = native_encode_expr (ctor, buf, size, const_off);
1491 if (len > 0)
1492 return native_interpret_expr (ref->type, buf, len);
1493 }
1494 }
1495 }
1496
1497 return NULL_TREE;
1498 }
1499
1500 /* Return true if OPS contain a storage order barrier. */
1501
1502 static bool
1503 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1504 {
1505 vn_reference_op_t op;
1506 unsigned i;
1507
1508 FOR_EACH_VEC_ELT (ops, i, op)
1509 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1510 return true;
1511
1512 return false;
1513 }
1514
1515 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1516 structures into their value numbers. This is done in-place, and
1517 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1518 whether any operands were valueized. */
1519
1520 static vec<vn_reference_op_s>
1521 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1522 bool with_avail = false)
1523 {
1524 vn_reference_op_t vro;
1525 unsigned int i;
1526
1527 *valueized_anything = false;
1528
1529 FOR_EACH_VEC_ELT (orig, i, vro)
1530 {
1531 if (vro->opcode == SSA_NAME
1532 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1533 {
1534 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1535 if (tem != vro->op0)
1536 {
1537 *valueized_anything = true;
1538 vro->op0 = tem;
1539 }
1540 /* If it transforms from an SSA_NAME to a constant, update
1541 the opcode. */
1542 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1543 vro->opcode = TREE_CODE (vro->op0);
1544 }
1545 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1546 {
1547 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1548 if (tem != vro->op1)
1549 {
1550 *valueized_anything = true;
1551 vro->op1 = tem;
1552 }
1553 }
1554 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1555 {
1556 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1557 if (tem != vro->op2)
1558 {
1559 *valueized_anything = true;
1560 vro->op2 = tem;
1561 }
1562 }
1563 /* If it transforms from an SSA_NAME to an address, fold with
1564 a preceding indirect reference. */
1565 if (i > 0
1566 && vro->op0
1567 && TREE_CODE (vro->op0) == ADDR_EXPR
1568 && orig[i - 1].opcode == MEM_REF)
1569 {
1570 if (vn_reference_fold_indirect (&orig, &i))
1571 *valueized_anything = true;
1572 }
1573 else if (i > 0
1574 && vro->opcode == SSA_NAME
1575 && orig[i - 1].opcode == MEM_REF)
1576 {
1577 if (vn_reference_maybe_forwprop_address (&orig, &i))
1578 *valueized_anything = true;
1579 }
1580 /* If it transforms a non-constant ARRAY_REF into a constant
1581 one, adjust the constant offset. */
1582 else if (vro->opcode == ARRAY_REF
1583 && known_eq (vro->off, -1)
1584 && poly_int_tree_p (vro->op0)
1585 && poly_int_tree_p (vro->op1)
1586 && TREE_CODE (vro->op2) == INTEGER_CST)
1587 {
1588 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1589 - wi::to_poly_offset (vro->op1))
1590 * wi::to_offset (vro->op2)
1591 * vn_ref_op_align_unit (vro));
1592 off.to_shwi (&vro->off);
1593 }
1594 }
1595
1596 return orig;
1597 }
1598
1599 static vec<vn_reference_op_s>
1600 valueize_refs (vec<vn_reference_op_s> orig)
1601 {
1602 bool tem;
1603 return valueize_refs_1 (orig, &tem);
1604 }
1605
1606 static vec<vn_reference_op_s> shared_lookup_references;
1607
1608 /* Create a vector of vn_reference_op_s structures from REF, a
1609 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1610 this function. *VALUEIZED_ANYTHING will specify whether any
1611 operands were valueized. */
1612
1613 static vec<vn_reference_op_s>
1614 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1615 {
1616 if (!ref)
1617 return vNULL;
1618 shared_lookup_references.truncate (0);
1619 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1620 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1621 valueized_anything);
1622 return shared_lookup_references;
1623 }
1624
1625 /* Create a vector of vn_reference_op_s structures from CALL, a
1626 call statement. The vector is shared among all callers of
1627 this function. */
1628
1629 static vec<vn_reference_op_s>
1630 valueize_shared_reference_ops_from_call (gcall *call)
1631 {
1632 if (!call)
1633 return vNULL;
1634 shared_lookup_references.truncate (0);
1635 copy_reference_ops_from_call (call, &shared_lookup_references);
1636 shared_lookup_references = valueize_refs (shared_lookup_references);
1637 return shared_lookup_references;
1638 }
1639
1640 /* Lookup a SCCVN reference operation VR in the current hash table.
1641 Returns the resulting value number if it exists in the hash table,
1642 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1643 vn_reference_t stored in the hashtable if something is found. */
1644
1645 static tree
1646 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1647 {
1648 vn_reference_s **slot;
1649 hashval_t hash;
1650
1651 hash = vr->hashcode;
1652 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1653 if (slot)
1654 {
1655 if (vnresult)
1656 *vnresult = (vn_reference_t)*slot;
1657 return ((vn_reference_t)*slot)->result;
1658 }
1659
1660 return NULL_TREE;
1661 }
1662
1663
1664 /* Partial definition tracking support. */
1665
1666 struct pd_range
1667 {
1668 HOST_WIDE_INT offset;
1669 HOST_WIDE_INT size;
1670 };
1671
1672 struct pd_data
1673 {
1674 tree rhs;
1675 HOST_WIDE_INT offset;
1676 HOST_WIDE_INT size;
1677 };
1678
1679 /* Context for alias walking. */
1680
1681 struct vn_walk_cb_data
1682 {
1683 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1684 vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
1685 : vr (vr_), last_vuse_ptr (last_vuse_ptr_),
1686 vn_walk_kind (vn_walk_kind_), tbaa_p (tbaa_p_), known_ranges (NULL)
1687 {
1688 ao_ref_init (&orig_ref, orig_ref_);
1689 }
1690 ~vn_walk_cb_data ();
1691 void *push_partial_def (const pd_data& pd, tree, HOST_WIDE_INT);
1692
1693 vn_reference_t vr;
1694 ao_ref orig_ref;
1695 tree *last_vuse_ptr;
1696 vn_lookup_kind vn_walk_kind;
1697 bool tbaa_p;
1698
1699 /* The VDEFs of partial defs we come along. */
1700 auto_vec<pd_data, 2> partial_defs;
1701 /* The first defs range to avoid splay tree setup in most cases. */
1702 pd_range first_range;
1703 tree first_vuse;
1704 splay_tree known_ranges;
1705 obstack ranges_obstack;
1706 };
1707
1708 vn_walk_cb_data::~vn_walk_cb_data ()
1709 {
1710 if (known_ranges)
1711 {
1712 splay_tree_delete (known_ranges);
1713 obstack_free (&ranges_obstack, NULL);
1714 }
1715 }
1716
1717 /* pd_range splay-tree helpers. */
1718
1719 static int
1720 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1721 {
1722 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1723 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1724 if (offset1 < offset2)
1725 return -1;
1726 else if (offset1 > offset2)
1727 return 1;
1728 return 0;
1729 }
1730
1731 static void *
1732 pd_tree_alloc (int size, void *data_)
1733 {
1734 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1735 return obstack_alloc (&data->ranges_obstack, size);
1736 }
1737
1738 static void
1739 pd_tree_dealloc (void *, void *)
1740 {
1741 }
1742
1743 /* Push PD to the vector of partial definitions returning a
1744 value when we are ready to combine things with VUSE and MAXSIZEI,
1745 NULL when we want to continue looking for partial defs or -1
1746 on failure. */
1747
1748 void *
1749 vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse,
1750 HOST_WIDE_INT maxsizei)
1751 {
1752 if (partial_defs.is_empty ())
1753 {
1754 partial_defs.safe_push (pd);
1755 first_range.offset = pd.offset;
1756 first_range.size = pd.size;
1757 first_vuse = vuse;
1758 last_vuse_ptr = NULL;
1759 /* Continue looking for partial defs. */
1760 return NULL;
1761 }
1762
1763 if (!known_ranges)
1764 {
1765 /* ??? Optimize the case where the 2nd partial def completes things. */
1766 gcc_obstack_init (&ranges_obstack);
1767 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1768 pd_tree_alloc,
1769 pd_tree_dealloc, this);
1770 splay_tree_insert (known_ranges,
1771 (splay_tree_key)&first_range.offset,
1772 (splay_tree_value)&first_range);
1773 }
1774
1775 pd_range newr = { pd.offset, pd.size };
1776 splay_tree_node n;
1777 pd_range *r;
1778 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1779 HOST_WIDE_INT loffset = newr.offset + 1;
1780 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1781 && ((r = (pd_range *)n->value), true)
1782 && ranges_known_overlap_p (r->offset, r->size + 1,
1783 newr.offset, newr.size))
1784 {
1785 /* Ignore partial defs already covered. */
1786 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1787 return NULL;
1788 r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
1789 }
1790 else
1791 {
1792 /* newr.offset wasn't covered yet, insert the range. */
1793 r = XOBNEW (&ranges_obstack, pd_range);
1794 *r = newr;
1795 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1796 (splay_tree_value)r);
1797 }
1798 /* Merge r which now contains newr and is a member of the splay tree with
1799 adjacent overlapping ranges. */
1800 pd_range *rafter;
1801 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
1802 && ((rafter = (pd_range *)n->value), true)
1803 && ranges_known_overlap_p (r->offset, r->size + 1,
1804 rafter->offset, rafter->size))
1805 {
1806 r->size = MAX (r->offset + r->size,
1807 rafter->offset + rafter->size) - r->offset;
1808 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
1809 }
1810 partial_defs.safe_push (pd);
1811
1812 /* Now we have merged newr into the range tree. When we have covered
1813 [offseti, sizei] then the tree will contain exactly one node which has
1814 the desired properties and it will be 'r'. */
1815 if (!known_subrange_p (0, maxsizei / BITS_PER_UNIT, r->offset, r->size))
1816 /* Continue looking for partial defs. */
1817 return NULL;
1818
1819 /* Now simply native encode all partial defs in reverse order. */
1820 unsigned ndefs = partial_defs.length ();
1821 /* We support up to 512-bit values (for V8DFmode). */
1822 unsigned char buffer[64];
1823 int len;
1824
1825 while (!partial_defs.is_empty ())
1826 {
1827 pd_data pd = partial_defs.pop ();
1828 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
1829 /* Empty CONSTRUCTOR. */
1830 memset (buffer + MAX (0, pd.offset),
1831 0, MIN ((HOST_WIDE_INT)sizeof (buffer) - MAX (0, pd.offset),
1832 pd.size + MIN (0, pd.offset)));
1833 else
1834 {
1835 unsigned pad = 0;
1836 if (BYTES_BIG_ENDIAN
1837 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (pd.rhs))))
1838 {
1839 /* On big-endian the padding is at the 'front' so just skip
1840 the initial bytes. */
1841 fixed_size_mode mode
1842 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (pd.rhs)));
1843 pad = GET_MODE_SIZE (mode) - pd.size;
1844 }
1845 len = native_encode_expr (pd.rhs, buffer + MAX (0, pd.offset),
1846 sizeof (buffer) - MAX (0, pd.offset),
1847 MAX (0, -pd.offset) + pad);
1848 if (len <= 0 || len < (pd.size - MAX (0, -pd.offset)))
1849 {
1850 if (dump_file && (dump_flags & TDF_DETAILS))
1851 fprintf (dump_file, "Failed to encode %u "
1852 "partial definitions\n", ndefs);
1853 return (void *)-1;
1854 }
1855 }
1856 }
1857
1858 tree type = vr->type;
1859 /* Make sure to interpret in a type that has a range covering the whole
1860 access size. */
1861 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
1862 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
1863 tree val = native_interpret_expr (type, buffer, maxsizei / BITS_PER_UNIT);
1864 /* If we chop off bits because the types precision doesn't match the memory
1865 access size this is ok when optimizing reads but not when called from
1866 the DSE code during elimination. */
1867 if (val && type != vr->type)
1868 {
1869 if (! int_fits_type_p (val, vr->type))
1870 val = NULL_TREE;
1871 else
1872 val = fold_convert (vr->type, val);
1873 }
1874
1875 if (val)
1876 {
1877 if (dump_file && (dump_flags & TDF_DETAILS))
1878 fprintf (dump_file,
1879 "Successfully combined %u partial definitions\n", ndefs);
1880 return vn_reference_lookup_or_insert_for_pieces
1881 (first_vuse, vr->set, vr->type, vr->operands, val);
1882 }
1883 else
1884 {
1885 if (dump_file && (dump_flags & TDF_DETAILS))
1886 fprintf (dump_file,
1887 "Failed to interpret %u encoded partial definitions\n", ndefs);
1888 return (void *)-1;
1889 }
1890 }
1891
1892 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1893 with the current VUSE and performs the expression lookup. */
1894
1895 static void *
1896 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
1897 {
1898 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1899 vn_reference_t vr = data->vr;
1900 vn_reference_s **slot;
1901 hashval_t hash;
1902
1903 /* If we have partial definitions recorded we have to go through
1904 vn_reference_lookup_3. */
1905 if (!data->partial_defs.is_empty ())
1906 return NULL;
1907
1908 if (data->last_vuse_ptr)
1909 *data->last_vuse_ptr = vuse;
1910
1911 /* Fixup vuse and hash. */
1912 if (vr->vuse)
1913 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1914 vr->vuse = vuse_ssa_val (vuse);
1915 if (vr->vuse)
1916 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1917
1918 hash = vr->hashcode;
1919 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1920 if (slot)
1921 return *slot;
1922
1923 return NULL;
1924 }
1925
1926 /* Lookup an existing or insert a new vn_reference entry into the
1927 value table for the VUSE, SET, TYPE, OPERANDS reference which
1928 has the value VALUE which is either a constant or an SSA name. */
1929
1930 static vn_reference_t
1931 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1932 alias_set_type set,
1933 tree type,
1934 vec<vn_reference_op_s,
1935 va_heap> operands,
1936 tree value)
1937 {
1938 vn_reference_s vr1;
1939 vn_reference_t result;
1940 unsigned value_id;
1941 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1942 vr1.operands = operands;
1943 vr1.type = type;
1944 vr1.set = set;
1945 vr1.hashcode = vn_reference_compute_hash (&vr1);
1946 if (vn_reference_lookup_1 (&vr1, &result))
1947 return result;
1948 if (TREE_CODE (value) == SSA_NAME)
1949 value_id = VN_INFO (value)->value_id;
1950 else
1951 value_id = get_or_alloc_constant_value_id (value);
1952 return vn_reference_insert_pieces (vuse, set, type,
1953 operands.copy (), value, value_id);
1954 }
1955
1956 /* Return a value-number for RCODE OPS... either by looking up an existing
1957 value-number for the simplified result or by inserting the operation if
1958 INSERT is true. */
1959
1960 static tree
1961 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1962 {
1963 tree result = NULL_TREE;
1964 /* We will be creating a value number for
1965 RCODE (OPS...).
1966 So first simplify and lookup this expression to see if it
1967 is already available. */
1968 /* For simplification valueize. */
1969 unsigned i;
1970 for (i = 0; i < res_op->num_ops; ++i)
1971 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
1972 {
1973 tree tem = vn_valueize (res_op->ops[i]);
1974 if (!tem)
1975 break;
1976 res_op->ops[i] = tem;
1977 }
1978 /* If valueization of an operand fails (it is not available), skip
1979 simplification. */
1980 bool res = false;
1981 if (i == res_op->num_ops)
1982 {
1983 mprts_hook = vn_lookup_simplify_result;
1984 res = res_op->resimplify (NULL, vn_valueize);
1985 mprts_hook = NULL;
1986 }
1987 gimple *new_stmt = NULL;
1988 if (res
1989 && gimple_simplified_result_is_gimple_val (res_op))
1990 {
1991 /* The expression is already available. */
1992 result = res_op->ops[0];
1993 /* Valueize it, simplification returns sth in AVAIL only. */
1994 if (TREE_CODE (result) == SSA_NAME)
1995 result = SSA_VAL (result);
1996 }
1997 else
1998 {
1999 tree val = vn_lookup_simplify_result (res_op);
2000 if (!val && insert)
2001 {
2002 gimple_seq stmts = NULL;
2003 result = maybe_push_res_to_seq (res_op, &stmts);
2004 if (result)
2005 {
2006 gcc_assert (gimple_seq_singleton_p (stmts));
2007 new_stmt = gimple_seq_first_stmt (stmts);
2008 }
2009 }
2010 else
2011 /* The expression is already available. */
2012 result = val;
2013 }
2014 if (new_stmt)
2015 {
2016 /* The expression is not yet available, value-number lhs to
2017 the new SSA_NAME we created. */
2018 /* Initialize value-number information properly. */
2019 vn_ssa_aux_t result_info = VN_INFO (result);
2020 result_info->valnum = result;
2021 result_info->value_id = get_next_value_id ();
2022 result_info->visited = 1;
2023 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2024 new_stmt);
2025 result_info->needs_insertion = true;
2026 /* ??? PRE phi-translation inserts NARYs without corresponding
2027 SSA name result. Re-use those but set their result according
2028 to the stmt we just built. */
2029 vn_nary_op_t nary = NULL;
2030 vn_nary_op_lookup_stmt (new_stmt, &nary);
2031 if (nary)
2032 {
2033 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2034 nary->u.result = gimple_assign_lhs (new_stmt);
2035 }
2036 /* As all "inserted" statements are singleton SCCs, insert
2037 to the valid table. This is strictly needed to
2038 avoid re-generating new value SSA_NAMEs for the same
2039 expression during SCC iteration over and over (the
2040 optimistic table gets cleared after each iteration).
2041 We do not need to insert into the optimistic table, as
2042 lookups there will fall back to the valid table. */
2043 else
2044 {
2045 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2046 vn_nary_op_t vno1
2047 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2048 vno1->value_id = result_info->value_id;
2049 vno1->length = length;
2050 vno1->predicated_values = 0;
2051 vno1->u.result = result;
2052 init_vn_nary_op_from_stmt (vno1, new_stmt);
2053 vn_nary_op_insert_into (vno1, valid_info->nary, true);
2054 /* Also do not link it into the undo chain. */
2055 last_inserted_nary = vno1->next;
2056 vno1->next = (vn_nary_op_t)(void *)-1;
2057 }
2058 if (dump_file && (dump_flags & TDF_DETAILS))
2059 {
2060 fprintf (dump_file, "Inserting name ");
2061 print_generic_expr (dump_file, result);
2062 fprintf (dump_file, " for expression ");
2063 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2064 fprintf (dump_file, "\n");
2065 }
2066 }
2067 return result;
2068 }
2069
2070 /* Return a value-number for RCODE OPS... either by looking up an existing
2071 value-number for the simplified result or by inserting the operation. */
2072
2073 static tree
2074 vn_nary_build_or_lookup (gimple_match_op *res_op)
2075 {
2076 return vn_nary_build_or_lookup_1 (res_op, true);
2077 }
2078
2079 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2080 its value if present. */
2081
2082 tree
2083 vn_nary_simplify (vn_nary_op_t nary)
2084 {
2085 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2086 return NULL_TREE;
2087 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2088 nary->type, nary->length);
2089 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2090 return vn_nary_build_or_lookup_1 (&op, false);
2091 }
2092
2093 /* Elimination engine. */
2094
2095 class eliminate_dom_walker : public dom_walker
2096 {
2097 public:
2098 eliminate_dom_walker (cdi_direction, bitmap);
2099 ~eliminate_dom_walker ();
2100
2101 virtual edge before_dom_children (basic_block);
2102 virtual void after_dom_children (basic_block);
2103
2104 virtual tree eliminate_avail (basic_block, tree op);
2105 virtual void eliminate_push_avail (basic_block, tree op);
2106 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2107
2108 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2109
2110 unsigned eliminate_cleanup (bool region_p = false);
2111
2112 bool do_pre;
2113 unsigned int el_todo;
2114 unsigned int eliminations;
2115 unsigned int insertions;
2116
2117 /* SSA names that had their defs inserted by PRE if do_pre. */
2118 bitmap inserted_exprs;
2119
2120 /* Blocks with statements that have had their EH properties changed. */
2121 bitmap need_eh_cleanup;
2122
2123 /* Blocks with statements that have had their AB properties changed. */
2124 bitmap need_ab_cleanup;
2125
2126 /* Local state for the eliminate domwalk. */
2127 auto_vec<gimple *> to_remove;
2128 auto_vec<gimple *> to_fixup;
2129 auto_vec<tree> avail;
2130 auto_vec<tree> avail_stack;
2131 };
2132
2133 /* Adaptor to the elimination engine using RPO availability. */
2134
2135 class rpo_elim : public eliminate_dom_walker
2136 {
2137 public:
2138 rpo_elim(basic_block entry_)
2139 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2140 m_avail_freelist (NULL) {}
2141
2142 virtual tree eliminate_avail (basic_block, tree op);
2143
2144 virtual void eliminate_push_avail (basic_block, tree);
2145
2146 basic_block entry;
2147 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2148 obstack. */
2149 vn_avail *m_avail_freelist;
2150 };
2151
2152 /* Global RPO state for access from hooks. */
2153 static rpo_elim *rpo_avail;
2154 basic_block vn_context_bb;
2155
2156 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2157 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2158 Otherwise return false. */
2159
2160 static bool
2161 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2162 tree base2, poly_int64 *offset2)
2163 {
2164 poly_int64 soff;
2165 if (TREE_CODE (base1) == MEM_REF
2166 && TREE_CODE (base2) == MEM_REF)
2167 {
2168 if (mem_ref_offset (base1).to_shwi (&soff))
2169 {
2170 base1 = TREE_OPERAND (base1, 0);
2171 *offset1 += soff * BITS_PER_UNIT;
2172 }
2173 if (mem_ref_offset (base2).to_shwi (&soff))
2174 {
2175 base2 = TREE_OPERAND (base2, 0);
2176 *offset2 += soff * BITS_PER_UNIT;
2177 }
2178 return operand_equal_p (base1, base2, 0);
2179 }
2180 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2181 }
2182
2183 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2184 from the statement defining VUSE and if not successful tries to
2185 translate *REFP and VR_ through an aggregate copy at the definition
2186 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2187 of *REF and *VR. If only disambiguation was performed then
2188 *DISAMBIGUATE_ONLY is set to true. */
2189
2190 static void *
2191 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2192 bool *disambiguate_only)
2193 {
2194 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2195 vn_reference_t vr = data->vr;
2196 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2197 tree base = ao_ref_base (ref);
2198 HOST_WIDE_INT offseti, maxsizei;
2199 static vec<vn_reference_op_s> lhs_ops;
2200 ao_ref lhs_ref;
2201 bool lhs_ref_ok = false;
2202 poly_int64 copy_size;
2203
2204 /* First try to disambiguate after value-replacing in the definitions LHS. */
2205 if (is_gimple_assign (def_stmt))
2206 {
2207 tree lhs = gimple_assign_lhs (def_stmt);
2208 bool valueized_anything = false;
2209 /* Avoid re-allocation overhead. */
2210 lhs_ops.truncate (0);
2211 basic_block saved_rpo_bb = vn_context_bb;
2212 vn_context_bb = gimple_bb (def_stmt);
2213 copy_reference_ops_from_ref (lhs, &lhs_ops);
2214 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
2215 vn_context_bb = saved_rpo_bb;
2216 if (valueized_anything)
2217 {
2218 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
2219 get_alias_set (lhs),
2220 TREE_TYPE (lhs), lhs_ops);
2221 if (lhs_ref_ok
2222 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2223 {
2224 *disambiguate_only = true;
2225 return NULL;
2226 }
2227 }
2228 else
2229 {
2230 ao_ref_init (&lhs_ref, lhs);
2231 lhs_ref_ok = true;
2232 }
2233
2234 /* Besides valueizing the LHS we can also use access-path based
2235 disambiguation on the original non-valueized ref. */
2236 if (!ref->ref
2237 && lhs_ref_ok
2238 && data->orig_ref.ref)
2239 {
2240 /* We want to use the non-valueized LHS for this, but avoid redundant
2241 work. */
2242 ao_ref *lref = &lhs_ref;
2243 ao_ref lref_alt;
2244 if (valueized_anything)
2245 {
2246 ao_ref_init (&lref_alt, lhs);
2247 lref = &lref_alt;
2248 }
2249 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2250 {
2251 *disambiguate_only = true;
2252 return NULL;
2253 }
2254 }
2255
2256 /* If we reach a clobbering statement try to skip it and see if
2257 we find a VN result with exactly the same value as the
2258 possible clobber. In this case we can ignore the clobber
2259 and return the found value. */
2260 if (is_gimple_reg_type (TREE_TYPE (lhs))
2261 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2262 && ref->ref)
2263 {
2264 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2265 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2266 data->last_vuse_ptr = NULL;
2267 tree saved_vuse = vr->vuse;
2268 hashval_t saved_hashcode = vr->hashcode;
2269 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2270 /* Need to restore vr->vuse and vr->hashcode. */
2271 vr->vuse = saved_vuse;
2272 vr->hashcode = saved_hashcode;
2273 data->last_vuse_ptr = saved_last_vuse_ptr;
2274 if (res && res != (void *)-1)
2275 {
2276 vn_reference_t vnresult = (vn_reference_t) res;
2277 tree rhs = gimple_assign_rhs1 (def_stmt);
2278 if (TREE_CODE (rhs) == SSA_NAME)
2279 rhs = SSA_VAL (rhs);
2280 if (vnresult->result
2281 && operand_equal_p (vnresult->result, rhs, 0)
2282 /* We have to honor our promise about union type punning
2283 and also support arbitrary overlaps with
2284 -fno-strict-aliasing. So simply resort to alignment to
2285 rule out overlaps. Do this check last because it is
2286 quite expensive compared to the hash-lookup above. */
2287 && multiple_p (get_object_alignment (ref->ref), ref->size)
2288 && multiple_p (get_object_alignment (lhs), ref->size))
2289 return res;
2290 }
2291 }
2292 }
2293 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2294 && gimple_call_num_args (def_stmt) <= 4)
2295 {
2296 /* For builtin calls valueize its arguments and call the
2297 alias oracle again. Valueization may improve points-to
2298 info of pointers and constify size and position arguments.
2299 Originally this was motivated by PR61034 which has
2300 conditional calls to free falsely clobbering ref because
2301 of imprecise points-to info of the argument. */
2302 tree oldargs[4];
2303 bool valueized_anything = false;
2304 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2305 {
2306 oldargs[i] = gimple_call_arg (def_stmt, i);
2307 tree val = vn_valueize (oldargs[i]);
2308 if (val != oldargs[i])
2309 {
2310 gimple_call_set_arg (def_stmt, i, val);
2311 valueized_anything = true;
2312 }
2313 }
2314 if (valueized_anything)
2315 {
2316 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2317 ref);
2318 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2319 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2320 if (!res)
2321 {
2322 *disambiguate_only = true;
2323 return NULL;
2324 }
2325 }
2326 }
2327
2328 /* If we are looking for redundant stores do not create new hashtable
2329 entries from aliasing defs with made up alias-sets. */
2330 if (*disambiguate_only || !data->tbaa_p)
2331 return (void *)-1;
2332
2333 /* If we cannot constrain the size of the reference we cannot
2334 test if anything kills it. */
2335 if (!ref->max_size_known_p ())
2336 return (void *)-1;
2337
2338 poly_int64 offset = ref->offset;
2339 poly_int64 maxsize = ref->max_size;
2340
2341 /* We can't deduce anything useful from clobbers. */
2342 if (gimple_clobber_p (def_stmt))
2343 return (void *)-1;
2344
2345 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2346 from that definition.
2347 1) Memset. */
2348 if (is_gimple_reg_type (vr->type)
2349 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2350 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2351 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2352 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2353 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2354 && offset.is_constant (&offseti)
2355 && offseti % BITS_PER_UNIT == 0))
2356 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2357 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2358 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2359 {
2360 tree base2;
2361 poly_int64 offset2, size2, maxsize2;
2362 bool reverse;
2363 tree ref2 = gimple_call_arg (def_stmt, 0);
2364 if (TREE_CODE (ref2) == SSA_NAME)
2365 {
2366 ref2 = SSA_VAL (ref2);
2367 if (TREE_CODE (ref2) == SSA_NAME
2368 && (TREE_CODE (base) != MEM_REF
2369 || TREE_OPERAND (base, 0) != ref2))
2370 {
2371 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2372 if (gimple_assign_single_p (def_stmt)
2373 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2374 ref2 = gimple_assign_rhs1 (def_stmt);
2375 }
2376 }
2377 if (TREE_CODE (ref2) == ADDR_EXPR)
2378 {
2379 ref2 = TREE_OPERAND (ref2, 0);
2380 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2381 &reverse);
2382 if (!known_size_p (maxsize2)
2383 || !known_eq (maxsize2, size2)
2384 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2385 return (void *)-1;
2386 }
2387 else if (TREE_CODE (ref2) == SSA_NAME)
2388 {
2389 poly_int64 soff;
2390 if (TREE_CODE (base) != MEM_REF
2391 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2392 return (void *)-1;
2393 offset += soff;
2394 offset2 = 0;
2395 if (TREE_OPERAND (base, 0) != ref2)
2396 {
2397 gimple *def = SSA_NAME_DEF_STMT (ref2);
2398 if (is_gimple_assign (def)
2399 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2400 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2401 && poly_int_tree_p (gimple_assign_rhs2 (def))
2402 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2403 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2404 {
2405 ref2 = gimple_assign_rhs1 (def);
2406 if (TREE_CODE (ref2) == SSA_NAME)
2407 ref2 = SSA_VAL (ref2);
2408 }
2409 else
2410 return (void *)-1;
2411 }
2412 }
2413 else
2414 return (void *)-1;
2415 tree len = gimple_call_arg (def_stmt, 2);
2416 HOST_WIDE_INT leni, offset2i, offseti;
2417 if (data->partial_defs.is_empty ()
2418 && known_subrange_p (offset, maxsize, offset2,
2419 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2420 {
2421 tree val;
2422 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2423 val = build_zero_cst (vr->type);
2424 else if (INTEGRAL_TYPE_P (vr->type)
2425 && known_eq (ref->size, 8))
2426 {
2427 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2428 vr->type, gimple_call_arg (def_stmt, 1));
2429 val = vn_nary_build_or_lookup (&res_op);
2430 if (!val
2431 || (TREE_CODE (val) == SSA_NAME
2432 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2433 return (void *)-1;
2434 }
2435 else
2436 {
2437 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2438 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2439 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2440 len);
2441 val = native_interpret_expr (vr->type, buf, len);
2442 if (!val)
2443 return (void *)-1;
2444 }
2445 return vn_reference_lookup_or_insert_for_pieces
2446 (vuse, vr->set, vr->type, vr->operands, val);
2447 }
2448 /* For now handle clearing memory with partial defs. */
2449 else if (known_eq (ref->size, maxsize)
2450 && integer_zerop (gimple_call_arg (def_stmt, 1))
2451 && tree_to_poly_int64 (len).is_constant (&leni)
2452 && offset.is_constant (&offseti)
2453 && offset2.is_constant (&offset2i)
2454 && maxsize.is_constant (&maxsizei))
2455 {
2456 pd_data pd;
2457 pd.rhs = build_constructor (NULL_TREE, NULL);
2458 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2459 pd.size = leni;
2460 return data->push_partial_def (pd, vuse, maxsizei);
2461 }
2462 }
2463
2464 /* 2) Assignment from an empty CONSTRUCTOR. */
2465 else if (is_gimple_reg_type (vr->type)
2466 && gimple_assign_single_p (def_stmt)
2467 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2468 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2469 {
2470 tree lhs = gimple_assign_lhs (def_stmt);
2471 tree base2;
2472 poly_int64 offset2, size2, maxsize2;
2473 HOST_WIDE_INT offset2i, size2i;
2474 bool reverse;
2475 if (lhs_ref_ok)
2476 {
2477 base2 = ao_ref_base (&lhs_ref);
2478 offset2 = lhs_ref.offset;
2479 size2 = lhs_ref.size;
2480 maxsize2 = lhs_ref.max_size;
2481 reverse = reverse_storage_order_for_component_p (lhs);
2482 }
2483 else
2484 base2 = get_ref_base_and_extent (lhs,
2485 &offset2, &size2, &maxsize2, &reverse);
2486 if (known_size_p (maxsize2)
2487 && known_eq (maxsize2, size2)
2488 && adjust_offsets_for_equal_base_address (base, &offset,
2489 base2, &offset2))
2490 {
2491 if (data->partial_defs.is_empty ()
2492 && known_subrange_p (offset, maxsize, offset2, size2))
2493 {
2494 tree val = build_zero_cst (vr->type);
2495 return vn_reference_lookup_or_insert_for_pieces
2496 (vuse, vr->set, vr->type, vr->operands, val);
2497 }
2498 else if (known_eq (ref->size, maxsize)
2499 && maxsize.is_constant (&maxsizei)
2500 && maxsizei % BITS_PER_UNIT == 0
2501 && offset.is_constant (&offseti)
2502 && offseti % BITS_PER_UNIT == 0
2503 && offset2.is_constant (&offset2i)
2504 && offset2i % BITS_PER_UNIT == 0
2505 && size2.is_constant (&size2i)
2506 && size2i % BITS_PER_UNIT == 0)
2507 {
2508 pd_data pd;
2509 pd.rhs = gimple_assign_rhs1 (def_stmt);
2510 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2511 pd.size = size2i / BITS_PER_UNIT;
2512 return data->push_partial_def (pd, vuse, maxsizei);
2513 }
2514 }
2515 }
2516
2517 /* 3) Assignment from a constant. We can use folds native encode/interpret
2518 routines to extract the assigned bits. */
2519 else if (known_eq (ref->size, maxsize)
2520 && is_gimple_reg_type (vr->type)
2521 && !contains_storage_order_barrier_p (vr->operands)
2522 && gimple_assign_single_p (def_stmt)
2523 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2524 /* native_encode and native_decode operate on arrays of bytes
2525 and so fundamentally need a compile-time size and offset. */
2526 && maxsize.is_constant (&maxsizei)
2527 && maxsizei % BITS_PER_UNIT == 0
2528 && offset.is_constant (&offseti)
2529 && offseti % BITS_PER_UNIT == 0
2530 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2531 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2532 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2533 {
2534 tree lhs = gimple_assign_lhs (def_stmt);
2535 tree base2;
2536 poly_int64 offset2, size2, maxsize2;
2537 HOST_WIDE_INT offset2i, size2i;
2538 bool reverse;
2539 if (lhs_ref_ok)
2540 {
2541 base2 = ao_ref_base (&lhs_ref);
2542 offset2 = lhs_ref.offset;
2543 size2 = lhs_ref.size;
2544 maxsize2 = lhs_ref.max_size;
2545 reverse = reverse_storage_order_for_component_p (lhs);
2546 }
2547 else
2548 base2 = get_ref_base_and_extent (lhs,
2549 &offset2, &size2, &maxsize2, &reverse);
2550 if (base2
2551 && !reverse
2552 && known_eq (maxsize2, size2)
2553 && multiple_p (size2, BITS_PER_UNIT)
2554 && multiple_p (offset2, BITS_PER_UNIT)
2555 && adjust_offsets_for_equal_base_address (base, &offset,
2556 base2, &offset2)
2557 && offset.is_constant (&offseti)
2558 && offset2.is_constant (&offset2i)
2559 && size2.is_constant (&size2i))
2560 {
2561 if (data->partial_defs.is_empty ()
2562 && known_subrange_p (offseti, maxsizei, offset2, size2))
2563 {
2564 /* We support up to 512-bit values (for V8DFmode). */
2565 unsigned char buffer[64];
2566 int len;
2567
2568 tree rhs = gimple_assign_rhs1 (def_stmt);
2569 if (TREE_CODE (rhs) == SSA_NAME)
2570 rhs = SSA_VAL (rhs);
2571 unsigned pad = 0;
2572 if (BYTES_BIG_ENDIAN
2573 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs))))
2574 {
2575 /* On big-endian the padding is at the 'front' so
2576 just skip the initial bytes. */
2577 fixed_size_mode mode
2578 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (rhs)));
2579 pad = GET_MODE_SIZE (mode) - size2i / BITS_PER_UNIT;
2580 }
2581 len = native_encode_expr (rhs,
2582 buffer, sizeof (buffer),
2583 ((offseti - offset2i) / BITS_PER_UNIT
2584 + pad));
2585 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2586 {
2587 tree type = vr->type;
2588 /* Make sure to interpret in a type that has a range
2589 covering the whole access size. */
2590 if (INTEGRAL_TYPE_P (vr->type)
2591 && maxsizei != TYPE_PRECISION (vr->type))
2592 type = build_nonstandard_integer_type (maxsizei,
2593 TYPE_UNSIGNED (type));
2594 tree val = native_interpret_expr (type, buffer,
2595 maxsizei / BITS_PER_UNIT);
2596 /* If we chop off bits because the types precision doesn't
2597 match the memory access size this is ok when optimizing
2598 reads but not when called from the DSE code during
2599 elimination. */
2600 if (val
2601 && type != vr->type)
2602 {
2603 if (! int_fits_type_p (val, vr->type))
2604 val = NULL_TREE;
2605 else
2606 val = fold_convert (vr->type, val);
2607 }
2608
2609 if (val)
2610 return vn_reference_lookup_or_insert_for_pieces
2611 (vuse, vr->set, vr->type, vr->operands, val);
2612 }
2613 }
2614 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i, size2i))
2615 {
2616 pd_data pd;
2617 tree rhs = gimple_assign_rhs1 (def_stmt);
2618 if (TREE_CODE (rhs) == SSA_NAME)
2619 rhs = SSA_VAL (rhs);
2620 pd.rhs = rhs;
2621 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2622 pd.size = size2i / BITS_PER_UNIT;
2623 return data->push_partial_def (pd, vuse, maxsizei);
2624 }
2625 }
2626 }
2627
2628 /* 4) Assignment from an SSA name which definition we may be able
2629 to access pieces from. */
2630 else if (known_eq (ref->size, maxsize)
2631 && is_gimple_reg_type (vr->type)
2632 && !contains_storage_order_barrier_p (vr->operands)
2633 && gimple_assign_single_p (def_stmt)
2634 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2635 /* A subset of partial defs from non-constants can be handled
2636 by for example inserting a CONSTRUCTOR, a COMPLEX_EXPR or
2637 even a (series of) BIT_INSERT_EXPR hoping for simplifications
2638 downstream, not so much for actually doing the insertion. */
2639 && data->partial_defs.is_empty ())
2640 {
2641 tree lhs = gimple_assign_lhs (def_stmt);
2642 tree base2;
2643 poly_int64 offset2, size2, maxsize2;
2644 bool reverse;
2645 if (lhs_ref_ok)
2646 {
2647 base2 = ao_ref_base (&lhs_ref);
2648 offset2 = lhs_ref.offset;
2649 size2 = lhs_ref.size;
2650 maxsize2 = lhs_ref.max_size;
2651 reverse = reverse_storage_order_for_component_p (lhs);
2652 }
2653 else
2654 base2 = get_ref_base_and_extent (lhs,
2655 &offset2, &size2, &maxsize2, &reverse);
2656 tree def_rhs = gimple_assign_rhs1 (def_stmt);
2657 if (!reverse
2658 && known_size_p (maxsize2)
2659 && known_eq (maxsize2, size2)
2660 && adjust_offsets_for_equal_base_address (base, &offset,
2661 base2, &offset2)
2662 && known_subrange_p (offset, maxsize, offset2, size2)
2663 /* ??? We can't handle bitfield precision extracts without
2664 either using an alternate type for the BIT_FIELD_REF and
2665 then doing a conversion or possibly adjusting the offset
2666 according to endianness. */
2667 && (! INTEGRAL_TYPE_P (vr->type)
2668 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2669 && multiple_p (ref->size, BITS_PER_UNIT)
2670 && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2671 || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2672 {
2673 gimple_match_op op (gimple_match_cond::UNCOND,
2674 BIT_FIELD_REF, vr->type,
2675 vn_valueize (def_rhs),
2676 bitsize_int (ref->size),
2677 bitsize_int (offset - offset2));
2678 tree val = vn_nary_build_or_lookup (&op);
2679 if (val
2680 && (TREE_CODE (val) != SSA_NAME
2681 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2682 {
2683 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2684 (vuse, vr->set, vr->type, vr->operands, val);
2685 return res;
2686 }
2687 }
2688 }
2689
2690 /* 5) For aggregate copies translate the reference through them if
2691 the copy kills ref. */
2692 else if (data->vn_walk_kind == VN_WALKREWRITE
2693 && gimple_assign_single_p (def_stmt)
2694 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2695 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2696 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2697 {
2698 tree base2;
2699 int i, j, k;
2700 auto_vec<vn_reference_op_s> rhs;
2701 vn_reference_op_t vro;
2702 ao_ref r;
2703
2704 if (!lhs_ref_ok)
2705 return (void *)-1;
2706
2707 /* See if the assignment kills REF. */
2708 base2 = ao_ref_base (&lhs_ref);
2709 if (!lhs_ref.max_size_known_p ()
2710 || (base != base2
2711 && (TREE_CODE (base) != MEM_REF
2712 || TREE_CODE (base2) != MEM_REF
2713 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2714 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2715 TREE_OPERAND (base2, 1))))
2716 || !stmt_kills_ref_p (def_stmt, ref))
2717 return (void *)-1;
2718
2719 /* Find the common base of ref and the lhs. lhs_ops already
2720 contains valueized operands for the lhs. */
2721 i = vr->operands.length () - 1;
2722 j = lhs_ops.length () - 1;
2723 while (j >= 0 && i >= 0
2724 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2725 {
2726 i--;
2727 j--;
2728 }
2729
2730 /* ??? The innermost op should always be a MEM_REF and we already
2731 checked that the assignment to the lhs kills vr. Thus for
2732 aggregate copies using char[] types the vn_reference_op_eq
2733 may fail when comparing types for compatibility. But we really
2734 don't care here - further lookups with the rewritten operands
2735 will simply fail if we messed up types too badly. */
2736 poly_int64 extra_off = 0;
2737 if (j == 0 && i >= 0
2738 && lhs_ops[0].opcode == MEM_REF
2739 && maybe_ne (lhs_ops[0].off, -1))
2740 {
2741 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2742 i--, j--;
2743 else if (vr->operands[i].opcode == MEM_REF
2744 && maybe_ne (vr->operands[i].off, -1))
2745 {
2746 extra_off = vr->operands[i].off - lhs_ops[0].off;
2747 i--, j--;
2748 }
2749 }
2750
2751 /* i now points to the first additional op.
2752 ??? LHS may not be completely contained in VR, one or more
2753 VIEW_CONVERT_EXPRs could be in its way. We could at least
2754 try handling outermost VIEW_CONVERT_EXPRs. */
2755 if (j != -1)
2756 return (void *)-1;
2757
2758 /* Punt if the additional ops contain a storage order barrier. */
2759 for (k = i; k >= 0; k--)
2760 {
2761 vro = &vr->operands[k];
2762 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2763 return (void *)-1;
2764 }
2765
2766 /* Now re-write REF to be based on the rhs of the assignment. */
2767 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2768
2769 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2770 if (maybe_ne (extra_off, 0))
2771 {
2772 if (rhs.length () < 2)
2773 return (void *)-1;
2774 int ix = rhs.length () - 2;
2775 if (rhs[ix].opcode != MEM_REF
2776 || known_eq (rhs[ix].off, -1))
2777 return (void *)-1;
2778 rhs[ix].off += extra_off;
2779 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2780 build_int_cst (TREE_TYPE (rhs[ix].op0),
2781 extra_off));
2782 }
2783
2784 /* We need to pre-pend vr->operands[0..i] to rhs. */
2785 vec<vn_reference_op_s> old = vr->operands;
2786 if (i + 1 + rhs.length () > vr->operands.length ())
2787 vr->operands.safe_grow (i + 1 + rhs.length ());
2788 else
2789 vr->operands.truncate (i + 1 + rhs.length ());
2790 FOR_EACH_VEC_ELT (rhs, j, vro)
2791 vr->operands[i + 1 + j] = *vro;
2792 vr->operands = valueize_refs (vr->operands);
2793 if (old == shared_lookup_references)
2794 shared_lookup_references = vr->operands;
2795 vr->hashcode = vn_reference_compute_hash (vr);
2796
2797 /* Try folding the new reference to a constant. */
2798 tree val = fully_constant_vn_reference_p (vr);
2799 if (val)
2800 {
2801 if (data->partial_defs.is_empty ())
2802 return vn_reference_lookup_or_insert_for_pieces
2803 (vuse, vr->set, vr->type, vr->operands, val);
2804 /* This is the only interesting case for partial-def handling
2805 coming from targets that like to gimplify init-ctors as
2806 aggregate copies from constant data like aarch64 for
2807 PR83518. */
2808 if (maxsize.is_constant (&maxsizei)
2809 && known_eq (ref->size, maxsize))
2810 {
2811 pd_data pd;
2812 pd.rhs = val;
2813 pd.offset = 0;
2814 pd.size = maxsizei / BITS_PER_UNIT;
2815 return data->push_partial_def (pd, vuse, maxsizei);
2816 }
2817 }
2818
2819 /* Continuing with partial defs isn't easily possible here, we
2820 have to find a full def from further lookups from here. Probably
2821 not worth the special-casing everywhere. */
2822 if (!data->partial_defs.is_empty ())
2823 return (void *)-1;
2824
2825 /* Adjust *ref from the new operands. */
2826 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2827 return (void *)-1;
2828 /* This can happen with bitfields. */
2829 if (maybe_ne (ref->size, r.size))
2830 return (void *)-1;
2831 *ref = r;
2832
2833 /* Do not update last seen VUSE after translating. */
2834 data->last_vuse_ptr = NULL;
2835 /* Invalidate the original access path since it now contains
2836 the wrong base. */
2837 data->orig_ref.ref = NULL_TREE;
2838
2839 /* Keep looking for the adjusted *REF / VR pair. */
2840 return NULL;
2841 }
2842
2843 /* 6) For memcpy copies translate the reference through them if
2844 the copy kills ref. */
2845 else if (data->vn_walk_kind == VN_WALKREWRITE
2846 && is_gimple_reg_type (vr->type)
2847 /* ??? Handle BCOPY as well. */
2848 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2849 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2850 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2851 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2852 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2853 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2854 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2855 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
2856 /* Handling this is more complicated, give up for now. */
2857 && data->partial_defs.is_empty ())
2858 {
2859 tree lhs, rhs;
2860 ao_ref r;
2861 poly_int64 rhs_offset, lhs_offset;
2862 vn_reference_op_s op;
2863 poly_uint64 mem_offset;
2864 poly_int64 at, byte_maxsize;
2865
2866 /* Only handle non-variable, addressable refs. */
2867 if (maybe_ne (ref->size, maxsize)
2868 || !multiple_p (offset, BITS_PER_UNIT, &at)
2869 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2870 return (void *)-1;
2871
2872 /* Extract a pointer base and an offset for the destination. */
2873 lhs = gimple_call_arg (def_stmt, 0);
2874 lhs_offset = 0;
2875 if (TREE_CODE (lhs) == SSA_NAME)
2876 {
2877 lhs = vn_valueize (lhs);
2878 if (TREE_CODE (lhs) == SSA_NAME)
2879 {
2880 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2881 if (gimple_assign_single_p (def_stmt)
2882 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2883 lhs = gimple_assign_rhs1 (def_stmt);
2884 }
2885 }
2886 if (TREE_CODE (lhs) == ADDR_EXPR)
2887 {
2888 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2889 &lhs_offset);
2890 if (!tem)
2891 return (void *)-1;
2892 if (TREE_CODE (tem) == MEM_REF
2893 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2894 {
2895 lhs = TREE_OPERAND (tem, 0);
2896 if (TREE_CODE (lhs) == SSA_NAME)
2897 lhs = vn_valueize (lhs);
2898 lhs_offset += mem_offset;
2899 }
2900 else if (DECL_P (tem))
2901 lhs = build_fold_addr_expr (tem);
2902 else
2903 return (void *)-1;
2904 }
2905 if (TREE_CODE (lhs) != SSA_NAME
2906 && TREE_CODE (lhs) != ADDR_EXPR)
2907 return (void *)-1;
2908
2909 /* Extract a pointer base and an offset for the source. */
2910 rhs = gimple_call_arg (def_stmt, 1);
2911 rhs_offset = 0;
2912 if (TREE_CODE (rhs) == SSA_NAME)
2913 rhs = vn_valueize (rhs);
2914 if (TREE_CODE (rhs) == ADDR_EXPR)
2915 {
2916 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2917 &rhs_offset);
2918 if (!tem)
2919 return (void *)-1;
2920 if (TREE_CODE (tem) == MEM_REF
2921 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2922 {
2923 rhs = TREE_OPERAND (tem, 0);
2924 rhs_offset += mem_offset;
2925 }
2926 else if (DECL_P (tem)
2927 || TREE_CODE (tem) == STRING_CST)
2928 rhs = build_fold_addr_expr (tem);
2929 else
2930 return (void *)-1;
2931 }
2932 if (TREE_CODE (rhs) != SSA_NAME
2933 && TREE_CODE (rhs) != ADDR_EXPR)
2934 return (void *)-1;
2935
2936 /* The bases of the destination and the references have to agree. */
2937 if (TREE_CODE (base) == MEM_REF)
2938 {
2939 if (TREE_OPERAND (base, 0) != lhs
2940 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2941 return (void *) -1;
2942 at += mem_offset;
2943 }
2944 else if (!DECL_P (base)
2945 || TREE_CODE (lhs) != ADDR_EXPR
2946 || TREE_OPERAND (lhs, 0) != base)
2947 return (void *)-1;
2948
2949 /* If the access is completely outside of the memcpy destination
2950 area there is no aliasing. */
2951 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2952 return NULL;
2953 /* And the access has to be contained within the memcpy destination. */
2954 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2955 return (void *)-1;
2956
2957 /* Make room for 2 operands in the new reference. */
2958 if (vr->operands.length () < 2)
2959 {
2960 vec<vn_reference_op_s> old = vr->operands;
2961 vr->operands.safe_grow_cleared (2);
2962 if (old == shared_lookup_references)
2963 shared_lookup_references = vr->operands;
2964 }
2965 else
2966 vr->operands.truncate (2);
2967
2968 /* The looked-through reference is a simple MEM_REF. */
2969 memset (&op, 0, sizeof (op));
2970 op.type = vr->type;
2971 op.opcode = MEM_REF;
2972 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2973 op.off = at - lhs_offset + rhs_offset;
2974 vr->operands[0] = op;
2975 op.type = TREE_TYPE (rhs);
2976 op.opcode = TREE_CODE (rhs);
2977 op.op0 = rhs;
2978 op.off = -1;
2979 vr->operands[1] = op;
2980 vr->hashcode = vn_reference_compute_hash (vr);
2981
2982 /* Try folding the new reference to a constant. */
2983 tree val = fully_constant_vn_reference_p (vr);
2984 if (val)
2985 return vn_reference_lookup_or_insert_for_pieces
2986 (vuse, vr->set, vr->type, vr->operands, val);
2987
2988 /* Adjust *ref from the new operands. */
2989 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2990 return (void *)-1;
2991 /* This can happen with bitfields. */
2992 if (maybe_ne (ref->size, r.size))
2993 return (void *)-1;
2994 *ref = r;
2995
2996 /* Do not update last seen VUSE after translating. */
2997 data->last_vuse_ptr = NULL;
2998 /* Invalidate the original access path since it now contains
2999 the wrong base. */
3000 data->orig_ref.ref = NULL_TREE;
3001
3002 /* Keep looking for the adjusted *REF / VR pair. */
3003 return NULL;
3004 }
3005
3006 /* Bail out and stop walking. */
3007 return (void *)-1;
3008 }
3009
3010 /* Return a reference op vector from OP that can be used for
3011 vn_reference_lookup_pieces. The caller is responsible for releasing
3012 the vector. */
3013
3014 vec<vn_reference_op_s>
3015 vn_reference_operands_for_lookup (tree op)
3016 {
3017 bool valueized;
3018 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3019 }
3020
3021 /* Lookup a reference operation by it's parts, in the current hash table.
3022 Returns the resulting value number if it exists in the hash table,
3023 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3024 vn_reference_t stored in the hashtable if something is found. */
3025
3026 tree
3027 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
3028 vec<vn_reference_op_s> operands,
3029 vn_reference_t *vnresult, vn_lookup_kind kind)
3030 {
3031 struct vn_reference_s vr1;
3032 vn_reference_t tmp;
3033 tree cst;
3034
3035 if (!vnresult)
3036 vnresult = &tmp;
3037 *vnresult = NULL;
3038
3039 vr1.vuse = vuse_ssa_val (vuse);
3040 shared_lookup_references.truncate (0);
3041 shared_lookup_references.safe_grow (operands.length ());
3042 memcpy (shared_lookup_references.address (),
3043 operands.address (),
3044 sizeof (vn_reference_op_s)
3045 * operands.length ());
3046 vr1.operands = operands = shared_lookup_references
3047 = valueize_refs (shared_lookup_references);
3048 vr1.type = type;
3049 vr1.set = set;
3050 vr1.hashcode = vn_reference_compute_hash (&vr1);
3051 if ((cst = fully_constant_vn_reference_p (&vr1)))
3052 return cst;
3053
3054 vn_reference_lookup_1 (&vr1, vnresult);
3055 if (!*vnresult
3056 && kind != VN_NOWALK
3057 && vr1.vuse)
3058 {
3059 ao_ref r;
3060 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
3061 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true);
3062 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
3063 *vnresult =
3064 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
3065 vn_reference_lookup_2,
3066 vn_reference_lookup_3,
3067 vuse_valueize, limit, &data);
3068 gcc_checking_assert (vr1.operands == shared_lookup_references);
3069 }
3070
3071 if (*vnresult)
3072 return (*vnresult)->result;
3073
3074 return NULL_TREE;
3075 }
3076
3077 /* Lookup OP in the current hash table, and return the resulting value
3078 number if it exists in the hash table. Return NULL_TREE if it does
3079 not exist in the hash table or if the result field of the structure
3080 was NULL.. VNRESULT will be filled in with the vn_reference_t
3081 stored in the hashtable if one exists. When TBAA_P is false assume
3082 we are looking up a store and treat it as having alias-set zero.
3083 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. */
3084
3085 tree
3086 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3087 vn_reference_t *vnresult, bool tbaa_p, tree *last_vuse_ptr)
3088 {
3089 vec<vn_reference_op_s> operands;
3090 struct vn_reference_s vr1;
3091 tree cst;
3092 bool valuezied_anything;
3093
3094 if (vnresult)
3095 *vnresult = NULL;
3096
3097 vr1.vuse = vuse_ssa_val (vuse);
3098 vr1.operands = operands
3099 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3100 vr1.type = TREE_TYPE (op);
3101 vr1.set = get_alias_set (op);
3102 vr1.hashcode = vn_reference_compute_hash (&vr1);
3103 if ((cst = fully_constant_vn_reference_p (&vr1)))
3104 return cst;
3105
3106 if (kind != VN_NOWALK
3107 && vr1.vuse)
3108 {
3109 vn_reference_t wvnresult;
3110 ao_ref r;
3111 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
3112 /* Make sure to use a valueized reference if we valueized anything.
3113 Otherwise preserve the full reference for advanced TBAA. */
3114 if (!valuezied_anything
3115 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
3116 vr1.operands))
3117 ao_ref_init (&r, op);
3118 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3119 last_vuse_ptr, kind, tbaa_p);
3120 wvnresult =
3121 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p,
3122 vn_reference_lookup_2,
3123 vn_reference_lookup_3,
3124 vuse_valueize, limit, &data);
3125 gcc_checking_assert (vr1.operands == shared_lookup_references);
3126 if (wvnresult)
3127 {
3128 if (vnresult)
3129 *vnresult = wvnresult;
3130 return wvnresult->result;
3131 }
3132
3133 return NULL_TREE;
3134 }
3135
3136 return vn_reference_lookup_1 (&vr1, vnresult);
3137 }
3138
3139 /* Lookup CALL in the current hash table and return the entry in
3140 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3141
3142 void
3143 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3144 vn_reference_t vr)
3145 {
3146 if (vnresult)
3147 *vnresult = NULL;
3148
3149 tree vuse = gimple_vuse (call);
3150
3151 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3152 vr->operands = valueize_shared_reference_ops_from_call (call);
3153 vr->type = gimple_expr_type (call);
3154 vr->set = 0;
3155 vr->hashcode = vn_reference_compute_hash (vr);
3156 vn_reference_lookup_1 (vr, vnresult);
3157 }
3158
3159 /* Insert OP into the current hash table with a value number of RESULT. */
3160
3161 static void
3162 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3163 {
3164 vn_reference_s **slot;
3165 vn_reference_t vr1;
3166 bool tem;
3167
3168 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3169 if (TREE_CODE (result) == SSA_NAME)
3170 vr1->value_id = VN_INFO (result)->value_id;
3171 else
3172 vr1->value_id = get_or_alloc_constant_value_id (result);
3173 vr1->vuse = vuse_ssa_val (vuse);
3174 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3175 vr1->type = TREE_TYPE (op);
3176 vr1->set = get_alias_set (op);
3177 vr1->hashcode = vn_reference_compute_hash (vr1);
3178 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3179 vr1->result_vdef = vdef;
3180
3181 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3182 INSERT);
3183
3184 /* Because IL walking on reference lookup can end up visiting
3185 a def that is only to be visited later in iteration order
3186 when we are about to make an irreducible region reducible
3187 the def can be effectively processed and its ref being inserted
3188 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3189 but save a lookup if we deal with already inserted refs here. */
3190 if (*slot)
3191 {
3192 /* We cannot assert that we have the same value either because
3193 when disentangling an irreducible region we may end up visiting
3194 a use before the corresponding def. That's a missed optimization
3195 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3196 if (dump_file && (dump_flags & TDF_DETAILS)
3197 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3198 {
3199 fprintf (dump_file, "Keeping old value ");
3200 print_generic_expr (dump_file, (*slot)->result);
3201 fprintf (dump_file, " because of collision\n");
3202 }
3203 free_reference (vr1);
3204 obstack_free (&vn_tables_obstack, vr1);
3205 return;
3206 }
3207
3208 *slot = vr1;
3209 vr1->next = last_inserted_ref;
3210 last_inserted_ref = vr1;
3211 }
3212
3213 /* Insert a reference by it's pieces into the current hash table with
3214 a value number of RESULT. Return the resulting reference
3215 structure we created. */
3216
3217 vn_reference_t
3218 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
3219 vec<vn_reference_op_s> operands,
3220 tree result, unsigned int value_id)
3221
3222 {
3223 vn_reference_s **slot;
3224 vn_reference_t vr1;
3225
3226 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3227 vr1->value_id = value_id;
3228 vr1->vuse = vuse_ssa_val (vuse);
3229 vr1->operands = valueize_refs (operands);
3230 vr1->type = type;
3231 vr1->set = set;
3232 vr1->hashcode = vn_reference_compute_hash (vr1);
3233 if (result && TREE_CODE (result) == SSA_NAME)
3234 result = SSA_VAL (result);
3235 vr1->result = result;
3236
3237 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3238 INSERT);
3239
3240 /* At this point we should have all the things inserted that we have
3241 seen before, and we should never try inserting something that
3242 already exists. */
3243 gcc_assert (!*slot);
3244
3245 *slot = vr1;
3246 vr1->next = last_inserted_ref;
3247 last_inserted_ref = vr1;
3248 return vr1;
3249 }
3250
3251 /* Compute and return the hash value for nary operation VBO1. */
3252
3253 static hashval_t
3254 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3255 {
3256 inchash::hash hstate;
3257 unsigned i;
3258
3259 for (i = 0; i < vno1->length; ++i)
3260 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3261 vno1->op[i] = SSA_VAL (vno1->op[i]);
3262
3263 if (((vno1->length == 2
3264 && commutative_tree_code (vno1->opcode))
3265 || (vno1->length == 3
3266 && commutative_ternary_tree_code (vno1->opcode)))
3267 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3268 std::swap (vno1->op[0], vno1->op[1]);
3269 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3270 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3271 {
3272 std::swap (vno1->op[0], vno1->op[1]);
3273 vno1->opcode = swap_tree_comparison (vno1->opcode);
3274 }
3275
3276 hstate.add_int (vno1->opcode);
3277 for (i = 0; i < vno1->length; ++i)
3278 inchash::add_expr (vno1->op[i], hstate);
3279
3280 return hstate.end ();
3281 }
3282
3283 /* Compare nary operations VNO1 and VNO2 and return true if they are
3284 equivalent. */
3285
3286 bool
3287 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3288 {
3289 unsigned i;
3290
3291 if (vno1->hashcode != vno2->hashcode)
3292 return false;
3293
3294 if (vno1->length != vno2->length)
3295 return false;
3296
3297 if (vno1->opcode != vno2->opcode
3298 || !types_compatible_p (vno1->type, vno2->type))
3299 return false;
3300
3301 for (i = 0; i < vno1->length; ++i)
3302 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3303 return false;
3304
3305 /* BIT_INSERT_EXPR has an implict operand as the type precision
3306 of op1. Need to check to make sure they are the same. */
3307 if (vno1->opcode == BIT_INSERT_EXPR
3308 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3309 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3310 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3311 return false;
3312
3313 return true;
3314 }
3315
3316 /* Initialize VNO from the pieces provided. */
3317
3318 static void
3319 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3320 enum tree_code code, tree type, tree *ops)
3321 {
3322 vno->opcode = code;
3323 vno->length = length;
3324 vno->type = type;
3325 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3326 }
3327
3328 /* Initialize VNO from OP. */
3329
3330 static void
3331 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
3332 {
3333 unsigned i;
3334
3335 vno->opcode = TREE_CODE (op);
3336 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
3337 vno->type = TREE_TYPE (op);
3338 for (i = 0; i < vno->length; ++i)
3339 vno->op[i] = TREE_OPERAND (op, i);
3340 }
3341
3342 /* Return the number of operands for a vn_nary ops structure from STMT. */
3343
3344 static unsigned int
3345 vn_nary_length_from_stmt (gimple *stmt)
3346 {
3347 switch (gimple_assign_rhs_code (stmt))
3348 {
3349 case REALPART_EXPR:
3350 case IMAGPART_EXPR:
3351 case VIEW_CONVERT_EXPR:
3352 return 1;
3353
3354 case BIT_FIELD_REF:
3355 return 3;
3356
3357 case CONSTRUCTOR:
3358 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3359
3360 default:
3361 return gimple_num_ops (stmt) - 1;
3362 }
3363 }
3364
3365 /* Initialize VNO from STMT. */
3366
3367 static void
3368 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3369 {
3370 unsigned i;
3371
3372 vno->opcode = gimple_assign_rhs_code (stmt);
3373 vno->type = gimple_expr_type (stmt);
3374 switch (vno->opcode)
3375 {
3376 case REALPART_EXPR:
3377 case IMAGPART_EXPR:
3378 case VIEW_CONVERT_EXPR:
3379 vno->length = 1;
3380 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3381 break;
3382
3383 case BIT_FIELD_REF:
3384 vno->length = 3;
3385 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3386 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3387 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3388 break;
3389
3390 case CONSTRUCTOR:
3391 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3392 for (i = 0; i < vno->length; ++i)
3393 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3394 break;
3395
3396 default:
3397 gcc_checking_assert (!gimple_assign_single_p (stmt));
3398 vno->length = gimple_num_ops (stmt) - 1;
3399 for (i = 0; i < vno->length; ++i)
3400 vno->op[i] = gimple_op (stmt, i + 1);
3401 }
3402 }
3403
3404 /* Compute the hashcode for VNO and look for it in the hash table;
3405 return the resulting value number if it exists in the hash table.
3406 Return NULL_TREE if it does not exist in the hash table or if the
3407 result field of the operation is NULL. VNRESULT will contain the
3408 vn_nary_op_t from the hashtable if it exists. */
3409
3410 static tree
3411 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3412 {
3413 vn_nary_op_s **slot;
3414
3415 if (vnresult)
3416 *vnresult = NULL;
3417
3418 vno->hashcode = vn_nary_op_compute_hash (vno);
3419 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3420 if (!slot)
3421 return NULL_TREE;
3422 if (vnresult)
3423 *vnresult = *slot;
3424 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3425 }
3426
3427 /* Lookup a n-ary operation by its pieces and return the resulting value
3428 number if it exists in the hash table. Return NULL_TREE if it does
3429 not exist in the hash table or if the result field of the operation
3430 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3431 if it exists. */
3432
3433 tree
3434 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3435 tree type, tree *ops, vn_nary_op_t *vnresult)
3436 {
3437 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3438 sizeof_vn_nary_op (length));
3439 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3440 return vn_nary_op_lookup_1 (vno1, vnresult);
3441 }
3442
3443 /* Lookup OP in the current hash table, and return the resulting value
3444 number if it exists in the hash table. Return NULL_TREE if it does
3445 not exist in the hash table or if the result field of the operation
3446 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3447 if it exists. */
3448
3449 tree
3450 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
3451 {
3452 vn_nary_op_t vno1
3453 = XALLOCAVAR (struct vn_nary_op_s,
3454 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
3455 init_vn_nary_op_from_op (vno1, op);
3456 return vn_nary_op_lookup_1 (vno1, vnresult);
3457 }
3458
3459 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3460 value number if it exists in the hash table. Return NULL_TREE if
3461 it does not exist in the hash table. VNRESULT will contain the
3462 vn_nary_op_t from the hashtable if it exists. */
3463
3464 tree
3465 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3466 {
3467 vn_nary_op_t vno1
3468 = XALLOCAVAR (struct vn_nary_op_s,
3469 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3470 init_vn_nary_op_from_stmt (vno1, stmt);
3471 return vn_nary_op_lookup_1 (vno1, vnresult);
3472 }
3473
3474 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3475
3476 static vn_nary_op_t
3477 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3478 {
3479 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3480 }
3481
3482 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3483 obstack. */
3484
3485 static vn_nary_op_t
3486 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3487 {
3488 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3489
3490 vno1->value_id = value_id;
3491 vno1->length = length;
3492 vno1->predicated_values = 0;
3493 vno1->u.result = result;
3494
3495 return vno1;
3496 }
3497
3498 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3499 VNO->HASHCODE first. */
3500
3501 static vn_nary_op_t
3502 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3503 bool compute_hash)
3504 {
3505 vn_nary_op_s **slot;
3506
3507 if (compute_hash)
3508 {
3509 vno->hashcode = vn_nary_op_compute_hash (vno);
3510 gcc_assert (! vno->predicated_values
3511 || (! vno->u.values->next
3512 && vno->u.values->n == 1));
3513 }
3514
3515 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3516 vno->unwind_to = *slot;
3517 if (*slot)
3518 {
3519 /* Prefer non-predicated values.
3520 ??? Only if those are constant, otherwise, with constant predicated
3521 value, turn them into predicated values with entry-block validity
3522 (??? but we always find the first valid result currently). */
3523 if ((*slot)->predicated_values
3524 && ! vno->predicated_values)
3525 {
3526 /* ??? We cannot remove *slot from the unwind stack list.
3527 For the moment we deal with this by skipping not found
3528 entries but this isn't ideal ... */
3529 *slot = vno;
3530 /* ??? Maintain a stack of states we can unwind in
3531 vn_nary_op_s? But how far do we unwind? In reality
3532 we need to push change records somewhere... Or not
3533 unwind vn_nary_op_s and linking them but instead
3534 unwind the results "list", linking that, which also
3535 doesn't move on hashtable resize. */
3536 /* We can also have a ->unwind_to recording *slot there.
3537 That way we can make u.values a fixed size array with
3538 recording the number of entries but of course we then
3539 have always N copies for each unwind_to-state. Or we
3540 make sure to only ever append and each unwinding will
3541 pop off one entry (but how to deal with predicated
3542 replaced with non-predicated here?) */
3543 vno->next = last_inserted_nary;
3544 last_inserted_nary = vno;
3545 return vno;
3546 }
3547 else if (vno->predicated_values
3548 && ! (*slot)->predicated_values)
3549 return *slot;
3550 else if (vno->predicated_values
3551 && (*slot)->predicated_values)
3552 {
3553 /* ??? Factor this all into a insert_single_predicated_value
3554 routine. */
3555 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3556 basic_block vno_bb
3557 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3558 vn_pval *nval = vno->u.values;
3559 vn_pval **next = &vno->u.values;
3560 bool found = false;
3561 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3562 {
3563 if (expressions_equal_p (val->result, vno->u.values->result))
3564 {
3565 found = true;
3566 for (unsigned i = 0; i < val->n; ++i)
3567 {
3568 basic_block val_bb
3569 = BASIC_BLOCK_FOR_FN (cfun,
3570 val->valid_dominated_by_p[i]);
3571 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3572 /* Value registered with more generic predicate. */
3573 return *slot;
3574 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3575 /* Shouldn't happen, we insert in RPO order. */
3576 gcc_unreachable ();
3577 }
3578 /* Append value. */
3579 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3580 sizeof (vn_pval)
3581 + val->n * sizeof (int));
3582 (*next)->next = NULL;
3583 (*next)->result = val->result;
3584 (*next)->n = val->n + 1;
3585 memcpy ((*next)->valid_dominated_by_p,
3586 val->valid_dominated_by_p,
3587 val->n * sizeof (int));
3588 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3589 next = &(*next)->next;
3590 if (dump_file && (dump_flags & TDF_DETAILS))
3591 fprintf (dump_file, "Appending predicate to value.\n");
3592 continue;
3593 }
3594 /* Copy other predicated values. */
3595 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3596 sizeof (vn_pval)
3597 + (val->n-1) * sizeof (int));
3598 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3599 (*next)->next = NULL;
3600 next = &(*next)->next;
3601 }
3602 if (!found)
3603 *next = nval;
3604
3605 *slot = vno;
3606 vno->next = last_inserted_nary;
3607 last_inserted_nary = vno;
3608 return vno;
3609 }
3610
3611 /* While we do not want to insert things twice it's awkward to
3612 avoid it in the case where visit_nary_op pattern-matches stuff
3613 and ends up simplifying the replacement to itself. We then
3614 get two inserts, one from visit_nary_op and one from
3615 vn_nary_build_or_lookup.
3616 So allow inserts with the same value number. */
3617 if ((*slot)->u.result == vno->u.result)
3618 return *slot;
3619 }
3620
3621 /* ??? There's also optimistic vs. previous commited state merging
3622 that is problematic for the case of unwinding. */
3623
3624 /* ??? We should return NULL if we do not use 'vno' and have the
3625 caller release it. */
3626 gcc_assert (!*slot);
3627
3628 *slot = vno;
3629 vno->next = last_inserted_nary;
3630 last_inserted_nary = vno;
3631 return vno;
3632 }
3633
3634 /* Insert a n-ary operation into the current hash table using it's
3635 pieces. Return the vn_nary_op_t structure we created and put in
3636 the hashtable. */
3637
3638 vn_nary_op_t
3639 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3640 tree type, tree *ops,
3641 tree result, unsigned int value_id)
3642 {
3643 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3644 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3645 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3646 }
3647
3648 static vn_nary_op_t
3649 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3650 tree type, tree *ops,
3651 tree result, unsigned int value_id,
3652 edge pred_e)
3653 {
3654 /* ??? Currently tracking BBs. */
3655 if (! single_pred_p (pred_e->dest))
3656 {
3657 /* Never record for backedges. */
3658 if (pred_e->flags & EDGE_DFS_BACK)
3659 return NULL;
3660 edge_iterator ei;
3661 edge e;
3662 int cnt = 0;
3663 /* Ignore backedges. */
3664 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3665 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3666 cnt++;
3667 if (cnt != 1)
3668 return NULL;
3669 }
3670 if (dump_file && (dump_flags & TDF_DETAILS)
3671 /* ??? Fix dumping, but currently we only get comparisons. */
3672 && TREE_CODE_CLASS (code) == tcc_comparison)
3673 {
3674 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3675 pred_e->dest->index);
3676 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3677 fprintf (dump_file, " %s ", get_tree_code_name (code));
3678 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3679 fprintf (dump_file, " == %s\n",
3680 integer_zerop (result) ? "false" : "true");
3681 }
3682 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3683 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3684 vno1->predicated_values = 1;
3685 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3686 sizeof (vn_pval));
3687 vno1->u.values->next = NULL;
3688 vno1->u.values->result = result;
3689 vno1->u.values->n = 1;
3690 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3691 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3692 }
3693
3694 static bool
3695 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3696
3697 static tree
3698 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3699 {
3700 if (! vno->predicated_values)
3701 return vno->u.result;
3702 for (vn_pval *val = vno->u.values; val; val = val->next)
3703 for (unsigned i = 0; i < val->n; ++i)
3704 if (dominated_by_p_w_unex (bb,
3705 BASIC_BLOCK_FOR_FN
3706 (cfun, val->valid_dominated_by_p[i])))
3707 return val->result;
3708 return NULL_TREE;
3709 }
3710
3711 /* Insert OP into the current hash table with a value number of
3712 RESULT. Return the vn_nary_op_t structure we created and put in
3713 the hashtable. */
3714
3715 vn_nary_op_t
3716 vn_nary_op_insert (tree op, tree result)
3717 {
3718 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3719 vn_nary_op_t vno1;
3720
3721 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3722 init_vn_nary_op_from_op (vno1, op);
3723 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3724 }
3725
3726 /* Insert the rhs of STMT into the current hash table with a value number of
3727 RESULT. */
3728
3729 static vn_nary_op_t
3730 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3731 {
3732 vn_nary_op_t vno1
3733 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3734 result, VN_INFO (result)->value_id);
3735 init_vn_nary_op_from_stmt (vno1, stmt);
3736 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3737 }
3738
3739 /* Compute a hashcode for PHI operation VP1 and return it. */
3740
3741 static inline hashval_t
3742 vn_phi_compute_hash (vn_phi_t vp1)
3743 {
3744 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3745 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3746 tree phi1op;
3747 tree type;
3748 edge e;
3749 edge_iterator ei;
3750
3751 /* If all PHI arguments are constants we need to distinguish
3752 the PHI node via its type. */
3753 type = vp1->type;
3754 hstate.merge_hash (vn_hash_type (type));
3755
3756 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3757 {
3758 /* Don't hash backedge values they need to be handled as VN_TOP
3759 for optimistic value-numbering. */
3760 if (e->flags & EDGE_DFS_BACK)
3761 continue;
3762
3763 phi1op = vp1->phiargs[e->dest_idx];
3764 if (phi1op == VN_TOP)
3765 continue;
3766 inchash::add_expr (phi1op, hstate);
3767 }
3768
3769 return hstate.end ();
3770 }
3771
3772
3773 /* Return true if COND1 and COND2 represent the same condition, set
3774 *INVERTED_P if one needs to be inverted to make it the same as
3775 the other. */
3776
3777 static bool
3778 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3779 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3780 {
3781 enum tree_code code1 = gimple_cond_code (cond1);
3782 enum tree_code code2 = gimple_cond_code (cond2);
3783
3784 *inverted_p = false;
3785 if (code1 == code2)
3786 ;
3787 else if (code1 == swap_tree_comparison (code2))
3788 std::swap (lhs2, rhs2);
3789 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3790 *inverted_p = true;
3791 else if (code1 == invert_tree_comparison
3792 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3793 {
3794 std::swap (lhs2, rhs2);
3795 *inverted_p = true;
3796 }
3797 else
3798 return false;
3799
3800 return ((expressions_equal_p (lhs1, lhs2)
3801 && expressions_equal_p (rhs1, rhs2))
3802 || (commutative_tree_code (code1)
3803 && expressions_equal_p (lhs1, rhs2)
3804 && expressions_equal_p (rhs1, lhs2)));
3805 }
3806
3807 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3808
3809 static int
3810 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3811 {
3812 if (vp1->hashcode != vp2->hashcode)
3813 return false;
3814
3815 if (vp1->block != vp2->block)
3816 {
3817 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3818 return false;
3819
3820 switch (EDGE_COUNT (vp1->block->preds))
3821 {
3822 case 1:
3823 /* Single-arg PHIs are just copies. */
3824 break;
3825
3826 case 2:
3827 {
3828 /* Rule out backedges into the PHI. */
3829 if (vp1->block->loop_father->header == vp1->block
3830 || vp2->block->loop_father->header == vp2->block)
3831 return false;
3832
3833 /* If the PHI nodes do not have compatible types
3834 they are not the same. */
3835 if (!types_compatible_p (vp1->type, vp2->type))
3836 return false;
3837
3838 basic_block idom1
3839 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3840 basic_block idom2
3841 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3842 /* If the immediate dominator end in switch stmts multiple
3843 values may end up in the same PHI arg via intermediate
3844 CFG merges. */
3845 if (EDGE_COUNT (idom1->succs) != 2
3846 || EDGE_COUNT (idom2->succs) != 2)
3847 return false;
3848
3849 /* Verify the controlling stmt is the same. */
3850 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3851 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3852 if (! last1 || ! last2)
3853 return false;
3854 bool inverted_p;
3855 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3856 last2, vp2->cclhs, vp2->ccrhs,
3857 &inverted_p))
3858 return false;
3859
3860 /* Get at true/false controlled edges into the PHI. */
3861 edge te1, te2, fe1, fe2;
3862 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3863 &te1, &fe1)
3864 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3865 &te2, &fe2))
3866 return false;
3867
3868 /* Swap edges if the second condition is the inverted of the
3869 first. */
3870 if (inverted_p)
3871 std::swap (te2, fe2);
3872
3873 /* ??? Handle VN_TOP specially. */
3874 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3875 vp2->phiargs[te2->dest_idx])
3876 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3877 vp2->phiargs[fe2->dest_idx]))
3878 return false;
3879
3880 return true;
3881 }
3882
3883 default:
3884 return false;
3885 }
3886 }
3887
3888 /* If the PHI nodes do not have compatible types
3889 they are not the same. */
3890 if (!types_compatible_p (vp1->type, vp2->type))
3891 return false;
3892
3893 /* Any phi in the same block will have it's arguments in the
3894 same edge order, because of how we store phi nodes. */
3895 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3896 {
3897 tree phi1op = vp1->phiargs[i];
3898 tree phi2op = vp2->phiargs[i];
3899 if (phi1op == VN_TOP || phi2op == VN_TOP)
3900 continue;
3901 if (!expressions_equal_p (phi1op, phi2op))
3902 return false;
3903 }
3904
3905 return true;
3906 }
3907
3908 /* Lookup PHI in the current hash table, and return the resulting
3909 value number if it exists in the hash table. Return NULL_TREE if
3910 it does not exist in the hash table. */
3911
3912 static tree
3913 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3914 {
3915 vn_phi_s **slot;
3916 struct vn_phi_s *vp1;
3917 edge e;
3918 edge_iterator ei;
3919
3920 vp1 = XALLOCAVAR (struct vn_phi_s,
3921 sizeof (struct vn_phi_s)
3922 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3923
3924 /* Canonicalize the SSA_NAME's to their value number. */
3925 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3926 {
3927 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3928 if (TREE_CODE (def) == SSA_NAME
3929 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3930 def = SSA_VAL (def);
3931 vp1->phiargs[e->dest_idx] = def;
3932 }
3933 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3934 vp1->block = gimple_bb (phi);
3935 /* Extract values of the controlling condition. */
3936 vp1->cclhs = NULL_TREE;
3937 vp1->ccrhs = NULL_TREE;
3938 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3939 if (EDGE_COUNT (idom1->succs) == 2)
3940 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3941 {
3942 /* ??? We want to use SSA_VAL here. But possibly not
3943 allow VN_TOP. */
3944 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3945 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3946 }
3947 vp1->hashcode = vn_phi_compute_hash (vp1);
3948 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3949 if (!slot)
3950 return NULL_TREE;
3951 return (*slot)->result;
3952 }
3953
3954 /* Insert PHI into the current hash table with a value number of
3955 RESULT. */
3956
3957 static vn_phi_t
3958 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3959 {
3960 vn_phi_s **slot;
3961 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3962 sizeof (vn_phi_s)
3963 + ((gimple_phi_num_args (phi) - 1)
3964 * sizeof (tree)));
3965 edge e;
3966 edge_iterator ei;
3967
3968 /* Canonicalize the SSA_NAME's to their value number. */
3969 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3970 {
3971 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3972 if (TREE_CODE (def) == SSA_NAME
3973 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3974 def = SSA_VAL (def);
3975 vp1->phiargs[e->dest_idx] = def;
3976 }
3977 vp1->value_id = VN_INFO (result)->value_id;
3978 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3979 vp1->block = gimple_bb (phi);
3980 /* Extract values of the controlling condition. */
3981 vp1->cclhs = NULL_TREE;
3982 vp1->ccrhs = NULL_TREE;
3983 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3984 if (EDGE_COUNT (idom1->succs) == 2)
3985 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3986 {
3987 /* ??? We want to use SSA_VAL here. But possibly not
3988 allow VN_TOP. */
3989 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3990 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3991 }
3992 vp1->result = result;
3993 vp1->hashcode = vn_phi_compute_hash (vp1);
3994
3995 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3996 gcc_assert (!*slot);
3997
3998 *slot = vp1;
3999 vp1->next = last_inserted_phi;
4000 last_inserted_phi = vp1;
4001 return vp1;
4002 }
4003
4004
4005 /* Return true if BB1 is dominated by BB2 taking into account edges
4006 that are not executable. */
4007
4008 static bool
4009 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
4010 {
4011 edge_iterator ei;
4012 edge e;
4013
4014 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4015 return true;
4016
4017 /* Before iterating we'd like to know if there exists a
4018 (executable) path from bb2 to bb1 at all, if not we can
4019 directly return false. For now simply iterate once. */
4020
4021 /* Iterate to the single executable bb1 predecessor. */
4022 if (EDGE_COUNT (bb1->preds) > 1)
4023 {
4024 edge prede = NULL;
4025 FOR_EACH_EDGE (e, ei, bb1->preds)
4026 if (e->flags & EDGE_EXECUTABLE)
4027 {
4028 if (prede)
4029 {
4030 prede = NULL;
4031 break;
4032 }
4033 prede = e;
4034 }
4035 if (prede)
4036 {
4037 bb1 = prede->src;
4038
4039 /* Re-do the dominance check with changed bb1. */
4040 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4041 return true;
4042 }
4043 }
4044
4045 /* Iterate to the single executable bb2 successor. */
4046 edge succe = NULL;
4047 FOR_EACH_EDGE (e, ei, bb2->succs)
4048 if (e->flags & EDGE_EXECUTABLE)
4049 {
4050 if (succe)
4051 {
4052 succe = NULL;
4053 break;
4054 }
4055 succe = e;
4056 }
4057 if (succe)
4058 {
4059 /* Verify the reached block is only reached through succe.
4060 If there is only one edge we can spare us the dominator
4061 check and iterate directly. */
4062 if (EDGE_COUNT (succe->dest->preds) > 1)
4063 {
4064 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4065 if (e != succe
4066 && (e->flags & EDGE_EXECUTABLE))
4067 {
4068 succe = NULL;
4069 break;
4070 }
4071 }
4072 if (succe)
4073 {
4074 bb2 = succe->dest;
4075
4076 /* Re-do the dominance check with changed bb2. */
4077 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4078 return true;
4079 }
4080 }
4081
4082 /* We could now iterate updating bb1 / bb2. */
4083 return false;
4084 }
4085
4086 /* Set the value number of FROM to TO, return true if it has changed
4087 as a result. */
4088
4089 static inline bool
4090 set_ssa_val_to (tree from, tree to)
4091 {
4092 vn_ssa_aux_t from_info = VN_INFO (from);
4093 tree currval = from_info->valnum; // SSA_VAL (from)
4094 poly_int64 toff, coff;
4095
4096 /* The only thing we allow as value numbers are ssa_names
4097 and invariants. So assert that here. We don't allow VN_TOP
4098 as visiting a stmt should produce a value-number other than
4099 that.
4100 ??? Still VN_TOP can happen for unreachable code, so force
4101 it to varying in that case. Not all code is prepared to
4102 get VN_TOP on valueization. */
4103 if (to == VN_TOP)
4104 {
4105 /* ??? When iterating and visiting PHI <undef, backedge-value>
4106 for the first time we rightfully get VN_TOP and we need to
4107 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4108 With SCCVN we were simply lucky we iterated the other PHI
4109 cycles first and thus visited the backedge-value DEF. */
4110 if (currval == VN_TOP)
4111 goto set_and_exit;
4112 if (dump_file && (dump_flags & TDF_DETAILS))
4113 fprintf (dump_file, "Forcing value number to varying on "
4114 "receiving VN_TOP\n");
4115 to = from;
4116 }
4117
4118 gcc_checking_assert (to != NULL_TREE
4119 && ((TREE_CODE (to) == SSA_NAME
4120 && (to == from || SSA_VAL (to) == to))
4121 || is_gimple_min_invariant (to)));
4122
4123 if (from != to)
4124 {
4125 if (currval == from)
4126 {
4127 if (dump_file && (dump_flags & TDF_DETAILS))
4128 {
4129 fprintf (dump_file, "Not changing value number of ");
4130 print_generic_expr (dump_file, from);
4131 fprintf (dump_file, " from VARYING to ");
4132 print_generic_expr (dump_file, to);
4133 fprintf (dump_file, "\n");
4134 }
4135 return false;
4136 }
4137 bool curr_invariant = is_gimple_min_invariant (currval);
4138 bool curr_undefined = (TREE_CODE (currval) == SSA_NAME
4139 && ssa_undefined_value_p (currval, false));
4140 if (currval != VN_TOP
4141 && !curr_invariant
4142 && !curr_undefined
4143 && is_gimple_min_invariant (to))
4144 {
4145 if (dump_file && (dump_flags & TDF_DETAILS))
4146 {
4147 fprintf (dump_file, "Forcing VARYING instead of changing "
4148 "value number of ");
4149 print_generic_expr (dump_file, from);
4150 fprintf (dump_file, " from ");
4151 print_generic_expr (dump_file, currval);
4152 fprintf (dump_file, " (non-constant) to ");
4153 print_generic_expr (dump_file, to);
4154 fprintf (dump_file, " (constant)\n");
4155 }
4156 to = from;
4157 }
4158 else if (currval != VN_TOP
4159 && !curr_undefined
4160 && TREE_CODE (to) == SSA_NAME
4161 && ssa_undefined_value_p (to, false))
4162 {
4163 if (dump_file && (dump_flags & TDF_DETAILS))
4164 {
4165 fprintf (dump_file, "Forcing VARYING instead of changing "
4166 "value number of ");
4167 print_generic_expr (dump_file, from);
4168 fprintf (dump_file, " from ");
4169 print_generic_expr (dump_file, currval);
4170 fprintf (dump_file, " (non-undefined) to ");
4171 print_generic_expr (dump_file, to);
4172 fprintf (dump_file, " (undefined)\n");
4173 }
4174 to = from;
4175 }
4176 else if (TREE_CODE (to) == SSA_NAME
4177 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4178 to = from;
4179 }
4180
4181 set_and_exit:
4182 if (dump_file && (dump_flags & TDF_DETAILS))
4183 {
4184 fprintf (dump_file, "Setting value number of ");
4185 print_generic_expr (dump_file, from);
4186 fprintf (dump_file, " to ");
4187 print_generic_expr (dump_file, to);
4188 }
4189
4190 if (currval != to
4191 && !operand_equal_p (currval, to, 0)
4192 /* Different undefined SSA names are not actually different. See
4193 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4194 && !(TREE_CODE (currval) == SSA_NAME
4195 && TREE_CODE (to) == SSA_NAME
4196 && ssa_undefined_value_p (currval, false)
4197 && ssa_undefined_value_p (to, false))
4198 /* ??? For addresses involving volatile objects or types operand_equal_p
4199 does not reliably detect ADDR_EXPRs as equal. We know we are only
4200 getting invariant gimple addresses here, so can use
4201 get_addr_base_and_unit_offset to do this comparison. */
4202 && !(TREE_CODE (currval) == ADDR_EXPR
4203 && TREE_CODE (to) == ADDR_EXPR
4204 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4205 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4206 && known_eq (coff, toff)))
4207 {
4208 if (dump_file && (dump_flags & TDF_DETAILS))
4209 fprintf (dump_file, " (changed)\n");
4210 from_info->valnum = to;
4211 return true;
4212 }
4213 if (dump_file && (dump_flags & TDF_DETAILS))
4214 fprintf (dump_file, "\n");
4215 return false;
4216 }
4217
4218 /* Set all definitions in STMT to value number to themselves.
4219 Return true if a value number changed. */
4220
4221 static bool
4222 defs_to_varying (gimple *stmt)
4223 {
4224 bool changed = false;
4225 ssa_op_iter iter;
4226 def_operand_p defp;
4227
4228 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4229 {
4230 tree def = DEF_FROM_PTR (defp);
4231 changed |= set_ssa_val_to (def, def);
4232 }
4233 return changed;
4234 }
4235
4236 /* Visit a copy between LHS and RHS, return true if the value number
4237 changed. */
4238
4239 static bool
4240 visit_copy (tree lhs, tree rhs)
4241 {
4242 /* Valueize. */
4243 rhs = SSA_VAL (rhs);
4244
4245 return set_ssa_val_to (lhs, rhs);
4246 }
4247
4248 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4249 is the same. */
4250
4251 static tree
4252 valueized_wider_op (tree wide_type, tree op)
4253 {
4254 if (TREE_CODE (op) == SSA_NAME)
4255 op = vn_valueize (op);
4256
4257 /* Either we have the op widened available. */
4258 tree ops[3] = {};
4259 ops[0] = op;
4260 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4261 wide_type, ops, NULL);
4262 if (tem)
4263 return tem;
4264
4265 /* Or the op is truncated from some existing value. */
4266 if (TREE_CODE (op) == SSA_NAME)
4267 {
4268 gimple *def = SSA_NAME_DEF_STMT (op);
4269 if (is_gimple_assign (def)
4270 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4271 {
4272 tem = gimple_assign_rhs1 (def);
4273 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4274 {
4275 if (TREE_CODE (tem) == SSA_NAME)
4276 tem = vn_valueize (tem);
4277 return tem;
4278 }
4279 }
4280 }
4281
4282 /* For constants simply extend it. */
4283 if (TREE_CODE (op) == INTEGER_CST)
4284 return wide_int_to_tree (wide_type, wi::to_wide (op));
4285
4286 return NULL_TREE;
4287 }
4288
4289 /* Visit a nary operator RHS, value number it, and return true if the
4290 value number of LHS has changed as a result. */
4291
4292 static bool
4293 visit_nary_op (tree lhs, gassign *stmt)
4294 {
4295 vn_nary_op_t vnresult;
4296 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4297 if (! result && vnresult)
4298 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4299 if (result)
4300 return set_ssa_val_to (lhs, result);
4301
4302 /* Do some special pattern matching for redundancies of operations
4303 in different types. */
4304 enum tree_code code = gimple_assign_rhs_code (stmt);
4305 tree type = TREE_TYPE (lhs);
4306 tree rhs1 = gimple_assign_rhs1 (stmt);
4307 switch (code)
4308 {
4309 CASE_CONVERT:
4310 /* Match arithmetic done in a different type where we can easily
4311 substitute the result from some earlier sign-changed or widened
4312 operation. */
4313 if (INTEGRAL_TYPE_P (type)
4314 && TREE_CODE (rhs1) == SSA_NAME
4315 /* We only handle sign-changes or zero-extension -> & mask. */
4316 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4317 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4318 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4319 {
4320 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4321 if (def
4322 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4323 || gimple_assign_rhs_code (def) == MINUS_EXPR
4324 || gimple_assign_rhs_code (def) == MULT_EXPR))
4325 {
4326 tree ops[3] = {};
4327 /* Either we have the op widened available. */
4328 ops[0] = valueized_wider_op (type,
4329 gimple_assign_rhs1 (def));
4330 if (ops[0])
4331 ops[1] = valueized_wider_op (type,
4332 gimple_assign_rhs2 (def));
4333 if (ops[0] && ops[1])
4334 {
4335 ops[0] = vn_nary_op_lookup_pieces
4336 (2, gimple_assign_rhs_code (def), type, ops, NULL);
4337 /* We have wider operation available. */
4338 if (ops[0]
4339 /* If the leader is a wrapping operation we can
4340 insert it for code hoisting w/o introducing
4341 undefined overflow. If it is not it has to
4342 be available. See PR86554. */
4343 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4344 || (rpo_avail && vn_context_bb
4345 && rpo_avail->eliminate_avail (vn_context_bb,
4346 ops[0]))))
4347 {
4348 unsigned lhs_prec = TYPE_PRECISION (type);
4349 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4350 if (lhs_prec == rhs_prec)
4351 {
4352 gimple_match_op match_op (gimple_match_cond::UNCOND,
4353 NOP_EXPR, type, ops[0]);
4354 result = vn_nary_build_or_lookup (&match_op);
4355 if (result)
4356 {
4357 bool changed = set_ssa_val_to (lhs, result);
4358 vn_nary_op_insert_stmt (stmt, result);
4359 return changed;
4360 }
4361 }
4362 else
4363 {
4364 tree mask = wide_int_to_tree
4365 (type, wi::mask (rhs_prec, false, lhs_prec));
4366 gimple_match_op match_op (gimple_match_cond::UNCOND,
4367 BIT_AND_EXPR,
4368 TREE_TYPE (lhs),
4369 ops[0], mask);
4370 result = vn_nary_build_or_lookup (&match_op);
4371 if (result)
4372 {
4373 bool changed = set_ssa_val_to (lhs, result);
4374 vn_nary_op_insert_stmt (stmt, result);
4375 return changed;
4376 }
4377 }
4378 }
4379 }
4380 }
4381 }
4382 default:;
4383 }
4384
4385 bool changed = set_ssa_val_to (lhs, lhs);
4386 vn_nary_op_insert_stmt (stmt, lhs);
4387 return changed;
4388 }
4389
4390 /* Visit a call STMT storing into LHS. Return true if the value number
4391 of the LHS has changed as a result. */
4392
4393 static bool
4394 visit_reference_op_call (tree lhs, gcall *stmt)
4395 {
4396 bool changed = false;
4397 struct vn_reference_s vr1;
4398 vn_reference_t vnresult = NULL;
4399 tree vdef = gimple_vdef (stmt);
4400
4401 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4402 if (lhs && TREE_CODE (lhs) != SSA_NAME)
4403 lhs = NULL_TREE;
4404
4405 vn_reference_lookup_call (stmt, &vnresult, &vr1);
4406 if (vnresult)
4407 {
4408 if (vnresult->result_vdef && vdef)
4409 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4410 else if (vdef)
4411 /* If the call was discovered to be pure or const reflect
4412 that as far as possible. */
4413 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4414
4415 if (!vnresult->result && lhs)
4416 vnresult->result = lhs;
4417
4418 if (vnresult->result && lhs)
4419 changed |= set_ssa_val_to (lhs, vnresult->result);
4420 }
4421 else
4422 {
4423 vn_reference_t vr2;
4424 vn_reference_s **slot;
4425 tree vdef_val = vdef;
4426 if (vdef)
4427 {
4428 /* If we value numbered an indirect functions function to
4429 one not clobbering memory value number its VDEF to its
4430 VUSE. */
4431 tree fn = gimple_call_fn (stmt);
4432 if (fn && TREE_CODE (fn) == SSA_NAME)
4433 {
4434 fn = SSA_VAL (fn);
4435 if (TREE_CODE (fn) == ADDR_EXPR
4436 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4437 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4438 & (ECF_CONST | ECF_PURE)))
4439 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4440 }
4441 changed |= set_ssa_val_to (vdef, vdef_val);
4442 }
4443 if (lhs)
4444 changed |= set_ssa_val_to (lhs, lhs);
4445 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4446 vr2->vuse = vr1.vuse;
4447 /* As we are not walking the virtual operand chain we know the
4448 shared_lookup_references are still original so we can re-use
4449 them here. */
4450 vr2->operands = vr1.operands.copy ();
4451 vr2->type = vr1.type;
4452 vr2->set = vr1.set;
4453 vr2->hashcode = vr1.hashcode;
4454 vr2->result = lhs;
4455 vr2->result_vdef = vdef_val;
4456 vr2->value_id = 0;
4457 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4458 INSERT);
4459 gcc_assert (!*slot);
4460 *slot = vr2;
4461 vr2->next = last_inserted_ref;
4462 last_inserted_ref = vr2;
4463 }
4464
4465 return changed;
4466 }
4467
4468 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4469 and return true if the value number of the LHS has changed as a result. */
4470
4471 static bool
4472 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4473 {
4474 bool changed = false;
4475 tree last_vuse;
4476 tree result;
4477
4478 last_vuse = gimple_vuse (stmt);
4479 result = vn_reference_lookup (op, gimple_vuse (stmt),
4480 default_vn_walk_kind, NULL, true, &last_vuse);
4481
4482 /* We handle type-punning through unions by value-numbering based
4483 on offset and size of the access. Be prepared to handle a
4484 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4485 if (result
4486 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4487 {
4488 /* We will be setting the value number of lhs to the value number
4489 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4490 So first simplify and lookup this expression to see if it
4491 is already available. */
4492 gimple_match_op res_op (gimple_match_cond::UNCOND,
4493 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
4494 result = vn_nary_build_or_lookup (&res_op);
4495 /* When building the conversion fails avoid inserting the reference
4496 again. */
4497 if (!result)
4498 return set_ssa_val_to (lhs, lhs);
4499 }
4500
4501 if (result)
4502 changed = set_ssa_val_to (lhs, result);
4503 else
4504 {
4505 changed = set_ssa_val_to (lhs, lhs);
4506 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
4507 }
4508
4509 return changed;
4510 }
4511
4512
4513 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4514 and return true if the value number of the LHS has changed as a result. */
4515
4516 static bool
4517 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4518 {
4519 bool changed = false;
4520 vn_reference_t vnresult = NULL;
4521 tree assign;
4522 bool resultsame = false;
4523 tree vuse = gimple_vuse (stmt);
4524 tree vdef = gimple_vdef (stmt);
4525
4526 if (TREE_CODE (op) == SSA_NAME)
4527 op = SSA_VAL (op);
4528
4529 /* First we want to lookup using the *vuses* from the store and see
4530 if there the last store to this location with the same address
4531 had the same value.
4532
4533 The vuses represent the memory state before the store. If the
4534 memory state, address, and value of the store is the same as the
4535 last store to this location, then this store will produce the
4536 same memory state as that store.
4537
4538 In this case the vdef versions for this store are value numbered to those
4539 vuse versions, since they represent the same memory state after
4540 this store.
4541
4542 Otherwise, the vdefs for the store are used when inserting into
4543 the table, since the store generates a new memory state. */
4544
4545 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4546 if (vnresult
4547 && vnresult->result)
4548 {
4549 tree result = vnresult->result;
4550 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4551 || result == SSA_VAL (result));
4552 resultsame = expressions_equal_p (result, op);
4553 if (resultsame)
4554 {
4555 /* If the TBAA state isn't compatible for downstream reads
4556 we cannot value-number the VDEFs the same. */
4557 alias_set_type set = get_alias_set (lhs);
4558 if (vnresult->set != set
4559 && ! alias_set_subset_of (set, vnresult->set))
4560 resultsame = false;
4561 }
4562 }
4563
4564 if (!resultsame)
4565 {
4566 /* Only perform the following when being called from PRE
4567 which embeds tail merging. */
4568 if (default_vn_walk_kind == VN_WALK)
4569 {
4570 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4571 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4572 if (vnresult)
4573 {
4574 VN_INFO (vdef)->visited = true;
4575 return set_ssa_val_to (vdef, vnresult->result_vdef);
4576 }
4577 }
4578
4579 if (dump_file && (dump_flags & TDF_DETAILS))
4580 {
4581 fprintf (dump_file, "No store match\n");
4582 fprintf (dump_file, "Value numbering store ");
4583 print_generic_expr (dump_file, lhs);
4584 fprintf (dump_file, " to ");
4585 print_generic_expr (dump_file, op);
4586 fprintf (dump_file, "\n");
4587 }
4588 /* Have to set value numbers before insert, since insert is
4589 going to valueize the references in-place. */
4590 if (vdef)
4591 changed |= set_ssa_val_to (vdef, vdef);
4592
4593 /* Do not insert structure copies into the tables. */
4594 if (is_gimple_min_invariant (op)
4595 || is_gimple_reg (op))
4596 vn_reference_insert (lhs, op, vdef, NULL);
4597
4598 /* Only perform the following when being called from PRE
4599 which embeds tail merging. */
4600 if (default_vn_walk_kind == VN_WALK)
4601 {
4602 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4603 vn_reference_insert (assign, lhs, vuse, vdef);
4604 }
4605 }
4606 else
4607 {
4608 /* We had a match, so value number the vdef to have the value
4609 number of the vuse it came from. */
4610
4611 if (dump_file && (dump_flags & TDF_DETAILS))
4612 fprintf (dump_file, "Store matched earlier value, "
4613 "value numbering store vdefs to matching vuses.\n");
4614
4615 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4616 }
4617
4618 return changed;
4619 }
4620
4621 /* Visit and value number PHI, return true if the value number
4622 changed. When BACKEDGES_VARYING_P is true then assume all
4623 backedge values are varying. When INSERTED is not NULL then
4624 this is just a ahead query for a possible iteration, set INSERTED
4625 to true if we'd insert into the hashtable. */
4626
4627 static bool
4628 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4629 {
4630 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4631 tree backedge_val = NULL_TREE;
4632 bool seen_non_backedge = false;
4633 tree sameval_base = NULL_TREE;
4634 poly_int64 soff, doff;
4635 unsigned n_executable = 0;
4636 edge_iterator ei;
4637 edge e;
4638
4639 /* TODO: We could check for this in initialization, and replace this
4640 with a gcc_assert. */
4641 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4642 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4643
4644 /* We track whether a PHI was CSEd to to avoid excessive iterations
4645 that would be necessary only because the PHI changed arguments
4646 but not value. */
4647 if (!inserted)
4648 gimple_set_plf (phi, GF_PLF_1, false);
4649
4650 /* See if all non-TOP arguments have the same value. TOP is
4651 equivalent to everything, so we can ignore it. */
4652 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4653 if (e->flags & EDGE_EXECUTABLE)
4654 {
4655 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4656
4657 ++n_executable;
4658 if (TREE_CODE (def) == SSA_NAME)
4659 {
4660 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4661 def = SSA_VAL (def);
4662 if (e->flags & EDGE_DFS_BACK)
4663 backedge_val = def;
4664 }
4665 if (!(e->flags & EDGE_DFS_BACK))
4666 seen_non_backedge = true;
4667 if (def == VN_TOP)
4668 ;
4669 /* Ignore undefined defs for sameval but record one. */
4670 else if (TREE_CODE (def) == SSA_NAME
4671 && ! virtual_operand_p (def)
4672 && ssa_undefined_value_p (def, false))
4673 seen_undef = def;
4674 else if (sameval == VN_TOP)
4675 sameval = def;
4676 else if (!expressions_equal_p (def, sameval))
4677 {
4678 /* We know we're arriving only with invariant addresses here,
4679 try harder comparing them. We can do some caching here
4680 which we cannot do in expressions_equal_p. */
4681 if (TREE_CODE (def) == ADDR_EXPR
4682 && TREE_CODE (sameval) == ADDR_EXPR
4683 && sameval_base != (void *)-1)
4684 {
4685 if (!sameval_base)
4686 sameval_base = get_addr_base_and_unit_offset
4687 (TREE_OPERAND (sameval, 0), &soff);
4688 if (!sameval_base)
4689 sameval_base = (tree)(void *)-1;
4690 else if ((get_addr_base_and_unit_offset
4691 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4692 && known_eq (soff, doff))
4693 continue;
4694 }
4695 sameval = NULL_TREE;
4696 break;
4697 }
4698 }
4699
4700 /* If the value we want to use is flowing over the backedge and we
4701 should take it as VARYING but it has a non-VARYING value drop to
4702 VARYING.
4703 If we value-number a virtual operand never value-number to the
4704 value from the backedge as that confuses the alias-walking code.
4705 See gcc.dg/torture/pr87176.c. If the value is the same on a
4706 non-backedge everything is OK though. */
4707 bool visited_p;
4708 if ((backedge_val
4709 && !seen_non_backedge
4710 && TREE_CODE (backedge_val) == SSA_NAME
4711 && sameval == backedge_val
4712 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
4713 || SSA_VAL (backedge_val) != backedge_val))
4714 /* Do not value-number a virtual operand to sth not visited though
4715 given that allows us to escape a region in alias walking. */
4716 || (sameval
4717 && TREE_CODE (sameval) == SSA_NAME
4718 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
4719 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
4720 && (SSA_VAL (sameval, &visited_p), !visited_p)))
4721 /* Note this just drops to VARYING without inserting the PHI into
4722 the hashes. */
4723 result = PHI_RESULT (phi);
4724 /* If none of the edges was executable keep the value-number at VN_TOP,
4725 if only a single edge is exectuable use its value. */
4726 else if (n_executable <= 1)
4727 result = seen_undef ? seen_undef : sameval;
4728 /* If we saw only undefined values and VN_TOP use one of the
4729 undefined values. */
4730 else if (sameval == VN_TOP)
4731 result = seen_undef ? seen_undef : sameval;
4732 /* First see if it is equivalent to a phi node in this block. We prefer
4733 this as it allows IV elimination - see PRs 66502 and 67167. */
4734 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4735 {
4736 if (!inserted
4737 && TREE_CODE (result) == SSA_NAME
4738 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4739 {
4740 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4741 if (dump_file && (dump_flags & TDF_DETAILS))
4742 {
4743 fprintf (dump_file, "Marking CSEd to PHI node ");
4744 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4745 0, TDF_SLIM);
4746 fprintf (dump_file, "\n");
4747 }
4748 }
4749 }
4750 /* If all values are the same use that, unless we've seen undefined
4751 values as well and the value isn't constant.
4752 CCP/copyprop have the same restriction to not remove uninit warnings. */
4753 else if (sameval
4754 && (! seen_undef || is_gimple_min_invariant (sameval)))
4755 result = sameval;
4756 else
4757 {
4758 result = PHI_RESULT (phi);
4759 /* Only insert PHIs that are varying, for constant value numbers
4760 we mess up equivalences otherwise as we are only comparing
4761 the immediate controlling predicates. */
4762 vn_phi_insert (phi, result, backedges_varying_p);
4763 if (inserted)
4764 *inserted = true;
4765 }
4766
4767 return set_ssa_val_to (PHI_RESULT (phi), result);
4768 }
4769
4770 /* Try to simplify RHS using equivalences and constant folding. */
4771
4772 static tree
4773 try_to_simplify (gassign *stmt)
4774 {
4775 enum tree_code code = gimple_assign_rhs_code (stmt);
4776 tree tem;
4777
4778 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4779 in this case, there is no point in doing extra work. */
4780 if (code == SSA_NAME)
4781 return NULL_TREE;
4782
4783 /* First try constant folding based on our current lattice. */
4784 mprts_hook = vn_lookup_simplify_result;
4785 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4786 mprts_hook = NULL;
4787 if (tem
4788 && (TREE_CODE (tem) == SSA_NAME
4789 || is_gimple_min_invariant (tem)))
4790 return tem;
4791
4792 return NULL_TREE;
4793 }
4794
4795 /* Visit and value number STMT, return true if the value number
4796 changed. */
4797
4798 static bool
4799 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4800 {
4801 bool changed = false;
4802
4803 if (dump_file && (dump_flags & TDF_DETAILS))
4804 {
4805 fprintf (dump_file, "Value numbering stmt = ");
4806 print_gimple_stmt (dump_file, stmt, 0);
4807 }
4808
4809 if (gimple_code (stmt) == GIMPLE_PHI)
4810 changed = visit_phi (stmt, NULL, backedges_varying_p);
4811 else if (gimple_has_volatile_ops (stmt))
4812 changed = defs_to_varying (stmt);
4813 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4814 {
4815 enum tree_code code = gimple_assign_rhs_code (ass);
4816 tree lhs = gimple_assign_lhs (ass);
4817 tree rhs1 = gimple_assign_rhs1 (ass);
4818 tree simplified;
4819
4820 /* Shortcut for copies. Simplifying copies is pointless,
4821 since we copy the expression and value they represent. */
4822 if (code == SSA_NAME
4823 && TREE_CODE (lhs) == SSA_NAME)
4824 {
4825 changed = visit_copy (lhs, rhs1);
4826 goto done;
4827 }
4828 simplified = try_to_simplify (ass);
4829 if (simplified)
4830 {
4831 if (dump_file && (dump_flags & TDF_DETAILS))
4832 {
4833 fprintf (dump_file, "RHS ");
4834 print_gimple_expr (dump_file, ass, 0);
4835 fprintf (dump_file, " simplified to ");
4836 print_generic_expr (dump_file, simplified);
4837 fprintf (dump_file, "\n");
4838 }
4839 }
4840 /* Setting value numbers to constants will occasionally
4841 screw up phi congruence because constants are not
4842 uniquely associated with a single ssa name that can be
4843 looked up. */
4844 if (simplified
4845 && is_gimple_min_invariant (simplified)
4846 && TREE_CODE (lhs) == SSA_NAME)
4847 {
4848 changed = set_ssa_val_to (lhs, simplified);
4849 goto done;
4850 }
4851 else if (simplified
4852 && TREE_CODE (simplified) == SSA_NAME
4853 && TREE_CODE (lhs) == SSA_NAME)
4854 {
4855 changed = visit_copy (lhs, simplified);
4856 goto done;
4857 }
4858
4859 if ((TREE_CODE (lhs) == SSA_NAME
4860 /* We can substitute SSA_NAMEs that are live over
4861 abnormal edges with their constant value. */
4862 && !(gimple_assign_copy_p (ass)
4863 && is_gimple_min_invariant (rhs1))
4864 && !(simplified
4865 && is_gimple_min_invariant (simplified))
4866 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4867 /* Stores or copies from SSA_NAMEs that are live over
4868 abnormal edges are a problem. */
4869 || (code == SSA_NAME
4870 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4871 changed = defs_to_varying (ass);
4872 else if (REFERENCE_CLASS_P (lhs)
4873 || DECL_P (lhs))
4874 changed = visit_reference_op_store (lhs, rhs1, ass);
4875 else if (TREE_CODE (lhs) == SSA_NAME)
4876 {
4877 if ((gimple_assign_copy_p (ass)
4878 && is_gimple_min_invariant (rhs1))
4879 || (simplified
4880 && is_gimple_min_invariant (simplified)))
4881 {
4882 if (simplified)
4883 changed = set_ssa_val_to (lhs, simplified);
4884 else
4885 changed = set_ssa_val_to (lhs, rhs1);
4886 }
4887 else
4888 {
4889 /* Visit the original statement. */
4890 switch (vn_get_stmt_kind (ass))
4891 {
4892 case VN_NARY:
4893 changed = visit_nary_op (lhs, ass);
4894 break;
4895 case VN_REFERENCE:
4896 changed = visit_reference_op_load (lhs, rhs1, ass);
4897 break;
4898 default:
4899 changed = defs_to_varying (ass);
4900 break;
4901 }
4902 }
4903 }
4904 else
4905 changed = defs_to_varying (ass);
4906 }
4907 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4908 {
4909 tree lhs = gimple_call_lhs (call_stmt);
4910 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4911 {
4912 /* Try constant folding based on our current lattice. */
4913 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4914 vn_valueize);
4915 if (simplified)
4916 {
4917 if (dump_file && (dump_flags & TDF_DETAILS))
4918 {
4919 fprintf (dump_file, "call ");
4920 print_gimple_expr (dump_file, call_stmt, 0);
4921 fprintf (dump_file, " simplified to ");
4922 print_generic_expr (dump_file, simplified);
4923 fprintf (dump_file, "\n");
4924 }
4925 }
4926 /* Setting value numbers to constants will occasionally
4927 screw up phi congruence because constants are not
4928 uniquely associated with a single ssa name that can be
4929 looked up. */
4930 if (simplified
4931 && is_gimple_min_invariant (simplified))
4932 {
4933 changed = set_ssa_val_to (lhs, simplified);
4934 if (gimple_vdef (call_stmt))
4935 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4936 SSA_VAL (gimple_vuse (call_stmt)));
4937 goto done;
4938 }
4939 else if (simplified
4940 && TREE_CODE (simplified) == SSA_NAME)
4941 {
4942 changed = visit_copy (lhs, simplified);
4943 if (gimple_vdef (call_stmt))
4944 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4945 SSA_VAL (gimple_vuse (call_stmt)));
4946 goto done;
4947 }
4948 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4949 {
4950 changed = defs_to_varying (call_stmt);
4951 goto done;
4952 }
4953 }
4954
4955 /* Pick up flags from a devirtualization target. */
4956 tree fn = gimple_call_fn (stmt);
4957 int extra_fnflags = 0;
4958 if (fn && TREE_CODE (fn) == SSA_NAME)
4959 {
4960 fn = SSA_VAL (fn);
4961 if (TREE_CODE (fn) == ADDR_EXPR
4962 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4963 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4964 }
4965 if (!gimple_call_internal_p (call_stmt)
4966 && (/* Calls to the same function with the same vuse
4967 and the same operands do not necessarily return the same
4968 value, unless they're pure or const. */
4969 ((gimple_call_flags (call_stmt) | extra_fnflags)
4970 & (ECF_PURE | ECF_CONST))
4971 /* If calls have a vdef, subsequent calls won't have
4972 the same incoming vuse. So, if 2 calls with vdef have the
4973 same vuse, we know they're not subsequent.
4974 We can value number 2 calls to the same function with the
4975 same vuse and the same operands which are not subsequent
4976 the same, because there is no code in the program that can
4977 compare the 2 values... */
4978 || (gimple_vdef (call_stmt)
4979 /* ... unless the call returns a pointer which does
4980 not alias with anything else. In which case the
4981 information that the values are distinct are encoded
4982 in the IL. */
4983 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4984 /* Only perform the following when being called from PRE
4985 which embeds tail merging. */
4986 && default_vn_walk_kind == VN_WALK)))
4987 changed = visit_reference_op_call (lhs, call_stmt);
4988 else
4989 changed = defs_to_varying (call_stmt);
4990 }
4991 else
4992 changed = defs_to_varying (stmt);
4993 done:
4994 return changed;
4995 }
4996
4997
4998 /* Allocate a value number table. */
4999
5000 static void
5001 allocate_vn_table (vn_tables_t table, unsigned size)
5002 {
5003 table->phis = new vn_phi_table_type (size);
5004 table->nary = new vn_nary_op_table_type (size);
5005 table->references = new vn_reference_table_type (size);
5006 }
5007
5008 /* Free a value number table. */
5009
5010 static void
5011 free_vn_table (vn_tables_t table)
5012 {
5013 /* Walk over elements and release vectors. */
5014 vn_reference_iterator_type hir;
5015 vn_reference_t vr;
5016 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5017 vr->operands.release ();
5018 delete table->phis;
5019 table->phis = NULL;
5020 delete table->nary;
5021 table->nary = NULL;
5022 delete table->references;
5023 table->references = NULL;
5024 }
5025
5026 /* Set *ID according to RESULT. */
5027
5028 static void
5029 set_value_id_for_result (tree result, unsigned int *id)
5030 {
5031 if (result && TREE_CODE (result) == SSA_NAME)
5032 *id = VN_INFO (result)->value_id;
5033 else if (result && is_gimple_min_invariant (result))
5034 *id = get_or_alloc_constant_value_id (result);
5035 else
5036 *id = get_next_value_id ();
5037 }
5038
5039 /* Set the value ids in the valid hash tables. */
5040
5041 static void
5042 set_hashtable_value_ids (void)
5043 {
5044 vn_nary_op_iterator_type hin;
5045 vn_phi_iterator_type hip;
5046 vn_reference_iterator_type hir;
5047 vn_nary_op_t vno;
5048 vn_reference_t vr;
5049 vn_phi_t vp;
5050
5051 /* Now set the value ids of the things we had put in the hash
5052 table. */
5053
5054 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5055 if (! vno->predicated_values)
5056 set_value_id_for_result (vno->u.result, &vno->value_id);
5057
5058 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5059 set_value_id_for_result (vp->result, &vp->value_id);
5060
5061 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5062 hir)
5063 set_value_id_for_result (vr->result, &vr->value_id);
5064 }
5065
5066 /* Return the maximum value id we have ever seen. */
5067
5068 unsigned int
5069 get_max_value_id (void)
5070 {
5071 return next_value_id;
5072 }
5073
5074 /* Return the next unique value id. */
5075
5076 unsigned int
5077 get_next_value_id (void)
5078 {
5079 return next_value_id++;
5080 }
5081
5082
5083 /* Compare two expressions E1 and E2 and return true if they are equal. */
5084
5085 bool
5086 expressions_equal_p (tree e1, tree e2)
5087 {
5088 /* The obvious case. */
5089 if (e1 == e2)
5090 return true;
5091
5092 /* If either one is VN_TOP consider them equal. */
5093 if (e1 == VN_TOP || e2 == VN_TOP)
5094 return true;
5095
5096 /* If only one of them is null, they cannot be equal. */
5097 if (!e1 || !e2)
5098 return false;
5099
5100 /* Now perform the actual comparison. */
5101 if (TREE_CODE (e1) == TREE_CODE (e2)
5102 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5103 return true;
5104
5105 return false;
5106 }
5107
5108
5109 /* Return true if the nary operation NARY may trap. This is a copy
5110 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5111
5112 bool
5113 vn_nary_may_trap (vn_nary_op_t nary)
5114 {
5115 tree type;
5116 tree rhs2 = NULL_TREE;
5117 bool honor_nans = false;
5118 bool honor_snans = false;
5119 bool fp_operation = false;
5120 bool honor_trapv = false;
5121 bool handled, ret;
5122 unsigned i;
5123
5124 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5125 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5126 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5127 {
5128 type = nary->type;
5129 fp_operation = FLOAT_TYPE_P (type);
5130 if (fp_operation)
5131 {
5132 honor_nans = flag_trapping_math && !flag_finite_math_only;
5133 honor_snans = flag_signaling_nans != 0;
5134 }
5135 else if (INTEGRAL_TYPE_P (type)
5136 && TYPE_OVERFLOW_TRAPS (type))
5137 honor_trapv = true;
5138 }
5139 if (nary->length >= 2)
5140 rhs2 = nary->op[1];
5141 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5142 honor_trapv,
5143 honor_nans, honor_snans, rhs2,
5144 &handled);
5145 if (handled
5146 && ret)
5147 return true;
5148
5149 for (i = 0; i < nary->length; ++i)
5150 if (tree_could_trap_p (nary->op[i]))
5151 return true;
5152
5153 return false;
5154 }
5155
5156 /* Return true if the reference operation REF may trap. */
5157
5158 bool
5159 vn_reference_may_trap (vn_reference_t ref)
5160 {
5161 switch (ref->operands[0].opcode)
5162 {
5163 case MODIFY_EXPR:
5164 case CALL_EXPR:
5165 /* We do not handle calls. */
5166 case ADDR_EXPR:
5167 /* And toplevel address computations never trap. */
5168 return false;
5169 default:;
5170 }
5171
5172 vn_reference_op_t op;
5173 unsigned i;
5174 FOR_EACH_VEC_ELT (ref->operands, i, op)
5175 {
5176 switch (op->opcode)
5177 {
5178 case WITH_SIZE_EXPR:
5179 case TARGET_MEM_REF:
5180 /* Always variable. */
5181 return true;
5182 case COMPONENT_REF:
5183 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5184 return true;
5185 break;
5186 case ARRAY_RANGE_REF:
5187 case ARRAY_REF:
5188 if (TREE_CODE (op->op0) == SSA_NAME)
5189 return true;
5190 break;
5191 case MEM_REF:
5192 /* Nothing interesting in itself, the base is separate. */
5193 break;
5194 /* The following are the address bases. */
5195 case SSA_NAME:
5196 return true;
5197 case ADDR_EXPR:
5198 if (op->op0)
5199 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5200 return false;
5201 default:;
5202 }
5203 }
5204 return false;
5205 }
5206
5207 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5208 bitmap inserted_exprs_)
5209 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5210 el_todo (0), eliminations (0), insertions (0),
5211 inserted_exprs (inserted_exprs_)
5212 {
5213 need_eh_cleanup = BITMAP_ALLOC (NULL);
5214 need_ab_cleanup = BITMAP_ALLOC (NULL);
5215 }
5216
5217 eliminate_dom_walker::~eliminate_dom_walker ()
5218 {
5219 BITMAP_FREE (need_eh_cleanup);
5220 BITMAP_FREE (need_ab_cleanup);
5221 }
5222
5223 /* Return a leader for OP that is available at the current point of the
5224 eliminate domwalk. */
5225
5226 tree
5227 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5228 {
5229 tree valnum = VN_INFO (op)->valnum;
5230 if (TREE_CODE (valnum) == SSA_NAME)
5231 {
5232 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5233 return valnum;
5234 if (avail.length () > SSA_NAME_VERSION (valnum))
5235 return avail[SSA_NAME_VERSION (valnum)];
5236 }
5237 else if (is_gimple_min_invariant (valnum))
5238 return valnum;
5239 return NULL_TREE;
5240 }
5241
5242 /* At the current point of the eliminate domwalk make OP available. */
5243
5244 void
5245 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5246 {
5247 tree valnum = VN_INFO (op)->valnum;
5248 if (TREE_CODE (valnum) == SSA_NAME)
5249 {
5250 if (avail.length () <= SSA_NAME_VERSION (valnum))
5251 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5252 tree pushop = op;
5253 if (avail[SSA_NAME_VERSION (valnum)])
5254 pushop = avail[SSA_NAME_VERSION (valnum)];
5255 avail_stack.safe_push (pushop);
5256 avail[SSA_NAME_VERSION (valnum)] = op;
5257 }
5258 }
5259
5260 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5261 the leader for the expression if insertion was successful. */
5262
5263 tree
5264 eliminate_dom_walker::eliminate_insert (basic_block bb,
5265 gimple_stmt_iterator *gsi, tree val)
5266 {
5267 /* We can insert a sequence with a single assignment only. */
5268 gimple_seq stmts = VN_INFO (val)->expr;
5269 if (!gimple_seq_singleton_p (stmts))
5270 return NULL_TREE;
5271 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5272 if (!stmt
5273 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5274 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5275 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5276 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5277 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5278 return NULL_TREE;
5279
5280 tree op = gimple_assign_rhs1 (stmt);
5281 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5282 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5283 op = TREE_OPERAND (op, 0);
5284 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5285 if (!leader)
5286 return NULL_TREE;
5287
5288 tree res;
5289 stmts = NULL;
5290 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5291 res = gimple_build (&stmts, BIT_FIELD_REF,
5292 TREE_TYPE (val), leader,
5293 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5294 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5295 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5296 res = gimple_build (&stmts, BIT_AND_EXPR,
5297 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5298 else
5299 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5300 TREE_TYPE (val), leader);
5301 if (TREE_CODE (res) != SSA_NAME
5302 || SSA_NAME_IS_DEFAULT_DEF (res)
5303 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5304 {
5305 gimple_seq_discard (stmts);
5306
5307 /* During propagation we have to treat SSA info conservatively
5308 and thus we can end up simplifying the inserted expression
5309 at elimination time to sth not defined in stmts. */
5310 /* But then this is a redundancy we failed to detect. Which means
5311 res now has two values. That doesn't play well with how
5312 we track availability here, so give up. */
5313 if (dump_file && (dump_flags & TDF_DETAILS))
5314 {
5315 if (TREE_CODE (res) == SSA_NAME)
5316 res = eliminate_avail (bb, res);
5317 if (res)
5318 {
5319 fprintf (dump_file, "Failed to insert expression for value ");
5320 print_generic_expr (dump_file, val);
5321 fprintf (dump_file, " which is really fully redundant to ");
5322 print_generic_expr (dump_file, res);
5323 fprintf (dump_file, "\n");
5324 }
5325 }
5326
5327 return NULL_TREE;
5328 }
5329 else
5330 {
5331 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5332 VN_INFO (res)->valnum = val;
5333 VN_INFO (res)->visited = true;
5334 }
5335
5336 insertions++;
5337 if (dump_file && (dump_flags & TDF_DETAILS))
5338 {
5339 fprintf (dump_file, "Inserted ");
5340 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5341 }
5342
5343 return res;
5344 }
5345
5346 void
5347 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
5348 {
5349 tree sprime = NULL_TREE;
5350 gimple *stmt = gsi_stmt (*gsi);
5351 tree lhs = gimple_get_lhs (stmt);
5352 if (lhs && TREE_CODE (lhs) == SSA_NAME
5353 && !gimple_has_volatile_ops (stmt)
5354 /* See PR43491. Do not replace a global register variable when
5355 it is a the RHS of an assignment. Do replace local register
5356 variables since gcc does not guarantee a local variable will
5357 be allocated in register.
5358 ??? The fix isn't effective here. This should instead
5359 be ensured by not value-numbering them the same but treating
5360 them like volatiles? */
5361 && !(gimple_assign_single_p (stmt)
5362 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5363 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5364 && is_global_var (gimple_assign_rhs1 (stmt)))))
5365 {
5366 sprime = eliminate_avail (b, lhs);
5367 if (!sprime)
5368 {
5369 /* If there is no existing usable leader but SCCVN thinks
5370 it has an expression it wants to use as replacement,
5371 insert that. */
5372 tree val = VN_INFO (lhs)->valnum;
5373 if (val != VN_TOP
5374 && TREE_CODE (val) == SSA_NAME
5375 && VN_INFO (val)->needs_insertion
5376 && VN_INFO (val)->expr != NULL
5377 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
5378 eliminate_push_avail (b, sprime);
5379 }
5380
5381 /* If this now constitutes a copy duplicate points-to
5382 and range info appropriately. This is especially
5383 important for inserted code. See tree-ssa-copy.c
5384 for similar code. */
5385 if (sprime
5386 && TREE_CODE (sprime) == SSA_NAME)
5387 {
5388 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5389 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5390 && SSA_NAME_PTR_INFO (lhs)
5391 && ! SSA_NAME_PTR_INFO (sprime))
5392 {
5393 duplicate_ssa_name_ptr_info (sprime,
5394 SSA_NAME_PTR_INFO (lhs));
5395 if (b != sprime_b)
5396 mark_ptr_info_alignment_unknown
5397 (SSA_NAME_PTR_INFO (sprime));
5398 }
5399 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5400 && SSA_NAME_RANGE_INFO (lhs)
5401 && ! SSA_NAME_RANGE_INFO (sprime)
5402 && b == sprime_b)
5403 duplicate_ssa_name_range_info (sprime,
5404 SSA_NAME_RANGE_TYPE (lhs),
5405 SSA_NAME_RANGE_INFO (lhs));
5406 }
5407
5408 /* Inhibit the use of an inserted PHI on a loop header when
5409 the address of the memory reference is a simple induction
5410 variable. In other cases the vectorizer won't do anything
5411 anyway (either it's loop invariant or a complicated
5412 expression). */
5413 if (sprime
5414 && TREE_CODE (sprime) == SSA_NAME
5415 && do_pre
5416 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5417 && loop_outer (b->loop_father)
5418 && has_zero_uses (sprime)
5419 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5420 && gimple_assign_load_p (stmt))
5421 {
5422 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5423 basic_block def_bb = gimple_bb (def_stmt);
5424 if (gimple_code (def_stmt) == GIMPLE_PHI
5425 && def_bb->loop_father->header == def_bb)
5426 {
5427 loop_p loop = def_bb->loop_father;
5428 ssa_op_iter iter;
5429 tree op;
5430 bool found = false;
5431 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5432 {
5433 affine_iv iv;
5434 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5435 if (def_bb
5436 && flow_bb_inside_loop_p (loop, def_bb)
5437 && simple_iv (loop, loop, op, &iv, true))
5438 {
5439 found = true;
5440 break;
5441 }
5442 }
5443 if (found)
5444 {
5445 if (dump_file && (dump_flags & TDF_DETAILS))
5446 {
5447 fprintf (dump_file, "Not replacing ");
5448 print_gimple_expr (dump_file, stmt, 0);
5449 fprintf (dump_file, " with ");
5450 print_generic_expr (dump_file, sprime);
5451 fprintf (dump_file, " which would add a loop"
5452 " carried dependence to loop %d\n",
5453 loop->num);
5454 }
5455 /* Don't keep sprime available. */
5456 sprime = NULL_TREE;
5457 }
5458 }
5459 }
5460
5461 if (sprime)
5462 {
5463 /* If we can propagate the value computed for LHS into
5464 all uses don't bother doing anything with this stmt. */
5465 if (may_propagate_copy (lhs, sprime))
5466 {
5467 /* Mark it for removal. */
5468 to_remove.safe_push (stmt);
5469
5470 /* ??? Don't count copy/constant propagations. */
5471 if (gimple_assign_single_p (stmt)
5472 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5473 || gimple_assign_rhs1 (stmt) == sprime))
5474 return;
5475
5476 if (dump_file && (dump_flags & TDF_DETAILS))
5477 {
5478 fprintf (dump_file, "Replaced ");
5479 print_gimple_expr (dump_file, stmt, 0);
5480 fprintf (dump_file, " with ");
5481 print_generic_expr (dump_file, sprime);
5482 fprintf (dump_file, " in all uses of ");
5483 print_gimple_stmt (dump_file, stmt, 0);
5484 }
5485
5486 eliminations++;
5487 return;
5488 }
5489
5490 /* If this is an assignment from our leader (which
5491 happens in the case the value-number is a constant)
5492 then there is nothing to do. */
5493 if (gimple_assign_single_p (stmt)
5494 && sprime == gimple_assign_rhs1 (stmt))
5495 return;
5496
5497 /* Else replace its RHS. */
5498 if (dump_file && (dump_flags & TDF_DETAILS))
5499 {
5500 fprintf (dump_file, "Replaced ");
5501 print_gimple_expr (dump_file, stmt, 0);
5502 fprintf (dump_file, " with ");
5503 print_generic_expr (dump_file, sprime);
5504 fprintf (dump_file, " in ");
5505 print_gimple_stmt (dump_file, stmt, 0);
5506 }
5507 eliminations++;
5508
5509 bool can_make_abnormal_goto = (is_gimple_call (stmt)
5510 && stmt_can_make_abnormal_goto (stmt));
5511 gimple *orig_stmt = stmt;
5512 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5513 TREE_TYPE (sprime)))
5514 {
5515 /* We preserve conversions to but not from function or method
5516 types. This asymmetry makes it necessary to re-instantiate
5517 conversions here. */
5518 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5519 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
5520 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5521 else
5522 gcc_unreachable ();
5523 }
5524 tree vdef = gimple_vdef (stmt);
5525 tree vuse = gimple_vuse (stmt);
5526 propagate_tree_value_into_stmt (gsi, sprime);
5527 stmt = gsi_stmt (*gsi);
5528 update_stmt (stmt);
5529 /* In case the VDEF on the original stmt was released, value-number
5530 it to the VUSE. This is to make vuse_ssa_val able to skip
5531 released virtual operands. */
5532 if (vdef != gimple_vdef (stmt))
5533 {
5534 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
5535 VN_INFO (vdef)->valnum = vuse;
5536 }
5537
5538 /* If we removed EH side-effects from the statement, clean
5539 its EH information. */
5540 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5541 {
5542 bitmap_set_bit (need_eh_cleanup,
5543 gimple_bb (stmt)->index);
5544 if (dump_file && (dump_flags & TDF_DETAILS))
5545 fprintf (dump_file, " Removed EH side-effects.\n");
5546 }
5547
5548 /* Likewise for AB side-effects. */
5549 if (can_make_abnormal_goto
5550 && !stmt_can_make_abnormal_goto (stmt))
5551 {
5552 bitmap_set_bit (need_ab_cleanup,
5553 gimple_bb (stmt)->index);
5554 if (dump_file && (dump_flags & TDF_DETAILS))
5555 fprintf (dump_file, " Removed AB side-effects.\n");
5556 }
5557
5558 return;
5559 }
5560 }
5561
5562 /* If the statement is a scalar store, see if the expression
5563 has the same value number as its rhs. If so, the store is
5564 dead. */
5565 if (gimple_assign_single_p (stmt)
5566 && !gimple_has_volatile_ops (stmt)
5567 && !is_gimple_reg (gimple_assign_lhs (stmt))
5568 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5569 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5570 {
5571 tree val;
5572 tree rhs = gimple_assign_rhs1 (stmt);
5573 vn_reference_t vnresult;
5574 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5575 &vnresult, false);
5576 if (TREE_CODE (rhs) == SSA_NAME)
5577 rhs = VN_INFO (rhs)->valnum;
5578 if (val
5579 && operand_equal_p (val, rhs, 0))
5580 {
5581 /* We can only remove the later store if the former aliases
5582 at least all accesses the later one does or if the store
5583 was to readonly memory storing the same value. */
5584 alias_set_type set = get_alias_set (lhs);
5585 if (! vnresult
5586 || vnresult->set == set
5587 || alias_set_subset_of (set, vnresult->set))
5588 {
5589 if (dump_file && (dump_flags & TDF_DETAILS))
5590 {
5591 fprintf (dump_file, "Deleted redundant store ");
5592 print_gimple_stmt (dump_file, stmt, 0);
5593 }
5594
5595 /* Queue stmt for removal. */
5596 to_remove.safe_push (stmt);
5597 return;
5598 }
5599 }
5600 }
5601
5602 /* If this is a control statement value numbering left edges
5603 unexecuted on force the condition in a way consistent with
5604 that. */
5605 if (gcond *cond = dyn_cast <gcond *> (stmt))
5606 {
5607 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5608 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5609 {
5610 if (dump_file && (dump_flags & TDF_DETAILS))
5611 {
5612 fprintf (dump_file, "Removing unexecutable edge from ");
5613 print_gimple_stmt (dump_file, stmt, 0);
5614 }
5615 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5616 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5617 gimple_cond_make_true (cond);
5618 else
5619 gimple_cond_make_false (cond);
5620 update_stmt (cond);
5621 el_todo |= TODO_cleanup_cfg;
5622 return;
5623 }
5624 }
5625
5626 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5627 bool was_noreturn = (is_gimple_call (stmt)
5628 && gimple_call_noreturn_p (stmt));
5629 tree vdef = gimple_vdef (stmt);
5630 tree vuse = gimple_vuse (stmt);
5631
5632 /* If we didn't replace the whole stmt (or propagate the result
5633 into all uses), replace all uses on this stmt with their
5634 leaders. */
5635 bool modified = false;
5636 use_operand_p use_p;
5637 ssa_op_iter iter;
5638 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5639 {
5640 tree use = USE_FROM_PTR (use_p);
5641 /* ??? The call code above leaves stmt operands un-updated. */
5642 if (TREE_CODE (use) != SSA_NAME)
5643 continue;
5644 tree sprime;
5645 if (SSA_NAME_IS_DEFAULT_DEF (use))
5646 /* ??? For default defs BB shouldn't matter, but we have to
5647 solve the inconsistency between rpo eliminate and
5648 dom eliminate avail valueization first. */
5649 sprime = eliminate_avail (b, use);
5650 else
5651 /* Look for sth available at the definition block of the argument.
5652 This avoids inconsistencies between availability there which
5653 decides if the stmt can be removed and availability at the
5654 use site. The SSA property ensures that things available
5655 at the definition are also available at uses. */
5656 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5657 if (sprime && sprime != use
5658 && may_propagate_copy (use, sprime)
5659 /* We substitute into debug stmts to avoid excessive
5660 debug temporaries created by removed stmts, but we need
5661 to avoid doing so for inserted sprimes as we never want
5662 to create debug temporaries for them. */
5663 && (!inserted_exprs
5664 || TREE_CODE (sprime) != SSA_NAME
5665 || !is_gimple_debug (stmt)
5666 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5667 {
5668 propagate_value (use_p, sprime);
5669 modified = true;
5670 }
5671 }
5672
5673 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5674 into which is a requirement for the IPA devirt machinery. */
5675 gimple *old_stmt = stmt;
5676 if (modified)
5677 {
5678 /* If a formerly non-invariant ADDR_EXPR is turned into an
5679 invariant one it was on a separate stmt. */
5680 if (gimple_assign_single_p (stmt)
5681 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5682 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5683 gimple_stmt_iterator prev = *gsi;
5684 gsi_prev (&prev);
5685 if (fold_stmt (gsi))
5686 {
5687 /* fold_stmt may have created new stmts inbetween
5688 the previous stmt and the folded stmt. Mark
5689 all defs created there as varying to not confuse
5690 the SCCVN machinery as we're using that even during
5691 elimination. */
5692 if (gsi_end_p (prev))
5693 prev = gsi_start_bb (b);
5694 else
5695 gsi_next (&prev);
5696 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5697 do
5698 {
5699 tree def;
5700 ssa_op_iter dit;
5701 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5702 dit, SSA_OP_ALL_DEFS)
5703 /* As existing DEFs may move between stmts
5704 only process new ones. */
5705 if (! has_VN_INFO (def))
5706 {
5707 VN_INFO (def)->valnum = def;
5708 VN_INFO (def)->visited = true;
5709 }
5710 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5711 break;
5712 gsi_next (&prev);
5713 }
5714 while (1);
5715 }
5716 stmt = gsi_stmt (*gsi);
5717 /* In case we folded the stmt away schedule the NOP for removal. */
5718 if (gimple_nop_p (stmt))
5719 to_remove.safe_push (stmt);
5720 }
5721
5722 /* Visit indirect calls and turn them into direct calls if
5723 possible using the devirtualization machinery. Do this before
5724 checking for required EH/abnormal/noreturn cleanup as devird
5725 may expose more of those. */
5726 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5727 {
5728 tree fn = gimple_call_fn (call_stmt);
5729 if (fn
5730 && flag_devirtualize
5731 && virtual_method_call_p (fn))
5732 {
5733 tree otr_type = obj_type_ref_class (fn);
5734 unsigned HOST_WIDE_INT otr_tok
5735 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5736 tree instance;
5737 ipa_polymorphic_call_context context (current_function_decl,
5738 fn, stmt, &instance);
5739 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5740 otr_type, stmt, NULL);
5741 bool final;
5742 vec <cgraph_node *> targets
5743 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5744 otr_tok, context, &final);
5745 if (dump_file)
5746 dump_possible_polymorphic_call_targets (dump_file,
5747 obj_type_ref_class (fn),
5748 otr_tok, context);
5749 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5750 {
5751 tree fn;
5752 if (targets.length () == 1)
5753 fn = targets[0]->decl;
5754 else
5755 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5756 if (dump_enabled_p ())
5757 {
5758 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5759 "converting indirect call to "
5760 "function %s\n",
5761 lang_hooks.decl_printable_name (fn, 2));
5762 }
5763 gimple_call_set_fndecl (call_stmt, fn);
5764 /* If changing the call to __builtin_unreachable
5765 or similar noreturn function, adjust gimple_call_fntype
5766 too. */
5767 if (gimple_call_noreturn_p (call_stmt)
5768 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5769 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5770 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5771 == void_type_node))
5772 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5773 maybe_remove_unused_call_args (cfun, call_stmt);
5774 modified = true;
5775 }
5776 }
5777 }
5778
5779 if (modified)
5780 {
5781 /* When changing a call into a noreturn call, cfg cleanup
5782 is needed to fix up the noreturn call. */
5783 if (!was_noreturn
5784 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5785 to_fixup.safe_push (stmt);
5786 /* When changing a condition or switch into one we know what
5787 edge will be executed, schedule a cfg cleanup. */
5788 if ((gimple_code (stmt) == GIMPLE_COND
5789 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5790 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5791 || (gimple_code (stmt) == GIMPLE_SWITCH
5792 && TREE_CODE (gimple_switch_index
5793 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5794 el_todo |= TODO_cleanup_cfg;
5795 /* If we removed EH side-effects from the statement, clean
5796 its EH information. */
5797 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5798 {
5799 bitmap_set_bit (need_eh_cleanup,
5800 gimple_bb (stmt)->index);
5801 if (dump_file && (dump_flags & TDF_DETAILS))
5802 fprintf (dump_file, " Removed EH side-effects.\n");
5803 }
5804 /* Likewise for AB side-effects. */
5805 if (can_make_abnormal_goto
5806 && !stmt_can_make_abnormal_goto (stmt))
5807 {
5808 bitmap_set_bit (need_ab_cleanup,
5809 gimple_bb (stmt)->index);
5810 if (dump_file && (dump_flags & TDF_DETAILS))
5811 fprintf (dump_file, " Removed AB side-effects.\n");
5812 }
5813 update_stmt (stmt);
5814 /* In case the VDEF on the original stmt was released, value-number
5815 it to the VUSE. This is to make vuse_ssa_val able to skip
5816 released virtual operands. */
5817 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
5818 VN_INFO (vdef)->valnum = vuse;
5819 }
5820
5821 /* Make new values available - for fully redundant LHS we
5822 continue with the next stmt above and skip this. */
5823 def_operand_p defp;
5824 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5825 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5826 }
5827
5828 /* Perform elimination for the basic-block B during the domwalk. */
5829
5830 edge
5831 eliminate_dom_walker::before_dom_children (basic_block b)
5832 {
5833 /* Mark new bb. */
5834 avail_stack.safe_push (NULL_TREE);
5835
5836 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5837 if (!(b->flags & BB_EXECUTABLE))
5838 return NULL;
5839
5840 vn_context_bb = b;
5841
5842 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5843 {
5844 gphi *phi = gsi.phi ();
5845 tree res = PHI_RESULT (phi);
5846
5847 if (virtual_operand_p (res))
5848 {
5849 gsi_next (&gsi);
5850 continue;
5851 }
5852
5853 tree sprime = eliminate_avail (b, res);
5854 if (sprime
5855 && sprime != res)
5856 {
5857 if (dump_file && (dump_flags & TDF_DETAILS))
5858 {
5859 fprintf (dump_file, "Replaced redundant PHI node defining ");
5860 print_generic_expr (dump_file, res);
5861 fprintf (dump_file, " with ");
5862 print_generic_expr (dump_file, sprime);
5863 fprintf (dump_file, "\n");
5864 }
5865
5866 /* If we inserted this PHI node ourself, it's not an elimination. */
5867 if (! inserted_exprs
5868 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5869 eliminations++;
5870
5871 /* If we will propagate into all uses don't bother to do
5872 anything. */
5873 if (may_propagate_copy (res, sprime))
5874 {
5875 /* Mark the PHI for removal. */
5876 to_remove.safe_push (phi);
5877 gsi_next (&gsi);
5878 continue;
5879 }
5880
5881 remove_phi_node (&gsi, false);
5882
5883 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5884 sprime = fold_convert (TREE_TYPE (res), sprime);
5885 gimple *stmt = gimple_build_assign (res, sprime);
5886 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5887 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5888 continue;
5889 }
5890
5891 eliminate_push_avail (b, res);
5892 gsi_next (&gsi);
5893 }
5894
5895 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5896 !gsi_end_p (gsi);
5897 gsi_next (&gsi))
5898 eliminate_stmt (b, &gsi);
5899
5900 /* Replace destination PHI arguments. */
5901 edge_iterator ei;
5902 edge e;
5903 FOR_EACH_EDGE (e, ei, b->succs)
5904 if (e->flags & EDGE_EXECUTABLE)
5905 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5906 !gsi_end_p (gsi);
5907 gsi_next (&gsi))
5908 {
5909 gphi *phi = gsi.phi ();
5910 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5911 tree arg = USE_FROM_PTR (use_p);
5912 if (TREE_CODE (arg) != SSA_NAME
5913 || virtual_operand_p (arg))
5914 continue;
5915 tree sprime = eliminate_avail (b, arg);
5916 if (sprime && may_propagate_copy (arg, sprime))
5917 propagate_value (use_p, sprime);
5918 }
5919
5920 vn_context_bb = NULL;
5921
5922 return NULL;
5923 }
5924
5925 /* Make no longer available leaders no longer available. */
5926
5927 void
5928 eliminate_dom_walker::after_dom_children (basic_block)
5929 {
5930 tree entry;
5931 while ((entry = avail_stack.pop ()) != NULL_TREE)
5932 {
5933 tree valnum = VN_INFO (entry)->valnum;
5934 tree old = avail[SSA_NAME_VERSION (valnum)];
5935 if (old == entry)
5936 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5937 else
5938 avail[SSA_NAME_VERSION (valnum)] = entry;
5939 }
5940 }
5941
5942 /* Remove queued stmts and perform delayed cleanups. */
5943
5944 unsigned
5945 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5946 {
5947 statistics_counter_event (cfun, "Eliminated", eliminations);
5948 statistics_counter_event (cfun, "Insertions", insertions);
5949
5950 /* We cannot remove stmts during BB walk, especially not release SSA
5951 names there as this confuses the VN machinery. The stmts ending
5952 up in to_remove are either stores or simple copies.
5953 Remove stmts in reverse order to make debug stmt creation possible. */
5954 while (!to_remove.is_empty ())
5955 {
5956 bool do_release_defs = true;
5957 gimple *stmt = to_remove.pop ();
5958
5959 /* When we are value-numbering a region we do not require exit PHIs to
5960 be present so we have to make sure to deal with uses outside of the
5961 region of stmts that we thought are eliminated.
5962 ??? Note we may be confused by uses in dead regions we didn't run
5963 elimination on. Rather than checking individual uses we accept
5964 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5965 contains such example). */
5966 if (region_p)
5967 {
5968 if (gphi *phi = dyn_cast <gphi *> (stmt))
5969 {
5970 tree lhs = gimple_phi_result (phi);
5971 if (!has_zero_uses (lhs))
5972 {
5973 if (dump_file && (dump_flags & TDF_DETAILS))
5974 fprintf (dump_file, "Keeping eliminated stmt live "
5975 "as copy because of out-of-region uses\n");
5976 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5977 gimple *copy = gimple_build_assign (lhs, sprime);
5978 gimple_stmt_iterator gsi
5979 = gsi_after_labels (gimple_bb (stmt));
5980 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5981 do_release_defs = false;
5982 }
5983 }
5984 else if (tree lhs = gimple_get_lhs (stmt))
5985 if (TREE_CODE (lhs) == SSA_NAME
5986 && !has_zero_uses (lhs))
5987 {
5988 if (dump_file && (dump_flags & TDF_DETAILS))
5989 fprintf (dump_file, "Keeping eliminated stmt live "
5990 "as copy because of out-of-region uses\n");
5991 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5992 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5993 if (is_gimple_assign (stmt))
5994 {
5995 gimple_assign_set_rhs_from_tree (&gsi, sprime);
5996 stmt = gsi_stmt (gsi);
5997 update_stmt (stmt);
5998 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
5999 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6000 continue;
6001 }
6002 else
6003 {
6004 gimple *copy = gimple_build_assign (lhs, sprime);
6005 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6006 do_release_defs = false;
6007 }
6008 }
6009 }
6010
6011 if (dump_file && (dump_flags & TDF_DETAILS))
6012 {
6013 fprintf (dump_file, "Removing dead stmt ");
6014 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6015 }
6016
6017 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6018 if (gimple_code (stmt) == GIMPLE_PHI)
6019 remove_phi_node (&gsi, do_release_defs);
6020 else
6021 {
6022 basic_block bb = gimple_bb (stmt);
6023 unlink_stmt_vdef (stmt);
6024 if (gsi_remove (&gsi, true))
6025 bitmap_set_bit (need_eh_cleanup, bb->index);
6026 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6027 bitmap_set_bit (need_ab_cleanup, bb->index);
6028 if (do_release_defs)
6029 release_defs (stmt);
6030 }
6031
6032 /* Removing a stmt may expose a forwarder block. */
6033 el_todo |= TODO_cleanup_cfg;
6034 }
6035
6036 /* Fixup stmts that became noreturn calls. This may require splitting
6037 blocks and thus isn't possible during the dominator walk. Do this
6038 in reverse order so we don't inadvertedly remove a stmt we want to
6039 fixup by visiting a dominating now noreturn call first. */
6040 while (!to_fixup.is_empty ())
6041 {
6042 gimple *stmt = to_fixup.pop ();
6043
6044 if (dump_file && (dump_flags & TDF_DETAILS))
6045 {
6046 fprintf (dump_file, "Fixing up noreturn call ");
6047 print_gimple_stmt (dump_file, stmt, 0);
6048 }
6049
6050 if (fixup_noreturn_call (stmt))
6051 el_todo |= TODO_cleanup_cfg;
6052 }
6053
6054 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6055 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6056
6057 if (do_eh_cleanup)
6058 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6059
6060 if (do_ab_cleanup)
6061 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6062
6063 if (do_eh_cleanup || do_ab_cleanup)
6064 el_todo |= TODO_cleanup_cfg;
6065
6066 return el_todo;
6067 }
6068
6069 /* Eliminate fully redundant computations. */
6070
6071 unsigned
6072 eliminate_with_rpo_vn (bitmap inserted_exprs)
6073 {
6074 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6075
6076 walker.walk (cfun->cfg->x_entry_block_ptr);
6077 return walker.eliminate_cleanup ();
6078 }
6079
6080 static unsigned
6081 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6082 bool iterate, bool eliminate);
6083
6084 void
6085 run_rpo_vn (vn_lookup_kind kind)
6086 {
6087 default_vn_walk_kind = kind;
6088 do_rpo_vn (cfun, NULL, NULL, true, false);
6089
6090 /* ??? Prune requirement of these. */
6091 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6092 constant_value_ids = BITMAP_ALLOC (NULL);
6093
6094 /* Initialize the value ids and prune out remaining VN_TOPs
6095 from dead code. */
6096 tree name;
6097 unsigned i;
6098 FOR_EACH_SSA_NAME (i, name, cfun)
6099 {
6100 vn_ssa_aux_t info = VN_INFO (name);
6101 if (!info->visited
6102 || info->valnum == VN_TOP)
6103 info->valnum = name;
6104 if (info->valnum == name)
6105 info->value_id = get_next_value_id ();
6106 else if (is_gimple_min_invariant (info->valnum))
6107 info->value_id = get_or_alloc_constant_value_id (info->valnum);
6108 }
6109
6110 /* Propagate. */
6111 FOR_EACH_SSA_NAME (i, name, cfun)
6112 {
6113 vn_ssa_aux_t info = VN_INFO (name);
6114 if (TREE_CODE (info->valnum) == SSA_NAME
6115 && info->valnum != name
6116 && info->value_id != VN_INFO (info->valnum)->value_id)
6117 info->value_id = VN_INFO (info->valnum)->value_id;
6118 }
6119
6120 set_hashtable_value_ids ();
6121
6122 if (dump_file && (dump_flags & TDF_DETAILS))
6123 {
6124 fprintf (dump_file, "Value numbers:\n");
6125 FOR_EACH_SSA_NAME (i, name, cfun)
6126 {
6127 if (VN_INFO (name)->visited
6128 && SSA_VAL (name) != name)
6129 {
6130 print_generic_expr (dump_file, name);
6131 fprintf (dump_file, " = ");
6132 print_generic_expr (dump_file, SSA_VAL (name));
6133 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6134 }
6135 }
6136 }
6137 }
6138
6139 /* Free VN associated data structures. */
6140
6141 void
6142 free_rpo_vn (void)
6143 {
6144 free_vn_table (valid_info);
6145 XDELETE (valid_info);
6146 obstack_free (&vn_tables_obstack, NULL);
6147 obstack_free (&vn_tables_insert_obstack, NULL);
6148
6149 vn_ssa_aux_iterator_type it;
6150 vn_ssa_aux_t info;
6151 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6152 if (info->needs_insertion)
6153 release_ssa_name (info->name);
6154 obstack_free (&vn_ssa_aux_obstack, NULL);
6155 delete vn_ssa_aux_hash;
6156
6157 delete constant_to_value_id;
6158 constant_to_value_id = NULL;
6159 BITMAP_FREE (constant_value_ids);
6160 }
6161
6162 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6163
6164 static tree
6165 vn_lookup_simplify_result (gimple_match_op *res_op)
6166 {
6167 if (!res_op->code.is_tree_code ())
6168 return NULL_TREE;
6169 tree *ops = res_op->ops;
6170 unsigned int length = res_op->num_ops;
6171 if (res_op->code == CONSTRUCTOR
6172 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6173 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6174 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6175 {
6176 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6177 ops = XALLOCAVEC (tree, length);
6178 for (unsigned i = 0; i < length; ++i)
6179 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6180 }
6181 vn_nary_op_t vnresult = NULL;
6182 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6183 res_op->type, ops, &vnresult);
6184 /* If this is used from expression simplification make sure to
6185 return an available expression. */
6186 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6187 res = rpo_avail->eliminate_avail (vn_context_bb, res);
6188 return res;
6189 }
6190
6191 /* Return a leader for OPs value that is valid at BB. */
6192
6193 tree
6194 rpo_elim::eliminate_avail (basic_block bb, tree op)
6195 {
6196 bool visited;
6197 tree valnum = SSA_VAL (op, &visited);
6198 /* If we didn't visit OP then it must be defined outside of the
6199 region we process and also dominate it. So it is available. */
6200 if (!visited)
6201 return op;
6202 if (TREE_CODE (valnum) == SSA_NAME)
6203 {
6204 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6205 return valnum;
6206 vn_avail *av = VN_INFO (valnum)->avail;
6207 if (!av)
6208 return NULL_TREE;
6209 if (av->location == bb->index)
6210 /* On tramp3d 90% of the cases are here. */
6211 return ssa_name (av->leader);
6212 do
6213 {
6214 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
6215 /* ??? During elimination we have to use availability at the
6216 definition site of a use we try to replace. This
6217 is required to not run into inconsistencies because
6218 of dominated_by_p_w_unex behavior and removing a definition
6219 while not replacing all uses.
6220 ??? We could try to consistently walk dominators
6221 ignoring non-executable regions. The nearest common
6222 dominator of bb and abb is where we can stop walking. We
6223 may also be able to "pre-compute" (bits of) the next immediate
6224 (non-)dominator during the RPO walk when marking edges as
6225 executable. */
6226 if (dominated_by_p_w_unex (bb, abb))
6227 {
6228 tree leader = ssa_name (av->leader);
6229 /* Prevent eliminations that break loop-closed SSA. */
6230 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6231 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6232 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6233 (leader))->loop_father,
6234 bb))
6235 return NULL_TREE;
6236 if (dump_file && (dump_flags & TDF_DETAILS))
6237 {
6238 print_generic_expr (dump_file, leader);
6239 fprintf (dump_file, " is available for ");
6240 print_generic_expr (dump_file, valnum);
6241 fprintf (dump_file, "\n");
6242 }
6243 /* On tramp3d 99% of the _remaining_ cases succeed at
6244 the first enty. */
6245 return leader;
6246 }
6247 /* ??? Can we somehow skip to the immediate dominator
6248 RPO index (bb_to_rpo)? Again, maybe not worth, on
6249 tramp3d the worst number of elements in the vector is 9. */
6250 av = av->next;
6251 }
6252 while (av);
6253 }
6254 else if (valnum != VN_TOP)
6255 /* valnum is is_gimple_min_invariant. */
6256 return valnum;
6257 return NULL_TREE;
6258 }
6259
6260 /* Make LEADER a leader for its value at BB. */
6261
6262 void
6263 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
6264 {
6265 tree valnum = VN_INFO (leader)->valnum;
6266 if (valnum == VN_TOP
6267 || is_gimple_min_invariant (valnum))
6268 return;
6269 if (dump_file && (dump_flags & TDF_DETAILS))
6270 {
6271 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
6272 print_generic_expr (dump_file, leader);
6273 fprintf (dump_file, " for value ");
6274 print_generic_expr (dump_file, valnum);
6275 fprintf (dump_file, "\n");
6276 }
6277 vn_ssa_aux_t value = VN_INFO (valnum);
6278 vn_avail *av;
6279 if (m_avail_freelist)
6280 {
6281 av = m_avail_freelist;
6282 m_avail_freelist = m_avail_freelist->next;
6283 }
6284 else
6285 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
6286 av->location = bb->index;
6287 av->leader = SSA_NAME_VERSION (leader);
6288 av->next = value->avail;
6289 value->avail = av;
6290 }
6291
6292 /* Valueization hook for RPO VN plus required state. */
6293
6294 tree
6295 rpo_vn_valueize (tree name)
6296 {
6297 if (TREE_CODE (name) == SSA_NAME)
6298 {
6299 vn_ssa_aux_t val = VN_INFO (name);
6300 if (val)
6301 {
6302 tree tem = val->valnum;
6303 if (tem != VN_TOP && tem != name)
6304 {
6305 if (TREE_CODE (tem) != SSA_NAME)
6306 return tem;
6307 /* For all values we only valueize to an available leader
6308 which means we can use SSA name info without restriction. */
6309 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
6310 if (tem)
6311 return tem;
6312 }
6313 }
6314 }
6315 return name;
6316 }
6317
6318 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6319 inverted condition. */
6320
6321 static void
6322 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
6323 {
6324 switch (code)
6325 {
6326 case LT_EXPR:
6327 /* a < b -> a {!,<}= b */
6328 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6329 ops, boolean_true_node, 0, pred_e);
6330 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
6331 ops, boolean_true_node, 0, pred_e);
6332 /* a < b -> ! a {>,=} b */
6333 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6334 ops, boolean_false_node, 0, pred_e);
6335 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6336 ops, boolean_false_node, 0, pred_e);
6337 break;
6338 case GT_EXPR:
6339 /* a > b -> a {!,>}= b */
6340 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6341 ops, boolean_true_node, 0, pred_e);
6342 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
6343 ops, boolean_true_node, 0, pred_e);
6344 /* a > b -> ! a {<,=} b */
6345 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6346 ops, boolean_false_node, 0, pred_e);
6347 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6348 ops, boolean_false_node, 0, pred_e);
6349 break;
6350 case EQ_EXPR:
6351 /* a == b -> ! a {<,>} b */
6352 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6353 ops, boolean_false_node, 0, pred_e);
6354 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6355 ops, boolean_false_node, 0, pred_e);
6356 break;
6357 case LE_EXPR:
6358 case GE_EXPR:
6359 case NE_EXPR:
6360 /* Nothing besides inverted condition. */
6361 break;
6362 default:;
6363 }
6364 }
6365
6366 /* Main stmt worker for RPO VN, process BB. */
6367
6368 static unsigned
6369 process_bb (rpo_elim &avail, basic_block bb,
6370 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
6371 bool do_region, bitmap exit_bbs, bool skip_phis)
6372 {
6373 unsigned todo = 0;
6374 edge_iterator ei;
6375 edge e;
6376
6377 vn_context_bb = bb;
6378
6379 /* If we are in loop-closed SSA preserve this state. This is
6380 relevant when called on regions from outside of FRE/PRE. */
6381 bool lc_phi_nodes = false;
6382 if (!skip_phis
6383 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
6384 FOR_EACH_EDGE (e, ei, bb->preds)
6385 if (e->src->loop_father != e->dest->loop_father
6386 && flow_loop_nested_p (e->dest->loop_father,
6387 e->src->loop_father))
6388 {
6389 lc_phi_nodes = true;
6390 break;
6391 }
6392
6393 /* When we visit a loop header substitute into loop info. */
6394 if (!iterate && eliminate && bb->loop_father->header == bb)
6395 {
6396 /* Keep fields in sync with substitute_in_loop_info. */
6397 if (bb->loop_father->nb_iterations)
6398 bb->loop_father->nb_iterations
6399 = simplify_replace_tree (bb->loop_father->nb_iterations,
6400 NULL_TREE, NULL_TREE, vn_valueize);
6401 }
6402
6403 /* Value-number all defs in the basic-block. */
6404 if (!skip_phis)
6405 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6406 gsi_next (&gsi))
6407 {
6408 gphi *phi = gsi.phi ();
6409 tree res = PHI_RESULT (phi);
6410 vn_ssa_aux_t res_info = VN_INFO (res);
6411 if (!bb_visited)
6412 {
6413 gcc_assert (!res_info->visited);
6414 res_info->valnum = VN_TOP;
6415 res_info->visited = true;
6416 }
6417
6418 /* When not iterating force backedge values to varying. */
6419 visit_stmt (phi, !iterate_phis);
6420 if (virtual_operand_p (res))
6421 continue;
6422
6423 /* Eliminate */
6424 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6425 how we handle backedges and availability.
6426 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6427 tree val = res_info->valnum;
6428 if (res != val && !iterate && eliminate)
6429 {
6430 if (tree leader = avail.eliminate_avail (bb, res))
6431 {
6432 if (leader != res
6433 /* Preserve loop-closed SSA form. */
6434 && (! lc_phi_nodes
6435 || is_gimple_min_invariant (leader)))
6436 {
6437 if (dump_file && (dump_flags & TDF_DETAILS))
6438 {
6439 fprintf (dump_file, "Replaced redundant PHI node "
6440 "defining ");
6441 print_generic_expr (dump_file, res);
6442 fprintf (dump_file, " with ");
6443 print_generic_expr (dump_file, leader);
6444 fprintf (dump_file, "\n");
6445 }
6446 avail.eliminations++;
6447
6448 if (may_propagate_copy (res, leader))
6449 {
6450 /* Schedule for removal. */
6451 avail.to_remove.safe_push (phi);
6452 continue;
6453 }
6454 /* ??? Else generate a copy stmt. */
6455 }
6456 }
6457 }
6458 /* Only make defs available that not already are. But make
6459 sure loop-closed SSA PHI node defs are picked up for
6460 downstream uses. */
6461 if (lc_phi_nodes
6462 || res == val
6463 || ! avail.eliminate_avail (bb, res))
6464 avail.eliminate_push_avail (bb, res);
6465 }
6466
6467 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6468 we do this when processing the last stmt as we have to do this
6469 before elimination which otherwise forces GIMPLE_CONDs to
6470 if (1 != 0) style when seeing non-executable edges. */
6471 if (gsi_end_p (gsi_start_bb (bb)))
6472 {
6473 FOR_EACH_EDGE (e, ei, bb->succs)
6474 {
6475 if (!(e->flags & EDGE_EXECUTABLE))
6476 {
6477 if (dump_file && (dump_flags & TDF_DETAILS))
6478 fprintf (dump_file,
6479 "marking outgoing edge %d -> %d executable\n",
6480 e->src->index, e->dest->index);
6481 e->flags |= EDGE_EXECUTABLE;
6482 e->dest->flags |= BB_EXECUTABLE;
6483 }
6484 else if (!(e->dest->flags & BB_EXECUTABLE))
6485 {
6486 if (dump_file && (dump_flags & TDF_DETAILS))
6487 fprintf (dump_file,
6488 "marking destination block %d reachable\n",
6489 e->dest->index);
6490 e->dest->flags |= BB_EXECUTABLE;
6491 }
6492 }
6493 }
6494 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6495 !gsi_end_p (gsi); gsi_next (&gsi))
6496 {
6497 ssa_op_iter i;
6498 tree op;
6499 if (!bb_visited)
6500 {
6501 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
6502 {
6503 vn_ssa_aux_t op_info = VN_INFO (op);
6504 gcc_assert (!op_info->visited);
6505 op_info->valnum = VN_TOP;
6506 op_info->visited = true;
6507 }
6508
6509 /* We somehow have to deal with uses that are not defined
6510 in the processed region. Forcing unvisited uses to
6511 varying here doesn't play well with def-use following during
6512 expression simplification, so we deal with this by checking
6513 the visited flag in SSA_VAL. */
6514 }
6515
6516 visit_stmt (gsi_stmt (gsi));
6517
6518 gimple *last = gsi_stmt (gsi);
6519 e = NULL;
6520 switch (gimple_code (last))
6521 {
6522 case GIMPLE_SWITCH:
6523 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
6524 (as_a <gswitch *> (last))));
6525 break;
6526 case GIMPLE_COND:
6527 {
6528 tree lhs = vn_valueize (gimple_cond_lhs (last));
6529 tree rhs = vn_valueize (gimple_cond_rhs (last));
6530 tree val = gimple_simplify (gimple_cond_code (last),
6531 boolean_type_node, lhs, rhs,
6532 NULL, vn_valueize);
6533 /* If the condition didn't simplfy see if we have recorded
6534 an expression from sofar taken edges. */
6535 if (! val || TREE_CODE (val) != INTEGER_CST)
6536 {
6537 vn_nary_op_t vnresult;
6538 tree ops[2];
6539 ops[0] = lhs;
6540 ops[1] = rhs;
6541 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6542 boolean_type_node, ops,
6543 &vnresult);
6544 /* Did we get a predicated value? */
6545 if (! val && vnresult && vnresult->predicated_values)
6546 {
6547 val = vn_nary_op_get_predicated_value (vnresult, bb);
6548 if (val && dump_file && (dump_flags & TDF_DETAILS))
6549 {
6550 fprintf (dump_file, "Got predicated value ");
6551 print_generic_expr (dump_file, val, TDF_NONE);
6552 fprintf (dump_file, " for ");
6553 print_gimple_stmt (dump_file, last, TDF_SLIM);
6554 }
6555 }
6556 }
6557 if (val)
6558 e = find_taken_edge (bb, val);
6559 if (! e)
6560 {
6561 /* If we didn't manage to compute the taken edge then
6562 push predicated expressions for the condition itself
6563 and related conditions to the hashtables. This allows
6564 simplification of redundant conditions which is
6565 important as early cleanup. */
6566 edge true_e, false_e;
6567 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6568 enum tree_code code = gimple_cond_code (last);
6569 enum tree_code icode
6570 = invert_tree_comparison (code, HONOR_NANS (lhs));
6571 tree ops[2];
6572 ops[0] = lhs;
6573 ops[1] = rhs;
6574 if (do_region
6575 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6576 true_e = NULL;
6577 if (do_region
6578 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6579 false_e = NULL;
6580 if (true_e)
6581 vn_nary_op_insert_pieces_predicated
6582 (2, code, boolean_type_node, ops,
6583 boolean_true_node, 0, true_e);
6584 if (false_e)
6585 vn_nary_op_insert_pieces_predicated
6586 (2, code, boolean_type_node, ops,
6587 boolean_false_node, 0, false_e);
6588 if (icode != ERROR_MARK)
6589 {
6590 if (true_e)
6591 vn_nary_op_insert_pieces_predicated
6592 (2, icode, boolean_type_node, ops,
6593 boolean_false_node, 0, true_e);
6594 if (false_e)
6595 vn_nary_op_insert_pieces_predicated
6596 (2, icode, boolean_type_node, ops,
6597 boolean_true_node, 0, false_e);
6598 }
6599 /* Relax for non-integers, inverted condition handled
6600 above. */
6601 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6602 {
6603 if (true_e)
6604 insert_related_predicates_on_edge (code, ops, true_e);
6605 if (false_e)
6606 insert_related_predicates_on_edge (icode, ops, false_e);
6607 }
6608 }
6609 break;
6610 }
6611 case GIMPLE_GOTO:
6612 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6613 break;
6614 default:
6615 e = NULL;
6616 }
6617 if (e)
6618 {
6619 todo = TODO_cleanup_cfg;
6620 if (!(e->flags & EDGE_EXECUTABLE))
6621 {
6622 if (dump_file && (dump_flags & TDF_DETAILS))
6623 fprintf (dump_file,
6624 "marking known outgoing %sedge %d -> %d executable\n",
6625 e->flags & EDGE_DFS_BACK ? "back-" : "",
6626 e->src->index, e->dest->index);
6627 e->flags |= EDGE_EXECUTABLE;
6628 e->dest->flags |= BB_EXECUTABLE;
6629 }
6630 else if (!(e->dest->flags & BB_EXECUTABLE))
6631 {
6632 if (dump_file && (dump_flags & TDF_DETAILS))
6633 fprintf (dump_file,
6634 "marking destination block %d reachable\n",
6635 e->dest->index);
6636 e->dest->flags |= BB_EXECUTABLE;
6637 }
6638 }
6639 else if (gsi_one_before_end_p (gsi))
6640 {
6641 FOR_EACH_EDGE (e, ei, bb->succs)
6642 {
6643 if (!(e->flags & EDGE_EXECUTABLE))
6644 {
6645 if (dump_file && (dump_flags & TDF_DETAILS))
6646 fprintf (dump_file,
6647 "marking outgoing edge %d -> %d executable\n",
6648 e->src->index, e->dest->index);
6649 e->flags |= EDGE_EXECUTABLE;
6650 e->dest->flags |= BB_EXECUTABLE;
6651 }
6652 else if (!(e->dest->flags & BB_EXECUTABLE))
6653 {
6654 if (dump_file && (dump_flags & TDF_DETAILS))
6655 fprintf (dump_file,
6656 "marking destination block %d reachable\n",
6657 e->dest->index);
6658 e->dest->flags |= BB_EXECUTABLE;
6659 }
6660 }
6661 }
6662
6663 /* Eliminate. That also pushes to avail. */
6664 if (eliminate && ! iterate)
6665 avail.eliminate_stmt (bb, &gsi);
6666 else
6667 /* If not eliminating, make all not already available defs
6668 available. */
6669 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6670 if (! avail.eliminate_avail (bb, op))
6671 avail.eliminate_push_avail (bb, op);
6672 }
6673
6674 /* Eliminate in destination PHI arguments. Always substitute in dest
6675 PHIs, even for non-executable edges. This handles region
6676 exits PHIs. */
6677 if (!iterate && eliminate)
6678 FOR_EACH_EDGE (e, ei, bb->succs)
6679 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6680 !gsi_end_p (gsi); gsi_next (&gsi))
6681 {
6682 gphi *phi = gsi.phi ();
6683 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6684 tree arg = USE_FROM_PTR (use_p);
6685 if (TREE_CODE (arg) != SSA_NAME
6686 || virtual_operand_p (arg))
6687 continue;
6688 tree sprime;
6689 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6690 {
6691 sprime = SSA_VAL (arg);
6692 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6693 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6694 }
6695 else
6696 /* Look for sth available at the definition block of the argument.
6697 This avoids inconsistencies between availability there which
6698 decides if the stmt can be removed and availability at the
6699 use site. The SSA property ensures that things available
6700 at the definition are also available at uses. */
6701 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6702 arg);
6703 if (sprime
6704 && sprime != arg
6705 && may_propagate_copy (arg, sprime))
6706 propagate_value (use_p, sprime);
6707 }
6708
6709 vn_context_bb = NULL;
6710 return todo;
6711 }
6712
6713 /* Unwind state per basic-block. */
6714
6715 struct unwind_state
6716 {
6717 /* Times this block has been visited. */
6718 unsigned visited;
6719 /* Whether to handle this as iteration point or whether to treat
6720 incoming backedge PHI values as varying. */
6721 bool iterate;
6722 /* Maximum RPO index this block is reachable from. */
6723 int max_rpo;
6724 /* Unwind state. */
6725 void *ob_top;
6726 vn_reference_t ref_top;
6727 vn_phi_t phi_top;
6728 vn_nary_op_t nary_top;
6729 };
6730
6731 /* Unwind the RPO VN state for iteration. */
6732
6733 static void
6734 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6735 {
6736 gcc_assert (to->iterate);
6737 for (; last_inserted_nary != to->nary_top;
6738 last_inserted_nary = last_inserted_nary->next)
6739 {
6740 vn_nary_op_t *slot;
6741 slot = valid_info->nary->find_slot_with_hash
6742 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6743 /* Predication causes the need to restore previous state. */
6744 if ((*slot)->unwind_to)
6745 *slot = (*slot)->unwind_to;
6746 else
6747 valid_info->nary->clear_slot (slot);
6748 }
6749 for (; last_inserted_phi != to->phi_top;
6750 last_inserted_phi = last_inserted_phi->next)
6751 {
6752 vn_phi_t *slot;
6753 slot = valid_info->phis->find_slot_with_hash
6754 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6755 valid_info->phis->clear_slot (slot);
6756 }
6757 for (; last_inserted_ref != to->ref_top;
6758 last_inserted_ref = last_inserted_ref->next)
6759 {
6760 vn_reference_t *slot;
6761 slot = valid_info->references->find_slot_with_hash
6762 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6763 (*slot)->operands.release ();
6764 valid_info->references->clear_slot (slot);
6765 }
6766 obstack_free (&vn_tables_obstack, to->ob_top);
6767
6768 /* Prune [rpo_idx, ] from avail. */
6769 /* ??? This is O(number-of-values-in-region) which is
6770 O(region-size) rather than O(iteration-piece). */
6771 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
6772 i != vn_ssa_aux_hash->end (); ++i)
6773 {
6774 while ((*i)->avail)
6775 {
6776 if (bb_to_rpo[(*i)->avail->location] < rpo_idx)
6777 break;
6778 vn_avail *av = (*i)->avail;
6779 (*i)->avail = (*i)->avail->next;
6780 av->next = avail.m_avail_freelist;
6781 avail.m_avail_freelist = av;
6782 }
6783 }
6784 }
6785
6786 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6787 If ITERATE is true then treat backedges optimistically as not
6788 executed and iterate. If ELIMINATE is true then perform
6789 elimination, otherwise leave that to the caller. */
6790
6791 static unsigned
6792 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6793 bool iterate, bool eliminate)
6794 {
6795 unsigned todo = 0;
6796
6797 /* We currently do not support region-based iteration when
6798 elimination is requested. */
6799 gcc_assert (!entry || !iterate || !eliminate);
6800 /* When iterating we need loop info up-to-date. */
6801 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6802
6803 bool do_region = entry != NULL;
6804 if (!do_region)
6805 {
6806 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6807 exit_bbs = BITMAP_ALLOC (NULL);
6808 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6809 }
6810
6811 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
6812 re-mark those that are contained in the region. */
6813 edge_iterator ei;
6814 edge e;
6815 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6816 e->flags &= ~EDGE_DFS_BACK;
6817
6818 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6819 int n = rev_post_order_and_mark_dfs_back_seme
6820 (fn, entry, exit_bbs, !loops_state_satisfies_p (LOOPS_NEED_FIXUP), rpo);
6821 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6822 for (int i = 0; i < n / 2; ++i)
6823 std::swap (rpo[i], rpo[n-i-1]);
6824
6825 if (!do_region)
6826 BITMAP_FREE (exit_bbs);
6827
6828 /* If there are any non-DFS_BACK edges into entry->dest skip
6829 processing PHI nodes for that block. This supports
6830 value-numbering loop bodies w/o the actual loop. */
6831 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6832 if (e != entry
6833 && !(e->flags & EDGE_DFS_BACK))
6834 break;
6835 bool skip_entry_phis = e != NULL;
6836 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
6837 fprintf (dump_file, "Region does not contain all edges into "
6838 "the entry block, skipping its PHIs.\n");
6839
6840 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6841 for (int i = 0; i < n; ++i)
6842 bb_to_rpo[rpo[i]] = i;
6843
6844 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6845
6846 rpo_elim avail (entry->dest);
6847 rpo_avail = &avail;
6848
6849 /* Verify we have no extra entries into the region. */
6850 if (flag_checking && do_region)
6851 {
6852 auto_bb_flag bb_in_region (fn);
6853 for (int i = 0; i < n; ++i)
6854 {
6855 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6856 bb->flags |= bb_in_region;
6857 }
6858 /* We can't merge the first two loops because we cannot rely
6859 on EDGE_DFS_BACK for edges not within the region. But if
6860 we decide to always have the bb_in_region flag we can
6861 do the checking during the RPO walk itself (but then it's
6862 also easy to handle MEME conservatively). */
6863 for (int i = 0; i < n; ++i)
6864 {
6865 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6866 edge e;
6867 edge_iterator ei;
6868 FOR_EACH_EDGE (e, ei, bb->preds)
6869 gcc_assert (e == entry
6870 || (skip_entry_phis && bb == entry->dest)
6871 || (e->src->flags & bb_in_region));
6872 }
6873 for (int i = 0; i < n; ++i)
6874 {
6875 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6876 bb->flags &= ~bb_in_region;
6877 }
6878 }
6879
6880 /* Create the VN state. For the initial size of the various hashtables
6881 use a heuristic based on region size and number of SSA names. */
6882 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6883 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6884 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6885 next_value_id = 1;
6886
6887 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6888 gcc_obstack_init (&vn_ssa_aux_obstack);
6889
6890 gcc_obstack_init (&vn_tables_obstack);
6891 gcc_obstack_init (&vn_tables_insert_obstack);
6892 valid_info = XCNEW (struct vn_tables_s);
6893 allocate_vn_table (valid_info, region_size);
6894 last_inserted_ref = NULL;
6895 last_inserted_phi = NULL;
6896 last_inserted_nary = NULL;
6897
6898 vn_valueize = rpo_vn_valueize;
6899
6900 /* Initialize the unwind state and edge/BB executable state. */
6901 bool need_max_rpo_iterate = false;
6902 for (int i = 0; i < n; ++i)
6903 {
6904 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6905 rpo_state[i].visited = 0;
6906 rpo_state[i].max_rpo = i;
6907 bb->flags &= ~BB_EXECUTABLE;
6908 bool has_backedges = false;
6909 edge e;
6910 edge_iterator ei;
6911 FOR_EACH_EDGE (e, ei, bb->preds)
6912 {
6913 if (e->flags & EDGE_DFS_BACK)
6914 has_backedges = true;
6915 e->flags &= ~EDGE_EXECUTABLE;
6916 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
6917 continue;
6918 if (bb_to_rpo[e->src->index] > i)
6919 {
6920 rpo_state[i].max_rpo = MAX (rpo_state[i].max_rpo,
6921 bb_to_rpo[e->src->index]);
6922 need_max_rpo_iterate = true;
6923 }
6924 else
6925 rpo_state[i].max_rpo
6926 = MAX (rpo_state[i].max_rpo,
6927 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6928 }
6929 rpo_state[i].iterate = iterate && has_backedges;
6930 }
6931 entry->flags |= EDGE_EXECUTABLE;
6932 entry->dest->flags |= BB_EXECUTABLE;
6933
6934 /* When there are irreducible regions the simplistic max_rpo computation
6935 above for the case of backedges doesn't work and we need to iterate
6936 until there are no more changes. */
6937 unsigned nit = 0;
6938 while (need_max_rpo_iterate)
6939 {
6940 nit++;
6941 need_max_rpo_iterate = false;
6942 for (int i = 0; i < n; ++i)
6943 {
6944 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6945 edge e;
6946 edge_iterator ei;
6947 FOR_EACH_EDGE (e, ei, bb->preds)
6948 {
6949 if (e == entry || (skip_entry_phis && bb == entry->dest))
6950 continue;
6951 int max_rpo = MAX (rpo_state[i].max_rpo,
6952 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6953 if (rpo_state[i].max_rpo != max_rpo)
6954 {
6955 rpo_state[i].max_rpo = max_rpo;
6956 need_max_rpo_iterate = true;
6957 }
6958 }
6959 }
6960 }
6961 statistics_histogram_event (cfun, "RPO max_rpo iterations", nit);
6962
6963 /* As heuristic to improve compile-time we handle only the N innermost
6964 loops and the outermost one optimistically. */
6965 if (iterate)
6966 {
6967 loop_p loop;
6968 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6969 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6970 if (loop_depth (loop) > max_depth)
6971 for (unsigned i = 2;
6972 i < loop_depth (loop) - max_depth; ++i)
6973 {
6974 basic_block header = superloop_at_depth (loop, i)->header;
6975 bool non_latch_backedge = false;
6976 edge e;
6977 edge_iterator ei;
6978 FOR_EACH_EDGE (e, ei, header->preds)
6979 if (e->flags & EDGE_DFS_BACK)
6980 {
6981 /* There can be a non-latch backedge into the header
6982 which is part of an outer irreducible region. We
6983 cannot avoid iterating this block then. */
6984 if (!dominated_by_p (CDI_DOMINATORS,
6985 e->src, e->dest))
6986 {
6987 if (dump_file && (dump_flags & TDF_DETAILS))
6988 fprintf (dump_file, "non-latch backedge %d -> %d "
6989 "forces iteration of loop %d\n",
6990 e->src->index, e->dest->index, loop->num);
6991 non_latch_backedge = true;
6992 }
6993 else
6994 e->flags |= EDGE_EXECUTABLE;
6995 }
6996 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
6997 }
6998 }
6999
7000 uint64_t nblk = 0;
7001 int idx = 0;
7002 if (iterate)
7003 /* Go and process all blocks, iterating as necessary. */
7004 do
7005 {
7006 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7007
7008 /* If the block has incoming backedges remember unwind state. This
7009 is required even for non-executable blocks since in irreducible
7010 regions we might reach them via the backedge and re-start iterating
7011 from there.
7012 Note we can individually mark blocks with incoming backedges to
7013 not iterate where we then handle PHIs conservatively. We do that
7014 heuristically to reduce compile-time for degenerate cases. */
7015 if (rpo_state[idx].iterate)
7016 {
7017 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7018 rpo_state[idx].ref_top = last_inserted_ref;
7019 rpo_state[idx].phi_top = last_inserted_phi;
7020 rpo_state[idx].nary_top = last_inserted_nary;
7021 }
7022
7023 if (!(bb->flags & BB_EXECUTABLE))
7024 {
7025 if (dump_file && (dump_flags & TDF_DETAILS))
7026 fprintf (dump_file, "Block %d: BB%d found not executable\n",
7027 idx, bb->index);
7028 idx++;
7029 continue;
7030 }
7031
7032 if (dump_file && (dump_flags & TDF_DETAILS))
7033 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7034 nblk++;
7035 todo |= process_bb (avail, bb,
7036 rpo_state[idx].visited != 0,
7037 rpo_state[idx].iterate,
7038 iterate, eliminate, do_region, exit_bbs, false);
7039 rpo_state[idx].visited++;
7040
7041 /* Verify if changed values flow over executable outgoing backedges
7042 and those change destination PHI values (that's the thing we
7043 can easily verify). Reduce over all such edges to the farthest
7044 away PHI. */
7045 int iterate_to = -1;
7046 edge_iterator ei;
7047 edge e;
7048 FOR_EACH_EDGE (e, ei, bb->succs)
7049 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7050 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7051 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7052 {
7053 int destidx = bb_to_rpo[e->dest->index];
7054 if (!rpo_state[destidx].visited)
7055 {
7056 if (dump_file && (dump_flags & TDF_DETAILS))
7057 fprintf (dump_file, "Unvisited destination %d\n",
7058 e->dest->index);
7059 if (iterate_to == -1 || destidx < iterate_to)
7060 iterate_to = destidx;
7061 continue;
7062 }
7063 if (dump_file && (dump_flags & TDF_DETAILS))
7064 fprintf (dump_file, "Looking for changed values of backedge"
7065 " %d->%d destination PHIs\n",
7066 e->src->index, e->dest->index);
7067 vn_context_bb = e->dest;
7068 gphi_iterator gsi;
7069 for (gsi = gsi_start_phis (e->dest);
7070 !gsi_end_p (gsi); gsi_next (&gsi))
7071 {
7072 bool inserted = false;
7073 /* While we'd ideally just iterate on value changes
7074 we CSE PHIs and do that even across basic-block
7075 boundaries. So even hashtable state changes can
7076 be important (which is roughly equivalent to
7077 PHI argument value changes). To not excessively
7078 iterate because of that we track whether a PHI
7079 was CSEd to with GF_PLF_1. */
7080 bool phival_changed;
7081 if ((phival_changed = visit_phi (gsi.phi (),
7082 &inserted, false))
7083 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7084 {
7085 if (!phival_changed
7086 && dump_file && (dump_flags & TDF_DETAILS))
7087 fprintf (dump_file, "PHI was CSEd and hashtable "
7088 "state (changed)\n");
7089 if (iterate_to == -1 || destidx < iterate_to)
7090 iterate_to = destidx;
7091 break;
7092 }
7093 }
7094 vn_context_bb = NULL;
7095 }
7096 if (iterate_to != -1)
7097 {
7098 do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
7099 idx = iterate_to;
7100 if (dump_file && (dump_flags & TDF_DETAILS))
7101 fprintf (dump_file, "Iterating to %d BB%d\n",
7102 iterate_to, rpo[iterate_to]);
7103 continue;
7104 }
7105
7106 idx++;
7107 }
7108 while (idx < n);
7109
7110 else /* !iterate */
7111 {
7112 /* Process all blocks greedily with a worklist that enforces RPO
7113 processing of reachable blocks. */
7114 auto_bitmap worklist;
7115 bitmap_set_bit (worklist, 0);
7116 while (!bitmap_empty_p (worklist))
7117 {
7118 int idx = bitmap_first_set_bit (worklist);
7119 bitmap_clear_bit (worklist, idx);
7120 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7121 gcc_assert ((bb->flags & BB_EXECUTABLE)
7122 && !rpo_state[idx].visited);
7123
7124 if (dump_file && (dump_flags & TDF_DETAILS))
7125 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7126
7127 /* When we run into predecessor edges where we cannot trust its
7128 executable state mark them executable so PHI processing will
7129 be conservative.
7130 ??? Do we need to force arguments flowing over that edge
7131 to be varying or will they even always be? */
7132 edge_iterator ei;
7133 edge e;
7134 FOR_EACH_EDGE (e, ei, bb->preds)
7135 if (!(e->flags & EDGE_EXECUTABLE)
7136 && (bb == entry->dest
7137 || (!rpo_state[bb_to_rpo[e->src->index]].visited
7138 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7139 >= (int)idx))))
7140 {
7141 if (dump_file && (dump_flags & TDF_DETAILS))
7142 fprintf (dump_file, "Cannot trust state of predecessor "
7143 "edge %d -> %d, marking executable\n",
7144 e->src->index, e->dest->index);
7145 e->flags |= EDGE_EXECUTABLE;
7146 }
7147
7148 nblk++;
7149 todo |= process_bb (avail, bb, false, false, false, eliminate,
7150 do_region, exit_bbs,
7151 skip_entry_phis && bb == entry->dest);
7152 rpo_state[idx].visited++;
7153
7154 FOR_EACH_EDGE (e, ei, bb->succs)
7155 if ((e->flags & EDGE_EXECUTABLE)
7156 && e->dest->index != EXIT_BLOCK
7157 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7158 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
7159 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7160 }
7161 }
7162
7163 /* If statistics or dump file active. */
7164 int nex = 0;
7165 unsigned max_visited = 1;
7166 for (int i = 0; i < n; ++i)
7167 {
7168 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7169 if (bb->flags & BB_EXECUTABLE)
7170 nex++;
7171 statistics_histogram_event (cfun, "RPO block visited times",
7172 rpo_state[i].visited);
7173 if (rpo_state[i].visited > max_visited)
7174 max_visited = rpo_state[i].visited;
7175 }
7176 unsigned nvalues = 0, navail = 0;
7177 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7178 i != vn_ssa_aux_hash->end (); ++i)
7179 {
7180 nvalues++;
7181 vn_avail *av = (*i)->avail;
7182 while (av)
7183 {
7184 navail++;
7185 av = av->next;
7186 }
7187 }
7188 statistics_counter_event (cfun, "RPO blocks", n);
7189 statistics_counter_event (cfun, "RPO blocks visited", nblk);
7190 statistics_counter_event (cfun, "RPO blocks executable", nex);
7191 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7192 statistics_histogram_event (cfun, "RPO num values", nvalues);
7193 statistics_histogram_event (cfun, "RPO num avail", navail);
7194 statistics_histogram_event (cfun, "RPO num lattice",
7195 vn_ssa_aux_hash->elements ());
7196 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7197 {
7198 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7199 " blocks in total discovering %d executable blocks iterating "
7200 "%d.%d times, a block was visited max. %u times\n",
7201 n, nblk, nex,
7202 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7203 max_visited);
7204 fprintf (dump_file, "RPO tracked %d values available at %d locations "
7205 "and %" PRIu64 " lattice elements\n",
7206 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7207 }
7208
7209 if (eliminate)
7210 {
7211 /* When !iterate we already performed elimination during the RPO
7212 walk. */
7213 if (iterate)
7214 {
7215 /* Elimination for region-based VN needs to be done within the
7216 RPO walk. */
7217 gcc_assert (! do_region);
7218 /* Note we can't use avail.walk here because that gets confused
7219 by the existing availability and it will be less efficient
7220 as well. */
7221 todo |= eliminate_with_rpo_vn (NULL);
7222 }
7223 else
7224 todo |= avail.eliminate_cleanup (do_region);
7225 }
7226
7227 vn_valueize = NULL;
7228 rpo_avail = NULL;
7229
7230 XDELETEVEC (bb_to_rpo);
7231 XDELETEVEC (rpo);
7232 XDELETEVEC (rpo_state);
7233
7234 return todo;
7235 }
7236
7237 /* Region-based entry for RPO VN. Performs value-numbering and elimination
7238 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7239 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7240 are not considered. */
7241
7242 unsigned
7243 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7244 {
7245 default_vn_walk_kind = VN_WALKREWRITE;
7246 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7247 free_rpo_vn ();
7248 return todo;
7249 }
7250
7251
7252 namespace {
7253
7254 const pass_data pass_data_fre =
7255 {
7256 GIMPLE_PASS, /* type */
7257 "fre", /* name */
7258 OPTGROUP_NONE, /* optinfo_flags */
7259 TV_TREE_FRE, /* tv_id */
7260 ( PROP_cfg | PROP_ssa ), /* properties_required */
7261 0, /* properties_provided */
7262 0, /* properties_destroyed */
7263 0, /* todo_flags_start */
7264 0, /* todo_flags_finish */
7265 };
7266
7267 class pass_fre : public gimple_opt_pass
7268 {
7269 public:
7270 pass_fre (gcc::context *ctxt)
7271 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7272 {}
7273
7274 /* opt_pass methods: */
7275 opt_pass * clone () { return new pass_fre (m_ctxt); }
7276 void set_pass_param (unsigned int n, bool param)
7277 {
7278 gcc_assert (n == 0);
7279 may_iterate = param;
7280 }
7281 virtual bool gate (function *)
7282 {
7283 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7284 }
7285 virtual unsigned int execute (function *);
7286
7287 private:
7288 bool may_iterate;
7289 }; // class pass_fre
7290
7291 unsigned int
7292 pass_fre::execute (function *fun)
7293 {
7294 unsigned todo = 0;
7295
7296 /* At -O[1g] use the cheap non-iterating mode. */
7297 bool iterate_p = may_iterate && (optimize > 1);
7298 calculate_dominance_info (CDI_DOMINATORS);
7299 if (iterate_p)
7300 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
7301
7302 default_vn_walk_kind = VN_WALKREWRITE;
7303 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
7304 free_rpo_vn ();
7305
7306 if (iterate_p)
7307 loop_optimizer_finalize ();
7308
7309 return todo;
7310 }
7311
7312 } // anon namespace
7313
7314 gimple_opt_pass *
7315 make_pass_fre (gcc::context *ctxt)
7316 {
7317 return new pass_fre (ctxt);
7318 }
7319
7320 #undef BB_EXECUTABLE