]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-sccvn.c
tree-call-cdce: If !HONOR_NANS do not make code with NaNs (PR88055)
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "insn-config.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "tree-ssa-sccvn.h"
73
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
79
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
83
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
88
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
93
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
97
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
102
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
106
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
109
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
115
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
121
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
129 */
130
131 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
132 #define BB_EXECUTABLE BB_VISITED
133
134 static tree *last_vuse_ptr;
135 static vn_lookup_kind vn_walk_kind;
136 static vn_lookup_kind default_vn_walk_kind;
137
138 /* vn_nary_op hashtable helpers. */
139
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 {
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
145 };
146
147 /* Return the computed hashcode for nary operation P1. */
148
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 {
152 return vno1->hashcode;
153 }
154
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
157
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 {
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
162 }
163
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166
167
168 /* vn_phi hashtable helpers. */
169
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172
173 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
174 {
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
177 };
178
179 /* Return the computed hashcode for phi operation P1. */
180
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 {
184 return vp1->hashcode;
185 }
186
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 {
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
193 }
194
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197
198
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
201
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
204 {
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
217 }
218
219 /* Free a reference operation structure VP. */
220
221 static inline void
222 free_reference (vn_reference_s *vr)
223 {
224 vr->operands.release ();
225 }
226
227
228 /* vn_reference hashtable helpers. */
229
230 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
231 {
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
234 };
235
236 /* Return the hashcode for a given reference operation P1. */
237
238 inline hashval_t
239 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 {
241 return vr1->hashcode;
242 }
243
244 inline bool
245 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 {
247 return v == c || vn_reference_eq (v, c);
248 }
249
250 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
251 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252
253
254 /* The set of VN hashtables. */
255
256 typedef struct vn_tables_s
257 {
258 vn_nary_op_table_type *nary;
259 vn_phi_table_type *phis;
260 vn_reference_table_type *references;
261 } *vn_tables_t;
262
263
264 /* vn_constant hashtable helpers. */
265
266 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
267 {
268 static inline hashval_t hash (const vn_constant_s *);
269 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
270 };
271
272 /* Hash table hash function for vn_constant_t. */
273
274 inline hashval_t
275 vn_constant_hasher::hash (const vn_constant_s *vc1)
276 {
277 return vc1->hashcode;
278 }
279
280 /* Hash table equality function for vn_constant_t. */
281
282 inline bool
283 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
284 {
285 if (vc1->hashcode != vc2->hashcode)
286 return false;
287
288 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
289 }
290
291 static hash_table<vn_constant_hasher> *constant_to_value_id;
292 static bitmap constant_value_ids;
293
294
295 /* Obstack we allocate the vn-tables elements from. */
296 static obstack vn_tables_obstack;
297 /* Special obstack we never unwind. */
298 static obstack vn_tables_insert_obstack;
299
300 static vn_reference_t last_inserted_ref;
301 static vn_phi_t last_inserted_phi;
302 static vn_nary_op_t last_inserted_nary;
303
304 /* Valid hashtables storing information we have proven to be
305 correct. */
306 static vn_tables_t valid_info;
307
308
309 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 just return it. */
311 tree (*vn_valueize) (tree);
312
313
314 /* This represents the top of the VN lattice, which is the universal
315 value. */
316
317 tree VN_TOP;
318
319 /* Unique counter for our value ids. */
320
321 static unsigned int next_value_id;
322
323
324 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
325 are allocated on an obstack for locality reasons, and to free them
326 without looping over the vec. */
327
328 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
329 {
330 typedef vn_ssa_aux_t value_type;
331 typedef tree compare_type;
332 static inline hashval_t hash (const value_type &);
333 static inline bool equal (const value_type &, const compare_type &);
334 static inline void mark_deleted (value_type &) {}
335 static inline void mark_empty (value_type &e) { e = NULL; }
336 static inline bool is_deleted (value_type &) { return false; }
337 static inline bool is_empty (value_type &e) { return e == NULL; }
338 };
339
340 hashval_t
341 vn_ssa_aux_hasher::hash (const value_type &entry)
342 {
343 return SSA_NAME_VERSION (entry->name);
344 }
345
346 bool
347 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
348 {
349 return name == entry->name;
350 }
351
352 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
353 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
354 static struct obstack vn_ssa_aux_obstack;
355
356 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
357 static unsigned int vn_nary_length_from_stmt (gimple *);
358 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
359 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
360 vn_nary_op_table_type *, bool);
361 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
362 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
363 enum tree_code, tree, tree *);
364 static tree vn_lookup_simplify_result (gimple_match_op *);
365
366 /* Return whether there is value numbering information for a given SSA name. */
367
368 bool
369 has_VN_INFO (tree name)
370 {
371 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
372 }
373
374 vn_ssa_aux_t
375 VN_INFO (tree name)
376 {
377 vn_ssa_aux_t *res
378 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
379 INSERT);
380 if (*res != NULL)
381 return *res;
382
383 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
384 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
385 newinfo->name = name;
386 newinfo->valnum = VN_TOP;
387 /* We are using the visited flag to handle uses with defs not within the
388 region being value-numbered. */
389 newinfo->visited = false;
390
391 /* Given we create the VN_INFOs on-demand now we have to do initialization
392 different than VN_TOP here. */
393 if (SSA_NAME_IS_DEFAULT_DEF (name))
394 switch (TREE_CODE (SSA_NAME_VAR (name)))
395 {
396 case VAR_DECL:
397 /* All undefined vars are VARYING. */
398 newinfo->valnum = name;
399 newinfo->visited = true;
400 break;
401
402 case PARM_DECL:
403 /* Parameters are VARYING but we can record a condition
404 if we know it is a non-NULL pointer. */
405 newinfo->visited = true;
406 newinfo->valnum = name;
407 if (POINTER_TYPE_P (TREE_TYPE (name))
408 && nonnull_arg_p (SSA_NAME_VAR (name)))
409 {
410 tree ops[2];
411 ops[0] = name;
412 ops[1] = build_int_cst (TREE_TYPE (name), 0);
413 vn_nary_op_t nary;
414 /* Allocate from non-unwinding stack. */
415 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
416 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
417 boolean_type_node, ops);
418 nary->predicated_values = 0;
419 nary->u.result = boolean_true_node;
420 vn_nary_op_insert_into (nary, valid_info->nary, true);
421 gcc_assert (nary->unwind_to == NULL);
422 /* Also do not link it into the undo chain. */
423 last_inserted_nary = nary->next;
424 nary->next = (vn_nary_op_t)(void *)-1;
425 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
426 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
427 boolean_type_node, ops);
428 nary->predicated_values = 0;
429 nary->u.result = boolean_false_node;
430 vn_nary_op_insert_into (nary, valid_info->nary, true);
431 gcc_assert (nary->unwind_to == NULL);
432 last_inserted_nary = nary->next;
433 nary->next = (vn_nary_op_t)(void *)-1;
434 if (dump_file && (dump_flags & TDF_DETAILS))
435 {
436 fprintf (dump_file, "Recording ");
437 print_generic_expr (dump_file, name, TDF_SLIM);
438 fprintf (dump_file, " != 0\n");
439 }
440 }
441 break;
442
443 case RESULT_DECL:
444 /* If the result is passed by invisible reference the default
445 def is initialized, otherwise it's uninitialized. Still
446 undefined is varying. */
447 newinfo->visited = true;
448 newinfo->valnum = name;
449 break;
450
451 default:
452 gcc_unreachable ();
453 }
454 return newinfo;
455 }
456
457 /* Return the SSA value of X. */
458
459 inline tree
460 SSA_VAL (tree x, bool *visited = NULL)
461 {
462 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
463 if (visited)
464 *visited = tem && tem->visited;
465 return tem && tem->visited ? tem->valnum : x;
466 }
467
468 /* Return the SSA value of the VUSE x, supporting released VDEFs
469 during elimination which will value-number the VDEF to the
470 associated VUSE (but not substitute in the whole lattice). */
471
472 static inline tree
473 vuse_ssa_val (tree x)
474 {
475 if (!x)
476 return NULL_TREE;
477
478 do
479 {
480 x = SSA_VAL (x);
481 gcc_assert (x != VN_TOP);
482 }
483 while (SSA_NAME_IN_FREE_LIST (x));
484
485 return x;
486 }
487
488 /* Similar to the above but used as callback for walk_non_aliases_vuses
489 and thus should stop at unvisited VUSE to not walk across region
490 boundaries. */
491
492 static tree
493 vuse_valueize (tree vuse)
494 {
495 do
496 {
497 bool visited;
498 vuse = SSA_VAL (vuse, &visited);
499 if (!visited)
500 return NULL_TREE;
501 gcc_assert (vuse != VN_TOP);
502 }
503 while (SSA_NAME_IN_FREE_LIST (vuse));
504 return vuse;
505 }
506
507
508 /* Return the vn_kind the expression computed by the stmt should be
509 associated with. */
510
511 enum vn_kind
512 vn_get_stmt_kind (gimple *stmt)
513 {
514 switch (gimple_code (stmt))
515 {
516 case GIMPLE_CALL:
517 return VN_REFERENCE;
518 case GIMPLE_PHI:
519 return VN_PHI;
520 case GIMPLE_ASSIGN:
521 {
522 enum tree_code code = gimple_assign_rhs_code (stmt);
523 tree rhs1 = gimple_assign_rhs1 (stmt);
524 switch (get_gimple_rhs_class (code))
525 {
526 case GIMPLE_UNARY_RHS:
527 case GIMPLE_BINARY_RHS:
528 case GIMPLE_TERNARY_RHS:
529 return VN_NARY;
530 case GIMPLE_SINGLE_RHS:
531 switch (TREE_CODE_CLASS (code))
532 {
533 case tcc_reference:
534 /* VOP-less references can go through unary case. */
535 if ((code == REALPART_EXPR
536 || code == IMAGPART_EXPR
537 || code == VIEW_CONVERT_EXPR
538 || code == BIT_FIELD_REF)
539 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
540 return VN_NARY;
541
542 /* Fallthrough. */
543 case tcc_declaration:
544 return VN_REFERENCE;
545
546 case tcc_constant:
547 return VN_CONSTANT;
548
549 default:
550 if (code == ADDR_EXPR)
551 return (is_gimple_min_invariant (rhs1)
552 ? VN_CONSTANT : VN_REFERENCE);
553 else if (code == CONSTRUCTOR)
554 return VN_NARY;
555 return VN_NONE;
556 }
557 default:
558 return VN_NONE;
559 }
560 }
561 default:
562 return VN_NONE;
563 }
564 }
565
566 /* Lookup a value id for CONSTANT and return it. If it does not
567 exist returns 0. */
568
569 unsigned int
570 get_constant_value_id (tree constant)
571 {
572 vn_constant_s **slot;
573 struct vn_constant_s vc;
574
575 vc.hashcode = vn_hash_constant_with_type (constant);
576 vc.constant = constant;
577 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
578 if (slot)
579 return (*slot)->value_id;
580 return 0;
581 }
582
583 /* Lookup a value id for CONSTANT, and if it does not exist, create a
584 new one and return it. If it does exist, return it. */
585
586 unsigned int
587 get_or_alloc_constant_value_id (tree constant)
588 {
589 vn_constant_s **slot;
590 struct vn_constant_s vc;
591 vn_constant_t vcp;
592
593 /* If the hashtable isn't initialized we're not running from PRE and thus
594 do not need value-ids. */
595 if (!constant_to_value_id)
596 return 0;
597
598 vc.hashcode = vn_hash_constant_with_type (constant);
599 vc.constant = constant;
600 slot = constant_to_value_id->find_slot (&vc, INSERT);
601 if (*slot)
602 return (*slot)->value_id;
603
604 vcp = XNEW (struct vn_constant_s);
605 vcp->hashcode = vc.hashcode;
606 vcp->constant = constant;
607 vcp->value_id = get_next_value_id ();
608 *slot = vcp;
609 bitmap_set_bit (constant_value_ids, vcp->value_id);
610 return vcp->value_id;
611 }
612
613 /* Return true if V is a value id for a constant. */
614
615 bool
616 value_id_constant_p (unsigned int v)
617 {
618 return bitmap_bit_p (constant_value_ids, v);
619 }
620
621 /* Compute the hash for a reference operand VRO1. */
622
623 static void
624 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
625 {
626 hstate.add_int (vro1->opcode);
627 if (vro1->op0)
628 inchash::add_expr (vro1->op0, hstate);
629 if (vro1->op1)
630 inchash::add_expr (vro1->op1, hstate);
631 if (vro1->op2)
632 inchash::add_expr (vro1->op2, hstate);
633 }
634
635 /* Compute a hash for the reference operation VR1 and return it. */
636
637 static hashval_t
638 vn_reference_compute_hash (const vn_reference_t vr1)
639 {
640 inchash::hash hstate;
641 hashval_t result;
642 int i;
643 vn_reference_op_t vro;
644 poly_int64 off = -1;
645 bool deref = false;
646
647 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
648 {
649 if (vro->opcode == MEM_REF)
650 deref = true;
651 else if (vro->opcode != ADDR_EXPR)
652 deref = false;
653 if (maybe_ne (vro->off, -1))
654 {
655 if (known_eq (off, -1))
656 off = 0;
657 off += vro->off;
658 }
659 else
660 {
661 if (maybe_ne (off, -1)
662 && maybe_ne (off, 0))
663 hstate.add_poly_int (off);
664 off = -1;
665 if (deref
666 && vro->opcode == ADDR_EXPR)
667 {
668 if (vro->op0)
669 {
670 tree op = TREE_OPERAND (vro->op0, 0);
671 hstate.add_int (TREE_CODE (op));
672 inchash::add_expr (op, hstate);
673 }
674 }
675 else
676 vn_reference_op_compute_hash (vro, hstate);
677 }
678 }
679 result = hstate.end ();
680 /* ??? We would ICE later if we hash instead of adding that in. */
681 if (vr1->vuse)
682 result += SSA_NAME_VERSION (vr1->vuse);
683
684 return result;
685 }
686
687 /* Return true if reference operations VR1 and VR2 are equivalent. This
688 means they have the same set of operands and vuses. */
689
690 bool
691 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
692 {
693 unsigned i, j;
694
695 /* Early out if this is not a hash collision. */
696 if (vr1->hashcode != vr2->hashcode)
697 return false;
698
699 /* The VOP needs to be the same. */
700 if (vr1->vuse != vr2->vuse)
701 return false;
702
703 /* If the operands are the same we are done. */
704 if (vr1->operands == vr2->operands)
705 return true;
706
707 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
708 return false;
709
710 if (INTEGRAL_TYPE_P (vr1->type)
711 && INTEGRAL_TYPE_P (vr2->type))
712 {
713 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
714 return false;
715 }
716 else if (INTEGRAL_TYPE_P (vr1->type)
717 && (TYPE_PRECISION (vr1->type)
718 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
719 return false;
720 else if (INTEGRAL_TYPE_P (vr2->type)
721 && (TYPE_PRECISION (vr2->type)
722 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
723 return false;
724
725 i = 0;
726 j = 0;
727 do
728 {
729 poly_int64 off1 = 0, off2 = 0;
730 vn_reference_op_t vro1, vro2;
731 vn_reference_op_s tem1, tem2;
732 bool deref1 = false, deref2 = false;
733 for (; vr1->operands.iterate (i, &vro1); i++)
734 {
735 if (vro1->opcode == MEM_REF)
736 deref1 = true;
737 /* Do not look through a storage order barrier. */
738 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
739 return false;
740 if (known_eq (vro1->off, -1))
741 break;
742 off1 += vro1->off;
743 }
744 for (; vr2->operands.iterate (j, &vro2); j++)
745 {
746 if (vro2->opcode == MEM_REF)
747 deref2 = true;
748 /* Do not look through a storage order barrier. */
749 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
750 return false;
751 if (known_eq (vro2->off, -1))
752 break;
753 off2 += vro2->off;
754 }
755 if (maybe_ne (off1, off2))
756 return false;
757 if (deref1 && vro1->opcode == ADDR_EXPR)
758 {
759 memset (&tem1, 0, sizeof (tem1));
760 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
761 tem1.type = TREE_TYPE (tem1.op0);
762 tem1.opcode = TREE_CODE (tem1.op0);
763 vro1 = &tem1;
764 deref1 = false;
765 }
766 if (deref2 && vro2->opcode == ADDR_EXPR)
767 {
768 memset (&tem2, 0, sizeof (tem2));
769 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
770 tem2.type = TREE_TYPE (tem2.op0);
771 tem2.opcode = TREE_CODE (tem2.op0);
772 vro2 = &tem2;
773 deref2 = false;
774 }
775 if (deref1 != deref2)
776 return false;
777 if (!vn_reference_op_eq (vro1, vro2))
778 return false;
779 ++j;
780 ++i;
781 }
782 while (vr1->operands.length () != i
783 || vr2->operands.length () != j);
784
785 return true;
786 }
787
788 /* Copy the operations present in load/store REF into RESULT, a vector of
789 vn_reference_op_s's. */
790
791 static void
792 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
793 {
794 if (TREE_CODE (ref) == TARGET_MEM_REF)
795 {
796 vn_reference_op_s temp;
797
798 result->reserve (3);
799
800 memset (&temp, 0, sizeof (temp));
801 temp.type = TREE_TYPE (ref);
802 temp.opcode = TREE_CODE (ref);
803 temp.op0 = TMR_INDEX (ref);
804 temp.op1 = TMR_STEP (ref);
805 temp.op2 = TMR_OFFSET (ref);
806 temp.off = -1;
807 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
808 temp.base = MR_DEPENDENCE_BASE (ref);
809 result->quick_push (temp);
810
811 memset (&temp, 0, sizeof (temp));
812 temp.type = NULL_TREE;
813 temp.opcode = ERROR_MARK;
814 temp.op0 = TMR_INDEX2 (ref);
815 temp.off = -1;
816 result->quick_push (temp);
817
818 memset (&temp, 0, sizeof (temp));
819 temp.type = NULL_TREE;
820 temp.opcode = TREE_CODE (TMR_BASE (ref));
821 temp.op0 = TMR_BASE (ref);
822 temp.off = -1;
823 result->quick_push (temp);
824 return;
825 }
826
827 /* For non-calls, store the information that makes up the address. */
828 tree orig = ref;
829 while (ref)
830 {
831 vn_reference_op_s temp;
832
833 memset (&temp, 0, sizeof (temp));
834 temp.type = TREE_TYPE (ref);
835 temp.opcode = TREE_CODE (ref);
836 temp.off = -1;
837
838 switch (temp.opcode)
839 {
840 case MODIFY_EXPR:
841 temp.op0 = TREE_OPERAND (ref, 1);
842 break;
843 case WITH_SIZE_EXPR:
844 temp.op0 = TREE_OPERAND (ref, 1);
845 temp.off = 0;
846 break;
847 case MEM_REF:
848 /* The base address gets its own vn_reference_op_s structure. */
849 temp.op0 = TREE_OPERAND (ref, 1);
850 if (!mem_ref_offset (ref).to_shwi (&temp.off))
851 temp.off = -1;
852 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
853 temp.base = MR_DEPENDENCE_BASE (ref);
854 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
855 break;
856 case BIT_FIELD_REF:
857 /* Record bits, position and storage order. */
858 temp.op0 = TREE_OPERAND (ref, 1);
859 temp.op1 = TREE_OPERAND (ref, 2);
860 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
861 temp.off = -1;
862 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
863 break;
864 case COMPONENT_REF:
865 /* The field decl is enough to unambiguously specify the field,
866 a matching type is not necessary and a mismatching type
867 is always a spurious difference. */
868 temp.type = NULL_TREE;
869 temp.op0 = TREE_OPERAND (ref, 1);
870 temp.op1 = TREE_OPERAND (ref, 2);
871 {
872 tree this_offset = component_ref_field_offset (ref);
873 if (this_offset
874 && poly_int_tree_p (this_offset))
875 {
876 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
877 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
878 {
879 poly_offset_int off
880 = (wi::to_poly_offset (this_offset)
881 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
882 /* Probibit value-numbering zero offset components
883 of addresses the same before the pass folding
884 __builtin_object_size had a chance to run
885 (checking cfun->after_inlining does the
886 trick here). */
887 if (TREE_CODE (orig) != ADDR_EXPR
888 || maybe_ne (off, 0)
889 || cfun->after_inlining)
890 off.to_shwi (&temp.off);
891 }
892 }
893 }
894 break;
895 case ARRAY_RANGE_REF:
896 case ARRAY_REF:
897 {
898 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
899 /* Record index as operand. */
900 temp.op0 = TREE_OPERAND (ref, 1);
901 /* Always record lower bounds and element size. */
902 temp.op1 = array_ref_low_bound (ref);
903 /* But record element size in units of the type alignment. */
904 temp.op2 = TREE_OPERAND (ref, 3);
905 temp.align = eltype->type_common.align;
906 if (! temp.op2)
907 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
908 size_int (TYPE_ALIGN_UNIT (eltype)));
909 if (poly_int_tree_p (temp.op0)
910 && poly_int_tree_p (temp.op1)
911 && TREE_CODE (temp.op2) == INTEGER_CST)
912 {
913 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
914 - wi::to_poly_offset (temp.op1))
915 * wi::to_offset (temp.op2)
916 * vn_ref_op_align_unit (&temp));
917 off.to_shwi (&temp.off);
918 }
919 }
920 break;
921 case VAR_DECL:
922 if (DECL_HARD_REGISTER (ref))
923 {
924 temp.op0 = ref;
925 break;
926 }
927 /* Fallthru. */
928 case PARM_DECL:
929 case CONST_DECL:
930 case RESULT_DECL:
931 /* Canonicalize decls to MEM[&decl] which is what we end up with
932 when valueizing MEM[ptr] with ptr = &decl. */
933 temp.opcode = MEM_REF;
934 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
935 temp.off = 0;
936 result->safe_push (temp);
937 temp.opcode = ADDR_EXPR;
938 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
939 temp.type = TREE_TYPE (temp.op0);
940 temp.off = -1;
941 break;
942 case STRING_CST:
943 case INTEGER_CST:
944 case COMPLEX_CST:
945 case VECTOR_CST:
946 case REAL_CST:
947 case FIXED_CST:
948 case CONSTRUCTOR:
949 case SSA_NAME:
950 temp.op0 = ref;
951 break;
952 case ADDR_EXPR:
953 if (is_gimple_min_invariant (ref))
954 {
955 temp.op0 = ref;
956 break;
957 }
958 break;
959 /* These are only interesting for their operands, their
960 existence, and their type. They will never be the last
961 ref in the chain of references (IE they require an
962 operand), so we don't have to put anything
963 for op* as it will be handled by the iteration */
964 case REALPART_EXPR:
965 temp.off = 0;
966 break;
967 case VIEW_CONVERT_EXPR:
968 temp.off = 0;
969 temp.reverse = storage_order_barrier_p (ref);
970 break;
971 case IMAGPART_EXPR:
972 /* This is only interesting for its constant offset. */
973 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
974 break;
975 default:
976 gcc_unreachable ();
977 }
978 result->safe_push (temp);
979
980 if (REFERENCE_CLASS_P (ref)
981 || TREE_CODE (ref) == MODIFY_EXPR
982 || TREE_CODE (ref) == WITH_SIZE_EXPR
983 || (TREE_CODE (ref) == ADDR_EXPR
984 && !is_gimple_min_invariant (ref)))
985 ref = TREE_OPERAND (ref, 0);
986 else
987 ref = NULL_TREE;
988 }
989 }
990
991 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
992 operands in *OPS, the reference alias set SET and the reference type TYPE.
993 Return true if something useful was produced. */
994
995 bool
996 ao_ref_init_from_vn_reference (ao_ref *ref,
997 alias_set_type set, tree type,
998 vec<vn_reference_op_s> ops)
999 {
1000 vn_reference_op_t op;
1001 unsigned i;
1002 tree base = NULL_TREE;
1003 tree *op0_p = &base;
1004 poly_offset_int offset = 0;
1005 poly_offset_int max_size;
1006 poly_offset_int size = -1;
1007 tree size_tree = NULL_TREE;
1008 alias_set_type base_alias_set = -1;
1009
1010 /* First get the final access size from just the outermost expression. */
1011 op = &ops[0];
1012 if (op->opcode == COMPONENT_REF)
1013 size_tree = DECL_SIZE (op->op0);
1014 else if (op->opcode == BIT_FIELD_REF)
1015 size_tree = op->op0;
1016 else
1017 {
1018 machine_mode mode = TYPE_MODE (type);
1019 if (mode == BLKmode)
1020 size_tree = TYPE_SIZE (type);
1021 else
1022 size = GET_MODE_BITSIZE (mode);
1023 }
1024 if (size_tree != NULL_TREE
1025 && poly_int_tree_p (size_tree))
1026 size = wi::to_poly_offset (size_tree);
1027
1028 /* Initially, maxsize is the same as the accessed element size.
1029 In the following it will only grow (or become -1). */
1030 max_size = size;
1031
1032 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1033 and find the ultimate containing object. */
1034 FOR_EACH_VEC_ELT (ops, i, op)
1035 {
1036 switch (op->opcode)
1037 {
1038 /* These may be in the reference ops, but we cannot do anything
1039 sensible with them here. */
1040 case ADDR_EXPR:
1041 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1042 if (base != NULL_TREE
1043 && TREE_CODE (base) == MEM_REF
1044 && op->op0
1045 && DECL_P (TREE_OPERAND (op->op0, 0)))
1046 {
1047 vn_reference_op_t pop = &ops[i-1];
1048 base = TREE_OPERAND (op->op0, 0);
1049 if (known_eq (pop->off, -1))
1050 {
1051 max_size = -1;
1052 offset = 0;
1053 }
1054 else
1055 offset += pop->off * BITS_PER_UNIT;
1056 op0_p = NULL;
1057 break;
1058 }
1059 /* Fallthru. */
1060 case CALL_EXPR:
1061 return false;
1062
1063 /* Record the base objects. */
1064 case MEM_REF:
1065 base_alias_set = get_deref_alias_set (op->op0);
1066 *op0_p = build2 (MEM_REF, op->type,
1067 NULL_TREE, op->op0);
1068 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1069 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1070 op0_p = &TREE_OPERAND (*op0_p, 0);
1071 break;
1072
1073 case VAR_DECL:
1074 case PARM_DECL:
1075 case RESULT_DECL:
1076 case SSA_NAME:
1077 *op0_p = op->op0;
1078 op0_p = NULL;
1079 break;
1080
1081 /* And now the usual component-reference style ops. */
1082 case BIT_FIELD_REF:
1083 offset += wi::to_poly_offset (op->op1);
1084 break;
1085
1086 case COMPONENT_REF:
1087 {
1088 tree field = op->op0;
1089 /* We do not have a complete COMPONENT_REF tree here so we
1090 cannot use component_ref_field_offset. Do the interesting
1091 parts manually. */
1092 tree this_offset = DECL_FIELD_OFFSET (field);
1093
1094 if (op->op1 || !poly_int_tree_p (this_offset))
1095 max_size = -1;
1096 else
1097 {
1098 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1099 << LOG2_BITS_PER_UNIT);
1100 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1101 offset += woffset;
1102 }
1103 break;
1104 }
1105
1106 case ARRAY_RANGE_REF:
1107 case ARRAY_REF:
1108 /* We recorded the lower bound and the element size. */
1109 if (!poly_int_tree_p (op->op0)
1110 || !poly_int_tree_p (op->op1)
1111 || TREE_CODE (op->op2) != INTEGER_CST)
1112 max_size = -1;
1113 else
1114 {
1115 poly_offset_int woffset
1116 = wi::sext (wi::to_poly_offset (op->op0)
1117 - wi::to_poly_offset (op->op1),
1118 TYPE_PRECISION (TREE_TYPE (op->op0)));
1119 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1120 woffset <<= LOG2_BITS_PER_UNIT;
1121 offset += woffset;
1122 }
1123 break;
1124
1125 case REALPART_EXPR:
1126 break;
1127
1128 case IMAGPART_EXPR:
1129 offset += size;
1130 break;
1131
1132 case VIEW_CONVERT_EXPR:
1133 break;
1134
1135 case STRING_CST:
1136 case INTEGER_CST:
1137 case COMPLEX_CST:
1138 case VECTOR_CST:
1139 case REAL_CST:
1140 case CONSTRUCTOR:
1141 case CONST_DECL:
1142 return false;
1143
1144 default:
1145 return false;
1146 }
1147 }
1148
1149 if (base == NULL_TREE)
1150 return false;
1151
1152 ref->ref = NULL_TREE;
1153 ref->base = base;
1154 ref->ref_alias_set = set;
1155 if (base_alias_set != -1)
1156 ref->base_alias_set = base_alias_set;
1157 else
1158 ref->base_alias_set = get_alias_set (base);
1159 /* We discount volatiles from value-numbering elsewhere. */
1160 ref->volatile_p = false;
1161
1162 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1163 {
1164 ref->offset = 0;
1165 ref->size = -1;
1166 ref->max_size = -1;
1167 return true;
1168 }
1169
1170 if (!offset.to_shwi (&ref->offset))
1171 {
1172 ref->offset = 0;
1173 ref->max_size = -1;
1174 return true;
1175 }
1176
1177 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1178 ref->max_size = -1;
1179
1180 return true;
1181 }
1182
1183 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1184 vn_reference_op_s's. */
1185
1186 static void
1187 copy_reference_ops_from_call (gcall *call,
1188 vec<vn_reference_op_s> *result)
1189 {
1190 vn_reference_op_s temp;
1191 unsigned i;
1192 tree lhs = gimple_call_lhs (call);
1193 int lr;
1194
1195 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1196 different. By adding the lhs here in the vector, we ensure that the
1197 hashcode is different, guaranteeing a different value number. */
1198 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1199 {
1200 memset (&temp, 0, sizeof (temp));
1201 temp.opcode = MODIFY_EXPR;
1202 temp.type = TREE_TYPE (lhs);
1203 temp.op0 = lhs;
1204 temp.off = -1;
1205 result->safe_push (temp);
1206 }
1207
1208 /* Copy the type, opcode, function, static chain and EH region, if any. */
1209 memset (&temp, 0, sizeof (temp));
1210 temp.type = gimple_call_fntype (call);
1211 temp.opcode = CALL_EXPR;
1212 temp.op0 = gimple_call_fn (call);
1213 temp.op1 = gimple_call_chain (call);
1214 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1215 temp.op2 = size_int (lr);
1216 temp.off = -1;
1217 result->safe_push (temp);
1218
1219 /* Copy the call arguments. As they can be references as well,
1220 just chain them together. */
1221 for (i = 0; i < gimple_call_num_args (call); ++i)
1222 {
1223 tree callarg = gimple_call_arg (call, i);
1224 copy_reference_ops_from_ref (callarg, result);
1225 }
1226 }
1227
1228 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1229 *I_P to point to the last element of the replacement. */
1230 static bool
1231 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1232 unsigned int *i_p)
1233 {
1234 unsigned int i = *i_p;
1235 vn_reference_op_t op = &(*ops)[i];
1236 vn_reference_op_t mem_op = &(*ops)[i - 1];
1237 tree addr_base;
1238 poly_int64 addr_offset = 0;
1239
1240 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1241 from .foo.bar to the preceding MEM_REF offset and replace the
1242 address with &OBJ. */
1243 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1244 &addr_offset);
1245 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1246 if (addr_base != TREE_OPERAND (op->op0, 0))
1247 {
1248 poly_offset_int off
1249 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1250 SIGNED)
1251 + addr_offset);
1252 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1253 op->op0 = build_fold_addr_expr (addr_base);
1254 if (tree_fits_shwi_p (mem_op->op0))
1255 mem_op->off = tree_to_shwi (mem_op->op0);
1256 else
1257 mem_op->off = -1;
1258 return true;
1259 }
1260 return false;
1261 }
1262
1263 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1264 *I_P to point to the last element of the replacement. */
1265 static bool
1266 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1267 unsigned int *i_p)
1268 {
1269 unsigned int i = *i_p;
1270 vn_reference_op_t op = &(*ops)[i];
1271 vn_reference_op_t mem_op = &(*ops)[i - 1];
1272 gimple *def_stmt;
1273 enum tree_code code;
1274 poly_offset_int off;
1275
1276 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1277 if (!is_gimple_assign (def_stmt))
1278 return false;
1279
1280 code = gimple_assign_rhs_code (def_stmt);
1281 if (code != ADDR_EXPR
1282 && code != POINTER_PLUS_EXPR)
1283 return false;
1284
1285 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1286
1287 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1288 from .foo.bar to the preceding MEM_REF offset and replace the
1289 address with &OBJ. */
1290 if (code == ADDR_EXPR)
1291 {
1292 tree addr, addr_base;
1293 poly_int64 addr_offset;
1294
1295 addr = gimple_assign_rhs1 (def_stmt);
1296 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1297 &addr_offset);
1298 /* If that didn't work because the address isn't invariant propagate
1299 the reference tree from the address operation in case the current
1300 dereference isn't offsetted. */
1301 if (!addr_base
1302 && *i_p == ops->length () - 1
1303 && known_eq (off, 0)
1304 /* This makes us disable this transform for PRE where the
1305 reference ops might be also used for code insertion which
1306 is invalid. */
1307 && default_vn_walk_kind == VN_WALKREWRITE)
1308 {
1309 auto_vec<vn_reference_op_s, 32> tem;
1310 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1311 /* Make sure to preserve TBAA info. The only objects not
1312 wrapped in MEM_REFs that can have their address taken are
1313 STRING_CSTs. */
1314 if (tem.length () >= 2
1315 && tem[tem.length () - 2].opcode == MEM_REF)
1316 {
1317 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1318 new_mem_op->op0
1319 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1320 wi::to_poly_wide (new_mem_op->op0));
1321 }
1322 else
1323 gcc_assert (tem.last ().opcode == STRING_CST);
1324 ops->pop ();
1325 ops->pop ();
1326 ops->safe_splice (tem);
1327 --*i_p;
1328 return true;
1329 }
1330 if (!addr_base
1331 || TREE_CODE (addr_base) != MEM_REF
1332 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1333 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1334 return false;
1335
1336 off += addr_offset;
1337 off += mem_ref_offset (addr_base);
1338 op->op0 = TREE_OPERAND (addr_base, 0);
1339 }
1340 else
1341 {
1342 tree ptr, ptroff;
1343 ptr = gimple_assign_rhs1 (def_stmt);
1344 ptroff = gimple_assign_rhs2 (def_stmt);
1345 if (TREE_CODE (ptr) != SSA_NAME
1346 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1347 /* Make sure to not endlessly recurse.
1348 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1349 happen when we value-number a PHI to its backedge value. */
1350 || SSA_VAL (ptr) == op->op0
1351 || !poly_int_tree_p (ptroff))
1352 return false;
1353
1354 off += wi::to_poly_offset (ptroff);
1355 op->op0 = ptr;
1356 }
1357
1358 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1359 if (tree_fits_shwi_p (mem_op->op0))
1360 mem_op->off = tree_to_shwi (mem_op->op0);
1361 else
1362 mem_op->off = -1;
1363 /* ??? Can end up with endless recursion here!?
1364 gcc.c-torture/execute/strcmp-1.c */
1365 if (TREE_CODE (op->op0) == SSA_NAME)
1366 op->op0 = SSA_VAL (op->op0);
1367 if (TREE_CODE (op->op0) != SSA_NAME)
1368 op->opcode = TREE_CODE (op->op0);
1369
1370 /* And recurse. */
1371 if (TREE_CODE (op->op0) == SSA_NAME)
1372 vn_reference_maybe_forwprop_address (ops, i_p);
1373 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1374 vn_reference_fold_indirect (ops, i_p);
1375 return true;
1376 }
1377
1378 /* Optimize the reference REF to a constant if possible or return
1379 NULL_TREE if not. */
1380
1381 tree
1382 fully_constant_vn_reference_p (vn_reference_t ref)
1383 {
1384 vec<vn_reference_op_s> operands = ref->operands;
1385 vn_reference_op_t op;
1386
1387 /* Try to simplify the translated expression if it is
1388 a call to a builtin function with at most two arguments. */
1389 op = &operands[0];
1390 if (op->opcode == CALL_EXPR
1391 && TREE_CODE (op->op0) == ADDR_EXPR
1392 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1393 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1394 && operands.length () >= 2
1395 && operands.length () <= 3)
1396 {
1397 vn_reference_op_t arg0, arg1 = NULL;
1398 bool anyconst = false;
1399 arg0 = &operands[1];
1400 if (operands.length () > 2)
1401 arg1 = &operands[2];
1402 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1403 || (arg0->opcode == ADDR_EXPR
1404 && is_gimple_min_invariant (arg0->op0)))
1405 anyconst = true;
1406 if (arg1
1407 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1408 || (arg1->opcode == ADDR_EXPR
1409 && is_gimple_min_invariant (arg1->op0))))
1410 anyconst = true;
1411 if (anyconst)
1412 {
1413 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1414 arg1 ? 2 : 1,
1415 arg0->op0,
1416 arg1 ? arg1->op0 : NULL);
1417 if (folded
1418 && TREE_CODE (folded) == NOP_EXPR)
1419 folded = TREE_OPERAND (folded, 0);
1420 if (folded
1421 && is_gimple_min_invariant (folded))
1422 return folded;
1423 }
1424 }
1425
1426 /* Simplify reads from constants or constant initializers. */
1427 else if (BITS_PER_UNIT == 8
1428 && COMPLETE_TYPE_P (ref->type)
1429 && is_gimple_reg_type (ref->type))
1430 {
1431 poly_int64 off = 0;
1432 HOST_WIDE_INT size;
1433 if (INTEGRAL_TYPE_P (ref->type))
1434 size = TYPE_PRECISION (ref->type);
1435 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1436 size = tree_to_shwi (TYPE_SIZE (ref->type));
1437 else
1438 return NULL_TREE;
1439 if (size % BITS_PER_UNIT != 0
1440 || size > MAX_BITSIZE_MODE_ANY_MODE)
1441 return NULL_TREE;
1442 size /= BITS_PER_UNIT;
1443 unsigned i;
1444 for (i = 0; i < operands.length (); ++i)
1445 {
1446 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1447 {
1448 ++i;
1449 break;
1450 }
1451 if (known_eq (operands[i].off, -1))
1452 return NULL_TREE;
1453 off += operands[i].off;
1454 if (operands[i].opcode == MEM_REF)
1455 {
1456 ++i;
1457 break;
1458 }
1459 }
1460 vn_reference_op_t base = &operands[--i];
1461 tree ctor = error_mark_node;
1462 tree decl = NULL_TREE;
1463 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1464 ctor = base->op0;
1465 else if (base->opcode == MEM_REF
1466 && base[1].opcode == ADDR_EXPR
1467 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1468 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1469 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1470 {
1471 decl = TREE_OPERAND (base[1].op0, 0);
1472 if (TREE_CODE (decl) == STRING_CST)
1473 ctor = decl;
1474 else
1475 ctor = ctor_for_folding (decl);
1476 }
1477 if (ctor == NULL_TREE)
1478 return build_zero_cst (ref->type);
1479 else if (ctor != error_mark_node)
1480 {
1481 HOST_WIDE_INT const_off;
1482 if (decl)
1483 {
1484 tree res = fold_ctor_reference (ref->type, ctor,
1485 off * BITS_PER_UNIT,
1486 size * BITS_PER_UNIT, decl);
1487 if (res)
1488 {
1489 STRIP_USELESS_TYPE_CONVERSION (res);
1490 if (is_gimple_min_invariant (res))
1491 return res;
1492 }
1493 }
1494 else if (off.is_constant (&const_off))
1495 {
1496 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1497 int len = native_encode_expr (ctor, buf, size, const_off);
1498 if (len > 0)
1499 return native_interpret_expr (ref->type, buf, len);
1500 }
1501 }
1502 }
1503
1504 return NULL_TREE;
1505 }
1506
1507 /* Return true if OPS contain a storage order barrier. */
1508
1509 static bool
1510 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1511 {
1512 vn_reference_op_t op;
1513 unsigned i;
1514
1515 FOR_EACH_VEC_ELT (ops, i, op)
1516 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1517 return true;
1518
1519 return false;
1520 }
1521
1522 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1523 structures into their value numbers. This is done in-place, and
1524 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1525 whether any operands were valueized. */
1526
1527 static vec<vn_reference_op_s>
1528 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1529 bool with_avail = false)
1530 {
1531 vn_reference_op_t vro;
1532 unsigned int i;
1533
1534 *valueized_anything = false;
1535
1536 FOR_EACH_VEC_ELT (orig, i, vro)
1537 {
1538 if (vro->opcode == SSA_NAME
1539 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1540 {
1541 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1542 if (tem != vro->op0)
1543 {
1544 *valueized_anything = true;
1545 vro->op0 = tem;
1546 }
1547 /* If it transforms from an SSA_NAME to a constant, update
1548 the opcode. */
1549 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1550 vro->opcode = TREE_CODE (vro->op0);
1551 }
1552 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1553 {
1554 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1555 if (tem != vro->op1)
1556 {
1557 *valueized_anything = true;
1558 vro->op1 = tem;
1559 }
1560 }
1561 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1562 {
1563 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1564 if (tem != vro->op2)
1565 {
1566 *valueized_anything = true;
1567 vro->op2 = tem;
1568 }
1569 }
1570 /* If it transforms from an SSA_NAME to an address, fold with
1571 a preceding indirect reference. */
1572 if (i > 0
1573 && vro->op0
1574 && TREE_CODE (vro->op0) == ADDR_EXPR
1575 && orig[i - 1].opcode == MEM_REF)
1576 {
1577 if (vn_reference_fold_indirect (&orig, &i))
1578 *valueized_anything = true;
1579 }
1580 else if (i > 0
1581 && vro->opcode == SSA_NAME
1582 && orig[i - 1].opcode == MEM_REF)
1583 {
1584 if (vn_reference_maybe_forwprop_address (&orig, &i))
1585 *valueized_anything = true;
1586 }
1587 /* If it transforms a non-constant ARRAY_REF into a constant
1588 one, adjust the constant offset. */
1589 else if (vro->opcode == ARRAY_REF
1590 && known_eq (vro->off, -1)
1591 && poly_int_tree_p (vro->op0)
1592 && poly_int_tree_p (vro->op1)
1593 && TREE_CODE (vro->op2) == INTEGER_CST)
1594 {
1595 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1596 - wi::to_poly_offset (vro->op1))
1597 * wi::to_offset (vro->op2)
1598 * vn_ref_op_align_unit (vro));
1599 off.to_shwi (&vro->off);
1600 }
1601 }
1602
1603 return orig;
1604 }
1605
1606 static vec<vn_reference_op_s>
1607 valueize_refs (vec<vn_reference_op_s> orig)
1608 {
1609 bool tem;
1610 return valueize_refs_1 (orig, &tem);
1611 }
1612
1613 static vec<vn_reference_op_s> shared_lookup_references;
1614
1615 /* Create a vector of vn_reference_op_s structures from REF, a
1616 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1617 this function. *VALUEIZED_ANYTHING will specify whether any
1618 operands were valueized. */
1619
1620 static vec<vn_reference_op_s>
1621 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1622 {
1623 if (!ref)
1624 return vNULL;
1625 shared_lookup_references.truncate (0);
1626 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1627 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1628 valueized_anything);
1629 return shared_lookup_references;
1630 }
1631
1632 /* Create a vector of vn_reference_op_s structures from CALL, a
1633 call statement. The vector is shared among all callers of
1634 this function. */
1635
1636 static vec<vn_reference_op_s>
1637 valueize_shared_reference_ops_from_call (gcall *call)
1638 {
1639 if (!call)
1640 return vNULL;
1641 shared_lookup_references.truncate (0);
1642 copy_reference_ops_from_call (call, &shared_lookup_references);
1643 shared_lookup_references = valueize_refs (shared_lookup_references);
1644 return shared_lookup_references;
1645 }
1646
1647 /* Lookup a SCCVN reference operation VR in the current hash table.
1648 Returns the resulting value number if it exists in the hash table,
1649 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1650 vn_reference_t stored in the hashtable if something is found. */
1651
1652 static tree
1653 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1654 {
1655 vn_reference_s **slot;
1656 hashval_t hash;
1657
1658 hash = vr->hashcode;
1659 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1660 if (slot)
1661 {
1662 if (vnresult)
1663 *vnresult = (vn_reference_t)*slot;
1664 return ((vn_reference_t)*slot)->result;
1665 }
1666
1667 return NULL_TREE;
1668 }
1669
1670 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1671 with the current VUSE and performs the expression lookup. */
1672
1673 static void *
1674 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1675 unsigned int cnt, void *vr_)
1676 {
1677 vn_reference_t vr = (vn_reference_t)vr_;
1678 vn_reference_s **slot;
1679 hashval_t hash;
1680
1681 /* This bounds the stmt walks we perform on reference lookups
1682 to O(1) instead of O(N) where N is the number of dominating
1683 stores. */
1684 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1685 return (void *)-1;
1686
1687 if (last_vuse_ptr)
1688 *last_vuse_ptr = vuse;
1689
1690 /* Fixup vuse and hash. */
1691 if (vr->vuse)
1692 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1693 vr->vuse = vuse_ssa_val (vuse);
1694 if (vr->vuse)
1695 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1696
1697 hash = vr->hashcode;
1698 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1699 if (slot)
1700 return *slot;
1701
1702 return NULL;
1703 }
1704
1705 /* Lookup an existing or insert a new vn_reference entry into the
1706 value table for the VUSE, SET, TYPE, OPERANDS reference which
1707 has the value VALUE which is either a constant or an SSA name. */
1708
1709 static vn_reference_t
1710 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1711 alias_set_type set,
1712 tree type,
1713 vec<vn_reference_op_s,
1714 va_heap> operands,
1715 tree value)
1716 {
1717 vn_reference_s vr1;
1718 vn_reference_t result;
1719 unsigned value_id;
1720 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1721 vr1.operands = operands;
1722 vr1.type = type;
1723 vr1.set = set;
1724 vr1.hashcode = vn_reference_compute_hash (&vr1);
1725 if (vn_reference_lookup_1 (&vr1, &result))
1726 return result;
1727 if (TREE_CODE (value) == SSA_NAME)
1728 value_id = VN_INFO (value)->value_id;
1729 else
1730 value_id = get_or_alloc_constant_value_id (value);
1731 return vn_reference_insert_pieces (vuse, set, type,
1732 operands.copy (), value, value_id);
1733 }
1734
1735 /* Return a value-number for RCODE OPS... either by looking up an existing
1736 value-number for the simplified result or by inserting the operation if
1737 INSERT is true. */
1738
1739 static tree
1740 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1741 {
1742 tree result = NULL_TREE;
1743 /* We will be creating a value number for
1744 RCODE (OPS...).
1745 So first simplify and lookup this expression to see if it
1746 is already available. */
1747 mprts_hook = vn_lookup_simplify_result;
1748 bool res = false;
1749 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1750 {
1751 case 1:
1752 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1753 break;
1754 case 2:
1755 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1756 break;
1757 case 3:
1758 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1759 break;
1760 }
1761 mprts_hook = NULL;
1762 gimple *new_stmt = NULL;
1763 if (res
1764 && gimple_simplified_result_is_gimple_val (res_op))
1765 {
1766 /* The expression is already available. */
1767 result = res_op->ops[0];
1768 /* Valueize it, simplification returns sth in AVAIL only. */
1769 if (TREE_CODE (result) == SSA_NAME)
1770 result = SSA_VAL (result);
1771 }
1772 else
1773 {
1774 tree val = vn_lookup_simplify_result (res_op);
1775 if (!val && insert)
1776 {
1777 gimple_seq stmts = NULL;
1778 result = maybe_push_res_to_seq (res_op, &stmts);
1779 if (result)
1780 {
1781 gcc_assert (gimple_seq_singleton_p (stmts));
1782 new_stmt = gimple_seq_first_stmt (stmts);
1783 }
1784 }
1785 else
1786 /* The expression is already available. */
1787 result = val;
1788 }
1789 if (new_stmt)
1790 {
1791 /* The expression is not yet available, value-number lhs to
1792 the new SSA_NAME we created. */
1793 /* Initialize value-number information properly. */
1794 vn_ssa_aux_t result_info = VN_INFO (result);
1795 result_info->valnum = result;
1796 result_info->value_id = get_next_value_id ();
1797 result_info->visited = 1;
1798 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1799 new_stmt);
1800 result_info->needs_insertion = true;
1801 /* ??? PRE phi-translation inserts NARYs without corresponding
1802 SSA name result. Re-use those but set their result according
1803 to the stmt we just built. */
1804 vn_nary_op_t nary = NULL;
1805 vn_nary_op_lookup_stmt (new_stmt, &nary);
1806 if (nary)
1807 {
1808 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
1809 nary->u.result = gimple_assign_lhs (new_stmt);
1810 }
1811 /* As all "inserted" statements are singleton SCCs, insert
1812 to the valid table. This is strictly needed to
1813 avoid re-generating new value SSA_NAMEs for the same
1814 expression during SCC iteration over and over (the
1815 optimistic table gets cleared after each iteration).
1816 We do not need to insert into the optimistic table, as
1817 lookups there will fall back to the valid table. */
1818 else
1819 {
1820 unsigned int length = vn_nary_length_from_stmt (new_stmt);
1821 vn_nary_op_t vno1
1822 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
1823 vno1->value_id = result_info->value_id;
1824 vno1->length = length;
1825 vno1->predicated_values = 0;
1826 vno1->u.result = result;
1827 init_vn_nary_op_from_stmt (vno1, new_stmt);
1828 vn_nary_op_insert_into (vno1, valid_info->nary, true);
1829 /* Also do not link it into the undo chain. */
1830 last_inserted_nary = vno1->next;
1831 vno1->next = (vn_nary_op_t)(void *)-1;
1832 }
1833 if (dump_file && (dump_flags & TDF_DETAILS))
1834 {
1835 fprintf (dump_file, "Inserting name ");
1836 print_generic_expr (dump_file, result);
1837 fprintf (dump_file, " for expression ");
1838 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1839 fprintf (dump_file, "\n");
1840 }
1841 }
1842 return result;
1843 }
1844
1845 /* Return a value-number for RCODE OPS... either by looking up an existing
1846 value-number for the simplified result or by inserting the operation. */
1847
1848 static tree
1849 vn_nary_build_or_lookup (gimple_match_op *res_op)
1850 {
1851 return vn_nary_build_or_lookup_1 (res_op, true);
1852 }
1853
1854 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1855 its value if present. */
1856
1857 tree
1858 vn_nary_simplify (vn_nary_op_t nary)
1859 {
1860 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1861 return NULL_TREE;
1862 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
1863 nary->type, nary->length);
1864 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1865 return vn_nary_build_or_lookup_1 (&op, false);
1866 }
1867
1868 /* Elimination engine. */
1869
1870 class eliminate_dom_walker : public dom_walker
1871 {
1872 public:
1873 eliminate_dom_walker (cdi_direction, bitmap);
1874 ~eliminate_dom_walker ();
1875
1876 virtual edge before_dom_children (basic_block);
1877 virtual void after_dom_children (basic_block);
1878
1879 virtual tree eliminate_avail (basic_block, tree op);
1880 virtual void eliminate_push_avail (basic_block, tree op);
1881 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
1882
1883 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
1884
1885 unsigned eliminate_cleanup (bool region_p = false);
1886
1887 bool do_pre;
1888 unsigned int el_todo;
1889 unsigned int eliminations;
1890 unsigned int insertions;
1891
1892 /* SSA names that had their defs inserted by PRE if do_pre. */
1893 bitmap inserted_exprs;
1894
1895 /* Blocks with statements that have had their EH properties changed. */
1896 bitmap need_eh_cleanup;
1897
1898 /* Blocks with statements that have had their AB properties changed. */
1899 bitmap need_ab_cleanup;
1900
1901 /* Local state for the eliminate domwalk. */
1902 auto_vec<gimple *> to_remove;
1903 auto_vec<gimple *> to_fixup;
1904 auto_vec<tree> avail;
1905 auto_vec<tree> avail_stack;
1906 };
1907
1908 /* Adaptor to the elimination engine using RPO availability. */
1909
1910 class rpo_elim : public eliminate_dom_walker
1911 {
1912 public:
1913 rpo_elim(basic_block entry_)
1914 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_) {}
1915 ~rpo_elim();
1916
1917 virtual tree eliminate_avail (basic_block, tree op);
1918
1919 virtual void eliminate_push_avail (basic_block, tree);
1920
1921 basic_block entry;
1922 /* Instead of having a local availability lattice for each
1923 basic-block and availability at X defined as union of
1924 the local availabilities at X and its dominators we're
1925 turning this upside down and track availability per
1926 value given values are usually made available at very
1927 few points (at least one).
1928 So we have a value -> vec<location, leader> map where
1929 LOCATION is specifying the basic-block LEADER is made
1930 available for VALUE. We push to this vector in RPO
1931 order thus for iteration we can simply pop the last
1932 entries.
1933 LOCATION is the basic-block index and LEADER is its
1934 SSA name version. */
1935 /* ??? We'd like to use auto_vec here with embedded storage
1936 but that doesn't play well until we can provide move
1937 constructors and use std::move on hash-table expansion.
1938 So for now this is a bit more expensive than necessary.
1939 We eventually want to switch to a chaining scheme like
1940 for hashtable entries for unwinding which would make
1941 making the vector part of the vn_ssa_aux structure possible. */
1942 typedef hash_map<tree, vec<std::pair<int, int> > > rpo_avail_t;
1943 rpo_avail_t m_rpo_avail;
1944 };
1945
1946 /* Global RPO state for access from hooks. */
1947 static rpo_elim *rpo_avail;
1948 basic_block vn_context_bb;
1949
1950 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1951 from the statement defining VUSE and if not successful tries to
1952 translate *REFP and VR_ through an aggregate copy at the definition
1953 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1954 of *REF and *VR. If only disambiguation was performed then
1955 *DISAMBIGUATE_ONLY is set to true. */
1956
1957 static void *
1958 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1959 bool *disambiguate_only)
1960 {
1961 vn_reference_t vr = (vn_reference_t)vr_;
1962 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1963 tree base = ao_ref_base (ref);
1964 HOST_WIDE_INT offseti, maxsizei;
1965 static vec<vn_reference_op_s> lhs_ops;
1966 ao_ref lhs_ref;
1967 bool lhs_ref_ok = false;
1968 poly_int64 copy_size;
1969
1970 /* First try to disambiguate after value-replacing in the definitions LHS. */
1971 if (is_gimple_assign (def_stmt))
1972 {
1973 tree lhs = gimple_assign_lhs (def_stmt);
1974 bool valueized_anything = false;
1975 /* Avoid re-allocation overhead. */
1976 lhs_ops.truncate (0);
1977 basic_block saved_rpo_bb = vn_context_bb;
1978 vn_context_bb = gimple_bb (def_stmt);
1979 copy_reference_ops_from_ref (lhs, &lhs_ops);
1980 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
1981 vn_context_bb = saved_rpo_bb;
1982 if (valueized_anything)
1983 {
1984 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1985 get_alias_set (lhs),
1986 TREE_TYPE (lhs), lhs_ops);
1987 if (lhs_ref_ok
1988 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1989 {
1990 *disambiguate_only = true;
1991 return NULL;
1992 }
1993 }
1994 else
1995 {
1996 ao_ref_init (&lhs_ref, lhs);
1997 lhs_ref_ok = true;
1998 }
1999
2000 /* If we reach a clobbering statement try to skip it and see if
2001 we find a VN result with exactly the same value as the
2002 possible clobber. In this case we can ignore the clobber
2003 and return the found value.
2004 Note that we don't need to worry about partial overlapping
2005 accesses as we then can use TBAA to disambiguate against the
2006 clobbering statement when looking up a load (thus the
2007 VN_WALKREWRITE guard). */
2008 if (vn_walk_kind == VN_WALKREWRITE
2009 && is_gimple_reg_type (TREE_TYPE (lhs))
2010 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2011 /* The overlap restriction breaks down when either access
2012 alias-set is zero. Still for accesses of the size of
2013 an addressable unit there can be no overlaps. Overlaps
2014 between different union members are not an issue since
2015 activation of a union member via a store makes the
2016 values of untouched bytes unspecified. */
2017 && (known_eq (ref->size, BITS_PER_UNIT)
2018 || (get_alias_set (lhs) != 0
2019 && ao_ref_alias_set (ref) != 0)))
2020 {
2021 tree *saved_last_vuse_ptr = last_vuse_ptr;
2022 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2023 last_vuse_ptr = NULL;
2024 tree saved_vuse = vr->vuse;
2025 hashval_t saved_hashcode = vr->hashcode;
2026 void *res = vn_reference_lookup_2 (ref,
2027 gimple_vuse (def_stmt), 0, vr);
2028 /* Need to restore vr->vuse and vr->hashcode. */
2029 vr->vuse = saved_vuse;
2030 vr->hashcode = saved_hashcode;
2031 last_vuse_ptr = saved_last_vuse_ptr;
2032 if (res && res != (void *)-1)
2033 {
2034 vn_reference_t vnresult = (vn_reference_t) res;
2035 if (vnresult->result
2036 && operand_equal_p (vnresult->result,
2037 gimple_assign_rhs1 (def_stmt), 0))
2038 return res;
2039 }
2040 }
2041 }
2042 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2043 && gimple_call_num_args (def_stmt) <= 4)
2044 {
2045 /* For builtin calls valueize its arguments and call the
2046 alias oracle again. Valueization may improve points-to
2047 info of pointers and constify size and position arguments.
2048 Originally this was motivated by PR61034 which has
2049 conditional calls to free falsely clobbering ref because
2050 of imprecise points-to info of the argument. */
2051 tree oldargs[4];
2052 bool valueized_anything = false;
2053 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2054 {
2055 oldargs[i] = gimple_call_arg (def_stmt, i);
2056 tree val = vn_valueize (oldargs[i]);
2057 if (val != oldargs[i])
2058 {
2059 gimple_call_set_arg (def_stmt, i, val);
2060 valueized_anything = true;
2061 }
2062 }
2063 if (valueized_anything)
2064 {
2065 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2066 ref);
2067 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2068 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2069 if (!res)
2070 {
2071 *disambiguate_only = true;
2072 return NULL;
2073 }
2074 }
2075 }
2076
2077 if (*disambiguate_only)
2078 return (void *)-1;
2079
2080 /* If we cannot constrain the size of the reference we cannot
2081 test if anything kills it. */
2082 if (!ref->max_size_known_p ())
2083 return (void *)-1;
2084
2085 poly_int64 offset = ref->offset;
2086 poly_int64 maxsize = ref->max_size;
2087
2088 /* We can't deduce anything useful from clobbers. */
2089 if (gimple_clobber_p (def_stmt))
2090 return (void *)-1;
2091
2092 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2093 from that definition.
2094 1) Memset. */
2095 if (is_gimple_reg_type (vr->type)
2096 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2097 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2098 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2099 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2100 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2101 && offset.is_constant (&offseti)
2102 && offseti % BITS_PER_UNIT == 0))
2103 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2104 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2105 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2106 {
2107 tree base2;
2108 poly_int64 offset2, size2, maxsize2;
2109 bool reverse;
2110 tree ref2 = gimple_call_arg (def_stmt, 0);
2111 if (TREE_CODE (ref2) == SSA_NAME)
2112 {
2113 ref2 = SSA_VAL (ref2);
2114 if (TREE_CODE (ref2) == SSA_NAME
2115 && (TREE_CODE (base) != MEM_REF
2116 || TREE_OPERAND (base, 0) != ref2))
2117 {
2118 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2119 if (gimple_assign_single_p (def_stmt)
2120 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2121 ref2 = gimple_assign_rhs1 (def_stmt);
2122 }
2123 }
2124 if (TREE_CODE (ref2) == ADDR_EXPR)
2125 {
2126 ref2 = TREE_OPERAND (ref2, 0);
2127 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2128 &reverse);
2129 if (!known_size_p (maxsize2)
2130 || !known_eq (maxsize2, size2)
2131 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2132 return (void *)-1;
2133 }
2134 else if (TREE_CODE (ref2) == SSA_NAME)
2135 {
2136 poly_int64 soff;
2137 if (TREE_CODE (base) != MEM_REF
2138 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2139 return (void *)-1;
2140 offset += soff;
2141 offset2 = 0;
2142 if (TREE_OPERAND (base, 0) != ref2)
2143 {
2144 gimple *def = SSA_NAME_DEF_STMT (ref2);
2145 if (is_gimple_assign (def)
2146 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2147 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2148 && poly_int_tree_p (gimple_assign_rhs2 (def))
2149 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2150 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2151 {
2152 ref2 = gimple_assign_rhs1 (def);
2153 if (TREE_CODE (ref2) == SSA_NAME)
2154 ref2 = SSA_VAL (ref2);
2155 }
2156 else
2157 return (void *)-1;
2158 }
2159 }
2160 else
2161 return (void *)-1;
2162 tree len = gimple_call_arg (def_stmt, 2);
2163 if (known_subrange_p (offset, maxsize, offset2,
2164 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2165 {
2166 tree val;
2167 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2168 val = build_zero_cst (vr->type);
2169 else if (INTEGRAL_TYPE_P (vr->type)
2170 && known_eq (ref->size, 8))
2171 {
2172 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2173 vr->type, gimple_call_arg (def_stmt, 1));
2174 val = vn_nary_build_or_lookup (&res_op);
2175 if (!val
2176 || (TREE_CODE (val) == SSA_NAME
2177 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2178 return (void *)-1;
2179 }
2180 else
2181 {
2182 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2183 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2184 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2185 len);
2186 val = native_interpret_expr (vr->type, buf, len);
2187 if (!val)
2188 return (void *)-1;
2189 }
2190 return vn_reference_lookup_or_insert_for_pieces
2191 (vuse, vr->set, vr->type, vr->operands, val);
2192 }
2193 }
2194
2195 /* 2) Assignment from an empty CONSTRUCTOR. */
2196 else if (is_gimple_reg_type (vr->type)
2197 && gimple_assign_single_p (def_stmt)
2198 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2199 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2200 {
2201 tree base2;
2202 poly_int64 offset2, size2, maxsize2;
2203 bool reverse;
2204 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2205 &offset2, &size2, &maxsize2, &reverse);
2206 if (known_size_p (maxsize2)
2207 && known_eq (maxsize2, size2)
2208 && operand_equal_p (base, base2, 0)
2209 && known_subrange_p (offset, maxsize, offset2, size2))
2210 {
2211 tree val = build_zero_cst (vr->type);
2212 return vn_reference_lookup_or_insert_for_pieces
2213 (vuse, vr->set, vr->type, vr->operands, val);
2214 }
2215 }
2216
2217 /* 3) Assignment from a constant. We can use folds native encode/interpret
2218 routines to extract the assigned bits. */
2219 else if (known_eq (ref->size, maxsize)
2220 && is_gimple_reg_type (vr->type)
2221 && !contains_storage_order_barrier_p (vr->operands)
2222 && gimple_assign_single_p (def_stmt)
2223 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2224 /* native_encode and native_decode operate on arrays of bytes
2225 and so fundamentally need a compile-time size and offset. */
2226 && maxsize.is_constant (&maxsizei)
2227 && maxsizei % BITS_PER_UNIT == 0
2228 && offset.is_constant (&offseti)
2229 && offseti % BITS_PER_UNIT == 0
2230 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2231 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2232 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2233 {
2234 tree base2;
2235 HOST_WIDE_INT offset2, size2;
2236 bool reverse;
2237 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2238 &offset2, &size2, &reverse);
2239 if (base2
2240 && !reverse
2241 && size2 % BITS_PER_UNIT == 0
2242 && offset2 % BITS_PER_UNIT == 0
2243 && operand_equal_p (base, base2, 0)
2244 && known_subrange_p (offseti, maxsizei, offset2, size2))
2245 {
2246 /* We support up to 512-bit values (for V8DFmode). */
2247 unsigned char buffer[64];
2248 int len;
2249
2250 tree rhs = gimple_assign_rhs1 (def_stmt);
2251 if (TREE_CODE (rhs) == SSA_NAME)
2252 rhs = SSA_VAL (rhs);
2253 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2254 buffer, sizeof (buffer),
2255 (offseti - offset2) / BITS_PER_UNIT);
2256 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2257 {
2258 tree type = vr->type;
2259 /* Make sure to interpret in a type that has a range
2260 covering the whole access size. */
2261 if (INTEGRAL_TYPE_P (vr->type)
2262 && maxsizei != TYPE_PRECISION (vr->type))
2263 type = build_nonstandard_integer_type (maxsizei,
2264 TYPE_UNSIGNED (type));
2265 tree val = native_interpret_expr (type, buffer,
2266 maxsizei / BITS_PER_UNIT);
2267 /* If we chop off bits because the types precision doesn't
2268 match the memory access size this is ok when optimizing
2269 reads but not when called from the DSE code during
2270 elimination. */
2271 if (val
2272 && type != vr->type)
2273 {
2274 if (! int_fits_type_p (val, vr->type))
2275 val = NULL_TREE;
2276 else
2277 val = fold_convert (vr->type, val);
2278 }
2279
2280 if (val)
2281 return vn_reference_lookup_or_insert_for_pieces
2282 (vuse, vr->set, vr->type, vr->operands, val);
2283 }
2284 }
2285 }
2286
2287 /* 4) Assignment from an SSA name which definition we may be able
2288 to access pieces from. */
2289 else if (known_eq (ref->size, maxsize)
2290 && is_gimple_reg_type (vr->type)
2291 && !contains_storage_order_barrier_p (vr->operands)
2292 && gimple_assign_single_p (def_stmt)
2293 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2294 {
2295 tree base2;
2296 poly_int64 offset2, size2, maxsize2;
2297 bool reverse;
2298 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2299 &offset2, &size2, &maxsize2,
2300 &reverse);
2301 tree def_rhs = gimple_assign_rhs1 (def_stmt);
2302 if (!reverse
2303 && known_size_p (maxsize2)
2304 && known_eq (maxsize2, size2)
2305 && operand_equal_p (base, base2, 0)
2306 && known_subrange_p (offset, maxsize, offset2, size2)
2307 /* ??? We can't handle bitfield precision extracts without
2308 either using an alternate type for the BIT_FIELD_REF and
2309 then doing a conversion or possibly adjusting the offset
2310 according to endianness. */
2311 && (! INTEGRAL_TYPE_P (vr->type)
2312 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2313 && multiple_p (ref->size, BITS_PER_UNIT)
2314 && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2315 || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2316 {
2317 gimple_match_op op (gimple_match_cond::UNCOND,
2318 BIT_FIELD_REF, vr->type,
2319 vn_valueize (def_rhs),
2320 bitsize_int (ref->size),
2321 bitsize_int (offset - offset2));
2322 tree val = vn_nary_build_or_lookup (&op);
2323 if (val
2324 && (TREE_CODE (val) != SSA_NAME
2325 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2326 {
2327 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2328 (vuse, vr->set, vr->type, vr->operands, val);
2329 return res;
2330 }
2331 }
2332 }
2333
2334 /* 5) For aggregate copies translate the reference through them if
2335 the copy kills ref. */
2336 else if (vn_walk_kind == VN_WALKREWRITE
2337 && gimple_assign_single_p (def_stmt)
2338 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2339 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2340 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2341 {
2342 tree base2;
2343 int i, j, k;
2344 auto_vec<vn_reference_op_s> rhs;
2345 vn_reference_op_t vro;
2346 ao_ref r;
2347
2348 if (!lhs_ref_ok)
2349 return (void *)-1;
2350
2351 /* See if the assignment kills REF. */
2352 base2 = ao_ref_base (&lhs_ref);
2353 if (!lhs_ref.max_size_known_p ()
2354 || (base != base2
2355 && (TREE_CODE (base) != MEM_REF
2356 || TREE_CODE (base2) != MEM_REF
2357 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2358 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2359 TREE_OPERAND (base2, 1))))
2360 || !stmt_kills_ref_p (def_stmt, ref))
2361 return (void *)-1;
2362
2363 /* Find the common base of ref and the lhs. lhs_ops already
2364 contains valueized operands for the lhs. */
2365 i = vr->operands.length () - 1;
2366 j = lhs_ops.length () - 1;
2367 while (j >= 0 && i >= 0
2368 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2369 {
2370 i--;
2371 j--;
2372 }
2373
2374 /* ??? The innermost op should always be a MEM_REF and we already
2375 checked that the assignment to the lhs kills vr. Thus for
2376 aggregate copies using char[] types the vn_reference_op_eq
2377 may fail when comparing types for compatibility. But we really
2378 don't care here - further lookups with the rewritten operands
2379 will simply fail if we messed up types too badly. */
2380 poly_int64 extra_off = 0;
2381 if (j == 0 && i >= 0
2382 && lhs_ops[0].opcode == MEM_REF
2383 && maybe_ne (lhs_ops[0].off, -1))
2384 {
2385 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2386 i--, j--;
2387 else if (vr->operands[i].opcode == MEM_REF
2388 && maybe_ne (vr->operands[i].off, -1))
2389 {
2390 extra_off = vr->operands[i].off - lhs_ops[0].off;
2391 i--, j--;
2392 }
2393 }
2394
2395 /* i now points to the first additional op.
2396 ??? LHS may not be completely contained in VR, one or more
2397 VIEW_CONVERT_EXPRs could be in its way. We could at least
2398 try handling outermost VIEW_CONVERT_EXPRs. */
2399 if (j != -1)
2400 return (void *)-1;
2401
2402 /* Punt if the additional ops contain a storage order barrier. */
2403 for (k = i; k >= 0; k--)
2404 {
2405 vro = &vr->operands[k];
2406 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2407 return (void *)-1;
2408 }
2409
2410 /* Now re-write REF to be based on the rhs of the assignment. */
2411 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2412
2413 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2414 if (maybe_ne (extra_off, 0))
2415 {
2416 if (rhs.length () < 2)
2417 return (void *)-1;
2418 int ix = rhs.length () - 2;
2419 if (rhs[ix].opcode != MEM_REF
2420 || known_eq (rhs[ix].off, -1))
2421 return (void *)-1;
2422 rhs[ix].off += extra_off;
2423 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2424 build_int_cst (TREE_TYPE (rhs[ix].op0),
2425 extra_off));
2426 }
2427
2428 /* We need to pre-pend vr->operands[0..i] to rhs. */
2429 vec<vn_reference_op_s> old = vr->operands;
2430 if (i + 1 + rhs.length () > vr->operands.length ())
2431 vr->operands.safe_grow (i + 1 + rhs.length ());
2432 else
2433 vr->operands.truncate (i + 1 + rhs.length ());
2434 FOR_EACH_VEC_ELT (rhs, j, vro)
2435 vr->operands[i + 1 + j] = *vro;
2436 vr->operands = valueize_refs (vr->operands);
2437 if (old == shared_lookup_references)
2438 shared_lookup_references = vr->operands;
2439 vr->hashcode = vn_reference_compute_hash (vr);
2440
2441 /* Try folding the new reference to a constant. */
2442 tree val = fully_constant_vn_reference_p (vr);
2443 if (val)
2444 return vn_reference_lookup_or_insert_for_pieces
2445 (vuse, vr->set, vr->type, vr->operands, val);
2446
2447 /* Adjust *ref from the new operands. */
2448 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2449 return (void *)-1;
2450 /* This can happen with bitfields. */
2451 if (maybe_ne (ref->size, r.size))
2452 return (void *)-1;
2453 *ref = r;
2454
2455 /* Do not update last seen VUSE after translating. */
2456 last_vuse_ptr = NULL;
2457
2458 /* Keep looking for the adjusted *REF / VR pair. */
2459 return NULL;
2460 }
2461
2462 /* 6) For memcpy copies translate the reference through them if
2463 the copy kills ref. */
2464 else if (vn_walk_kind == VN_WALKREWRITE
2465 && is_gimple_reg_type (vr->type)
2466 /* ??? Handle BCOPY as well. */
2467 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2468 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2469 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2470 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2471 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2472 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2473 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2474 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2475 {
2476 tree lhs, rhs;
2477 ao_ref r;
2478 poly_int64 rhs_offset, lhs_offset;
2479 vn_reference_op_s op;
2480 poly_uint64 mem_offset;
2481 poly_int64 at, byte_maxsize;
2482
2483 /* Only handle non-variable, addressable refs. */
2484 if (maybe_ne (ref->size, maxsize)
2485 || !multiple_p (offset, BITS_PER_UNIT, &at)
2486 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2487 return (void *)-1;
2488
2489 /* Extract a pointer base and an offset for the destination. */
2490 lhs = gimple_call_arg (def_stmt, 0);
2491 lhs_offset = 0;
2492 if (TREE_CODE (lhs) == SSA_NAME)
2493 {
2494 lhs = vn_valueize (lhs);
2495 if (TREE_CODE (lhs) == SSA_NAME)
2496 {
2497 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2498 if (gimple_assign_single_p (def_stmt)
2499 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2500 lhs = gimple_assign_rhs1 (def_stmt);
2501 }
2502 }
2503 if (TREE_CODE (lhs) == ADDR_EXPR)
2504 {
2505 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2506 &lhs_offset);
2507 if (!tem)
2508 return (void *)-1;
2509 if (TREE_CODE (tem) == MEM_REF
2510 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2511 {
2512 lhs = TREE_OPERAND (tem, 0);
2513 if (TREE_CODE (lhs) == SSA_NAME)
2514 lhs = vn_valueize (lhs);
2515 lhs_offset += mem_offset;
2516 }
2517 else if (DECL_P (tem))
2518 lhs = build_fold_addr_expr (tem);
2519 else
2520 return (void *)-1;
2521 }
2522 if (TREE_CODE (lhs) != SSA_NAME
2523 && TREE_CODE (lhs) != ADDR_EXPR)
2524 return (void *)-1;
2525
2526 /* Extract a pointer base and an offset for the source. */
2527 rhs = gimple_call_arg (def_stmt, 1);
2528 rhs_offset = 0;
2529 if (TREE_CODE (rhs) == SSA_NAME)
2530 rhs = vn_valueize (rhs);
2531 if (TREE_CODE (rhs) == ADDR_EXPR)
2532 {
2533 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2534 &rhs_offset);
2535 if (!tem)
2536 return (void *)-1;
2537 if (TREE_CODE (tem) == MEM_REF
2538 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2539 {
2540 rhs = TREE_OPERAND (tem, 0);
2541 rhs_offset += mem_offset;
2542 }
2543 else if (DECL_P (tem)
2544 || TREE_CODE (tem) == STRING_CST)
2545 rhs = build_fold_addr_expr (tem);
2546 else
2547 return (void *)-1;
2548 }
2549 if (TREE_CODE (rhs) != SSA_NAME
2550 && TREE_CODE (rhs) != ADDR_EXPR)
2551 return (void *)-1;
2552
2553 /* The bases of the destination and the references have to agree. */
2554 if (TREE_CODE (base) == MEM_REF)
2555 {
2556 if (TREE_OPERAND (base, 0) != lhs
2557 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2558 return (void *) -1;
2559 at += mem_offset;
2560 }
2561 else if (!DECL_P (base)
2562 || TREE_CODE (lhs) != ADDR_EXPR
2563 || TREE_OPERAND (lhs, 0) != base)
2564 return (void *)-1;
2565
2566 /* If the access is completely outside of the memcpy destination
2567 area there is no aliasing. */
2568 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2569 return NULL;
2570 /* And the access has to be contained within the memcpy destination. */
2571 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2572 return (void *)-1;
2573
2574 /* Make room for 2 operands in the new reference. */
2575 if (vr->operands.length () < 2)
2576 {
2577 vec<vn_reference_op_s> old = vr->operands;
2578 vr->operands.safe_grow_cleared (2);
2579 if (old == shared_lookup_references)
2580 shared_lookup_references = vr->operands;
2581 }
2582 else
2583 vr->operands.truncate (2);
2584
2585 /* The looked-through reference is a simple MEM_REF. */
2586 memset (&op, 0, sizeof (op));
2587 op.type = vr->type;
2588 op.opcode = MEM_REF;
2589 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2590 op.off = at - lhs_offset + rhs_offset;
2591 vr->operands[0] = op;
2592 op.type = TREE_TYPE (rhs);
2593 op.opcode = TREE_CODE (rhs);
2594 op.op0 = rhs;
2595 op.off = -1;
2596 vr->operands[1] = op;
2597 vr->hashcode = vn_reference_compute_hash (vr);
2598
2599 /* Try folding the new reference to a constant. */
2600 tree val = fully_constant_vn_reference_p (vr);
2601 if (val)
2602 return vn_reference_lookup_or_insert_for_pieces
2603 (vuse, vr->set, vr->type, vr->operands, val);
2604
2605 /* Adjust *ref from the new operands. */
2606 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2607 return (void *)-1;
2608 /* This can happen with bitfields. */
2609 if (maybe_ne (ref->size, r.size))
2610 return (void *)-1;
2611 *ref = r;
2612
2613 /* Do not update last seen VUSE after translating. */
2614 last_vuse_ptr = NULL;
2615
2616 /* Keep looking for the adjusted *REF / VR pair. */
2617 return NULL;
2618 }
2619
2620 /* Bail out and stop walking. */
2621 return (void *)-1;
2622 }
2623
2624 /* Return a reference op vector from OP that can be used for
2625 vn_reference_lookup_pieces. The caller is responsible for releasing
2626 the vector. */
2627
2628 vec<vn_reference_op_s>
2629 vn_reference_operands_for_lookup (tree op)
2630 {
2631 bool valueized;
2632 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2633 }
2634
2635 /* Lookup a reference operation by it's parts, in the current hash table.
2636 Returns the resulting value number if it exists in the hash table,
2637 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2638 vn_reference_t stored in the hashtable if something is found. */
2639
2640 tree
2641 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2642 vec<vn_reference_op_s> operands,
2643 vn_reference_t *vnresult, vn_lookup_kind kind)
2644 {
2645 struct vn_reference_s vr1;
2646 vn_reference_t tmp;
2647 tree cst;
2648
2649 if (!vnresult)
2650 vnresult = &tmp;
2651 *vnresult = NULL;
2652
2653 vr1.vuse = vuse_ssa_val (vuse);
2654 shared_lookup_references.truncate (0);
2655 shared_lookup_references.safe_grow (operands.length ());
2656 memcpy (shared_lookup_references.address (),
2657 operands.address (),
2658 sizeof (vn_reference_op_s)
2659 * operands.length ());
2660 vr1.operands = operands = shared_lookup_references
2661 = valueize_refs (shared_lookup_references);
2662 vr1.type = type;
2663 vr1.set = set;
2664 vr1.hashcode = vn_reference_compute_hash (&vr1);
2665 if ((cst = fully_constant_vn_reference_p (&vr1)))
2666 return cst;
2667
2668 vn_reference_lookup_1 (&vr1, vnresult);
2669 if (!*vnresult
2670 && kind != VN_NOWALK
2671 && vr1.vuse)
2672 {
2673 ao_ref r;
2674 vn_walk_kind = kind;
2675 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2676 *vnresult =
2677 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2678 vn_reference_lookup_2,
2679 vn_reference_lookup_3,
2680 vuse_valueize, &vr1);
2681 gcc_checking_assert (vr1.operands == shared_lookup_references);
2682 }
2683
2684 if (*vnresult)
2685 return (*vnresult)->result;
2686
2687 return NULL_TREE;
2688 }
2689
2690 /* Lookup OP in the current hash table, and return the resulting value
2691 number if it exists in the hash table. Return NULL_TREE if it does
2692 not exist in the hash table or if the result field of the structure
2693 was NULL.. VNRESULT will be filled in with the vn_reference_t
2694 stored in the hashtable if one exists. When TBAA_P is false assume
2695 we are looking up a store and treat it as having alias-set zero. */
2696
2697 tree
2698 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2699 vn_reference_t *vnresult, bool tbaa_p)
2700 {
2701 vec<vn_reference_op_s> operands;
2702 struct vn_reference_s vr1;
2703 tree cst;
2704 bool valuezied_anything;
2705
2706 if (vnresult)
2707 *vnresult = NULL;
2708
2709 vr1.vuse = vuse_ssa_val (vuse);
2710 vr1.operands = operands
2711 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2712 vr1.type = TREE_TYPE (op);
2713 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2714 vr1.hashcode = vn_reference_compute_hash (&vr1);
2715 if ((cst = fully_constant_vn_reference_p (&vr1)))
2716 return cst;
2717
2718 if (kind != VN_NOWALK
2719 && vr1.vuse)
2720 {
2721 vn_reference_t wvnresult;
2722 ao_ref r;
2723 /* Make sure to use a valueized reference if we valueized anything.
2724 Otherwise preserve the full reference for advanced TBAA. */
2725 if (!valuezied_anything
2726 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2727 vr1.operands))
2728 ao_ref_init (&r, op);
2729 if (! tbaa_p)
2730 r.ref_alias_set = r.base_alias_set = 0;
2731 vn_walk_kind = kind;
2732 wvnresult =
2733 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2734 vn_reference_lookup_2,
2735 vn_reference_lookup_3,
2736 vuse_valueize, &vr1);
2737 gcc_checking_assert (vr1.operands == shared_lookup_references);
2738 if (wvnresult)
2739 {
2740 if (vnresult)
2741 *vnresult = wvnresult;
2742 return wvnresult->result;
2743 }
2744
2745 return NULL_TREE;
2746 }
2747
2748 return vn_reference_lookup_1 (&vr1, vnresult);
2749 }
2750
2751 /* Lookup CALL in the current hash table and return the entry in
2752 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2753
2754 void
2755 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2756 vn_reference_t vr)
2757 {
2758 if (vnresult)
2759 *vnresult = NULL;
2760
2761 tree vuse = gimple_vuse (call);
2762
2763 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2764 vr->operands = valueize_shared_reference_ops_from_call (call);
2765 vr->type = gimple_expr_type (call);
2766 vr->set = 0;
2767 vr->hashcode = vn_reference_compute_hash (vr);
2768 vn_reference_lookup_1 (vr, vnresult);
2769 }
2770
2771 /* Insert OP into the current hash table with a value number of RESULT. */
2772
2773 static void
2774 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2775 {
2776 vn_reference_s **slot;
2777 vn_reference_t vr1;
2778 bool tem;
2779
2780 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2781 if (TREE_CODE (result) == SSA_NAME)
2782 vr1->value_id = VN_INFO (result)->value_id;
2783 else
2784 vr1->value_id = get_or_alloc_constant_value_id (result);
2785 vr1->vuse = vuse_ssa_val (vuse);
2786 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2787 vr1->type = TREE_TYPE (op);
2788 vr1->set = get_alias_set (op);
2789 vr1->hashcode = vn_reference_compute_hash (vr1);
2790 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2791 vr1->result_vdef = vdef;
2792
2793 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2794 INSERT);
2795
2796 /* Because IL walking on reference lookup can end up visiting
2797 a def that is only to be visited later in iteration order
2798 when we are about to make an irreducible region reducible
2799 the def can be effectively processed and its ref being inserted
2800 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
2801 but save a lookup if we deal with already inserted refs here. */
2802 if (*slot)
2803 {
2804 /* We cannot assert that we have the same value either because
2805 when disentangling an irreducible region we may end up visiting
2806 a use before the corresponding def. That's a missed optimization
2807 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
2808 if (dump_file && (dump_flags & TDF_DETAILS)
2809 && !operand_equal_p ((*slot)->result, vr1->result, 0))
2810 {
2811 fprintf (dump_file, "Keeping old value ");
2812 print_generic_expr (dump_file, (*slot)->result);
2813 fprintf (dump_file, " because of collision\n");
2814 }
2815 free_reference (vr1);
2816 obstack_free (&vn_tables_obstack, vr1);
2817 return;
2818 }
2819
2820 *slot = vr1;
2821 vr1->next = last_inserted_ref;
2822 last_inserted_ref = vr1;
2823 }
2824
2825 /* Insert a reference by it's pieces into the current hash table with
2826 a value number of RESULT. Return the resulting reference
2827 structure we created. */
2828
2829 vn_reference_t
2830 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2831 vec<vn_reference_op_s> operands,
2832 tree result, unsigned int value_id)
2833
2834 {
2835 vn_reference_s **slot;
2836 vn_reference_t vr1;
2837
2838 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2839 vr1->value_id = value_id;
2840 vr1->vuse = vuse_ssa_val (vuse);
2841 vr1->operands = valueize_refs (operands);
2842 vr1->type = type;
2843 vr1->set = set;
2844 vr1->hashcode = vn_reference_compute_hash (vr1);
2845 if (result && TREE_CODE (result) == SSA_NAME)
2846 result = SSA_VAL (result);
2847 vr1->result = result;
2848
2849 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2850 INSERT);
2851
2852 /* At this point we should have all the things inserted that we have
2853 seen before, and we should never try inserting something that
2854 already exists. */
2855 gcc_assert (!*slot);
2856
2857 *slot = vr1;
2858 vr1->next = last_inserted_ref;
2859 last_inserted_ref = vr1;
2860 return vr1;
2861 }
2862
2863 /* Compute and return the hash value for nary operation VBO1. */
2864
2865 static hashval_t
2866 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2867 {
2868 inchash::hash hstate;
2869 unsigned i;
2870
2871 for (i = 0; i < vno1->length; ++i)
2872 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2873 vno1->op[i] = SSA_VAL (vno1->op[i]);
2874
2875 if (((vno1->length == 2
2876 && commutative_tree_code (vno1->opcode))
2877 || (vno1->length == 3
2878 && commutative_ternary_tree_code (vno1->opcode)))
2879 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2880 std::swap (vno1->op[0], vno1->op[1]);
2881 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2882 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2883 {
2884 std::swap (vno1->op[0], vno1->op[1]);
2885 vno1->opcode = swap_tree_comparison (vno1->opcode);
2886 }
2887
2888 hstate.add_int (vno1->opcode);
2889 for (i = 0; i < vno1->length; ++i)
2890 inchash::add_expr (vno1->op[i], hstate);
2891
2892 return hstate.end ();
2893 }
2894
2895 /* Compare nary operations VNO1 and VNO2 and return true if they are
2896 equivalent. */
2897
2898 bool
2899 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2900 {
2901 unsigned i;
2902
2903 if (vno1->hashcode != vno2->hashcode)
2904 return false;
2905
2906 if (vno1->length != vno2->length)
2907 return false;
2908
2909 if (vno1->opcode != vno2->opcode
2910 || !types_compatible_p (vno1->type, vno2->type))
2911 return false;
2912
2913 for (i = 0; i < vno1->length; ++i)
2914 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2915 return false;
2916
2917 /* BIT_INSERT_EXPR has an implict operand as the type precision
2918 of op1. Need to check to make sure they are the same. */
2919 if (vno1->opcode == BIT_INSERT_EXPR
2920 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2921 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2922 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2923 return false;
2924
2925 return true;
2926 }
2927
2928 /* Initialize VNO from the pieces provided. */
2929
2930 static void
2931 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2932 enum tree_code code, tree type, tree *ops)
2933 {
2934 vno->opcode = code;
2935 vno->length = length;
2936 vno->type = type;
2937 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2938 }
2939
2940 /* Initialize VNO from OP. */
2941
2942 static void
2943 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2944 {
2945 unsigned i;
2946
2947 vno->opcode = TREE_CODE (op);
2948 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2949 vno->type = TREE_TYPE (op);
2950 for (i = 0; i < vno->length; ++i)
2951 vno->op[i] = TREE_OPERAND (op, i);
2952 }
2953
2954 /* Return the number of operands for a vn_nary ops structure from STMT. */
2955
2956 static unsigned int
2957 vn_nary_length_from_stmt (gimple *stmt)
2958 {
2959 switch (gimple_assign_rhs_code (stmt))
2960 {
2961 case REALPART_EXPR:
2962 case IMAGPART_EXPR:
2963 case VIEW_CONVERT_EXPR:
2964 return 1;
2965
2966 case BIT_FIELD_REF:
2967 return 3;
2968
2969 case CONSTRUCTOR:
2970 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2971
2972 default:
2973 return gimple_num_ops (stmt) - 1;
2974 }
2975 }
2976
2977 /* Initialize VNO from STMT. */
2978
2979 static void
2980 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2981 {
2982 unsigned i;
2983
2984 vno->opcode = gimple_assign_rhs_code (stmt);
2985 vno->type = gimple_expr_type (stmt);
2986 switch (vno->opcode)
2987 {
2988 case REALPART_EXPR:
2989 case IMAGPART_EXPR:
2990 case VIEW_CONVERT_EXPR:
2991 vno->length = 1;
2992 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2993 break;
2994
2995 case BIT_FIELD_REF:
2996 vno->length = 3;
2997 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2998 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2999 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3000 break;
3001
3002 case CONSTRUCTOR:
3003 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3004 for (i = 0; i < vno->length; ++i)
3005 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3006 break;
3007
3008 default:
3009 gcc_checking_assert (!gimple_assign_single_p (stmt));
3010 vno->length = gimple_num_ops (stmt) - 1;
3011 for (i = 0; i < vno->length; ++i)
3012 vno->op[i] = gimple_op (stmt, i + 1);
3013 }
3014 }
3015
3016 /* Compute the hashcode for VNO and look for it in the hash table;
3017 return the resulting value number if it exists in the hash table.
3018 Return NULL_TREE if it does not exist in the hash table or if the
3019 result field of the operation is NULL. VNRESULT will contain the
3020 vn_nary_op_t from the hashtable if it exists. */
3021
3022 static tree
3023 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3024 {
3025 vn_nary_op_s **slot;
3026
3027 if (vnresult)
3028 *vnresult = NULL;
3029
3030 vno->hashcode = vn_nary_op_compute_hash (vno);
3031 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3032 if (!slot)
3033 return NULL_TREE;
3034 if (vnresult)
3035 *vnresult = *slot;
3036 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3037 }
3038
3039 /* Lookup a n-ary operation by its pieces and return the resulting value
3040 number if it exists in the hash table. Return NULL_TREE if it does
3041 not exist in the hash table or if the result field of the operation
3042 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3043 if it exists. */
3044
3045 tree
3046 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3047 tree type, tree *ops, vn_nary_op_t *vnresult)
3048 {
3049 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3050 sizeof_vn_nary_op (length));
3051 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3052 return vn_nary_op_lookup_1 (vno1, vnresult);
3053 }
3054
3055 /* Lookup OP in the current hash table, and return the resulting value
3056 number if it exists in the hash table. Return NULL_TREE if it does
3057 not exist in the hash table or if the result field of the operation
3058 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3059 if it exists. */
3060
3061 tree
3062 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
3063 {
3064 vn_nary_op_t vno1
3065 = XALLOCAVAR (struct vn_nary_op_s,
3066 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
3067 init_vn_nary_op_from_op (vno1, op);
3068 return vn_nary_op_lookup_1 (vno1, vnresult);
3069 }
3070
3071 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3072 value number if it exists in the hash table. Return NULL_TREE if
3073 it does not exist in the hash table. VNRESULT will contain the
3074 vn_nary_op_t from the hashtable if it exists. */
3075
3076 tree
3077 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3078 {
3079 vn_nary_op_t vno1
3080 = XALLOCAVAR (struct vn_nary_op_s,
3081 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3082 init_vn_nary_op_from_stmt (vno1, stmt);
3083 return vn_nary_op_lookup_1 (vno1, vnresult);
3084 }
3085
3086 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3087
3088 static vn_nary_op_t
3089 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3090 {
3091 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3092 }
3093
3094 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3095 obstack. */
3096
3097 static vn_nary_op_t
3098 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3099 {
3100 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3101
3102 vno1->value_id = value_id;
3103 vno1->length = length;
3104 vno1->predicated_values = 0;
3105 vno1->u.result = result;
3106
3107 return vno1;
3108 }
3109
3110 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3111 VNO->HASHCODE first. */
3112
3113 static vn_nary_op_t
3114 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3115 bool compute_hash)
3116 {
3117 vn_nary_op_s **slot;
3118
3119 if (compute_hash)
3120 {
3121 vno->hashcode = vn_nary_op_compute_hash (vno);
3122 gcc_assert (! vno->predicated_values
3123 || (! vno->u.values->next
3124 && vno->u.values->n == 1));
3125 }
3126
3127 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3128 vno->unwind_to = *slot;
3129 if (*slot)
3130 {
3131 /* Prefer non-predicated values.
3132 ??? Only if those are constant, otherwise, with constant predicated
3133 value, turn them into predicated values with entry-block validity
3134 (??? but we always find the first valid result currently). */
3135 if ((*slot)->predicated_values
3136 && ! vno->predicated_values)
3137 {
3138 /* ??? We cannot remove *slot from the unwind stack list.
3139 For the moment we deal with this by skipping not found
3140 entries but this isn't ideal ... */
3141 *slot = vno;
3142 /* ??? Maintain a stack of states we can unwind in
3143 vn_nary_op_s? But how far do we unwind? In reality
3144 we need to push change records somewhere... Or not
3145 unwind vn_nary_op_s and linking them but instead
3146 unwind the results "list", linking that, which also
3147 doesn't move on hashtable resize. */
3148 /* We can also have a ->unwind_to recording *slot there.
3149 That way we can make u.values a fixed size array with
3150 recording the number of entries but of course we then
3151 have always N copies for each unwind_to-state. Or we
3152 make sure to only ever append and each unwinding will
3153 pop off one entry (but how to deal with predicated
3154 replaced with non-predicated here?) */
3155 vno->next = last_inserted_nary;
3156 last_inserted_nary = vno;
3157 return vno;
3158 }
3159 else if (vno->predicated_values
3160 && ! (*slot)->predicated_values)
3161 return *slot;
3162 else if (vno->predicated_values
3163 && (*slot)->predicated_values)
3164 {
3165 /* ??? Factor this all into a insert_single_predicated_value
3166 routine. */
3167 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3168 basic_block vno_bb
3169 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3170 vn_pval *nval = vno->u.values;
3171 vn_pval **next = &vno->u.values;
3172 bool found = false;
3173 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3174 {
3175 if (expressions_equal_p (val->result, vno->u.values->result))
3176 {
3177 found = true;
3178 for (unsigned i = 0; i < val->n; ++i)
3179 {
3180 basic_block val_bb
3181 = BASIC_BLOCK_FOR_FN (cfun,
3182 val->valid_dominated_by_p[i]);
3183 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3184 /* Value registered with more generic predicate. */
3185 return *slot;
3186 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3187 /* Shouldn't happen, we insert in RPO order. */
3188 gcc_unreachable ();
3189 }
3190 /* Append value. */
3191 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3192 sizeof (vn_pval)
3193 + val->n * sizeof (int));
3194 (*next)->next = NULL;
3195 (*next)->result = val->result;
3196 (*next)->n = val->n + 1;
3197 memcpy ((*next)->valid_dominated_by_p,
3198 val->valid_dominated_by_p,
3199 val->n * sizeof (int));
3200 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3201 next = &(*next)->next;
3202 if (dump_file && (dump_flags & TDF_DETAILS))
3203 fprintf (dump_file, "Appending predicate to value.\n");
3204 continue;
3205 }
3206 /* Copy other predicated values. */
3207 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3208 sizeof (vn_pval)
3209 + (val->n-1) * sizeof (int));
3210 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3211 (*next)->next = NULL;
3212 next = &(*next)->next;
3213 }
3214 if (!found)
3215 *next = nval;
3216
3217 *slot = vno;
3218 vno->next = last_inserted_nary;
3219 last_inserted_nary = vno;
3220 return vno;
3221 }
3222
3223 /* While we do not want to insert things twice it's awkward to
3224 avoid it in the case where visit_nary_op pattern-matches stuff
3225 and ends up simplifying the replacement to itself. We then
3226 get two inserts, one from visit_nary_op and one from
3227 vn_nary_build_or_lookup.
3228 So allow inserts with the same value number. */
3229 if ((*slot)->u.result == vno->u.result)
3230 return *slot;
3231 }
3232
3233 /* ??? There's also optimistic vs. previous commited state merging
3234 that is problematic for the case of unwinding. */
3235
3236 /* ??? We should return NULL if we do not use 'vno' and have the
3237 caller release it. */
3238 gcc_assert (!*slot);
3239
3240 *slot = vno;
3241 vno->next = last_inserted_nary;
3242 last_inserted_nary = vno;
3243 return vno;
3244 }
3245
3246 /* Insert a n-ary operation into the current hash table using it's
3247 pieces. Return the vn_nary_op_t structure we created and put in
3248 the hashtable. */
3249
3250 vn_nary_op_t
3251 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3252 tree type, tree *ops,
3253 tree result, unsigned int value_id)
3254 {
3255 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3256 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3257 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3258 }
3259
3260 static vn_nary_op_t
3261 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3262 tree type, tree *ops,
3263 tree result, unsigned int value_id,
3264 edge pred_e)
3265 {
3266 /* ??? Currently tracking BBs. */
3267 if (! single_pred_p (pred_e->dest))
3268 {
3269 /* Never record for backedges. */
3270 if (pred_e->flags & EDGE_DFS_BACK)
3271 return NULL;
3272 edge_iterator ei;
3273 edge e;
3274 int cnt = 0;
3275 /* Ignore backedges. */
3276 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3277 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3278 cnt++;
3279 if (cnt != 1)
3280 return NULL;
3281 }
3282 if (dump_file && (dump_flags & TDF_DETAILS)
3283 /* ??? Fix dumping, but currently we only get comparisons. */
3284 && TREE_CODE_CLASS (code) == tcc_comparison)
3285 {
3286 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3287 pred_e->dest->index);
3288 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3289 fprintf (dump_file, " %s ", get_tree_code_name (code));
3290 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3291 fprintf (dump_file, " == %s\n",
3292 integer_zerop (result) ? "false" : "true");
3293 }
3294 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3295 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3296 vno1->predicated_values = 1;
3297 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3298 sizeof (vn_pval));
3299 vno1->u.values->next = NULL;
3300 vno1->u.values->result = result;
3301 vno1->u.values->n = 1;
3302 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3303 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3304 }
3305
3306 static bool
3307 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3308
3309 static tree
3310 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3311 {
3312 if (! vno->predicated_values)
3313 return vno->u.result;
3314 for (vn_pval *val = vno->u.values; val; val = val->next)
3315 for (unsigned i = 0; i < val->n; ++i)
3316 if (dominated_by_p_w_unex (bb,
3317 BASIC_BLOCK_FOR_FN
3318 (cfun, val->valid_dominated_by_p[i])))
3319 return val->result;
3320 return NULL_TREE;
3321 }
3322
3323 /* Insert OP into the current hash table with a value number of
3324 RESULT. Return the vn_nary_op_t structure we created and put in
3325 the hashtable. */
3326
3327 vn_nary_op_t
3328 vn_nary_op_insert (tree op, tree result)
3329 {
3330 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3331 vn_nary_op_t vno1;
3332
3333 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3334 init_vn_nary_op_from_op (vno1, op);
3335 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3336 }
3337
3338 /* Insert the rhs of STMT into the current hash table with a value number of
3339 RESULT. */
3340
3341 static vn_nary_op_t
3342 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3343 {
3344 vn_nary_op_t vno1
3345 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3346 result, VN_INFO (result)->value_id);
3347 init_vn_nary_op_from_stmt (vno1, stmt);
3348 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3349 }
3350
3351 /* Compute a hashcode for PHI operation VP1 and return it. */
3352
3353 static inline hashval_t
3354 vn_phi_compute_hash (vn_phi_t vp1)
3355 {
3356 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3357 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3358 tree phi1op;
3359 tree type;
3360 edge e;
3361 edge_iterator ei;
3362
3363 /* If all PHI arguments are constants we need to distinguish
3364 the PHI node via its type. */
3365 type = vp1->type;
3366 hstate.merge_hash (vn_hash_type (type));
3367
3368 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3369 {
3370 /* Don't hash backedge values they need to be handled as VN_TOP
3371 for optimistic value-numbering. */
3372 if (e->flags & EDGE_DFS_BACK)
3373 continue;
3374
3375 phi1op = vp1->phiargs[e->dest_idx];
3376 if (phi1op == VN_TOP)
3377 continue;
3378 inchash::add_expr (phi1op, hstate);
3379 }
3380
3381 return hstate.end ();
3382 }
3383
3384
3385 /* Return true if COND1 and COND2 represent the same condition, set
3386 *INVERTED_P if one needs to be inverted to make it the same as
3387 the other. */
3388
3389 static bool
3390 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3391 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3392 {
3393 enum tree_code code1 = gimple_cond_code (cond1);
3394 enum tree_code code2 = gimple_cond_code (cond2);
3395
3396 *inverted_p = false;
3397 if (code1 == code2)
3398 ;
3399 else if (code1 == swap_tree_comparison (code2))
3400 std::swap (lhs2, rhs2);
3401 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3402 *inverted_p = true;
3403 else if (code1 == invert_tree_comparison
3404 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3405 {
3406 std::swap (lhs2, rhs2);
3407 *inverted_p = true;
3408 }
3409 else
3410 return false;
3411
3412 return ((expressions_equal_p (lhs1, lhs2)
3413 && expressions_equal_p (rhs1, rhs2))
3414 || (commutative_tree_code (code1)
3415 && expressions_equal_p (lhs1, rhs2)
3416 && expressions_equal_p (rhs1, lhs2)));
3417 }
3418
3419 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3420
3421 static int
3422 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3423 {
3424 if (vp1->hashcode != vp2->hashcode)
3425 return false;
3426
3427 if (vp1->block != vp2->block)
3428 {
3429 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3430 return false;
3431
3432 switch (EDGE_COUNT (vp1->block->preds))
3433 {
3434 case 1:
3435 /* Single-arg PHIs are just copies. */
3436 break;
3437
3438 case 2:
3439 {
3440 /* Rule out backedges into the PHI. */
3441 if (vp1->block->loop_father->header == vp1->block
3442 || vp2->block->loop_father->header == vp2->block)
3443 return false;
3444
3445 /* If the PHI nodes do not have compatible types
3446 they are not the same. */
3447 if (!types_compatible_p (vp1->type, vp2->type))
3448 return false;
3449
3450 basic_block idom1
3451 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3452 basic_block idom2
3453 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3454 /* If the immediate dominator end in switch stmts multiple
3455 values may end up in the same PHI arg via intermediate
3456 CFG merges. */
3457 if (EDGE_COUNT (idom1->succs) != 2
3458 || EDGE_COUNT (idom2->succs) != 2)
3459 return false;
3460
3461 /* Verify the controlling stmt is the same. */
3462 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3463 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3464 if (! last1 || ! last2)
3465 return false;
3466 bool inverted_p;
3467 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3468 last2, vp2->cclhs, vp2->ccrhs,
3469 &inverted_p))
3470 return false;
3471
3472 /* Get at true/false controlled edges into the PHI. */
3473 edge te1, te2, fe1, fe2;
3474 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3475 &te1, &fe1)
3476 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3477 &te2, &fe2))
3478 return false;
3479
3480 /* Swap edges if the second condition is the inverted of the
3481 first. */
3482 if (inverted_p)
3483 std::swap (te2, fe2);
3484
3485 /* ??? Handle VN_TOP specially. */
3486 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3487 vp2->phiargs[te2->dest_idx])
3488 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3489 vp2->phiargs[fe2->dest_idx]))
3490 return false;
3491
3492 return true;
3493 }
3494
3495 default:
3496 return false;
3497 }
3498 }
3499
3500 /* If the PHI nodes do not have compatible types
3501 they are not the same. */
3502 if (!types_compatible_p (vp1->type, vp2->type))
3503 return false;
3504
3505 /* Any phi in the same block will have it's arguments in the
3506 same edge order, because of how we store phi nodes. */
3507 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3508 {
3509 tree phi1op = vp1->phiargs[i];
3510 tree phi2op = vp2->phiargs[i];
3511 if (phi1op == VN_TOP || phi2op == VN_TOP)
3512 continue;
3513 if (!expressions_equal_p (phi1op, phi2op))
3514 return false;
3515 }
3516
3517 return true;
3518 }
3519
3520 /* Lookup PHI in the current hash table, and return the resulting
3521 value number if it exists in the hash table. Return NULL_TREE if
3522 it does not exist in the hash table. */
3523
3524 static tree
3525 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3526 {
3527 vn_phi_s **slot;
3528 struct vn_phi_s *vp1;
3529 edge e;
3530 edge_iterator ei;
3531
3532 vp1 = XALLOCAVAR (struct vn_phi_s,
3533 sizeof (struct vn_phi_s)
3534 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3535
3536 /* Canonicalize the SSA_NAME's to their value number. */
3537 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3538 {
3539 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3540 if (TREE_CODE (def) == SSA_NAME
3541 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3542 def = SSA_VAL (def);
3543 vp1->phiargs[e->dest_idx] = def;
3544 }
3545 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3546 vp1->block = gimple_bb (phi);
3547 /* Extract values of the controlling condition. */
3548 vp1->cclhs = NULL_TREE;
3549 vp1->ccrhs = NULL_TREE;
3550 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3551 if (EDGE_COUNT (idom1->succs) == 2)
3552 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3553 {
3554 /* ??? We want to use SSA_VAL here. But possibly not
3555 allow VN_TOP. */
3556 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3557 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3558 }
3559 vp1->hashcode = vn_phi_compute_hash (vp1);
3560 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3561 if (!slot)
3562 return NULL_TREE;
3563 return (*slot)->result;
3564 }
3565
3566 /* Insert PHI into the current hash table with a value number of
3567 RESULT. */
3568
3569 static vn_phi_t
3570 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3571 {
3572 vn_phi_s **slot;
3573 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3574 sizeof (vn_phi_s)
3575 + ((gimple_phi_num_args (phi) - 1)
3576 * sizeof (tree)));
3577 edge e;
3578 edge_iterator ei;
3579
3580 /* Canonicalize the SSA_NAME's to their value number. */
3581 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3582 {
3583 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3584 if (TREE_CODE (def) == SSA_NAME
3585 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3586 def = SSA_VAL (def);
3587 vp1->phiargs[e->dest_idx] = def;
3588 }
3589 vp1->value_id = VN_INFO (result)->value_id;
3590 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3591 vp1->block = gimple_bb (phi);
3592 /* Extract values of the controlling condition. */
3593 vp1->cclhs = NULL_TREE;
3594 vp1->ccrhs = NULL_TREE;
3595 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3596 if (EDGE_COUNT (idom1->succs) == 2)
3597 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3598 {
3599 /* ??? We want to use SSA_VAL here. But possibly not
3600 allow VN_TOP. */
3601 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3602 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3603 }
3604 vp1->result = result;
3605 vp1->hashcode = vn_phi_compute_hash (vp1);
3606
3607 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3608 gcc_assert (!*slot);
3609
3610 *slot = vp1;
3611 vp1->next = last_inserted_phi;
3612 last_inserted_phi = vp1;
3613 return vp1;
3614 }
3615
3616
3617 /* Return true if BB1 is dominated by BB2 taking into account edges
3618 that are not executable. */
3619
3620 static bool
3621 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3622 {
3623 edge_iterator ei;
3624 edge e;
3625
3626 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3627 return true;
3628
3629 /* Before iterating we'd like to know if there exists a
3630 (executable) path from bb2 to bb1 at all, if not we can
3631 directly return false. For now simply iterate once. */
3632
3633 /* Iterate to the single executable bb1 predecessor. */
3634 if (EDGE_COUNT (bb1->preds) > 1)
3635 {
3636 edge prede = NULL;
3637 FOR_EACH_EDGE (e, ei, bb1->preds)
3638 if (e->flags & EDGE_EXECUTABLE)
3639 {
3640 if (prede)
3641 {
3642 prede = NULL;
3643 break;
3644 }
3645 prede = e;
3646 }
3647 if (prede)
3648 {
3649 bb1 = prede->src;
3650
3651 /* Re-do the dominance check with changed bb1. */
3652 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3653 return true;
3654 }
3655 }
3656
3657 /* Iterate to the single executable bb2 successor. */
3658 edge succe = NULL;
3659 FOR_EACH_EDGE (e, ei, bb2->succs)
3660 if (e->flags & EDGE_EXECUTABLE)
3661 {
3662 if (succe)
3663 {
3664 succe = NULL;
3665 break;
3666 }
3667 succe = e;
3668 }
3669 if (succe)
3670 {
3671 /* Verify the reached block is only reached through succe.
3672 If there is only one edge we can spare us the dominator
3673 check and iterate directly. */
3674 if (EDGE_COUNT (succe->dest->preds) > 1)
3675 {
3676 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3677 if (e != succe
3678 && (e->flags & EDGE_EXECUTABLE))
3679 {
3680 succe = NULL;
3681 break;
3682 }
3683 }
3684 if (succe)
3685 {
3686 bb2 = succe->dest;
3687
3688 /* Re-do the dominance check with changed bb2. */
3689 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3690 return true;
3691 }
3692 }
3693
3694 /* We could now iterate updating bb1 / bb2. */
3695 return false;
3696 }
3697
3698 /* Set the value number of FROM to TO, return true if it has changed
3699 as a result. */
3700
3701 static inline bool
3702 set_ssa_val_to (tree from, tree to)
3703 {
3704 vn_ssa_aux_t from_info = VN_INFO (from);
3705 tree currval = from_info->valnum; // SSA_VAL (from)
3706 poly_int64 toff, coff;
3707
3708 /* The only thing we allow as value numbers are ssa_names
3709 and invariants. So assert that here. We don't allow VN_TOP
3710 as visiting a stmt should produce a value-number other than
3711 that.
3712 ??? Still VN_TOP can happen for unreachable code, so force
3713 it to varying in that case. Not all code is prepared to
3714 get VN_TOP on valueization. */
3715 if (to == VN_TOP)
3716 {
3717 /* ??? When iterating and visiting PHI <undef, backedge-value>
3718 for the first time we rightfully get VN_TOP and we need to
3719 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
3720 With SCCVN we were simply lucky we iterated the other PHI
3721 cycles first and thus visited the backedge-value DEF. */
3722 if (currval == VN_TOP)
3723 goto set_and_exit;
3724 if (dump_file && (dump_flags & TDF_DETAILS))
3725 fprintf (dump_file, "Forcing value number to varying on "
3726 "receiving VN_TOP\n");
3727 to = from;
3728 }
3729
3730 gcc_checking_assert (to != NULL_TREE
3731 && ((TREE_CODE (to) == SSA_NAME
3732 && (to == from || SSA_VAL (to) == to))
3733 || is_gimple_min_invariant (to)));
3734
3735 if (from != to)
3736 {
3737 if (currval == from)
3738 {
3739 if (dump_file && (dump_flags & TDF_DETAILS))
3740 {
3741 fprintf (dump_file, "Not changing value number of ");
3742 print_generic_expr (dump_file, from);
3743 fprintf (dump_file, " from VARYING to ");
3744 print_generic_expr (dump_file, to);
3745 fprintf (dump_file, "\n");
3746 }
3747 return false;
3748 }
3749 bool curr_invariant = is_gimple_min_invariant (currval);
3750 bool curr_undefined = (TREE_CODE (currval) == SSA_NAME
3751 && ssa_undefined_value_p (currval, false));
3752 if (currval != VN_TOP
3753 && !curr_invariant
3754 && !curr_undefined
3755 && is_gimple_min_invariant (to))
3756 {
3757 if (dump_file && (dump_flags & TDF_DETAILS))
3758 {
3759 fprintf (dump_file, "Forcing VARYING instead of changing "
3760 "value number of ");
3761 print_generic_expr (dump_file, from);
3762 fprintf (dump_file, " from ");
3763 print_generic_expr (dump_file, currval);
3764 fprintf (dump_file, " (non-constant) to ");
3765 print_generic_expr (dump_file, to);
3766 fprintf (dump_file, " (constant)\n");
3767 }
3768 to = from;
3769 }
3770 else if (currval != VN_TOP
3771 && !curr_undefined
3772 && TREE_CODE (to) == SSA_NAME
3773 && ssa_undefined_value_p (to, false))
3774 {
3775 if (dump_file && (dump_flags & TDF_DETAILS))
3776 {
3777 fprintf (dump_file, "Forcing VARYING instead of changing "
3778 "value number of ");
3779 print_generic_expr (dump_file, from);
3780 fprintf (dump_file, " from ");
3781 print_generic_expr (dump_file, currval);
3782 fprintf (dump_file, " (non-undefined) to ");
3783 print_generic_expr (dump_file, to);
3784 fprintf (dump_file, " (undefined)\n");
3785 }
3786 to = from;
3787 }
3788 else if (TREE_CODE (to) == SSA_NAME
3789 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3790 to = from;
3791 }
3792
3793 set_and_exit:
3794 if (dump_file && (dump_flags & TDF_DETAILS))
3795 {
3796 fprintf (dump_file, "Setting value number of ");
3797 print_generic_expr (dump_file, from);
3798 fprintf (dump_file, " to ");
3799 print_generic_expr (dump_file, to);
3800 }
3801
3802 if (currval != to
3803 && !operand_equal_p (currval, to, 0)
3804 /* Different undefined SSA names are not actually different. See
3805 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3806 && !(TREE_CODE (currval) == SSA_NAME
3807 && TREE_CODE (to) == SSA_NAME
3808 && ssa_undefined_value_p (currval, false)
3809 && ssa_undefined_value_p (to, false))
3810 /* ??? For addresses involving volatile objects or types operand_equal_p
3811 does not reliably detect ADDR_EXPRs as equal. We know we are only
3812 getting invariant gimple addresses here, so can use
3813 get_addr_base_and_unit_offset to do this comparison. */
3814 && !(TREE_CODE (currval) == ADDR_EXPR
3815 && TREE_CODE (to) == ADDR_EXPR
3816 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3817 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3818 && known_eq (coff, toff)))
3819 {
3820 if (dump_file && (dump_flags & TDF_DETAILS))
3821 fprintf (dump_file, " (changed)\n");
3822 from_info->valnum = to;
3823 return true;
3824 }
3825 if (dump_file && (dump_flags & TDF_DETAILS))
3826 fprintf (dump_file, "\n");
3827 return false;
3828 }
3829
3830 /* Set all definitions in STMT to value number to themselves.
3831 Return true if a value number changed. */
3832
3833 static bool
3834 defs_to_varying (gimple *stmt)
3835 {
3836 bool changed = false;
3837 ssa_op_iter iter;
3838 def_operand_p defp;
3839
3840 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3841 {
3842 tree def = DEF_FROM_PTR (defp);
3843 changed |= set_ssa_val_to (def, def);
3844 }
3845 return changed;
3846 }
3847
3848 /* Visit a copy between LHS and RHS, return true if the value number
3849 changed. */
3850
3851 static bool
3852 visit_copy (tree lhs, tree rhs)
3853 {
3854 /* Valueize. */
3855 rhs = SSA_VAL (rhs);
3856
3857 return set_ssa_val_to (lhs, rhs);
3858 }
3859
3860 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3861 is the same. */
3862
3863 static tree
3864 valueized_wider_op (tree wide_type, tree op)
3865 {
3866 if (TREE_CODE (op) == SSA_NAME)
3867 op = vn_valueize (op);
3868
3869 /* Either we have the op widened available. */
3870 tree ops[3] = {};
3871 ops[0] = op;
3872 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3873 wide_type, ops, NULL);
3874 if (tem)
3875 return tem;
3876
3877 /* Or the op is truncated from some existing value. */
3878 if (TREE_CODE (op) == SSA_NAME)
3879 {
3880 gimple *def = SSA_NAME_DEF_STMT (op);
3881 if (is_gimple_assign (def)
3882 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3883 {
3884 tem = gimple_assign_rhs1 (def);
3885 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3886 {
3887 if (TREE_CODE (tem) == SSA_NAME)
3888 tem = vn_valueize (tem);
3889 return tem;
3890 }
3891 }
3892 }
3893
3894 /* For constants simply extend it. */
3895 if (TREE_CODE (op) == INTEGER_CST)
3896 return wide_int_to_tree (wide_type, wi::to_wide (op));
3897
3898 return NULL_TREE;
3899 }
3900
3901 /* Visit a nary operator RHS, value number it, and return true if the
3902 value number of LHS has changed as a result. */
3903
3904 static bool
3905 visit_nary_op (tree lhs, gassign *stmt)
3906 {
3907 vn_nary_op_t vnresult;
3908 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
3909 if (! result && vnresult)
3910 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
3911 if (result)
3912 return set_ssa_val_to (lhs, result);
3913
3914 /* Do some special pattern matching for redundancies of operations
3915 in different types. */
3916 enum tree_code code = gimple_assign_rhs_code (stmt);
3917 tree type = TREE_TYPE (lhs);
3918 tree rhs1 = gimple_assign_rhs1 (stmt);
3919 switch (code)
3920 {
3921 CASE_CONVERT:
3922 /* Match arithmetic done in a different type where we can easily
3923 substitute the result from some earlier sign-changed or widened
3924 operation. */
3925 if (INTEGRAL_TYPE_P (type)
3926 && TREE_CODE (rhs1) == SSA_NAME
3927 /* We only handle sign-changes or zero-extension -> & mask. */
3928 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3929 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3930 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3931 {
3932 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3933 if (def
3934 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3935 || gimple_assign_rhs_code (def) == MINUS_EXPR
3936 || gimple_assign_rhs_code (def) == MULT_EXPR))
3937 {
3938 tree ops[3] = {};
3939 /* Either we have the op widened available. */
3940 ops[0] = valueized_wider_op (type,
3941 gimple_assign_rhs1 (def));
3942 if (ops[0])
3943 ops[1] = valueized_wider_op (type,
3944 gimple_assign_rhs2 (def));
3945 if (ops[0] && ops[1])
3946 {
3947 ops[0] = vn_nary_op_lookup_pieces
3948 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3949 /* We have wider operation available. */
3950 if (ops[0]
3951 /* If the leader is a wrapping operation we can
3952 insert it for code hoisting w/o introducing
3953 undefined overflow. If it is not it has to
3954 be available. See PR86554. */
3955 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
3956 || (rpo_avail && vn_context_bb
3957 && rpo_avail->eliminate_avail (vn_context_bb,
3958 ops[0]))))
3959 {
3960 unsigned lhs_prec = TYPE_PRECISION (type);
3961 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3962 if (lhs_prec == rhs_prec)
3963 {
3964 gimple_match_op match_op (gimple_match_cond::UNCOND,
3965 NOP_EXPR, type, ops[0]);
3966 result = vn_nary_build_or_lookup (&match_op);
3967 if (result)
3968 {
3969 bool changed = set_ssa_val_to (lhs, result);
3970 vn_nary_op_insert_stmt (stmt, result);
3971 return changed;
3972 }
3973 }
3974 else
3975 {
3976 tree mask = wide_int_to_tree
3977 (type, wi::mask (rhs_prec, false, lhs_prec));
3978 gimple_match_op match_op (gimple_match_cond::UNCOND,
3979 BIT_AND_EXPR,
3980 TREE_TYPE (lhs),
3981 ops[0], mask);
3982 result = vn_nary_build_or_lookup (&match_op);
3983 if (result)
3984 {
3985 bool changed = set_ssa_val_to (lhs, result);
3986 vn_nary_op_insert_stmt (stmt, result);
3987 return changed;
3988 }
3989 }
3990 }
3991 }
3992 }
3993 }
3994 default:;
3995 }
3996
3997 bool changed = set_ssa_val_to (lhs, lhs);
3998 vn_nary_op_insert_stmt (stmt, lhs);
3999 return changed;
4000 }
4001
4002 /* Visit a call STMT storing into LHS. Return true if the value number
4003 of the LHS has changed as a result. */
4004
4005 static bool
4006 visit_reference_op_call (tree lhs, gcall *stmt)
4007 {
4008 bool changed = false;
4009 struct vn_reference_s vr1;
4010 vn_reference_t vnresult = NULL;
4011 tree vdef = gimple_vdef (stmt);
4012
4013 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4014 if (lhs && TREE_CODE (lhs) != SSA_NAME)
4015 lhs = NULL_TREE;
4016
4017 vn_reference_lookup_call (stmt, &vnresult, &vr1);
4018 if (vnresult)
4019 {
4020 if (vnresult->result_vdef && vdef)
4021 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4022 else if (vdef)
4023 /* If the call was discovered to be pure or const reflect
4024 that as far as possible. */
4025 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4026
4027 if (!vnresult->result && lhs)
4028 vnresult->result = lhs;
4029
4030 if (vnresult->result && lhs)
4031 changed |= set_ssa_val_to (lhs, vnresult->result);
4032 }
4033 else
4034 {
4035 vn_reference_t vr2;
4036 vn_reference_s **slot;
4037 tree vdef_val = vdef;
4038 if (vdef)
4039 {
4040 /* If we value numbered an indirect functions function to
4041 one not clobbering memory value number its VDEF to its
4042 VUSE. */
4043 tree fn = gimple_call_fn (stmt);
4044 if (fn && TREE_CODE (fn) == SSA_NAME)
4045 {
4046 fn = SSA_VAL (fn);
4047 if (TREE_CODE (fn) == ADDR_EXPR
4048 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4049 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4050 & (ECF_CONST | ECF_PURE)))
4051 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4052 }
4053 changed |= set_ssa_val_to (vdef, vdef_val);
4054 }
4055 if (lhs)
4056 changed |= set_ssa_val_to (lhs, lhs);
4057 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4058 vr2->vuse = vr1.vuse;
4059 /* As we are not walking the virtual operand chain we know the
4060 shared_lookup_references are still original so we can re-use
4061 them here. */
4062 vr2->operands = vr1.operands.copy ();
4063 vr2->type = vr1.type;
4064 vr2->set = vr1.set;
4065 vr2->hashcode = vr1.hashcode;
4066 vr2->result = lhs;
4067 vr2->result_vdef = vdef_val;
4068 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4069 INSERT);
4070 gcc_assert (!*slot);
4071 *slot = vr2;
4072 vr2->next = last_inserted_ref;
4073 last_inserted_ref = vr2;
4074 }
4075
4076 return changed;
4077 }
4078
4079 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4080 and return true if the value number of the LHS has changed as a result. */
4081
4082 static bool
4083 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4084 {
4085 bool changed = false;
4086 tree last_vuse;
4087 tree result;
4088
4089 last_vuse = gimple_vuse (stmt);
4090 last_vuse_ptr = &last_vuse;
4091 result = vn_reference_lookup (op, gimple_vuse (stmt),
4092 default_vn_walk_kind, NULL, true);
4093 last_vuse_ptr = NULL;
4094
4095 /* We handle type-punning through unions by value-numbering based
4096 on offset and size of the access. Be prepared to handle a
4097 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4098 if (result
4099 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4100 {
4101 /* We will be setting the value number of lhs to the value number
4102 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4103 So first simplify and lookup this expression to see if it
4104 is already available. */
4105 gimple_match_op res_op (gimple_match_cond::UNCOND,
4106 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
4107 result = vn_nary_build_or_lookup (&res_op);
4108 /* When building the conversion fails avoid inserting the reference
4109 again. */
4110 if (!result)
4111 return set_ssa_val_to (lhs, lhs);
4112 }
4113
4114 if (result)
4115 changed = set_ssa_val_to (lhs, result);
4116 else
4117 {
4118 changed = set_ssa_val_to (lhs, lhs);
4119 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
4120 }
4121
4122 return changed;
4123 }
4124
4125
4126 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4127 and return true if the value number of the LHS has changed as a result. */
4128
4129 static bool
4130 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4131 {
4132 bool changed = false;
4133 vn_reference_t vnresult = NULL;
4134 tree assign;
4135 bool resultsame = false;
4136 tree vuse = gimple_vuse (stmt);
4137 tree vdef = gimple_vdef (stmt);
4138
4139 if (TREE_CODE (op) == SSA_NAME)
4140 op = SSA_VAL (op);
4141
4142 /* First we want to lookup using the *vuses* from the store and see
4143 if there the last store to this location with the same address
4144 had the same value.
4145
4146 The vuses represent the memory state before the store. If the
4147 memory state, address, and value of the store is the same as the
4148 last store to this location, then this store will produce the
4149 same memory state as that store.
4150
4151 In this case the vdef versions for this store are value numbered to those
4152 vuse versions, since they represent the same memory state after
4153 this store.
4154
4155 Otherwise, the vdefs for the store are used when inserting into
4156 the table, since the store generates a new memory state. */
4157
4158 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4159 if (vnresult
4160 && vnresult->result)
4161 {
4162 tree result = vnresult->result;
4163 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4164 || result == SSA_VAL (result));
4165 resultsame = expressions_equal_p (result, op);
4166 if (resultsame)
4167 {
4168 /* If the TBAA state isn't compatible for downstream reads
4169 we cannot value-number the VDEFs the same. */
4170 alias_set_type set = get_alias_set (lhs);
4171 if (vnresult->set != set
4172 && ! alias_set_subset_of (set, vnresult->set))
4173 resultsame = false;
4174 }
4175 }
4176
4177 if (!resultsame)
4178 {
4179 /* Only perform the following when being called from PRE
4180 which embeds tail merging. */
4181 if (default_vn_walk_kind == VN_WALK)
4182 {
4183 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4184 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4185 if (vnresult)
4186 {
4187 VN_INFO (vdef)->visited = true;
4188 return set_ssa_val_to (vdef, vnresult->result_vdef);
4189 }
4190 }
4191
4192 if (dump_file && (dump_flags & TDF_DETAILS))
4193 {
4194 fprintf (dump_file, "No store match\n");
4195 fprintf (dump_file, "Value numbering store ");
4196 print_generic_expr (dump_file, lhs);
4197 fprintf (dump_file, " to ");
4198 print_generic_expr (dump_file, op);
4199 fprintf (dump_file, "\n");
4200 }
4201 /* Have to set value numbers before insert, since insert is
4202 going to valueize the references in-place. */
4203 if (vdef)
4204 changed |= set_ssa_val_to (vdef, vdef);
4205
4206 /* Do not insert structure copies into the tables. */
4207 if (is_gimple_min_invariant (op)
4208 || is_gimple_reg (op))
4209 vn_reference_insert (lhs, op, vdef, NULL);
4210
4211 /* Only perform the following when being called from PRE
4212 which embeds tail merging. */
4213 if (default_vn_walk_kind == VN_WALK)
4214 {
4215 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4216 vn_reference_insert (assign, lhs, vuse, vdef);
4217 }
4218 }
4219 else
4220 {
4221 /* We had a match, so value number the vdef to have the value
4222 number of the vuse it came from. */
4223
4224 if (dump_file && (dump_flags & TDF_DETAILS))
4225 fprintf (dump_file, "Store matched earlier value, "
4226 "value numbering store vdefs to matching vuses.\n");
4227
4228 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4229 }
4230
4231 return changed;
4232 }
4233
4234 /* Visit and value number PHI, return true if the value number
4235 changed. When BACKEDGES_VARYING_P is true then assume all
4236 backedge values are varying. When INSERTED is not NULL then
4237 this is just a ahead query for a possible iteration, set INSERTED
4238 to true if we'd insert into the hashtable. */
4239
4240 static bool
4241 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4242 {
4243 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4244 tree backedge_val = NULL_TREE;
4245 bool seen_non_backedge = false;
4246 tree sameval_base = NULL_TREE;
4247 poly_int64 soff, doff;
4248 unsigned n_executable = 0;
4249 edge_iterator ei;
4250 edge e;
4251
4252 /* TODO: We could check for this in initialization, and replace this
4253 with a gcc_assert. */
4254 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4255 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4256
4257 /* We track whether a PHI was CSEd to to avoid excessive iterations
4258 that would be necessary only because the PHI changed arguments
4259 but not value. */
4260 if (!inserted)
4261 gimple_set_plf (phi, GF_PLF_1, false);
4262
4263 /* See if all non-TOP arguments have the same value. TOP is
4264 equivalent to everything, so we can ignore it. */
4265 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4266 if (e->flags & EDGE_EXECUTABLE)
4267 {
4268 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4269
4270 ++n_executable;
4271 if (TREE_CODE (def) == SSA_NAME)
4272 {
4273 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4274 def = SSA_VAL (def);
4275 if (e->flags & EDGE_DFS_BACK)
4276 backedge_val = def;
4277 }
4278 if (!(e->flags & EDGE_DFS_BACK))
4279 seen_non_backedge = true;
4280 if (def == VN_TOP)
4281 ;
4282 /* Ignore undefined defs for sameval but record one. */
4283 else if (TREE_CODE (def) == SSA_NAME
4284 && ! virtual_operand_p (def)
4285 && ssa_undefined_value_p (def, false))
4286 seen_undef = def;
4287 else if (sameval == VN_TOP)
4288 sameval = def;
4289 else if (!expressions_equal_p (def, sameval))
4290 {
4291 /* We know we're arriving only with invariant addresses here,
4292 try harder comparing them. We can do some caching here
4293 which we cannot do in expressions_equal_p. */
4294 if (TREE_CODE (def) == ADDR_EXPR
4295 && TREE_CODE (sameval) == ADDR_EXPR
4296 && sameval_base != (void *)-1)
4297 {
4298 if (!sameval_base)
4299 sameval_base = get_addr_base_and_unit_offset
4300 (TREE_OPERAND (sameval, 0), &soff);
4301 if (!sameval_base)
4302 sameval_base = (tree)(void *)-1;
4303 else if ((get_addr_base_and_unit_offset
4304 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4305 && known_eq (soff, doff))
4306 continue;
4307 }
4308 sameval = NULL_TREE;
4309 break;
4310 }
4311 }
4312
4313 /* If the value we want to use is flowing over the backedge and we
4314 should take it as VARYING but it has a non-VARYING value drop to
4315 VARYING.
4316 If we value-number a virtual operand never value-number to the
4317 value from the backedge as that confuses the alias-walking code.
4318 See gcc.dg/torture/pr87176.c. If the value is the same on a
4319 non-backedge everything is OK though. */
4320 bool visited_p;
4321 if ((backedge_val
4322 && !seen_non_backedge
4323 && TREE_CODE (backedge_val) == SSA_NAME
4324 && sameval == backedge_val
4325 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
4326 || SSA_VAL (backedge_val) != backedge_val))
4327 /* Do not value-number a virtual operand to sth not visited though
4328 given that allows us to escape a region in alias walking. */
4329 || (sameval
4330 && TREE_CODE (sameval) == SSA_NAME
4331 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
4332 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
4333 && (SSA_VAL (sameval, &visited_p), !visited_p)))
4334 /* Note this just drops to VARYING without inserting the PHI into
4335 the hashes. */
4336 result = PHI_RESULT (phi);
4337 /* If none of the edges was executable keep the value-number at VN_TOP,
4338 if only a single edge is exectuable use its value. */
4339 else if (n_executable <= 1)
4340 result = seen_undef ? seen_undef : sameval;
4341 /* If we saw only undefined values and VN_TOP use one of the
4342 undefined values. */
4343 else if (sameval == VN_TOP)
4344 result = seen_undef ? seen_undef : sameval;
4345 /* First see if it is equivalent to a phi node in this block. We prefer
4346 this as it allows IV elimination - see PRs 66502 and 67167. */
4347 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4348 {
4349 if (!inserted
4350 && TREE_CODE (result) == SSA_NAME
4351 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4352 {
4353 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4354 if (dump_file && (dump_flags & TDF_DETAILS))
4355 {
4356 fprintf (dump_file, "Marking CSEd to PHI node ");
4357 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4358 0, TDF_SLIM);
4359 fprintf (dump_file, "\n");
4360 }
4361 }
4362 }
4363 /* If all values are the same use that, unless we've seen undefined
4364 values as well and the value isn't constant.
4365 CCP/copyprop have the same restriction to not remove uninit warnings. */
4366 else if (sameval
4367 && (! seen_undef || is_gimple_min_invariant (sameval)))
4368 result = sameval;
4369 else
4370 {
4371 result = PHI_RESULT (phi);
4372 /* Only insert PHIs that are varying, for constant value numbers
4373 we mess up equivalences otherwise as we are only comparing
4374 the immediate controlling predicates. */
4375 vn_phi_insert (phi, result, backedges_varying_p);
4376 if (inserted)
4377 *inserted = true;
4378 }
4379
4380 return set_ssa_val_to (PHI_RESULT (phi), result);
4381 }
4382
4383 /* Try to simplify RHS using equivalences and constant folding. */
4384
4385 static tree
4386 try_to_simplify (gassign *stmt)
4387 {
4388 enum tree_code code = gimple_assign_rhs_code (stmt);
4389 tree tem;
4390
4391 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4392 in this case, there is no point in doing extra work. */
4393 if (code == SSA_NAME)
4394 return NULL_TREE;
4395
4396 /* First try constant folding based on our current lattice. */
4397 mprts_hook = vn_lookup_simplify_result;
4398 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4399 mprts_hook = NULL;
4400 if (tem
4401 && (TREE_CODE (tem) == SSA_NAME
4402 || is_gimple_min_invariant (tem)))
4403 return tem;
4404
4405 return NULL_TREE;
4406 }
4407
4408 /* Visit and value number STMT, return true if the value number
4409 changed. */
4410
4411 static bool
4412 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4413 {
4414 bool changed = false;
4415
4416 if (dump_file && (dump_flags & TDF_DETAILS))
4417 {
4418 fprintf (dump_file, "Value numbering stmt = ");
4419 print_gimple_stmt (dump_file, stmt, 0);
4420 }
4421
4422 if (gimple_code (stmt) == GIMPLE_PHI)
4423 changed = visit_phi (stmt, NULL, backedges_varying_p);
4424 else if (gimple_has_volatile_ops (stmt))
4425 changed = defs_to_varying (stmt);
4426 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4427 {
4428 enum tree_code code = gimple_assign_rhs_code (ass);
4429 tree lhs = gimple_assign_lhs (ass);
4430 tree rhs1 = gimple_assign_rhs1 (ass);
4431 tree simplified;
4432
4433 /* Shortcut for copies. Simplifying copies is pointless,
4434 since we copy the expression and value they represent. */
4435 if (code == SSA_NAME
4436 && TREE_CODE (lhs) == SSA_NAME)
4437 {
4438 changed = visit_copy (lhs, rhs1);
4439 goto done;
4440 }
4441 simplified = try_to_simplify (ass);
4442 if (simplified)
4443 {
4444 if (dump_file && (dump_flags & TDF_DETAILS))
4445 {
4446 fprintf (dump_file, "RHS ");
4447 print_gimple_expr (dump_file, ass, 0);
4448 fprintf (dump_file, " simplified to ");
4449 print_generic_expr (dump_file, simplified);
4450 fprintf (dump_file, "\n");
4451 }
4452 }
4453 /* Setting value numbers to constants will occasionally
4454 screw up phi congruence because constants are not
4455 uniquely associated with a single ssa name that can be
4456 looked up. */
4457 if (simplified
4458 && is_gimple_min_invariant (simplified)
4459 && TREE_CODE (lhs) == SSA_NAME)
4460 {
4461 changed = set_ssa_val_to (lhs, simplified);
4462 goto done;
4463 }
4464 else if (simplified
4465 && TREE_CODE (simplified) == SSA_NAME
4466 && TREE_CODE (lhs) == SSA_NAME)
4467 {
4468 changed = visit_copy (lhs, simplified);
4469 goto done;
4470 }
4471
4472 if ((TREE_CODE (lhs) == SSA_NAME
4473 /* We can substitute SSA_NAMEs that are live over
4474 abnormal edges with their constant value. */
4475 && !(gimple_assign_copy_p (ass)
4476 && is_gimple_min_invariant (rhs1))
4477 && !(simplified
4478 && is_gimple_min_invariant (simplified))
4479 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4480 /* Stores or copies from SSA_NAMEs that are live over
4481 abnormal edges are a problem. */
4482 || (code == SSA_NAME
4483 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4484 changed = defs_to_varying (ass);
4485 else if (REFERENCE_CLASS_P (lhs)
4486 || DECL_P (lhs))
4487 changed = visit_reference_op_store (lhs, rhs1, ass);
4488 else if (TREE_CODE (lhs) == SSA_NAME)
4489 {
4490 if ((gimple_assign_copy_p (ass)
4491 && is_gimple_min_invariant (rhs1))
4492 || (simplified
4493 && is_gimple_min_invariant (simplified)))
4494 {
4495 if (simplified)
4496 changed = set_ssa_val_to (lhs, simplified);
4497 else
4498 changed = set_ssa_val_to (lhs, rhs1);
4499 }
4500 else
4501 {
4502 /* Visit the original statement. */
4503 switch (vn_get_stmt_kind (ass))
4504 {
4505 case VN_NARY:
4506 changed = visit_nary_op (lhs, ass);
4507 break;
4508 case VN_REFERENCE:
4509 changed = visit_reference_op_load (lhs, rhs1, ass);
4510 break;
4511 default:
4512 changed = defs_to_varying (ass);
4513 break;
4514 }
4515 }
4516 }
4517 else
4518 changed = defs_to_varying (ass);
4519 }
4520 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4521 {
4522 tree lhs = gimple_call_lhs (call_stmt);
4523 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4524 {
4525 /* Try constant folding based on our current lattice. */
4526 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4527 vn_valueize);
4528 if (simplified)
4529 {
4530 if (dump_file && (dump_flags & TDF_DETAILS))
4531 {
4532 fprintf (dump_file, "call ");
4533 print_gimple_expr (dump_file, call_stmt, 0);
4534 fprintf (dump_file, " simplified to ");
4535 print_generic_expr (dump_file, simplified);
4536 fprintf (dump_file, "\n");
4537 }
4538 }
4539 /* Setting value numbers to constants will occasionally
4540 screw up phi congruence because constants are not
4541 uniquely associated with a single ssa name that can be
4542 looked up. */
4543 if (simplified
4544 && is_gimple_min_invariant (simplified))
4545 {
4546 changed = set_ssa_val_to (lhs, simplified);
4547 if (gimple_vdef (call_stmt))
4548 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4549 SSA_VAL (gimple_vuse (call_stmt)));
4550 goto done;
4551 }
4552 else if (simplified
4553 && TREE_CODE (simplified) == SSA_NAME)
4554 {
4555 changed = visit_copy (lhs, simplified);
4556 if (gimple_vdef (call_stmt))
4557 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4558 SSA_VAL (gimple_vuse (call_stmt)));
4559 goto done;
4560 }
4561 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4562 {
4563 changed = defs_to_varying (call_stmt);
4564 goto done;
4565 }
4566 }
4567
4568 /* Pick up flags from a devirtualization target. */
4569 tree fn = gimple_call_fn (stmt);
4570 int extra_fnflags = 0;
4571 if (fn && TREE_CODE (fn) == SSA_NAME)
4572 {
4573 fn = SSA_VAL (fn);
4574 if (TREE_CODE (fn) == ADDR_EXPR
4575 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4576 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4577 }
4578 if (!gimple_call_internal_p (call_stmt)
4579 && (/* Calls to the same function with the same vuse
4580 and the same operands do not necessarily return the same
4581 value, unless they're pure or const. */
4582 ((gimple_call_flags (call_stmt) | extra_fnflags)
4583 & (ECF_PURE | ECF_CONST))
4584 /* If calls have a vdef, subsequent calls won't have
4585 the same incoming vuse. So, if 2 calls with vdef have the
4586 same vuse, we know they're not subsequent.
4587 We can value number 2 calls to the same function with the
4588 same vuse and the same operands which are not subsequent
4589 the same, because there is no code in the program that can
4590 compare the 2 values... */
4591 || (gimple_vdef (call_stmt)
4592 /* ... unless the call returns a pointer which does
4593 not alias with anything else. In which case the
4594 information that the values are distinct are encoded
4595 in the IL. */
4596 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4597 /* Only perform the following when being called from PRE
4598 which embeds tail merging. */
4599 && default_vn_walk_kind == VN_WALK)))
4600 changed = visit_reference_op_call (lhs, call_stmt);
4601 else
4602 changed = defs_to_varying (call_stmt);
4603 }
4604 else
4605 changed = defs_to_varying (stmt);
4606 done:
4607 return changed;
4608 }
4609
4610
4611 /* Allocate a value number table. */
4612
4613 static void
4614 allocate_vn_table (vn_tables_t table, unsigned size)
4615 {
4616 table->phis = new vn_phi_table_type (size);
4617 table->nary = new vn_nary_op_table_type (size);
4618 table->references = new vn_reference_table_type (size);
4619 }
4620
4621 /* Free a value number table. */
4622
4623 static void
4624 free_vn_table (vn_tables_t table)
4625 {
4626 /* Walk over elements and release vectors. */
4627 vn_reference_iterator_type hir;
4628 vn_reference_t vr;
4629 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
4630 vr->operands.release ();
4631 delete table->phis;
4632 table->phis = NULL;
4633 delete table->nary;
4634 table->nary = NULL;
4635 delete table->references;
4636 table->references = NULL;
4637 }
4638
4639 /* Set *ID according to RESULT. */
4640
4641 static void
4642 set_value_id_for_result (tree result, unsigned int *id)
4643 {
4644 if (result && TREE_CODE (result) == SSA_NAME)
4645 *id = VN_INFO (result)->value_id;
4646 else if (result && is_gimple_min_invariant (result))
4647 *id = get_or_alloc_constant_value_id (result);
4648 else
4649 *id = get_next_value_id ();
4650 }
4651
4652 /* Set the value ids in the valid hash tables. */
4653
4654 static void
4655 set_hashtable_value_ids (void)
4656 {
4657 vn_nary_op_iterator_type hin;
4658 vn_phi_iterator_type hip;
4659 vn_reference_iterator_type hir;
4660 vn_nary_op_t vno;
4661 vn_reference_t vr;
4662 vn_phi_t vp;
4663
4664 /* Now set the value ids of the things we had put in the hash
4665 table. */
4666
4667 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4668 if (! vno->predicated_values)
4669 set_value_id_for_result (vno->u.result, &vno->value_id);
4670
4671 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4672 set_value_id_for_result (vp->result, &vp->value_id);
4673
4674 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4675 hir)
4676 set_value_id_for_result (vr->result, &vr->value_id);
4677 }
4678
4679 /* Return the maximum value id we have ever seen. */
4680
4681 unsigned int
4682 get_max_value_id (void)
4683 {
4684 return next_value_id;
4685 }
4686
4687 /* Return the next unique value id. */
4688
4689 unsigned int
4690 get_next_value_id (void)
4691 {
4692 return next_value_id++;
4693 }
4694
4695
4696 /* Compare two expressions E1 and E2 and return true if they are equal. */
4697
4698 bool
4699 expressions_equal_p (tree e1, tree e2)
4700 {
4701 /* The obvious case. */
4702 if (e1 == e2)
4703 return true;
4704
4705 /* If either one is VN_TOP consider them equal. */
4706 if (e1 == VN_TOP || e2 == VN_TOP)
4707 return true;
4708
4709 /* If only one of them is null, they cannot be equal. */
4710 if (!e1 || !e2)
4711 return false;
4712
4713 /* Now perform the actual comparison. */
4714 if (TREE_CODE (e1) == TREE_CODE (e2)
4715 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4716 return true;
4717
4718 return false;
4719 }
4720
4721
4722 /* Return true if the nary operation NARY may trap. This is a copy
4723 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4724
4725 bool
4726 vn_nary_may_trap (vn_nary_op_t nary)
4727 {
4728 tree type;
4729 tree rhs2 = NULL_TREE;
4730 bool honor_nans = false;
4731 bool honor_snans = false;
4732 bool fp_operation = false;
4733 bool honor_trapv = false;
4734 bool handled, ret;
4735 unsigned i;
4736
4737 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4738 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4739 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4740 {
4741 type = nary->type;
4742 fp_operation = FLOAT_TYPE_P (type);
4743 if (fp_operation)
4744 {
4745 honor_nans = flag_trapping_math && !flag_finite_math_only;
4746 honor_snans = flag_signaling_nans != 0;
4747 }
4748 else if (INTEGRAL_TYPE_P (type)
4749 && TYPE_OVERFLOW_TRAPS (type))
4750 honor_trapv = true;
4751 }
4752 if (nary->length >= 2)
4753 rhs2 = nary->op[1];
4754 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4755 honor_trapv,
4756 honor_nans, honor_snans, rhs2,
4757 &handled);
4758 if (handled
4759 && ret)
4760 return true;
4761
4762 for (i = 0; i < nary->length; ++i)
4763 if (tree_could_trap_p (nary->op[i]))
4764 return true;
4765
4766 return false;
4767 }
4768
4769 /* Return true if the reference operation REF may trap. */
4770
4771 bool
4772 vn_reference_may_trap (vn_reference_t ref)
4773 {
4774 switch (ref->operands[0].opcode)
4775 {
4776 case MODIFY_EXPR:
4777 case CALL_EXPR:
4778 /* We do not handle calls. */
4779 case ADDR_EXPR:
4780 /* And toplevel address computations never trap. */
4781 return false;
4782 default:;
4783 }
4784
4785 vn_reference_op_t op;
4786 unsigned i;
4787 FOR_EACH_VEC_ELT (ref->operands, i, op)
4788 {
4789 switch (op->opcode)
4790 {
4791 case WITH_SIZE_EXPR:
4792 case TARGET_MEM_REF:
4793 /* Always variable. */
4794 return true;
4795 case COMPONENT_REF:
4796 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
4797 return true;
4798 break;
4799 case ARRAY_RANGE_REF:
4800 case ARRAY_REF:
4801 if (TREE_CODE (op->op0) == SSA_NAME)
4802 return true;
4803 break;
4804 case MEM_REF:
4805 /* Nothing interesting in itself, the base is separate. */
4806 break;
4807 /* The following are the address bases. */
4808 case SSA_NAME:
4809 return true;
4810 case ADDR_EXPR:
4811 if (op->op0)
4812 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
4813 return false;
4814 default:;
4815 }
4816 }
4817 return false;
4818 }
4819
4820 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
4821 bitmap inserted_exprs_)
4822 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
4823 el_todo (0), eliminations (0), insertions (0),
4824 inserted_exprs (inserted_exprs_)
4825 {
4826 need_eh_cleanup = BITMAP_ALLOC (NULL);
4827 need_ab_cleanup = BITMAP_ALLOC (NULL);
4828 }
4829
4830 eliminate_dom_walker::~eliminate_dom_walker ()
4831 {
4832 BITMAP_FREE (need_eh_cleanup);
4833 BITMAP_FREE (need_ab_cleanup);
4834 }
4835
4836 /* Return a leader for OP that is available at the current point of the
4837 eliminate domwalk. */
4838
4839 tree
4840 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
4841 {
4842 tree valnum = VN_INFO (op)->valnum;
4843 if (TREE_CODE (valnum) == SSA_NAME)
4844 {
4845 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
4846 return valnum;
4847 if (avail.length () > SSA_NAME_VERSION (valnum))
4848 return avail[SSA_NAME_VERSION (valnum)];
4849 }
4850 else if (is_gimple_min_invariant (valnum))
4851 return valnum;
4852 return NULL_TREE;
4853 }
4854
4855 /* At the current point of the eliminate domwalk make OP available. */
4856
4857 void
4858 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
4859 {
4860 tree valnum = VN_INFO (op)->valnum;
4861 if (TREE_CODE (valnum) == SSA_NAME)
4862 {
4863 if (avail.length () <= SSA_NAME_VERSION (valnum))
4864 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
4865 tree pushop = op;
4866 if (avail[SSA_NAME_VERSION (valnum)])
4867 pushop = avail[SSA_NAME_VERSION (valnum)];
4868 avail_stack.safe_push (pushop);
4869 avail[SSA_NAME_VERSION (valnum)] = op;
4870 }
4871 }
4872
4873 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4874 the leader for the expression if insertion was successful. */
4875
4876 tree
4877 eliminate_dom_walker::eliminate_insert (basic_block bb,
4878 gimple_stmt_iterator *gsi, tree val)
4879 {
4880 /* We can insert a sequence with a single assignment only. */
4881 gimple_seq stmts = VN_INFO (val)->expr;
4882 if (!gimple_seq_singleton_p (stmts))
4883 return NULL_TREE;
4884 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
4885 if (!stmt
4886 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
4887 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
4888 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
4889 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
4890 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
4891 return NULL_TREE;
4892
4893 tree op = gimple_assign_rhs1 (stmt);
4894 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
4895 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4896 op = TREE_OPERAND (op, 0);
4897 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
4898 if (!leader)
4899 return NULL_TREE;
4900
4901 tree res;
4902 stmts = NULL;
4903 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4904 res = gimple_build (&stmts, BIT_FIELD_REF,
4905 TREE_TYPE (val), leader,
4906 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
4907 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
4908 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
4909 res = gimple_build (&stmts, BIT_AND_EXPR,
4910 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
4911 else
4912 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
4913 TREE_TYPE (val), leader);
4914 if (TREE_CODE (res) != SSA_NAME
4915 || SSA_NAME_IS_DEFAULT_DEF (res)
4916 || gimple_bb (SSA_NAME_DEF_STMT (res)))
4917 {
4918 gimple_seq_discard (stmts);
4919
4920 /* During propagation we have to treat SSA info conservatively
4921 and thus we can end up simplifying the inserted expression
4922 at elimination time to sth not defined in stmts. */
4923 /* But then this is a redundancy we failed to detect. Which means
4924 res now has two values. That doesn't play well with how
4925 we track availability here, so give up. */
4926 if (dump_file && (dump_flags & TDF_DETAILS))
4927 {
4928 if (TREE_CODE (res) == SSA_NAME)
4929 res = eliminate_avail (bb, res);
4930 if (res)
4931 {
4932 fprintf (dump_file, "Failed to insert expression for value ");
4933 print_generic_expr (dump_file, val);
4934 fprintf (dump_file, " which is really fully redundant to ");
4935 print_generic_expr (dump_file, res);
4936 fprintf (dump_file, "\n");
4937 }
4938 }
4939
4940 return NULL_TREE;
4941 }
4942 else
4943 {
4944 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4945 VN_INFO (res)->valnum = val;
4946 VN_INFO (res)->visited = true;
4947 }
4948
4949 insertions++;
4950 if (dump_file && (dump_flags & TDF_DETAILS))
4951 {
4952 fprintf (dump_file, "Inserted ");
4953 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
4954 }
4955
4956 return res;
4957 }
4958
4959 void
4960 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
4961 {
4962 tree sprime = NULL_TREE;
4963 gimple *stmt = gsi_stmt (*gsi);
4964 tree lhs = gimple_get_lhs (stmt);
4965 if (lhs && TREE_CODE (lhs) == SSA_NAME
4966 && !gimple_has_volatile_ops (stmt)
4967 /* See PR43491. Do not replace a global register variable when
4968 it is a the RHS of an assignment. Do replace local register
4969 variables since gcc does not guarantee a local variable will
4970 be allocated in register.
4971 ??? The fix isn't effective here. This should instead
4972 be ensured by not value-numbering them the same but treating
4973 them like volatiles? */
4974 && !(gimple_assign_single_p (stmt)
4975 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4976 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4977 && is_global_var (gimple_assign_rhs1 (stmt)))))
4978 {
4979 sprime = eliminate_avail (b, lhs);
4980 if (!sprime)
4981 {
4982 /* If there is no existing usable leader but SCCVN thinks
4983 it has an expression it wants to use as replacement,
4984 insert that. */
4985 tree val = VN_INFO (lhs)->valnum;
4986 if (val != VN_TOP
4987 && TREE_CODE (val) == SSA_NAME
4988 && VN_INFO (val)->needs_insertion
4989 && VN_INFO (val)->expr != NULL
4990 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
4991 eliminate_push_avail (b, sprime);
4992 }
4993
4994 /* If this now constitutes a copy duplicate points-to
4995 and range info appropriately. This is especially
4996 important for inserted code. See tree-ssa-copy.c
4997 for similar code. */
4998 if (sprime
4999 && TREE_CODE (sprime) == SSA_NAME)
5000 {
5001 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5002 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5003 && SSA_NAME_PTR_INFO (lhs)
5004 && ! SSA_NAME_PTR_INFO (sprime))
5005 {
5006 duplicate_ssa_name_ptr_info (sprime,
5007 SSA_NAME_PTR_INFO (lhs));
5008 if (b != sprime_b)
5009 mark_ptr_info_alignment_unknown
5010 (SSA_NAME_PTR_INFO (sprime));
5011 }
5012 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5013 && SSA_NAME_RANGE_INFO (lhs)
5014 && ! SSA_NAME_RANGE_INFO (sprime)
5015 && b == sprime_b)
5016 duplicate_ssa_name_range_info (sprime,
5017 SSA_NAME_RANGE_TYPE (lhs),
5018 SSA_NAME_RANGE_INFO (lhs));
5019 }
5020
5021 /* Inhibit the use of an inserted PHI on a loop header when
5022 the address of the memory reference is a simple induction
5023 variable. In other cases the vectorizer won't do anything
5024 anyway (either it's loop invariant or a complicated
5025 expression). */
5026 if (sprime
5027 && TREE_CODE (sprime) == SSA_NAME
5028 && do_pre
5029 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5030 && loop_outer (b->loop_father)
5031 && has_zero_uses (sprime)
5032 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5033 && gimple_assign_load_p (stmt))
5034 {
5035 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5036 basic_block def_bb = gimple_bb (def_stmt);
5037 if (gimple_code (def_stmt) == GIMPLE_PHI
5038 && def_bb->loop_father->header == def_bb)
5039 {
5040 loop_p loop = def_bb->loop_father;
5041 ssa_op_iter iter;
5042 tree op;
5043 bool found = false;
5044 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5045 {
5046 affine_iv iv;
5047 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5048 if (def_bb
5049 && flow_bb_inside_loop_p (loop, def_bb)
5050 && simple_iv (loop, loop, op, &iv, true))
5051 {
5052 found = true;
5053 break;
5054 }
5055 }
5056 if (found)
5057 {
5058 if (dump_file && (dump_flags & TDF_DETAILS))
5059 {
5060 fprintf (dump_file, "Not replacing ");
5061 print_gimple_expr (dump_file, stmt, 0);
5062 fprintf (dump_file, " with ");
5063 print_generic_expr (dump_file, sprime);
5064 fprintf (dump_file, " which would add a loop"
5065 " carried dependence to loop %d\n",
5066 loop->num);
5067 }
5068 /* Don't keep sprime available. */
5069 sprime = NULL_TREE;
5070 }
5071 }
5072 }
5073
5074 if (sprime)
5075 {
5076 /* If we can propagate the value computed for LHS into
5077 all uses don't bother doing anything with this stmt. */
5078 if (may_propagate_copy (lhs, sprime))
5079 {
5080 /* Mark it for removal. */
5081 to_remove.safe_push (stmt);
5082
5083 /* ??? Don't count copy/constant propagations. */
5084 if (gimple_assign_single_p (stmt)
5085 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5086 || gimple_assign_rhs1 (stmt) == sprime))
5087 return;
5088
5089 if (dump_file && (dump_flags & TDF_DETAILS))
5090 {
5091 fprintf (dump_file, "Replaced ");
5092 print_gimple_expr (dump_file, stmt, 0);
5093 fprintf (dump_file, " with ");
5094 print_generic_expr (dump_file, sprime);
5095 fprintf (dump_file, " in all uses of ");
5096 print_gimple_stmt (dump_file, stmt, 0);
5097 }
5098
5099 eliminations++;
5100 return;
5101 }
5102
5103 /* If this is an assignment from our leader (which
5104 happens in the case the value-number is a constant)
5105 then there is nothing to do. */
5106 if (gimple_assign_single_p (stmt)
5107 && sprime == gimple_assign_rhs1 (stmt))
5108 return;
5109
5110 /* Else replace its RHS. */
5111 if (dump_file && (dump_flags & TDF_DETAILS))
5112 {
5113 fprintf (dump_file, "Replaced ");
5114 print_gimple_expr (dump_file, stmt, 0);
5115 fprintf (dump_file, " with ");
5116 print_generic_expr (dump_file, sprime);
5117 fprintf (dump_file, " in ");
5118 print_gimple_stmt (dump_file, stmt, 0);
5119 }
5120 eliminations++;
5121
5122 bool can_make_abnormal_goto = (is_gimple_call (stmt)
5123 && stmt_can_make_abnormal_goto (stmt));
5124 gimple *orig_stmt = stmt;
5125 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5126 TREE_TYPE (sprime)))
5127 {
5128 /* We preserve conversions to but not from function or method
5129 types. This asymmetry makes it necessary to re-instantiate
5130 conversions here. */
5131 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5132 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
5133 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5134 else
5135 gcc_unreachable ();
5136 }
5137 tree vdef = gimple_vdef (stmt);
5138 tree vuse = gimple_vuse (stmt);
5139 propagate_tree_value_into_stmt (gsi, sprime);
5140 stmt = gsi_stmt (*gsi);
5141 update_stmt (stmt);
5142 /* In case the VDEF on the original stmt was released, value-number
5143 it to the VUSE. This is to make vuse_ssa_val able to skip
5144 released virtual operands. */
5145 if (vdef != gimple_vdef (stmt))
5146 {
5147 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
5148 VN_INFO (vdef)->valnum = vuse;
5149 }
5150
5151 /* If we removed EH side-effects from the statement, clean
5152 its EH information. */
5153 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5154 {
5155 bitmap_set_bit (need_eh_cleanup,
5156 gimple_bb (stmt)->index);
5157 if (dump_file && (dump_flags & TDF_DETAILS))
5158 fprintf (dump_file, " Removed EH side-effects.\n");
5159 }
5160
5161 /* Likewise for AB side-effects. */
5162 if (can_make_abnormal_goto
5163 && !stmt_can_make_abnormal_goto (stmt))
5164 {
5165 bitmap_set_bit (need_ab_cleanup,
5166 gimple_bb (stmt)->index);
5167 if (dump_file && (dump_flags & TDF_DETAILS))
5168 fprintf (dump_file, " Removed AB side-effects.\n");
5169 }
5170
5171 return;
5172 }
5173 }
5174
5175 /* If the statement is a scalar store, see if the expression
5176 has the same value number as its rhs. If so, the store is
5177 dead. */
5178 if (gimple_assign_single_p (stmt)
5179 && !gimple_has_volatile_ops (stmt)
5180 && !is_gimple_reg (gimple_assign_lhs (stmt))
5181 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5182 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5183 {
5184 tree val;
5185 tree rhs = gimple_assign_rhs1 (stmt);
5186 vn_reference_t vnresult;
5187 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5188 &vnresult, false);
5189 if (TREE_CODE (rhs) == SSA_NAME)
5190 rhs = VN_INFO (rhs)->valnum;
5191 if (val
5192 && operand_equal_p (val, rhs, 0))
5193 {
5194 /* We can only remove the later store if the former aliases
5195 at least all accesses the later one does or if the store
5196 was to readonly memory storing the same value. */
5197 alias_set_type set = get_alias_set (lhs);
5198 if (! vnresult
5199 || vnresult->set == set
5200 || alias_set_subset_of (set, vnresult->set))
5201 {
5202 if (dump_file && (dump_flags & TDF_DETAILS))
5203 {
5204 fprintf (dump_file, "Deleted redundant store ");
5205 print_gimple_stmt (dump_file, stmt, 0);
5206 }
5207
5208 /* Queue stmt for removal. */
5209 to_remove.safe_push (stmt);
5210 return;
5211 }
5212 }
5213 }
5214
5215 /* If this is a control statement value numbering left edges
5216 unexecuted on force the condition in a way consistent with
5217 that. */
5218 if (gcond *cond = dyn_cast <gcond *> (stmt))
5219 {
5220 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5221 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5222 {
5223 if (dump_file && (dump_flags & TDF_DETAILS))
5224 {
5225 fprintf (dump_file, "Removing unexecutable edge from ");
5226 print_gimple_stmt (dump_file, stmt, 0);
5227 }
5228 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5229 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5230 gimple_cond_make_true (cond);
5231 else
5232 gimple_cond_make_false (cond);
5233 update_stmt (cond);
5234 el_todo |= TODO_cleanup_cfg;
5235 return;
5236 }
5237 }
5238
5239 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5240 bool was_noreturn = (is_gimple_call (stmt)
5241 && gimple_call_noreturn_p (stmt));
5242 tree vdef = gimple_vdef (stmt);
5243 tree vuse = gimple_vuse (stmt);
5244
5245 /* If we didn't replace the whole stmt (or propagate the result
5246 into all uses), replace all uses on this stmt with their
5247 leaders. */
5248 bool modified = false;
5249 use_operand_p use_p;
5250 ssa_op_iter iter;
5251 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5252 {
5253 tree use = USE_FROM_PTR (use_p);
5254 /* ??? The call code above leaves stmt operands un-updated. */
5255 if (TREE_CODE (use) != SSA_NAME)
5256 continue;
5257 tree sprime;
5258 if (SSA_NAME_IS_DEFAULT_DEF (use))
5259 /* ??? For default defs BB shouldn't matter, but we have to
5260 solve the inconsistency between rpo eliminate and
5261 dom eliminate avail valueization first. */
5262 sprime = eliminate_avail (b, use);
5263 else
5264 /* Look for sth available at the definition block of the argument.
5265 This avoids inconsistencies between availability there which
5266 decides if the stmt can be removed and availability at the
5267 use site. The SSA property ensures that things available
5268 at the definition are also available at uses. */
5269 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5270 if (sprime && sprime != use
5271 && may_propagate_copy (use, sprime)
5272 /* We substitute into debug stmts to avoid excessive
5273 debug temporaries created by removed stmts, but we need
5274 to avoid doing so for inserted sprimes as we never want
5275 to create debug temporaries for them. */
5276 && (!inserted_exprs
5277 || TREE_CODE (sprime) != SSA_NAME
5278 || !is_gimple_debug (stmt)
5279 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5280 {
5281 propagate_value (use_p, sprime);
5282 modified = true;
5283 }
5284 }
5285
5286 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5287 into which is a requirement for the IPA devirt machinery. */
5288 gimple *old_stmt = stmt;
5289 if (modified)
5290 {
5291 /* If a formerly non-invariant ADDR_EXPR is turned into an
5292 invariant one it was on a separate stmt. */
5293 if (gimple_assign_single_p (stmt)
5294 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5295 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5296 gimple_stmt_iterator prev = *gsi;
5297 gsi_prev (&prev);
5298 if (fold_stmt (gsi))
5299 {
5300 /* fold_stmt may have created new stmts inbetween
5301 the previous stmt and the folded stmt. Mark
5302 all defs created there as varying to not confuse
5303 the SCCVN machinery as we're using that even during
5304 elimination. */
5305 if (gsi_end_p (prev))
5306 prev = gsi_start_bb (b);
5307 else
5308 gsi_next (&prev);
5309 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5310 do
5311 {
5312 tree def;
5313 ssa_op_iter dit;
5314 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5315 dit, SSA_OP_ALL_DEFS)
5316 /* As existing DEFs may move between stmts
5317 only process new ones. */
5318 if (! has_VN_INFO (def))
5319 {
5320 VN_INFO (def)->valnum = def;
5321 VN_INFO (def)->visited = true;
5322 }
5323 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5324 break;
5325 gsi_next (&prev);
5326 }
5327 while (1);
5328 }
5329 stmt = gsi_stmt (*gsi);
5330 /* In case we folded the stmt away schedule the NOP for removal. */
5331 if (gimple_nop_p (stmt))
5332 to_remove.safe_push (stmt);
5333 }
5334
5335 /* Visit indirect calls and turn them into direct calls if
5336 possible using the devirtualization machinery. Do this before
5337 checking for required EH/abnormal/noreturn cleanup as devird
5338 may expose more of those. */
5339 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5340 {
5341 tree fn = gimple_call_fn (call_stmt);
5342 if (fn
5343 && flag_devirtualize
5344 && virtual_method_call_p (fn))
5345 {
5346 tree otr_type = obj_type_ref_class (fn);
5347 unsigned HOST_WIDE_INT otr_tok
5348 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5349 tree instance;
5350 ipa_polymorphic_call_context context (current_function_decl,
5351 fn, stmt, &instance);
5352 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5353 otr_type, stmt, NULL);
5354 bool final;
5355 vec <cgraph_node *> targets
5356 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5357 otr_tok, context, &final);
5358 if (dump_file)
5359 dump_possible_polymorphic_call_targets (dump_file,
5360 obj_type_ref_class (fn),
5361 otr_tok, context);
5362 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5363 {
5364 tree fn;
5365 if (targets.length () == 1)
5366 fn = targets[0]->decl;
5367 else
5368 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5369 if (dump_enabled_p ())
5370 {
5371 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5372 "converting indirect call to "
5373 "function %s\n",
5374 lang_hooks.decl_printable_name (fn, 2));
5375 }
5376 gimple_call_set_fndecl (call_stmt, fn);
5377 /* If changing the call to __builtin_unreachable
5378 or similar noreturn function, adjust gimple_call_fntype
5379 too. */
5380 if (gimple_call_noreturn_p (call_stmt)
5381 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5382 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5383 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5384 == void_type_node))
5385 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5386 maybe_remove_unused_call_args (cfun, call_stmt);
5387 modified = true;
5388 }
5389 }
5390 }
5391
5392 if (modified)
5393 {
5394 /* When changing a call into a noreturn call, cfg cleanup
5395 is needed to fix up the noreturn call. */
5396 if (!was_noreturn
5397 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5398 to_fixup.safe_push (stmt);
5399 /* When changing a condition or switch into one we know what
5400 edge will be executed, schedule a cfg cleanup. */
5401 if ((gimple_code (stmt) == GIMPLE_COND
5402 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5403 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5404 || (gimple_code (stmt) == GIMPLE_SWITCH
5405 && TREE_CODE (gimple_switch_index
5406 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5407 el_todo |= TODO_cleanup_cfg;
5408 /* If we removed EH side-effects from the statement, clean
5409 its EH information. */
5410 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5411 {
5412 bitmap_set_bit (need_eh_cleanup,
5413 gimple_bb (stmt)->index);
5414 if (dump_file && (dump_flags & TDF_DETAILS))
5415 fprintf (dump_file, " Removed EH side-effects.\n");
5416 }
5417 /* Likewise for AB side-effects. */
5418 if (can_make_abnormal_goto
5419 && !stmt_can_make_abnormal_goto (stmt))
5420 {
5421 bitmap_set_bit (need_ab_cleanup,
5422 gimple_bb (stmt)->index);
5423 if (dump_file && (dump_flags & TDF_DETAILS))
5424 fprintf (dump_file, " Removed AB side-effects.\n");
5425 }
5426 update_stmt (stmt);
5427 /* In case the VDEF on the original stmt was released, value-number
5428 it to the VUSE. This is to make vuse_ssa_val able to skip
5429 released virtual operands. */
5430 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
5431 VN_INFO (vdef)->valnum = vuse;
5432 }
5433
5434 /* Make new values available - for fully redundant LHS we
5435 continue with the next stmt above and skip this. */
5436 def_operand_p defp;
5437 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5438 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5439 }
5440
5441 /* Perform elimination for the basic-block B during the domwalk. */
5442
5443 edge
5444 eliminate_dom_walker::before_dom_children (basic_block b)
5445 {
5446 /* Mark new bb. */
5447 avail_stack.safe_push (NULL_TREE);
5448
5449 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5450 if (!(b->flags & BB_EXECUTABLE))
5451 return NULL;
5452
5453 vn_context_bb = b;
5454
5455 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5456 {
5457 gphi *phi = gsi.phi ();
5458 tree res = PHI_RESULT (phi);
5459
5460 if (virtual_operand_p (res))
5461 {
5462 gsi_next (&gsi);
5463 continue;
5464 }
5465
5466 tree sprime = eliminate_avail (b, res);
5467 if (sprime
5468 && sprime != res)
5469 {
5470 if (dump_file && (dump_flags & TDF_DETAILS))
5471 {
5472 fprintf (dump_file, "Replaced redundant PHI node defining ");
5473 print_generic_expr (dump_file, res);
5474 fprintf (dump_file, " with ");
5475 print_generic_expr (dump_file, sprime);
5476 fprintf (dump_file, "\n");
5477 }
5478
5479 /* If we inserted this PHI node ourself, it's not an elimination. */
5480 if (! inserted_exprs
5481 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5482 eliminations++;
5483
5484 /* If we will propagate into all uses don't bother to do
5485 anything. */
5486 if (may_propagate_copy (res, sprime))
5487 {
5488 /* Mark the PHI for removal. */
5489 to_remove.safe_push (phi);
5490 gsi_next (&gsi);
5491 continue;
5492 }
5493
5494 remove_phi_node (&gsi, false);
5495
5496 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5497 sprime = fold_convert (TREE_TYPE (res), sprime);
5498 gimple *stmt = gimple_build_assign (res, sprime);
5499 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5500 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5501 continue;
5502 }
5503
5504 eliminate_push_avail (b, res);
5505 gsi_next (&gsi);
5506 }
5507
5508 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5509 !gsi_end_p (gsi);
5510 gsi_next (&gsi))
5511 eliminate_stmt (b, &gsi);
5512
5513 /* Replace destination PHI arguments. */
5514 edge_iterator ei;
5515 edge e;
5516 FOR_EACH_EDGE (e, ei, b->succs)
5517 if (e->flags & EDGE_EXECUTABLE)
5518 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5519 !gsi_end_p (gsi);
5520 gsi_next (&gsi))
5521 {
5522 gphi *phi = gsi.phi ();
5523 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5524 tree arg = USE_FROM_PTR (use_p);
5525 if (TREE_CODE (arg) != SSA_NAME
5526 || virtual_operand_p (arg))
5527 continue;
5528 tree sprime = eliminate_avail (b, arg);
5529 if (sprime && may_propagate_copy (arg, sprime))
5530 propagate_value (use_p, sprime);
5531 }
5532
5533 vn_context_bb = NULL;
5534
5535 return NULL;
5536 }
5537
5538 /* Make no longer available leaders no longer available. */
5539
5540 void
5541 eliminate_dom_walker::after_dom_children (basic_block)
5542 {
5543 tree entry;
5544 while ((entry = avail_stack.pop ()) != NULL_TREE)
5545 {
5546 tree valnum = VN_INFO (entry)->valnum;
5547 tree old = avail[SSA_NAME_VERSION (valnum)];
5548 if (old == entry)
5549 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5550 else
5551 avail[SSA_NAME_VERSION (valnum)] = entry;
5552 }
5553 }
5554
5555 /* Remove queued stmts and perform delayed cleanups. */
5556
5557 unsigned
5558 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5559 {
5560 statistics_counter_event (cfun, "Eliminated", eliminations);
5561 statistics_counter_event (cfun, "Insertions", insertions);
5562
5563 /* We cannot remove stmts during BB walk, especially not release SSA
5564 names there as this confuses the VN machinery. The stmts ending
5565 up in to_remove are either stores or simple copies.
5566 Remove stmts in reverse order to make debug stmt creation possible. */
5567 while (!to_remove.is_empty ())
5568 {
5569 bool do_release_defs = true;
5570 gimple *stmt = to_remove.pop ();
5571
5572 /* When we are value-numbering a region we do not require exit PHIs to
5573 be present so we have to make sure to deal with uses outside of the
5574 region of stmts that we thought are eliminated.
5575 ??? Note we may be confused by uses in dead regions we didn't run
5576 elimination on. Rather than checking individual uses we accept
5577 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5578 contains such example). */
5579 if (region_p)
5580 {
5581 if (gphi *phi = dyn_cast <gphi *> (stmt))
5582 {
5583 tree lhs = gimple_phi_result (phi);
5584 if (!has_zero_uses (lhs))
5585 {
5586 if (dump_file && (dump_flags & TDF_DETAILS))
5587 fprintf (dump_file, "Keeping eliminated stmt live "
5588 "as copy because of out-of-region uses\n");
5589 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5590 gimple *copy = gimple_build_assign (lhs, sprime);
5591 gimple_stmt_iterator gsi
5592 = gsi_after_labels (gimple_bb (stmt));
5593 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5594 do_release_defs = false;
5595 }
5596 }
5597 else if (tree lhs = gimple_get_lhs (stmt))
5598 if (TREE_CODE (lhs) == SSA_NAME
5599 && !has_zero_uses (lhs))
5600 {
5601 if (dump_file && (dump_flags & TDF_DETAILS))
5602 fprintf (dump_file, "Keeping eliminated stmt live "
5603 "as copy because of out-of-region uses\n");
5604 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5605 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5606 if (is_gimple_assign (stmt))
5607 {
5608 gimple_assign_set_rhs_from_tree (&gsi, sprime);
5609 stmt = gsi_stmt (gsi);
5610 update_stmt (stmt);
5611 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
5612 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
5613 continue;
5614 }
5615 else
5616 {
5617 gimple *copy = gimple_build_assign (lhs, sprime);
5618 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5619 do_release_defs = false;
5620 }
5621 }
5622 }
5623
5624 if (dump_file && (dump_flags & TDF_DETAILS))
5625 {
5626 fprintf (dump_file, "Removing dead stmt ");
5627 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5628 }
5629
5630 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5631 if (gimple_code (stmt) == GIMPLE_PHI)
5632 remove_phi_node (&gsi, do_release_defs);
5633 else
5634 {
5635 basic_block bb = gimple_bb (stmt);
5636 unlink_stmt_vdef (stmt);
5637 if (gsi_remove (&gsi, true))
5638 bitmap_set_bit (need_eh_cleanup, bb->index);
5639 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5640 bitmap_set_bit (need_ab_cleanup, bb->index);
5641 if (do_release_defs)
5642 release_defs (stmt);
5643 }
5644
5645 /* Removing a stmt may expose a forwarder block. */
5646 el_todo |= TODO_cleanup_cfg;
5647 }
5648
5649 /* Fixup stmts that became noreturn calls. This may require splitting
5650 blocks and thus isn't possible during the dominator walk. Do this
5651 in reverse order so we don't inadvertedly remove a stmt we want to
5652 fixup by visiting a dominating now noreturn call first. */
5653 while (!to_fixup.is_empty ())
5654 {
5655 gimple *stmt = to_fixup.pop ();
5656
5657 if (dump_file && (dump_flags & TDF_DETAILS))
5658 {
5659 fprintf (dump_file, "Fixing up noreturn call ");
5660 print_gimple_stmt (dump_file, stmt, 0);
5661 }
5662
5663 if (fixup_noreturn_call (stmt))
5664 el_todo |= TODO_cleanup_cfg;
5665 }
5666
5667 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
5668 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
5669
5670 if (do_eh_cleanup)
5671 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
5672
5673 if (do_ab_cleanup)
5674 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
5675
5676 if (do_eh_cleanup || do_ab_cleanup)
5677 el_todo |= TODO_cleanup_cfg;
5678
5679 return el_todo;
5680 }
5681
5682 /* Eliminate fully redundant computations. */
5683
5684 unsigned
5685 eliminate_with_rpo_vn (bitmap inserted_exprs)
5686 {
5687 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
5688
5689 walker.walk (cfun->cfg->x_entry_block_ptr);
5690 return walker.eliminate_cleanup ();
5691 }
5692
5693 static unsigned
5694 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
5695 bool iterate, bool eliminate);
5696
5697 void
5698 run_rpo_vn (vn_lookup_kind kind)
5699 {
5700 default_vn_walk_kind = kind;
5701 do_rpo_vn (cfun, NULL, NULL, true, false);
5702
5703 /* ??? Prune requirement of these. */
5704 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
5705 constant_value_ids = BITMAP_ALLOC (NULL);
5706
5707 /* Initialize the value ids and prune out remaining VN_TOPs
5708 from dead code. */
5709 tree name;
5710 unsigned i;
5711 FOR_EACH_SSA_NAME (i, name, cfun)
5712 {
5713 vn_ssa_aux_t info = VN_INFO (name);
5714 if (!info->visited
5715 || info->valnum == VN_TOP)
5716 info->valnum = name;
5717 if (info->valnum == name)
5718 info->value_id = get_next_value_id ();
5719 else if (is_gimple_min_invariant (info->valnum))
5720 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5721 }
5722
5723 /* Propagate. */
5724 FOR_EACH_SSA_NAME (i, name, cfun)
5725 {
5726 vn_ssa_aux_t info = VN_INFO (name);
5727 if (TREE_CODE (info->valnum) == SSA_NAME
5728 && info->valnum != name
5729 && info->value_id != VN_INFO (info->valnum)->value_id)
5730 info->value_id = VN_INFO (info->valnum)->value_id;
5731 }
5732
5733 set_hashtable_value_ids ();
5734
5735 if (dump_file && (dump_flags & TDF_DETAILS))
5736 {
5737 fprintf (dump_file, "Value numbers:\n");
5738 FOR_EACH_SSA_NAME (i, name, cfun)
5739 {
5740 if (VN_INFO (name)->visited
5741 && SSA_VAL (name) != name)
5742 {
5743 print_generic_expr (dump_file, name);
5744 fprintf (dump_file, " = ");
5745 print_generic_expr (dump_file, SSA_VAL (name));
5746 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
5747 }
5748 }
5749 }
5750 }
5751
5752 /* Free VN associated data structures. */
5753
5754 void
5755 free_rpo_vn (void)
5756 {
5757 free_vn_table (valid_info);
5758 XDELETE (valid_info);
5759 obstack_free (&vn_tables_obstack, NULL);
5760 obstack_free (&vn_tables_insert_obstack, NULL);
5761
5762 vn_ssa_aux_iterator_type it;
5763 vn_ssa_aux_t info;
5764 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
5765 if (info->needs_insertion)
5766 release_ssa_name (info->name);
5767 obstack_free (&vn_ssa_aux_obstack, NULL);
5768 delete vn_ssa_aux_hash;
5769
5770 delete constant_to_value_id;
5771 constant_to_value_id = NULL;
5772 BITMAP_FREE (constant_value_ids);
5773 }
5774
5775 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
5776
5777 static tree
5778 vn_lookup_simplify_result (gimple_match_op *res_op)
5779 {
5780 if (!res_op->code.is_tree_code ())
5781 return NULL_TREE;
5782 tree *ops = res_op->ops;
5783 unsigned int length = res_op->num_ops;
5784 if (res_op->code == CONSTRUCTOR
5785 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
5786 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
5787 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
5788 {
5789 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
5790 ops = XALLOCAVEC (tree, length);
5791 for (unsigned i = 0; i < length; ++i)
5792 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
5793 }
5794 vn_nary_op_t vnresult = NULL;
5795 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
5796 res_op->type, ops, &vnresult);
5797 /* If this is used from expression simplification make sure to
5798 return an available expression. */
5799 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
5800 res = rpo_avail->eliminate_avail (vn_context_bb, res);
5801 return res;
5802 }
5803
5804 rpo_elim::~rpo_elim ()
5805 {
5806 /* Release the avail vectors. */
5807 for (rpo_avail_t::iterator i = m_rpo_avail.begin ();
5808 i != m_rpo_avail.end (); ++i)
5809 (*i).second.release ();
5810 }
5811
5812 /* Return a leader for OPs value that is valid at BB. */
5813
5814 tree
5815 rpo_elim::eliminate_avail (basic_block bb, tree op)
5816 {
5817 bool visited;
5818 tree valnum = SSA_VAL (op, &visited);
5819 /* If we didn't visit OP then it must be defined outside of the
5820 region we process and also dominate it. So it is available. */
5821 if (!visited)
5822 return op;
5823 if (TREE_CODE (valnum) == SSA_NAME)
5824 {
5825 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5826 return valnum;
5827 vec<std::pair<int, int> > *av = m_rpo_avail.get (valnum);
5828 if (!av || av->is_empty ())
5829 return NULL_TREE;
5830 int i = av->length () - 1;
5831 if ((*av)[i].first == bb->index)
5832 /* On tramp3d 90% of the cases are here. */
5833 return ssa_name ((*av)[i].second);
5834 do
5835 {
5836 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, (*av)[i].first);
5837 /* ??? During elimination we have to use availability at the
5838 definition site of a use we try to replace. This
5839 is required to not run into inconsistencies because
5840 of dominated_by_p_w_unex behavior and removing a definition
5841 while not replacing all uses.
5842 ??? We could try to consistently walk dominators
5843 ignoring non-executable regions. The nearest common
5844 dominator of bb and abb is where we can stop walking. We
5845 may also be able to "pre-compute" (bits of) the next immediate
5846 (non-)dominator during the RPO walk when marking edges as
5847 executable. */
5848 if (dominated_by_p_w_unex (bb, abb))
5849 {
5850 tree leader = ssa_name ((*av)[i].second);
5851 /* Prevent eliminations that break loop-closed SSA. */
5852 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
5853 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
5854 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
5855 (leader))->loop_father,
5856 bb))
5857 return NULL_TREE;
5858 if (dump_file && (dump_flags & TDF_DETAILS))
5859 {
5860 print_generic_expr (dump_file, leader);
5861 fprintf (dump_file, " is available for ");
5862 print_generic_expr (dump_file, valnum);
5863 fprintf (dump_file, "\n");
5864 }
5865 /* On tramp3d 99% of the _remaining_ cases succeed at
5866 the first enty. */
5867 return leader;
5868 }
5869 /* ??? Can we somehow skip to the immediate dominator
5870 RPO index (bb_to_rpo)? Again, maybe not worth, on
5871 tramp3d the worst number of elements in the vector is 9. */
5872 }
5873 while (--i >= 0);
5874 }
5875 else if (valnum != VN_TOP)
5876 /* valnum is is_gimple_min_invariant. */
5877 return valnum;
5878 return NULL_TREE;
5879 }
5880
5881 /* Make LEADER a leader for its value at BB. */
5882
5883 void
5884 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
5885 {
5886 tree valnum = VN_INFO (leader)->valnum;
5887 if (valnum == VN_TOP)
5888 return;
5889 if (dump_file && (dump_flags & TDF_DETAILS))
5890 {
5891 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
5892 print_generic_expr (dump_file, leader);
5893 fprintf (dump_file, " for value ");
5894 print_generic_expr (dump_file, valnum);
5895 fprintf (dump_file, "\n");
5896 }
5897 bool existed;
5898 vec<std::pair<int, int> > &av = m_rpo_avail.get_or_insert (valnum, &existed);
5899 if (!existed)
5900 {
5901 new (&av) vec<std::pair<int, int> >;
5902 av = vNULL;
5903 av.reserve_exact (2);
5904 }
5905 av.safe_push (std::make_pair (bb->index, SSA_NAME_VERSION (leader)));
5906 }
5907
5908 /* Valueization hook for RPO VN plus required state. */
5909
5910 tree
5911 rpo_vn_valueize (tree name)
5912 {
5913 if (TREE_CODE (name) == SSA_NAME)
5914 {
5915 vn_ssa_aux_t val = VN_INFO (name);
5916 if (val)
5917 {
5918 tree tem = val->valnum;
5919 if (tem != VN_TOP && tem != name)
5920 {
5921 if (TREE_CODE (tem) != SSA_NAME)
5922 return tem;
5923 /* For all values we only valueize to an available leader
5924 which means we can use SSA name info without restriction. */
5925 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
5926 if (tem)
5927 return tem;
5928 }
5929 }
5930 }
5931 return name;
5932 }
5933
5934 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
5935 inverted condition. */
5936
5937 static void
5938 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
5939 {
5940 switch (code)
5941 {
5942 case LT_EXPR:
5943 /* a < b -> a {!,<}= b */
5944 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5945 ops, boolean_true_node, 0, pred_e);
5946 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
5947 ops, boolean_true_node, 0, pred_e);
5948 /* a < b -> ! a {>,=} b */
5949 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5950 ops, boolean_false_node, 0, pred_e);
5951 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5952 ops, boolean_false_node, 0, pred_e);
5953 break;
5954 case GT_EXPR:
5955 /* a > b -> a {!,>}= b */
5956 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5957 ops, boolean_true_node, 0, pred_e);
5958 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
5959 ops, boolean_true_node, 0, pred_e);
5960 /* a > b -> ! a {<,=} b */
5961 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5962 ops, boolean_false_node, 0, pred_e);
5963 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5964 ops, boolean_false_node, 0, pred_e);
5965 break;
5966 case EQ_EXPR:
5967 /* a == b -> ! a {<,>} b */
5968 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5969 ops, boolean_false_node, 0, pred_e);
5970 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5971 ops, boolean_false_node, 0, pred_e);
5972 break;
5973 case LE_EXPR:
5974 case GE_EXPR:
5975 case NE_EXPR:
5976 /* Nothing besides inverted condition. */
5977 break;
5978 default:;
5979 }
5980 }
5981
5982 /* Main stmt worker for RPO VN, process BB. */
5983
5984 static unsigned
5985 process_bb (rpo_elim &avail, basic_block bb,
5986 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
5987 bool do_region, bitmap exit_bbs)
5988 {
5989 unsigned todo = 0;
5990 edge_iterator ei;
5991 edge e;
5992
5993 vn_context_bb = bb;
5994
5995 /* If we are in loop-closed SSA preserve this state. This is
5996 relevant when called on regions from outside of FRE/PRE. */
5997 bool lc_phi_nodes = false;
5998 if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
5999 FOR_EACH_EDGE (e, ei, bb->preds)
6000 if (e->src->loop_father != e->dest->loop_father
6001 && flow_loop_nested_p (e->dest->loop_father,
6002 e->src->loop_father))
6003 {
6004 lc_phi_nodes = true;
6005 break;
6006 }
6007
6008 /* When we visit a loop header substitute into loop info. */
6009 if (!iterate && eliminate && bb->loop_father->header == bb)
6010 {
6011 /* Keep fields in sync with substitute_in_loop_info. */
6012 if (bb->loop_father->nb_iterations)
6013 bb->loop_father->nb_iterations
6014 = simplify_replace_tree (bb->loop_father->nb_iterations,
6015 NULL_TREE, NULL_TREE, vn_valueize);
6016 }
6017
6018 /* Value-number all defs in the basic-block. */
6019 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6020 gsi_next (&gsi))
6021 {
6022 gphi *phi = gsi.phi ();
6023 tree res = PHI_RESULT (phi);
6024 vn_ssa_aux_t res_info = VN_INFO (res);
6025 if (!bb_visited)
6026 {
6027 gcc_assert (!res_info->visited);
6028 res_info->valnum = VN_TOP;
6029 res_info->visited = true;
6030 }
6031
6032 /* When not iterating force backedge values to varying. */
6033 visit_stmt (phi, !iterate_phis);
6034 if (virtual_operand_p (res))
6035 continue;
6036
6037 /* Eliminate */
6038 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6039 how we handle backedges and availability.
6040 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6041 tree val = res_info->valnum;
6042 if (res != val && !iterate && eliminate)
6043 {
6044 if (tree leader = avail.eliminate_avail (bb, res))
6045 {
6046 if (leader != res
6047 /* Preserve loop-closed SSA form. */
6048 && (! lc_phi_nodes
6049 || is_gimple_min_invariant (leader)))
6050 {
6051 if (dump_file && (dump_flags & TDF_DETAILS))
6052 {
6053 fprintf (dump_file, "Replaced redundant PHI node "
6054 "defining ");
6055 print_generic_expr (dump_file, res);
6056 fprintf (dump_file, " with ");
6057 print_generic_expr (dump_file, leader);
6058 fprintf (dump_file, "\n");
6059 }
6060 avail.eliminations++;
6061
6062 if (may_propagate_copy (res, leader))
6063 {
6064 /* Schedule for removal. */
6065 avail.to_remove.safe_push (phi);
6066 continue;
6067 }
6068 /* ??? Else generate a copy stmt. */
6069 }
6070 }
6071 }
6072 /* Only make defs available that not already are. But make
6073 sure loop-closed SSA PHI node defs are picked up for
6074 downstream uses. */
6075 if (lc_phi_nodes
6076 || res == val
6077 || ! avail.eliminate_avail (bb, res))
6078 avail.eliminate_push_avail (bb, res);
6079 }
6080
6081 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6082 we do this when processing the last stmt as we have to do this
6083 before elimination which otherwise forces GIMPLE_CONDs to
6084 if (1 != 0) style when seeing non-executable edges. */
6085 if (gsi_end_p (gsi_start_bb (bb)))
6086 {
6087 FOR_EACH_EDGE (e, ei, bb->succs)
6088 {
6089 if (!(e->flags & EDGE_EXECUTABLE))
6090 {
6091 if (dump_file && (dump_flags & TDF_DETAILS))
6092 fprintf (dump_file,
6093 "marking outgoing edge %d -> %d executable\n",
6094 e->src->index, e->dest->index);
6095 e->flags |= EDGE_EXECUTABLE;
6096 e->dest->flags |= BB_EXECUTABLE;
6097 }
6098 else if (!(e->dest->flags & BB_EXECUTABLE))
6099 {
6100 if (dump_file && (dump_flags & TDF_DETAILS))
6101 fprintf (dump_file,
6102 "marking destination block %d reachable\n",
6103 e->dest->index);
6104 e->dest->flags |= BB_EXECUTABLE;
6105 }
6106 }
6107 }
6108 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6109 !gsi_end_p (gsi); gsi_next (&gsi))
6110 {
6111 ssa_op_iter i;
6112 tree op;
6113 if (!bb_visited)
6114 {
6115 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
6116 {
6117 vn_ssa_aux_t op_info = VN_INFO (op);
6118 gcc_assert (!op_info->visited);
6119 op_info->valnum = VN_TOP;
6120 op_info->visited = true;
6121 }
6122
6123 /* We somehow have to deal with uses that are not defined
6124 in the processed region. Forcing unvisited uses to
6125 varying here doesn't play well with def-use following during
6126 expression simplification, so we deal with this by checking
6127 the visited flag in SSA_VAL. */
6128 }
6129
6130 visit_stmt (gsi_stmt (gsi));
6131
6132 gimple *last = gsi_stmt (gsi);
6133 e = NULL;
6134 switch (gimple_code (last))
6135 {
6136 case GIMPLE_SWITCH:
6137 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
6138 (as_a <gswitch *> (last))));
6139 break;
6140 case GIMPLE_COND:
6141 {
6142 tree lhs = vn_valueize (gimple_cond_lhs (last));
6143 tree rhs = vn_valueize (gimple_cond_rhs (last));
6144 tree val = gimple_simplify (gimple_cond_code (last),
6145 boolean_type_node, lhs, rhs,
6146 NULL, vn_valueize);
6147 /* If the condition didn't simplfy see if we have recorded
6148 an expression from sofar taken edges. */
6149 if (! val || TREE_CODE (val) != INTEGER_CST)
6150 {
6151 vn_nary_op_t vnresult;
6152 tree ops[2];
6153 ops[0] = lhs;
6154 ops[1] = rhs;
6155 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6156 boolean_type_node, ops,
6157 &vnresult);
6158 /* Did we get a predicated value? */
6159 if (! val && vnresult && vnresult->predicated_values)
6160 {
6161 val = vn_nary_op_get_predicated_value (vnresult, bb);
6162 if (val && dump_file && (dump_flags & TDF_DETAILS))
6163 {
6164 fprintf (dump_file, "Got predicated value ");
6165 print_generic_expr (dump_file, val, TDF_NONE);
6166 fprintf (dump_file, " for ");
6167 print_gimple_stmt (dump_file, last, TDF_SLIM);
6168 }
6169 }
6170 }
6171 if (val)
6172 e = find_taken_edge (bb, val);
6173 if (! e)
6174 {
6175 /* If we didn't manage to compute the taken edge then
6176 push predicated expressions for the condition itself
6177 and related conditions to the hashtables. This allows
6178 simplification of redundant conditions which is
6179 important as early cleanup. */
6180 edge true_e, false_e;
6181 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6182 enum tree_code code = gimple_cond_code (last);
6183 enum tree_code icode
6184 = invert_tree_comparison (code, HONOR_NANS (lhs));
6185 tree ops[2];
6186 ops[0] = lhs;
6187 ops[1] = rhs;
6188 if (do_region
6189 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6190 true_e = NULL;
6191 if (do_region
6192 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6193 false_e = NULL;
6194 if (true_e)
6195 vn_nary_op_insert_pieces_predicated
6196 (2, code, boolean_type_node, ops,
6197 boolean_true_node, 0, true_e);
6198 if (false_e)
6199 vn_nary_op_insert_pieces_predicated
6200 (2, code, boolean_type_node, ops,
6201 boolean_false_node, 0, false_e);
6202 if (icode != ERROR_MARK)
6203 {
6204 if (true_e)
6205 vn_nary_op_insert_pieces_predicated
6206 (2, icode, boolean_type_node, ops,
6207 boolean_false_node, 0, true_e);
6208 if (false_e)
6209 vn_nary_op_insert_pieces_predicated
6210 (2, icode, boolean_type_node, ops,
6211 boolean_true_node, 0, false_e);
6212 }
6213 /* Relax for non-integers, inverted condition handled
6214 above. */
6215 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6216 {
6217 if (true_e)
6218 insert_related_predicates_on_edge (code, ops, true_e);
6219 if (false_e)
6220 insert_related_predicates_on_edge (icode, ops, false_e);
6221 }
6222 }
6223 break;
6224 }
6225 case GIMPLE_GOTO:
6226 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6227 break;
6228 default:
6229 e = NULL;
6230 }
6231 if (e)
6232 {
6233 todo = TODO_cleanup_cfg;
6234 if (!(e->flags & EDGE_EXECUTABLE))
6235 {
6236 if (dump_file && (dump_flags & TDF_DETAILS))
6237 fprintf (dump_file,
6238 "marking known outgoing %sedge %d -> %d executable\n",
6239 e->flags & EDGE_DFS_BACK ? "back-" : "",
6240 e->src->index, e->dest->index);
6241 e->flags |= EDGE_EXECUTABLE;
6242 e->dest->flags |= BB_EXECUTABLE;
6243 }
6244 else if (!(e->dest->flags & BB_EXECUTABLE))
6245 {
6246 if (dump_file && (dump_flags & TDF_DETAILS))
6247 fprintf (dump_file,
6248 "marking destination block %d reachable\n",
6249 e->dest->index);
6250 e->dest->flags |= BB_EXECUTABLE;
6251 }
6252 }
6253 else if (gsi_one_before_end_p (gsi))
6254 {
6255 FOR_EACH_EDGE (e, ei, bb->succs)
6256 {
6257 if (!(e->flags & EDGE_EXECUTABLE))
6258 {
6259 if (dump_file && (dump_flags & TDF_DETAILS))
6260 fprintf (dump_file,
6261 "marking outgoing edge %d -> %d executable\n",
6262 e->src->index, e->dest->index);
6263 e->flags |= EDGE_EXECUTABLE;
6264 e->dest->flags |= BB_EXECUTABLE;
6265 }
6266 else if (!(e->dest->flags & BB_EXECUTABLE))
6267 {
6268 if (dump_file && (dump_flags & TDF_DETAILS))
6269 fprintf (dump_file,
6270 "marking destination block %d reachable\n",
6271 e->dest->index);
6272 e->dest->flags |= BB_EXECUTABLE;
6273 }
6274 }
6275 }
6276
6277 /* Eliminate. That also pushes to avail. */
6278 if (eliminate && ! iterate)
6279 avail.eliminate_stmt (bb, &gsi);
6280 else
6281 /* If not eliminating, make all not already available defs
6282 available. */
6283 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6284 if (! avail.eliminate_avail (bb, op))
6285 avail.eliminate_push_avail (bb, op);
6286 }
6287
6288 /* Eliminate in destination PHI arguments. Always substitute in dest
6289 PHIs, even for non-executable edges. This handles region
6290 exits PHIs. */
6291 if (!iterate && eliminate)
6292 FOR_EACH_EDGE (e, ei, bb->succs)
6293 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6294 !gsi_end_p (gsi); gsi_next (&gsi))
6295 {
6296 gphi *phi = gsi.phi ();
6297 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6298 tree arg = USE_FROM_PTR (use_p);
6299 if (TREE_CODE (arg) != SSA_NAME
6300 || virtual_operand_p (arg))
6301 continue;
6302 tree sprime;
6303 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6304 {
6305 sprime = SSA_VAL (arg);
6306 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6307 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6308 }
6309 else
6310 /* Look for sth available at the definition block of the argument.
6311 This avoids inconsistencies between availability there which
6312 decides if the stmt can be removed and availability at the
6313 use site. The SSA property ensures that things available
6314 at the definition are also available at uses. */
6315 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6316 arg);
6317 if (sprime
6318 && sprime != arg
6319 && may_propagate_copy (arg, sprime))
6320 propagate_value (use_p, sprime);
6321 }
6322
6323 vn_context_bb = NULL;
6324 return todo;
6325 }
6326
6327 /* Unwind state per basic-block. */
6328
6329 struct unwind_state
6330 {
6331 /* Times this block has been visited. */
6332 unsigned visited;
6333 /* Whether to handle this as iteration point or whether to treat
6334 incoming backedge PHI values as varying. */
6335 bool iterate;
6336 /* Maximum RPO index this block is reachable from. */
6337 int max_rpo;
6338 /* Unwind state. */
6339 void *ob_top;
6340 vn_reference_t ref_top;
6341 vn_phi_t phi_top;
6342 vn_nary_op_t nary_top;
6343 };
6344
6345 /* Unwind the RPO VN state for iteration. */
6346
6347 static void
6348 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6349 {
6350 gcc_assert (to->iterate);
6351 for (; last_inserted_nary != to->nary_top;
6352 last_inserted_nary = last_inserted_nary->next)
6353 {
6354 vn_nary_op_t *slot;
6355 slot = valid_info->nary->find_slot_with_hash
6356 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6357 /* Predication causes the need to restore previous state. */
6358 if ((*slot)->unwind_to)
6359 *slot = (*slot)->unwind_to;
6360 else
6361 valid_info->nary->clear_slot (slot);
6362 }
6363 for (; last_inserted_phi != to->phi_top;
6364 last_inserted_phi = last_inserted_phi->next)
6365 {
6366 vn_phi_t *slot;
6367 slot = valid_info->phis->find_slot_with_hash
6368 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6369 valid_info->phis->clear_slot (slot);
6370 }
6371 for (; last_inserted_ref != to->ref_top;
6372 last_inserted_ref = last_inserted_ref->next)
6373 {
6374 vn_reference_t *slot;
6375 slot = valid_info->references->find_slot_with_hash
6376 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6377 (*slot)->operands.release ();
6378 valid_info->references->clear_slot (slot);
6379 }
6380 obstack_free (&vn_tables_obstack, to->ob_top);
6381
6382 /* Prune [rpo_idx, ] from avail. */
6383 /* ??? This is O(number-of-values-in-region) which is
6384 O(region-size) rather than O(iteration-piece). */
6385 for (rpo_elim::rpo_avail_t::iterator i
6386 = avail.m_rpo_avail.begin ();
6387 i != avail.m_rpo_avail.end (); ++i)
6388 {
6389 while (! (*i).second.is_empty ())
6390 {
6391 if (bb_to_rpo[(*i).second.last ().first] < rpo_idx)
6392 break;
6393 (*i).second.pop ();
6394 }
6395 }
6396 }
6397
6398 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6399 If ITERATE is true then treat backedges optimistically as not
6400 executed and iterate. If ELIMINATE is true then perform
6401 elimination, otherwise leave that to the caller. */
6402
6403 static unsigned
6404 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6405 bool iterate, bool eliminate)
6406 {
6407 unsigned todo = 0;
6408
6409 /* We currently do not support region-based iteration when
6410 elimination is requested. */
6411 gcc_assert (!entry || !iterate || !eliminate);
6412 /* When iterating we need loop info up-to-date. */
6413 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6414
6415 bool do_region = entry != NULL;
6416 if (!do_region)
6417 {
6418 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6419 exit_bbs = BITMAP_ALLOC (NULL);
6420 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6421 }
6422
6423 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6424 int n = rev_post_order_and_mark_dfs_back_seme
6425 (fn, entry, exit_bbs, !loops_state_satisfies_p (LOOPS_NEED_FIXUP), rpo);
6426 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6427 for (int i = 0; i < n / 2; ++i)
6428 std::swap (rpo[i], rpo[n-i-1]);
6429
6430 if (!do_region)
6431 BITMAP_FREE (exit_bbs);
6432
6433 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6434 for (int i = 0; i < n; ++i)
6435 bb_to_rpo[rpo[i]] = i;
6436
6437 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6438
6439 rpo_elim avail (entry->dest);
6440 rpo_avail = &avail;
6441
6442 /* Verify we have no extra entries into the region. */
6443 if (flag_checking && do_region)
6444 {
6445 auto_bb_flag bb_in_region (fn);
6446 for (int i = 0; i < n; ++i)
6447 {
6448 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6449 bb->flags |= bb_in_region;
6450 }
6451 /* We can't merge the first two loops because we cannot rely
6452 on EDGE_DFS_BACK for edges not within the region. But if
6453 we decide to always have the bb_in_region flag we can
6454 do the checking during the RPO walk itself (but then it's
6455 also easy to handle MEME conservatively). */
6456 for (int i = 0; i < n; ++i)
6457 {
6458 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6459 edge e;
6460 edge_iterator ei;
6461 FOR_EACH_EDGE (e, ei, bb->preds)
6462 gcc_assert (e == entry || (e->src->flags & bb_in_region));
6463 }
6464 for (int i = 0; i < n; ++i)
6465 {
6466 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6467 bb->flags &= ~bb_in_region;
6468 }
6469 }
6470
6471 /* Create the VN state. For the initial size of the various hashtables
6472 use a heuristic based on region size and number of SSA names. */
6473 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6474 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6475 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6476
6477 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6478 gcc_obstack_init (&vn_ssa_aux_obstack);
6479
6480 gcc_obstack_init (&vn_tables_obstack);
6481 gcc_obstack_init (&vn_tables_insert_obstack);
6482 valid_info = XCNEW (struct vn_tables_s);
6483 allocate_vn_table (valid_info, region_size);
6484 last_inserted_ref = NULL;
6485 last_inserted_phi = NULL;
6486 last_inserted_nary = NULL;
6487
6488 vn_valueize = rpo_vn_valueize;
6489
6490 /* Initialize the unwind state and edge/BB executable state. */
6491 bool need_max_rpo_iterate = false;
6492 for (int i = 0; i < n; ++i)
6493 {
6494 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6495 rpo_state[i].visited = 0;
6496 rpo_state[i].max_rpo = i;
6497 bb->flags &= ~BB_EXECUTABLE;
6498 bool has_backedges = false;
6499 edge e;
6500 edge_iterator ei;
6501 FOR_EACH_EDGE (e, ei, bb->preds)
6502 {
6503 if (e->flags & EDGE_DFS_BACK)
6504 has_backedges = true;
6505 e->flags &= ~EDGE_EXECUTABLE;
6506 if (iterate || e == entry)
6507 continue;
6508 if (bb_to_rpo[e->src->index] > i)
6509 {
6510 rpo_state[i].max_rpo = MAX (rpo_state[i].max_rpo,
6511 bb_to_rpo[e->src->index]);
6512 need_max_rpo_iterate = true;
6513 }
6514 else
6515 rpo_state[i].max_rpo
6516 = MAX (rpo_state[i].max_rpo,
6517 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6518 }
6519 rpo_state[i].iterate = iterate && has_backedges;
6520 }
6521 entry->flags |= EDGE_EXECUTABLE;
6522 entry->dest->flags |= BB_EXECUTABLE;
6523
6524 /* When there are irreducible regions the simplistic max_rpo computation
6525 above for the case of backedges doesn't work and we need to iterate
6526 until there are no more changes. */
6527 unsigned nit = 0;
6528 while (need_max_rpo_iterate)
6529 {
6530 nit++;
6531 need_max_rpo_iterate = false;
6532 for (int i = 0; i < n; ++i)
6533 {
6534 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6535 edge e;
6536 edge_iterator ei;
6537 FOR_EACH_EDGE (e, ei, bb->preds)
6538 {
6539 if (e == entry)
6540 continue;
6541 int max_rpo = MAX (rpo_state[i].max_rpo,
6542 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6543 if (rpo_state[i].max_rpo != max_rpo)
6544 {
6545 rpo_state[i].max_rpo = max_rpo;
6546 need_max_rpo_iterate = true;
6547 }
6548 }
6549 }
6550 }
6551 statistics_histogram_event (cfun, "RPO max_rpo iterations", nit);
6552
6553 /* As heuristic to improve compile-time we handle only the N innermost
6554 loops and the outermost one optimistically. */
6555 if (iterate)
6556 {
6557 loop_p loop;
6558 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6559 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6560 if (loop_depth (loop) > max_depth)
6561 for (unsigned i = 2;
6562 i < loop_depth (loop) - max_depth; ++i)
6563 {
6564 basic_block header = superloop_at_depth (loop, i)->header;
6565 bool non_latch_backedge = false;
6566 edge e;
6567 edge_iterator ei;
6568 FOR_EACH_EDGE (e, ei, header->preds)
6569 if (e->flags & EDGE_DFS_BACK)
6570 {
6571 /* There can be a non-latch backedge into the header
6572 which is part of an outer irreducible region. We
6573 cannot avoid iterating this block then. */
6574 if (!dominated_by_p (CDI_DOMINATORS,
6575 e->src, e->dest))
6576 {
6577 if (dump_file && (dump_flags & TDF_DETAILS))
6578 fprintf (dump_file, "non-latch backedge %d -> %d "
6579 "forces iteration of loop %d\n",
6580 e->src->index, e->dest->index, loop->num);
6581 non_latch_backedge = true;
6582 }
6583 else
6584 e->flags |= EDGE_EXECUTABLE;
6585 }
6586 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
6587 }
6588 }
6589
6590 uint64_t nblk = 0;
6591 int idx = 0;
6592 if (iterate)
6593 /* Go and process all blocks, iterating as necessary. */
6594 do
6595 {
6596 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6597
6598 /* If the block has incoming backedges remember unwind state. This
6599 is required even for non-executable blocks since in irreducible
6600 regions we might reach them via the backedge and re-start iterating
6601 from there.
6602 Note we can individually mark blocks with incoming backedges to
6603 not iterate where we then handle PHIs conservatively. We do that
6604 heuristically to reduce compile-time for degenerate cases. */
6605 if (rpo_state[idx].iterate)
6606 {
6607 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
6608 rpo_state[idx].ref_top = last_inserted_ref;
6609 rpo_state[idx].phi_top = last_inserted_phi;
6610 rpo_state[idx].nary_top = last_inserted_nary;
6611 }
6612
6613 if (!(bb->flags & BB_EXECUTABLE))
6614 {
6615 if (dump_file && (dump_flags & TDF_DETAILS))
6616 fprintf (dump_file, "Block %d: BB%d found not executable\n",
6617 idx, bb->index);
6618 idx++;
6619 continue;
6620 }
6621
6622 if (dump_file && (dump_flags & TDF_DETAILS))
6623 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
6624 nblk++;
6625 todo |= process_bb (avail, bb,
6626 rpo_state[idx].visited != 0,
6627 rpo_state[idx].iterate,
6628 iterate, eliminate, do_region, exit_bbs);
6629 rpo_state[idx].visited++;
6630
6631 /* Verify if changed values flow over executable outgoing backedges
6632 and those change destination PHI values (that's the thing we
6633 can easily verify). Reduce over all such edges to the farthest
6634 away PHI. */
6635 int iterate_to = -1;
6636 edge_iterator ei;
6637 edge e;
6638 FOR_EACH_EDGE (e, ei, bb->succs)
6639 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
6640 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
6641 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
6642 {
6643 int destidx = bb_to_rpo[e->dest->index];
6644 if (!rpo_state[destidx].visited)
6645 {
6646 if (dump_file && (dump_flags & TDF_DETAILS))
6647 fprintf (dump_file, "Unvisited destination %d\n",
6648 e->dest->index);
6649 if (iterate_to == -1 || destidx < iterate_to)
6650 iterate_to = destidx;
6651 continue;
6652 }
6653 if (dump_file && (dump_flags & TDF_DETAILS))
6654 fprintf (dump_file, "Looking for changed values of backedge"
6655 " %d->%d destination PHIs\n",
6656 e->src->index, e->dest->index);
6657 vn_context_bb = e->dest;
6658 gphi_iterator gsi;
6659 for (gsi = gsi_start_phis (e->dest);
6660 !gsi_end_p (gsi); gsi_next (&gsi))
6661 {
6662 bool inserted = false;
6663 /* While we'd ideally just iterate on value changes
6664 we CSE PHIs and do that even across basic-block
6665 boundaries. So even hashtable state changes can
6666 be important (which is roughly equivalent to
6667 PHI argument value changes). To not excessively
6668 iterate because of that we track whether a PHI
6669 was CSEd to with GF_PLF_1. */
6670 bool phival_changed;
6671 if ((phival_changed = visit_phi (gsi.phi (),
6672 &inserted, false))
6673 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
6674 {
6675 if (!phival_changed
6676 && dump_file && (dump_flags & TDF_DETAILS))
6677 fprintf (dump_file, "PHI was CSEd and hashtable "
6678 "state (changed)\n");
6679 if (iterate_to == -1 || destidx < iterate_to)
6680 iterate_to = destidx;
6681 break;
6682 }
6683 }
6684 vn_context_bb = NULL;
6685 }
6686 if (iterate_to != -1)
6687 {
6688 do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
6689 idx = iterate_to;
6690 if (dump_file && (dump_flags & TDF_DETAILS))
6691 fprintf (dump_file, "Iterating to %d BB%d\n",
6692 iterate_to, rpo[iterate_to]);
6693 continue;
6694 }
6695
6696 idx++;
6697 }
6698 while (idx < n);
6699
6700 else /* !iterate */
6701 {
6702 /* Process all blocks greedily with a worklist that enforces RPO
6703 processing of reachable blocks. */
6704 auto_bitmap worklist;
6705 bitmap_set_bit (worklist, 0);
6706 while (!bitmap_empty_p (worklist))
6707 {
6708 int idx = bitmap_first_set_bit (worklist);
6709 bitmap_clear_bit (worklist, idx);
6710 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6711 gcc_assert ((bb->flags & BB_EXECUTABLE)
6712 && !rpo_state[idx].visited);
6713
6714 if (dump_file && (dump_flags & TDF_DETAILS))
6715 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
6716
6717 /* When we run into predecessor edges where we cannot trust its
6718 executable state mark them executable so PHI processing will
6719 be conservative.
6720 ??? Do we need to force arguments flowing over that edge
6721 to be varying or will they even always be? */
6722 edge_iterator ei;
6723 edge e;
6724 FOR_EACH_EDGE (e, ei, bb->preds)
6725 if (!(e->flags & EDGE_EXECUTABLE)
6726 && !rpo_state[bb_to_rpo[e->src->index]].visited
6727 && rpo_state[bb_to_rpo[e->src->index]].max_rpo >= (int)idx)
6728 {
6729 if (dump_file && (dump_flags & TDF_DETAILS))
6730 fprintf (dump_file, "Cannot trust state of predecessor "
6731 "edge %d -> %d, marking executable\n",
6732 e->src->index, e->dest->index);
6733 e->flags |= EDGE_EXECUTABLE;
6734 }
6735
6736 nblk++;
6737 todo |= process_bb (avail, bb, false, false, false, eliminate,
6738 do_region, exit_bbs);
6739 rpo_state[idx].visited++;
6740
6741 FOR_EACH_EDGE (e, ei, bb->succs)
6742 if ((e->flags & EDGE_EXECUTABLE)
6743 && e->dest->index != EXIT_BLOCK
6744 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
6745 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
6746 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
6747 }
6748 }
6749
6750 /* If statistics or dump file active. */
6751 int nex = 0;
6752 unsigned max_visited = 1;
6753 for (int i = 0; i < n; ++i)
6754 {
6755 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6756 if (bb->flags & BB_EXECUTABLE)
6757 nex++;
6758 statistics_histogram_event (cfun, "RPO block visited times",
6759 rpo_state[i].visited);
6760 if (rpo_state[i].visited > max_visited)
6761 max_visited = rpo_state[i].visited;
6762 }
6763 unsigned nvalues = 0, navail = 0;
6764 for (rpo_elim::rpo_avail_t::iterator i = avail.m_rpo_avail.begin ();
6765 i != avail.m_rpo_avail.end (); ++i)
6766 {
6767 nvalues++;
6768 navail += (*i).second.length ();
6769 }
6770 statistics_counter_event (cfun, "RPO blocks", n);
6771 statistics_counter_event (cfun, "RPO blocks visited", nblk);
6772 statistics_counter_event (cfun, "RPO blocks executable", nex);
6773 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
6774 statistics_histogram_event (cfun, "RPO num values", nvalues);
6775 statistics_histogram_event (cfun, "RPO num avail", navail);
6776 statistics_histogram_event (cfun, "RPO num lattice",
6777 vn_ssa_aux_hash->elements ());
6778 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
6779 {
6780 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
6781 " blocks in total discovering %d executable blocks iterating "
6782 "%d.%d times, a block was visited max. %u times\n",
6783 n, nblk, nex,
6784 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
6785 max_visited);
6786 fprintf (dump_file, "RPO tracked %d values available at %d locations "
6787 "and %" PRIu64 " lattice elements\n",
6788 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
6789 }
6790
6791 if (eliminate)
6792 {
6793 /* When !iterate we already performed elimination during the RPO
6794 walk. */
6795 if (iterate)
6796 {
6797 /* Elimination for region-based VN needs to be done within the
6798 RPO walk. */
6799 gcc_assert (! do_region);
6800 /* Note we can't use avail.walk here because that gets confused
6801 by the existing availability and it will be less efficient
6802 as well. */
6803 todo |= eliminate_with_rpo_vn (NULL);
6804 }
6805 else
6806 todo |= avail.eliminate_cleanup (do_region);
6807 }
6808
6809 vn_valueize = NULL;
6810 rpo_avail = NULL;
6811
6812 XDELETEVEC (bb_to_rpo);
6813 XDELETEVEC (rpo);
6814 XDELETEVEC (rpo_state);
6815
6816 return todo;
6817 }
6818
6819 /* Region-based entry for RPO VN. Performs value-numbering and elimination
6820 on the SEME region specified by ENTRY and EXIT_BBS. */
6821
6822 unsigned
6823 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
6824 {
6825 default_vn_walk_kind = VN_WALKREWRITE;
6826 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
6827 free_rpo_vn ();
6828 return todo;
6829 }
6830
6831
6832 namespace {
6833
6834 const pass_data pass_data_fre =
6835 {
6836 GIMPLE_PASS, /* type */
6837 "fre", /* name */
6838 OPTGROUP_NONE, /* optinfo_flags */
6839 TV_TREE_FRE, /* tv_id */
6840 ( PROP_cfg | PROP_ssa ), /* properties_required */
6841 0, /* properties_provided */
6842 0, /* properties_destroyed */
6843 0, /* todo_flags_start */
6844 0, /* todo_flags_finish */
6845 };
6846
6847 class pass_fre : public gimple_opt_pass
6848 {
6849 public:
6850 pass_fre (gcc::context *ctxt)
6851 : gimple_opt_pass (pass_data_fre, ctxt)
6852 {}
6853
6854 /* opt_pass methods: */
6855 opt_pass * clone () { return new pass_fre (m_ctxt); }
6856 virtual bool gate (function *) { return flag_tree_fre != 0; }
6857 virtual unsigned int execute (function *);
6858
6859 }; // class pass_fre
6860
6861 unsigned int
6862 pass_fre::execute (function *fun)
6863 {
6864 unsigned todo = 0;
6865
6866 /* At -O[1g] use the cheap non-iterating mode. */
6867 calculate_dominance_info (CDI_DOMINATORS);
6868 if (optimize > 1)
6869 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6870
6871 default_vn_walk_kind = VN_WALKREWRITE;
6872 todo = do_rpo_vn (fun, NULL, NULL, optimize > 1, true);
6873 free_rpo_vn ();
6874
6875 if (optimize > 1)
6876 loop_optimizer_finalize ();
6877
6878 return todo;
6879 }
6880
6881 } // anon namespace
6882
6883 gimple_opt_pass *
6884 make_pass_fre (gcc::context *ctxt)
6885 {
6886 return new pass_fre (ctxt);
6887 }
6888
6889 #undef BB_EXECUTABLE