]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-sccvn.c
* config/mips/mips.c (mips_final_postscan_insn): Modify call
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "insn-config.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "tree-ssa-sccvn.h"
73
74 /* This algorithm is based on the SCC algorithm presented by Keith
75 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76 (http://citeseer.ist.psu.edu/41805.html). In
77 straight line code, it is equivalent to a regular hash based value
78 numbering that is performed in reverse postorder.
79
80 For code with cycles, there are two alternatives, both of which
81 require keeping the hashtables separate from the actual list of
82 value numbers for SSA names.
83
84 1. Iterate value numbering in an RPO walk of the blocks, removing
85 all the entries from the hashtable after each iteration (but
86 keeping the SSA name->value number mapping between iterations).
87 Iterate until it does not change.
88
89 2. Perform value numbering as part of an SCC walk on the SSA graph,
90 iterating only the cycles in the SSA graph until they do not change
91 (using a separate, optimistic hashtable for value numbering the SCC
92 operands).
93
94 The second is not just faster in practice (because most SSA graph
95 cycles do not involve all the variables in the graph), it also has
96 some nice properties.
97
98 One of these nice properties is that when we pop an SCC off the
99 stack, we are guaranteed to have processed all the operands coming from
100 *outside of that SCC*, so we do not need to do anything special to
101 ensure they have value numbers.
102
103 Another nice property is that the SCC walk is done as part of a DFS
104 of the SSA graph, which makes it easy to perform combining and
105 simplifying operations at the same time.
106
107 The code below is deliberately written in a way that makes it easy
108 to separate the SCC walk from the other work it does.
109
110 In order to propagate constants through the code, we track which
111 expressions contain constants, and use those while folding. In
112 theory, we could also track expressions whose value numbers are
113 replaced, in case we end up folding based on expression
114 identities.
115
116 In order to value number memory, we assign value numbers to vuses.
117 This enables us to note that, for example, stores to the same
118 address of the same value from the same starting memory states are
119 equivalent.
120 TODO:
121
122 1. We can iterate only the changing portions of the SCC's, but
123 I have not seen an SCC big enough for this to be a win.
124 2. If you differentiate between phi nodes for loops and phi nodes
125 for if-then-else, you can properly consider phi nodes in different
126 blocks for equivalence.
127 3. We could value number vuses in more cases, particularly, whole
128 structure copies.
129 */
130
131 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
132 #define BB_EXECUTABLE BB_VISITED
133
134 static tree *last_vuse_ptr;
135 static vn_lookup_kind vn_walk_kind;
136 static vn_lookup_kind default_vn_walk_kind;
137
138 /* vn_nary_op hashtable helpers. */
139
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 {
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
145 };
146
147 /* Return the computed hashcode for nary operation P1. */
148
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 {
152 return vno1->hashcode;
153 }
154
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
157
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 {
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
162 }
163
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166
167
168 /* vn_phi hashtable helpers. */
169
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172
173 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
174 {
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
177 };
178
179 /* Return the computed hashcode for phi operation P1. */
180
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 {
184 return vp1->hashcode;
185 }
186
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 {
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
193 }
194
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197
198
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
201
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
204 {
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
217 }
218
219 /* Free a reference operation structure VP. */
220
221 static inline void
222 free_reference (vn_reference_s *vr)
223 {
224 vr->operands.release ();
225 }
226
227
228 /* vn_reference hashtable helpers. */
229
230 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
231 {
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
234 };
235
236 /* Return the hashcode for a given reference operation P1. */
237
238 inline hashval_t
239 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 {
241 return vr1->hashcode;
242 }
243
244 inline bool
245 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 {
247 return v == c || vn_reference_eq (v, c);
248 }
249
250 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
251 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252
253
254 /* The set of VN hashtables. */
255
256 typedef struct vn_tables_s
257 {
258 vn_nary_op_table_type *nary;
259 vn_phi_table_type *phis;
260 vn_reference_table_type *references;
261 } *vn_tables_t;
262
263
264 /* vn_constant hashtable helpers. */
265
266 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
267 {
268 static inline hashval_t hash (const vn_constant_s *);
269 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
270 };
271
272 /* Hash table hash function for vn_constant_t. */
273
274 inline hashval_t
275 vn_constant_hasher::hash (const vn_constant_s *vc1)
276 {
277 return vc1->hashcode;
278 }
279
280 /* Hash table equality function for vn_constant_t. */
281
282 inline bool
283 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
284 {
285 if (vc1->hashcode != vc2->hashcode)
286 return false;
287
288 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
289 }
290
291 static hash_table<vn_constant_hasher> *constant_to_value_id;
292 static bitmap constant_value_ids;
293
294
295 /* Obstack we allocate the vn-tables elements from. */
296 static obstack vn_tables_obstack;
297 /* Special obstack we never unwind. */
298 static obstack vn_tables_insert_obstack;
299
300 static vn_reference_t last_inserted_ref;
301 static vn_phi_t last_inserted_phi;
302 static vn_nary_op_t last_inserted_nary;
303
304 /* Valid hashtables storing information we have proven to be
305 correct. */
306 static vn_tables_t valid_info;
307
308
309 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 just return it. */
311 tree (*vn_valueize) (tree);
312
313
314 /* This represents the top of the VN lattice, which is the universal
315 value. */
316
317 tree VN_TOP;
318
319 /* Unique counter for our value ids. */
320
321 static unsigned int next_value_id;
322
323
324 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
325 are allocated on an obstack for locality reasons, and to free them
326 without looping over the vec. */
327
328 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
329 {
330 typedef vn_ssa_aux_t value_type;
331 typedef tree compare_type;
332 static inline hashval_t hash (const value_type &);
333 static inline bool equal (const value_type &, const compare_type &);
334 static inline void mark_deleted (value_type &) {}
335 static inline void mark_empty (value_type &e) { e = NULL; }
336 static inline bool is_deleted (value_type &) { return false; }
337 static inline bool is_empty (value_type &e) { return e == NULL; }
338 };
339
340 hashval_t
341 vn_ssa_aux_hasher::hash (const value_type &entry)
342 {
343 return SSA_NAME_VERSION (entry->name);
344 }
345
346 bool
347 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
348 {
349 return name == entry->name;
350 }
351
352 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
353 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
354 static struct obstack vn_ssa_aux_obstack;
355
356 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
357 static unsigned int vn_nary_length_from_stmt (gimple *);
358 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
359 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
360 vn_nary_op_table_type *, bool);
361 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
362 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
363 enum tree_code, tree, tree *);
364 static tree vn_lookup_simplify_result (gimple_match_op *);
365
366 /* Return whether there is value numbering information for a given SSA name. */
367
368 bool
369 has_VN_INFO (tree name)
370 {
371 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
372 }
373
374 vn_ssa_aux_t
375 VN_INFO (tree name)
376 {
377 vn_ssa_aux_t *res
378 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
379 INSERT);
380 if (*res != NULL)
381 return *res;
382
383 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
384 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
385 newinfo->name = name;
386 newinfo->valnum = VN_TOP;
387 /* We are using the visited flag to handle uses with defs not within the
388 region being value-numbered. */
389 newinfo->visited = false;
390
391 /* Given we create the VN_INFOs on-demand now we have to do initialization
392 different than VN_TOP here. */
393 if (SSA_NAME_IS_DEFAULT_DEF (name))
394 switch (TREE_CODE (SSA_NAME_VAR (name)))
395 {
396 case VAR_DECL:
397 /* All undefined vars are VARYING. */
398 newinfo->valnum = name;
399 newinfo->visited = true;
400 break;
401
402 case PARM_DECL:
403 /* Parameters are VARYING but we can record a condition
404 if we know it is a non-NULL pointer. */
405 newinfo->visited = true;
406 newinfo->valnum = name;
407 if (POINTER_TYPE_P (TREE_TYPE (name))
408 && nonnull_arg_p (SSA_NAME_VAR (name)))
409 {
410 tree ops[2];
411 ops[0] = name;
412 ops[1] = build_int_cst (TREE_TYPE (name), 0);
413 vn_nary_op_t nary;
414 /* Allocate from non-unwinding stack. */
415 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
416 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
417 boolean_type_node, ops);
418 nary->predicated_values = 0;
419 nary->u.result = boolean_true_node;
420 vn_nary_op_insert_into (nary, valid_info->nary, true);
421 gcc_assert (nary->unwind_to == NULL);
422 /* Also do not link it into the undo chain. */
423 last_inserted_nary = nary->next;
424 nary->next = (vn_nary_op_t)(void *)-1;
425 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
426 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
427 boolean_type_node, ops);
428 nary->predicated_values = 0;
429 nary->u.result = boolean_false_node;
430 vn_nary_op_insert_into (nary, valid_info->nary, true);
431 gcc_assert (nary->unwind_to == NULL);
432 last_inserted_nary = nary->next;
433 nary->next = (vn_nary_op_t)(void *)-1;
434 if (dump_file && (dump_flags & TDF_DETAILS))
435 {
436 fprintf (dump_file, "Recording ");
437 print_generic_expr (dump_file, name, TDF_SLIM);
438 fprintf (dump_file, " != 0\n");
439 }
440 }
441 break;
442
443 case RESULT_DECL:
444 /* If the result is passed by invisible reference the default
445 def is initialized, otherwise it's uninitialized. Still
446 undefined is varying. */
447 newinfo->visited = true;
448 newinfo->valnum = name;
449 break;
450
451 default:
452 gcc_unreachable ();
453 }
454 return newinfo;
455 }
456
457 /* Return the SSA value of X. */
458
459 inline tree
460 SSA_VAL (tree x, bool *visited = NULL)
461 {
462 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
463 if (visited)
464 *visited = tem && tem->visited;
465 return tem && tem->visited ? tem->valnum : x;
466 }
467
468 /* Return the SSA value of the VUSE x, supporting released VDEFs
469 during elimination which will value-number the VDEF to the
470 associated VUSE (but not substitute in the whole lattice). */
471
472 static inline tree
473 vuse_ssa_val (tree x)
474 {
475 if (!x)
476 return NULL_TREE;
477
478 do
479 {
480 x = SSA_VAL (x);
481 gcc_assert (x != VN_TOP);
482 }
483 while (SSA_NAME_IN_FREE_LIST (x));
484
485 return x;
486 }
487
488 /* Similar to the above but used as callback for walk_non_aliases_vuses
489 and thus should stop at unvisited VUSE to not walk across region
490 boundaries. */
491
492 static tree
493 vuse_valueize (tree vuse)
494 {
495 do
496 {
497 bool visited;
498 vuse = SSA_VAL (vuse, &visited);
499 if (!visited)
500 return NULL_TREE;
501 gcc_assert (vuse != VN_TOP);
502 }
503 while (SSA_NAME_IN_FREE_LIST (vuse));
504 return vuse;
505 }
506
507
508 /* Return the vn_kind the expression computed by the stmt should be
509 associated with. */
510
511 enum vn_kind
512 vn_get_stmt_kind (gimple *stmt)
513 {
514 switch (gimple_code (stmt))
515 {
516 case GIMPLE_CALL:
517 return VN_REFERENCE;
518 case GIMPLE_PHI:
519 return VN_PHI;
520 case GIMPLE_ASSIGN:
521 {
522 enum tree_code code = gimple_assign_rhs_code (stmt);
523 tree rhs1 = gimple_assign_rhs1 (stmt);
524 switch (get_gimple_rhs_class (code))
525 {
526 case GIMPLE_UNARY_RHS:
527 case GIMPLE_BINARY_RHS:
528 case GIMPLE_TERNARY_RHS:
529 return VN_NARY;
530 case GIMPLE_SINGLE_RHS:
531 switch (TREE_CODE_CLASS (code))
532 {
533 case tcc_reference:
534 /* VOP-less references can go through unary case. */
535 if ((code == REALPART_EXPR
536 || code == IMAGPART_EXPR
537 || code == VIEW_CONVERT_EXPR
538 || code == BIT_FIELD_REF)
539 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
540 return VN_NARY;
541
542 /* Fallthrough. */
543 case tcc_declaration:
544 return VN_REFERENCE;
545
546 case tcc_constant:
547 return VN_CONSTANT;
548
549 default:
550 if (code == ADDR_EXPR)
551 return (is_gimple_min_invariant (rhs1)
552 ? VN_CONSTANT : VN_REFERENCE);
553 else if (code == CONSTRUCTOR)
554 return VN_NARY;
555 return VN_NONE;
556 }
557 default:
558 return VN_NONE;
559 }
560 }
561 default:
562 return VN_NONE;
563 }
564 }
565
566 /* Lookup a value id for CONSTANT and return it. If it does not
567 exist returns 0. */
568
569 unsigned int
570 get_constant_value_id (tree constant)
571 {
572 vn_constant_s **slot;
573 struct vn_constant_s vc;
574
575 vc.hashcode = vn_hash_constant_with_type (constant);
576 vc.constant = constant;
577 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
578 if (slot)
579 return (*slot)->value_id;
580 return 0;
581 }
582
583 /* Lookup a value id for CONSTANT, and if it does not exist, create a
584 new one and return it. If it does exist, return it. */
585
586 unsigned int
587 get_or_alloc_constant_value_id (tree constant)
588 {
589 vn_constant_s **slot;
590 struct vn_constant_s vc;
591 vn_constant_t vcp;
592
593 /* If the hashtable isn't initialized we're not running from PRE and thus
594 do not need value-ids. */
595 if (!constant_to_value_id)
596 return 0;
597
598 vc.hashcode = vn_hash_constant_with_type (constant);
599 vc.constant = constant;
600 slot = constant_to_value_id->find_slot (&vc, INSERT);
601 if (*slot)
602 return (*slot)->value_id;
603
604 vcp = XNEW (struct vn_constant_s);
605 vcp->hashcode = vc.hashcode;
606 vcp->constant = constant;
607 vcp->value_id = get_next_value_id ();
608 *slot = vcp;
609 bitmap_set_bit (constant_value_ids, vcp->value_id);
610 return vcp->value_id;
611 }
612
613 /* Return true if V is a value id for a constant. */
614
615 bool
616 value_id_constant_p (unsigned int v)
617 {
618 return bitmap_bit_p (constant_value_ids, v);
619 }
620
621 /* Compute the hash for a reference operand VRO1. */
622
623 static void
624 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
625 {
626 hstate.add_int (vro1->opcode);
627 if (vro1->op0)
628 inchash::add_expr (vro1->op0, hstate);
629 if (vro1->op1)
630 inchash::add_expr (vro1->op1, hstate);
631 if (vro1->op2)
632 inchash::add_expr (vro1->op2, hstate);
633 }
634
635 /* Compute a hash for the reference operation VR1 and return it. */
636
637 static hashval_t
638 vn_reference_compute_hash (const vn_reference_t vr1)
639 {
640 inchash::hash hstate;
641 hashval_t result;
642 int i;
643 vn_reference_op_t vro;
644 poly_int64 off = -1;
645 bool deref = false;
646
647 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
648 {
649 if (vro->opcode == MEM_REF)
650 deref = true;
651 else if (vro->opcode != ADDR_EXPR)
652 deref = false;
653 if (maybe_ne (vro->off, -1))
654 {
655 if (known_eq (off, -1))
656 off = 0;
657 off += vro->off;
658 }
659 else
660 {
661 if (maybe_ne (off, -1)
662 && maybe_ne (off, 0))
663 hstate.add_poly_int (off);
664 off = -1;
665 if (deref
666 && vro->opcode == ADDR_EXPR)
667 {
668 if (vro->op0)
669 {
670 tree op = TREE_OPERAND (vro->op0, 0);
671 hstate.add_int (TREE_CODE (op));
672 inchash::add_expr (op, hstate);
673 }
674 }
675 else
676 vn_reference_op_compute_hash (vro, hstate);
677 }
678 }
679 result = hstate.end ();
680 /* ??? We would ICE later if we hash instead of adding that in. */
681 if (vr1->vuse)
682 result += SSA_NAME_VERSION (vr1->vuse);
683
684 return result;
685 }
686
687 /* Return true if reference operations VR1 and VR2 are equivalent. This
688 means they have the same set of operands and vuses. */
689
690 bool
691 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
692 {
693 unsigned i, j;
694
695 /* Early out if this is not a hash collision. */
696 if (vr1->hashcode != vr2->hashcode)
697 return false;
698
699 /* The VOP needs to be the same. */
700 if (vr1->vuse != vr2->vuse)
701 return false;
702
703 /* If the operands are the same we are done. */
704 if (vr1->operands == vr2->operands)
705 return true;
706
707 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
708 return false;
709
710 if (INTEGRAL_TYPE_P (vr1->type)
711 && INTEGRAL_TYPE_P (vr2->type))
712 {
713 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
714 return false;
715 }
716 else if (INTEGRAL_TYPE_P (vr1->type)
717 && (TYPE_PRECISION (vr1->type)
718 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
719 return false;
720 else if (INTEGRAL_TYPE_P (vr2->type)
721 && (TYPE_PRECISION (vr2->type)
722 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
723 return false;
724
725 i = 0;
726 j = 0;
727 do
728 {
729 poly_int64 off1 = 0, off2 = 0;
730 vn_reference_op_t vro1, vro2;
731 vn_reference_op_s tem1, tem2;
732 bool deref1 = false, deref2 = false;
733 for (; vr1->operands.iterate (i, &vro1); i++)
734 {
735 if (vro1->opcode == MEM_REF)
736 deref1 = true;
737 /* Do not look through a storage order barrier. */
738 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
739 return false;
740 if (known_eq (vro1->off, -1))
741 break;
742 off1 += vro1->off;
743 }
744 for (; vr2->operands.iterate (j, &vro2); j++)
745 {
746 if (vro2->opcode == MEM_REF)
747 deref2 = true;
748 /* Do not look through a storage order barrier. */
749 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
750 return false;
751 if (known_eq (vro2->off, -1))
752 break;
753 off2 += vro2->off;
754 }
755 if (maybe_ne (off1, off2))
756 return false;
757 if (deref1 && vro1->opcode == ADDR_EXPR)
758 {
759 memset (&tem1, 0, sizeof (tem1));
760 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
761 tem1.type = TREE_TYPE (tem1.op0);
762 tem1.opcode = TREE_CODE (tem1.op0);
763 vro1 = &tem1;
764 deref1 = false;
765 }
766 if (deref2 && vro2->opcode == ADDR_EXPR)
767 {
768 memset (&tem2, 0, sizeof (tem2));
769 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
770 tem2.type = TREE_TYPE (tem2.op0);
771 tem2.opcode = TREE_CODE (tem2.op0);
772 vro2 = &tem2;
773 deref2 = false;
774 }
775 if (deref1 != deref2)
776 return false;
777 if (!vn_reference_op_eq (vro1, vro2))
778 return false;
779 ++j;
780 ++i;
781 }
782 while (vr1->operands.length () != i
783 || vr2->operands.length () != j);
784
785 return true;
786 }
787
788 /* Copy the operations present in load/store REF into RESULT, a vector of
789 vn_reference_op_s's. */
790
791 static void
792 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
793 {
794 if (TREE_CODE (ref) == TARGET_MEM_REF)
795 {
796 vn_reference_op_s temp;
797
798 result->reserve (3);
799
800 memset (&temp, 0, sizeof (temp));
801 temp.type = TREE_TYPE (ref);
802 temp.opcode = TREE_CODE (ref);
803 temp.op0 = TMR_INDEX (ref);
804 temp.op1 = TMR_STEP (ref);
805 temp.op2 = TMR_OFFSET (ref);
806 temp.off = -1;
807 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
808 temp.base = MR_DEPENDENCE_BASE (ref);
809 result->quick_push (temp);
810
811 memset (&temp, 0, sizeof (temp));
812 temp.type = NULL_TREE;
813 temp.opcode = ERROR_MARK;
814 temp.op0 = TMR_INDEX2 (ref);
815 temp.off = -1;
816 result->quick_push (temp);
817
818 memset (&temp, 0, sizeof (temp));
819 temp.type = NULL_TREE;
820 temp.opcode = TREE_CODE (TMR_BASE (ref));
821 temp.op0 = TMR_BASE (ref);
822 temp.off = -1;
823 result->quick_push (temp);
824 return;
825 }
826
827 /* For non-calls, store the information that makes up the address. */
828 tree orig = ref;
829 while (ref)
830 {
831 vn_reference_op_s temp;
832
833 memset (&temp, 0, sizeof (temp));
834 temp.type = TREE_TYPE (ref);
835 temp.opcode = TREE_CODE (ref);
836 temp.off = -1;
837
838 switch (temp.opcode)
839 {
840 case MODIFY_EXPR:
841 temp.op0 = TREE_OPERAND (ref, 1);
842 break;
843 case WITH_SIZE_EXPR:
844 temp.op0 = TREE_OPERAND (ref, 1);
845 temp.off = 0;
846 break;
847 case MEM_REF:
848 /* The base address gets its own vn_reference_op_s structure. */
849 temp.op0 = TREE_OPERAND (ref, 1);
850 if (!mem_ref_offset (ref).to_shwi (&temp.off))
851 temp.off = -1;
852 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
853 temp.base = MR_DEPENDENCE_BASE (ref);
854 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
855 break;
856 case BIT_FIELD_REF:
857 /* Record bits, position and storage order. */
858 temp.op0 = TREE_OPERAND (ref, 1);
859 temp.op1 = TREE_OPERAND (ref, 2);
860 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
861 temp.off = -1;
862 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
863 break;
864 case COMPONENT_REF:
865 /* The field decl is enough to unambiguously specify the field,
866 a matching type is not necessary and a mismatching type
867 is always a spurious difference. */
868 temp.type = NULL_TREE;
869 temp.op0 = TREE_OPERAND (ref, 1);
870 temp.op1 = TREE_OPERAND (ref, 2);
871 {
872 tree this_offset = component_ref_field_offset (ref);
873 if (this_offset
874 && poly_int_tree_p (this_offset))
875 {
876 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
877 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
878 {
879 poly_offset_int off
880 = (wi::to_poly_offset (this_offset)
881 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
882 /* Probibit value-numbering zero offset components
883 of addresses the same before the pass folding
884 __builtin_object_size had a chance to run
885 (checking cfun->after_inlining does the
886 trick here). */
887 if (TREE_CODE (orig) != ADDR_EXPR
888 || maybe_ne (off, 0)
889 || cfun->after_inlining)
890 off.to_shwi (&temp.off);
891 }
892 }
893 }
894 break;
895 case ARRAY_RANGE_REF:
896 case ARRAY_REF:
897 {
898 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
899 /* Record index as operand. */
900 temp.op0 = TREE_OPERAND (ref, 1);
901 /* Always record lower bounds and element size. */
902 temp.op1 = array_ref_low_bound (ref);
903 /* But record element size in units of the type alignment. */
904 temp.op2 = TREE_OPERAND (ref, 3);
905 temp.align = eltype->type_common.align;
906 if (! temp.op2)
907 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
908 size_int (TYPE_ALIGN_UNIT (eltype)));
909 if (poly_int_tree_p (temp.op0)
910 && poly_int_tree_p (temp.op1)
911 && TREE_CODE (temp.op2) == INTEGER_CST)
912 {
913 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
914 - wi::to_poly_offset (temp.op1))
915 * wi::to_offset (temp.op2)
916 * vn_ref_op_align_unit (&temp));
917 off.to_shwi (&temp.off);
918 }
919 }
920 break;
921 case VAR_DECL:
922 if (DECL_HARD_REGISTER (ref))
923 {
924 temp.op0 = ref;
925 break;
926 }
927 /* Fallthru. */
928 case PARM_DECL:
929 case CONST_DECL:
930 case RESULT_DECL:
931 /* Canonicalize decls to MEM[&decl] which is what we end up with
932 when valueizing MEM[ptr] with ptr = &decl. */
933 temp.opcode = MEM_REF;
934 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
935 temp.off = 0;
936 result->safe_push (temp);
937 temp.opcode = ADDR_EXPR;
938 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
939 temp.type = TREE_TYPE (temp.op0);
940 temp.off = -1;
941 break;
942 case STRING_CST:
943 case INTEGER_CST:
944 case COMPLEX_CST:
945 case VECTOR_CST:
946 case REAL_CST:
947 case FIXED_CST:
948 case CONSTRUCTOR:
949 case SSA_NAME:
950 temp.op0 = ref;
951 break;
952 case ADDR_EXPR:
953 if (is_gimple_min_invariant (ref))
954 {
955 temp.op0 = ref;
956 break;
957 }
958 break;
959 /* These are only interesting for their operands, their
960 existence, and their type. They will never be the last
961 ref in the chain of references (IE they require an
962 operand), so we don't have to put anything
963 for op* as it will be handled by the iteration */
964 case REALPART_EXPR:
965 temp.off = 0;
966 break;
967 case VIEW_CONVERT_EXPR:
968 temp.off = 0;
969 temp.reverse = storage_order_barrier_p (ref);
970 break;
971 case IMAGPART_EXPR:
972 /* This is only interesting for its constant offset. */
973 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
974 break;
975 default:
976 gcc_unreachable ();
977 }
978 result->safe_push (temp);
979
980 if (REFERENCE_CLASS_P (ref)
981 || TREE_CODE (ref) == MODIFY_EXPR
982 || TREE_CODE (ref) == WITH_SIZE_EXPR
983 || (TREE_CODE (ref) == ADDR_EXPR
984 && !is_gimple_min_invariant (ref)))
985 ref = TREE_OPERAND (ref, 0);
986 else
987 ref = NULL_TREE;
988 }
989 }
990
991 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
992 operands in *OPS, the reference alias set SET and the reference type TYPE.
993 Return true if something useful was produced. */
994
995 bool
996 ao_ref_init_from_vn_reference (ao_ref *ref,
997 alias_set_type set, tree type,
998 vec<vn_reference_op_s> ops)
999 {
1000 vn_reference_op_t op;
1001 unsigned i;
1002 tree base = NULL_TREE;
1003 tree *op0_p = &base;
1004 poly_offset_int offset = 0;
1005 poly_offset_int max_size;
1006 poly_offset_int size = -1;
1007 tree size_tree = NULL_TREE;
1008 alias_set_type base_alias_set = -1;
1009
1010 /* First get the final access size from just the outermost expression. */
1011 op = &ops[0];
1012 if (op->opcode == COMPONENT_REF)
1013 size_tree = DECL_SIZE (op->op0);
1014 else if (op->opcode == BIT_FIELD_REF)
1015 size_tree = op->op0;
1016 else
1017 {
1018 machine_mode mode = TYPE_MODE (type);
1019 if (mode == BLKmode)
1020 size_tree = TYPE_SIZE (type);
1021 else
1022 size = GET_MODE_BITSIZE (mode);
1023 }
1024 if (size_tree != NULL_TREE
1025 && poly_int_tree_p (size_tree))
1026 size = wi::to_poly_offset (size_tree);
1027
1028 /* Initially, maxsize is the same as the accessed element size.
1029 In the following it will only grow (or become -1). */
1030 max_size = size;
1031
1032 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1033 and find the ultimate containing object. */
1034 FOR_EACH_VEC_ELT (ops, i, op)
1035 {
1036 switch (op->opcode)
1037 {
1038 /* These may be in the reference ops, but we cannot do anything
1039 sensible with them here. */
1040 case ADDR_EXPR:
1041 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1042 if (base != NULL_TREE
1043 && TREE_CODE (base) == MEM_REF
1044 && op->op0
1045 && DECL_P (TREE_OPERAND (op->op0, 0)))
1046 {
1047 vn_reference_op_t pop = &ops[i-1];
1048 base = TREE_OPERAND (op->op0, 0);
1049 if (known_eq (pop->off, -1))
1050 {
1051 max_size = -1;
1052 offset = 0;
1053 }
1054 else
1055 offset += pop->off * BITS_PER_UNIT;
1056 op0_p = NULL;
1057 break;
1058 }
1059 /* Fallthru. */
1060 case CALL_EXPR:
1061 return false;
1062
1063 /* Record the base objects. */
1064 case MEM_REF:
1065 base_alias_set = get_deref_alias_set (op->op0);
1066 *op0_p = build2 (MEM_REF, op->type,
1067 NULL_TREE, op->op0);
1068 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1069 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1070 op0_p = &TREE_OPERAND (*op0_p, 0);
1071 break;
1072
1073 case VAR_DECL:
1074 case PARM_DECL:
1075 case RESULT_DECL:
1076 case SSA_NAME:
1077 *op0_p = op->op0;
1078 op0_p = NULL;
1079 break;
1080
1081 /* And now the usual component-reference style ops. */
1082 case BIT_FIELD_REF:
1083 offset += wi::to_poly_offset (op->op1);
1084 break;
1085
1086 case COMPONENT_REF:
1087 {
1088 tree field = op->op0;
1089 /* We do not have a complete COMPONENT_REF tree here so we
1090 cannot use component_ref_field_offset. Do the interesting
1091 parts manually. */
1092 tree this_offset = DECL_FIELD_OFFSET (field);
1093
1094 if (op->op1 || !poly_int_tree_p (this_offset))
1095 max_size = -1;
1096 else
1097 {
1098 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1099 << LOG2_BITS_PER_UNIT);
1100 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1101 offset += woffset;
1102 }
1103 break;
1104 }
1105
1106 case ARRAY_RANGE_REF:
1107 case ARRAY_REF:
1108 /* We recorded the lower bound and the element size. */
1109 if (!poly_int_tree_p (op->op0)
1110 || !poly_int_tree_p (op->op1)
1111 || TREE_CODE (op->op2) != INTEGER_CST)
1112 max_size = -1;
1113 else
1114 {
1115 poly_offset_int woffset
1116 = wi::sext (wi::to_poly_offset (op->op0)
1117 - wi::to_poly_offset (op->op1),
1118 TYPE_PRECISION (TREE_TYPE (op->op0)));
1119 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1120 woffset <<= LOG2_BITS_PER_UNIT;
1121 offset += woffset;
1122 }
1123 break;
1124
1125 case REALPART_EXPR:
1126 break;
1127
1128 case IMAGPART_EXPR:
1129 offset += size;
1130 break;
1131
1132 case VIEW_CONVERT_EXPR:
1133 break;
1134
1135 case STRING_CST:
1136 case INTEGER_CST:
1137 case COMPLEX_CST:
1138 case VECTOR_CST:
1139 case REAL_CST:
1140 case CONSTRUCTOR:
1141 case CONST_DECL:
1142 return false;
1143
1144 default:
1145 return false;
1146 }
1147 }
1148
1149 if (base == NULL_TREE)
1150 return false;
1151
1152 ref->ref = NULL_TREE;
1153 ref->base = base;
1154 ref->ref_alias_set = set;
1155 if (base_alias_set != -1)
1156 ref->base_alias_set = base_alias_set;
1157 else
1158 ref->base_alias_set = get_alias_set (base);
1159 /* We discount volatiles from value-numbering elsewhere. */
1160 ref->volatile_p = false;
1161
1162 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1163 {
1164 ref->offset = 0;
1165 ref->size = -1;
1166 ref->max_size = -1;
1167 return true;
1168 }
1169
1170 if (!offset.to_shwi (&ref->offset))
1171 {
1172 ref->offset = 0;
1173 ref->max_size = -1;
1174 return true;
1175 }
1176
1177 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1178 ref->max_size = -1;
1179
1180 return true;
1181 }
1182
1183 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1184 vn_reference_op_s's. */
1185
1186 static void
1187 copy_reference_ops_from_call (gcall *call,
1188 vec<vn_reference_op_s> *result)
1189 {
1190 vn_reference_op_s temp;
1191 unsigned i;
1192 tree lhs = gimple_call_lhs (call);
1193 int lr;
1194
1195 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1196 different. By adding the lhs here in the vector, we ensure that the
1197 hashcode is different, guaranteeing a different value number. */
1198 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1199 {
1200 memset (&temp, 0, sizeof (temp));
1201 temp.opcode = MODIFY_EXPR;
1202 temp.type = TREE_TYPE (lhs);
1203 temp.op0 = lhs;
1204 temp.off = -1;
1205 result->safe_push (temp);
1206 }
1207
1208 /* Copy the type, opcode, function, static chain and EH region, if any. */
1209 memset (&temp, 0, sizeof (temp));
1210 temp.type = gimple_call_fntype (call);
1211 temp.opcode = CALL_EXPR;
1212 temp.op0 = gimple_call_fn (call);
1213 temp.op1 = gimple_call_chain (call);
1214 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1215 temp.op2 = size_int (lr);
1216 temp.off = -1;
1217 result->safe_push (temp);
1218
1219 /* Copy the call arguments. As they can be references as well,
1220 just chain them together. */
1221 for (i = 0; i < gimple_call_num_args (call); ++i)
1222 {
1223 tree callarg = gimple_call_arg (call, i);
1224 copy_reference_ops_from_ref (callarg, result);
1225 }
1226 }
1227
1228 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1229 *I_P to point to the last element of the replacement. */
1230 static bool
1231 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1232 unsigned int *i_p)
1233 {
1234 unsigned int i = *i_p;
1235 vn_reference_op_t op = &(*ops)[i];
1236 vn_reference_op_t mem_op = &(*ops)[i - 1];
1237 tree addr_base;
1238 poly_int64 addr_offset = 0;
1239
1240 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1241 from .foo.bar to the preceding MEM_REF offset and replace the
1242 address with &OBJ. */
1243 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1244 &addr_offset);
1245 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1246 if (addr_base != TREE_OPERAND (op->op0, 0))
1247 {
1248 poly_offset_int off
1249 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1250 SIGNED)
1251 + addr_offset);
1252 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1253 op->op0 = build_fold_addr_expr (addr_base);
1254 if (tree_fits_shwi_p (mem_op->op0))
1255 mem_op->off = tree_to_shwi (mem_op->op0);
1256 else
1257 mem_op->off = -1;
1258 return true;
1259 }
1260 return false;
1261 }
1262
1263 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1264 *I_P to point to the last element of the replacement. */
1265 static bool
1266 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1267 unsigned int *i_p)
1268 {
1269 unsigned int i = *i_p;
1270 vn_reference_op_t op = &(*ops)[i];
1271 vn_reference_op_t mem_op = &(*ops)[i - 1];
1272 gimple *def_stmt;
1273 enum tree_code code;
1274 poly_offset_int off;
1275
1276 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1277 if (!is_gimple_assign (def_stmt))
1278 return false;
1279
1280 code = gimple_assign_rhs_code (def_stmt);
1281 if (code != ADDR_EXPR
1282 && code != POINTER_PLUS_EXPR)
1283 return false;
1284
1285 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1286
1287 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1288 from .foo.bar to the preceding MEM_REF offset and replace the
1289 address with &OBJ. */
1290 if (code == ADDR_EXPR)
1291 {
1292 tree addr, addr_base;
1293 poly_int64 addr_offset;
1294
1295 addr = gimple_assign_rhs1 (def_stmt);
1296 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1297 &addr_offset);
1298 /* If that didn't work because the address isn't invariant propagate
1299 the reference tree from the address operation in case the current
1300 dereference isn't offsetted. */
1301 if (!addr_base
1302 && *i_p == ops->length () - 1
1303 && known_eq (off, 0)
1304 /* This makes us disable this transform for PRE where the
1305 reference ops might be also used for code insertion which
1306 is invalid. */
1307 && default_vn_walk_kind == VN_WALKREWRITE)
1308 {
1309 auto_vec<vn_reference_op_s, 32> tem;
1310 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1311 /* Make sure to preserve TBAA info. The only objects not
1312 wrapped in MEM_REFs that can have their address taken are
1313 STRING_CSTs. */
1314 if (tem.length () >= 2
1315 && tem[tem.length () - 2].opcode == MEM_REF)
1316 {
1317 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1318 new_mem_op->op0
1319 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1320 wi::to_poly_wide (new_mem_op->op0));
1321 }
1322 else
1323 gcc_assert (tem.last ().opcode == STRING_CST);
1324 ops->pop ();
1325 ops->pop ();
1326 ops->safe_splice (tem);
1327 --*i_p;
1328 return true;
1329 }
1330 if (!addr_base
1331 || TREE_CODE (addr_base) != MEM_REF
1332 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1333 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1334 return false;
1335
1336 off += addr_offset;
1337 off += mem_ref_offset (addr_base);
1338 op->op0 = TREE_OPERAND (addr_base, 0);
1339 }
1340 else
1341 {
1342 tree ptr, ptroff;
1343 ptr = gimple_assign_rhs1 (def_stmt);
1344 ptroff = gimple_assign_rhs2 (def_stmt);
1345 if (TREE_CODE (ptr) != SSA_NAME
1346 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1347 /* Make sure to not endlessly recurse.
1348 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1349 happen when we value-number a PHI to its backedge value. */
1350 || SSA_VAL (ptr) == op->op0
1351 || !poly_int_tree_p (ptroff))
1352 return false;
1353
1354 off += wi::to_poly_offset (ptroff);
1355 op->op0 = ptr;
1356 }
1357
1358 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1359 if (tree_fits_shwi_p (mem_op->op0))
1360 mem_op->off = tree_to_shwi (mem_op->op0);
1361 else
1362 mem_op->off = -1;
1363 /* ??? Can end up with endless recursion here!?
1364 gcc.c-torture/execute/strcmp-1.c */
1365 if (TREE_CODE (op->op0) == SSA_NAME)
1366 op->op0 = SSA_VAL (op->op0);
1367 if (TREE_CODE (op->op0) != SSA_NAME)
1368 op->opcode = TREE_CODE (op->op0);
1369
1370 /* And recurse. */
1371 if (TREE_CODE (op->op0) == SSA_NAME)
1372 vn_reference_maybe_forwprop_address (ops, i_p);
1373 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1374 vn_reference_fold_indirect (ops, i_p);
1375 return true;
1376 }
1377
1378 /* Optimize the reference REF to a constant if possible or return
1379 NULL_TREE if not. */
1380
1381 tree
1382 fully_constant_vn_reference_p (vn_reference_t ref)
1383 {
1384 vec<vn_reference_op_s> operands = ref->operands;
1385 vn_reference_op_t op;
1386
1387 /* Try to simplify the translated expression if it is
1388 a call to a builtin function with at most two arguments. */
1389 op = &operands[0];
1390 if (op->opcode == CALL_EXPR
1391 && TREE_CODE (op->op0) == ADDR_EXPR
1392 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1393 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1394 && operands.length () >= 2
1395 && operands.length () <= 3)
1396 {
1397 vn_reference_op_t arg0, arg1 = NULL;
1398 bool anyconst = false;
1399 arg0 = &operands[1];
1400 if (operands.length () > 2)
1401 arg1 = &operands[2];
1402 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1403 || (arg0->opcode == ADDR_EXPR
1404 && is_gimple_min_invariant (arg0->op0)))
1405 anyconst = true;
1406 if (arg1
1407 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1408 || (arg1->opcode == ADDR_EXPR
1409 && is_gimple_min_invariant (arg1->op0))))
1410 anyconst = true;
1411 if (anyconst)
1412 {
1413 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1414 arg1 ? 2 : 1,
1415 arg0->op0,
1416 arg1 ? arg1->op0 : NULL);
1417 if (folded
1418 && TREE_CODE (folded) == NOP_EXPR)
1419 folded = TREE_OPERAND (folded, 0);
1420 if (folded
1421 && is_gimple_min_invariant (folded))
1422 return folded;
1423 }
1424 }
1425
1426 /* Simplify reads from constants or constant initializers. */
1427 else if (BITS_PER_UNIT == 8
1428 && COMPLETE_TYPE_P (ref->type)
1429 && is_gimple_reg_type (ref->type))
1430 {
1431 poly_int64 off = 0;
1432 HOST_WIDE_INT size;
1433 if (INTEGRAL_TYPE_P (ref->type))
1434 size = TYPE_PRECISION (ref->type);
1435 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1436 size = tree_to_shwi (TYPE_SIZE (ref->type));
1437 else
1438 return NULL_TREE;
1439 if (size % BITS_PER_UNIT != 0
1440 || size > MAX_BITSIZE_MODE_ANY_MODE)
1441 return NULL_TREE;
1442 size /= BITS_PER_UNIT;
1443 unsigned i;
1444 for (i = 0; i < operands.length (); ++i)
1445 {
1446 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1447 {
1448 ++i;
1449 break;
1450 }
1451 if (known_eq (operands[i].off, -1))
1452 return NULL_TREE;
1453 off += operands[i].off;
1454 if (operands[i].opcode == MEM_REF)
1455 {
1456 ++i;
1457 break;
1458 }
1459 }
1460 vn_reference_op_t base = &operands[--i];
1461 tree ctor = error_mark_node;
1462 tree decl = NULL_TREE;
1463 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1464 ctor = base->op0;
1465 else if (base->opcode == MEM_REF
1466 && base[1].opcode == ADDR_EXPR
1467 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1468 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1469 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1470 {
1471 decl = TREE_OPERAND (base[1].op0, 0);
1472 if (TREE_CODE (decl) == STRING_CST)
1473 ctor = decl;
1474 else
1475 ctor = ctor_for_folding (decl);
1476 }
1477 if (ctor == NULL_TREE)
1478 return build_zero_cst (ref->type);
1479 else if (ctor != error_mark_node)
1480 {
1481 HOST_WIDE_INT const_off;
1482 if (decl)
1483 {
1484 tree res = fold_ctor_reference (ref->type, ctor,
1485 off * BITS_PER_UNIT,
1486 size * BITS_PER_UNIT, decl);
1487 if (res)
1488 {
1489 STRIP_USELESS_TYPE_CONVERSION (res);
1490 if (is_gimple_min_invariant (res))
1491 return res;
1492 }
1493 }
1494 else if (off.is_constant (&const_off))
1495 {
1496 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1497 int len = native_encode_expr (ctor, buf, size, const_off);
1498 if (len > 0)
1499 return native_interpret_expr (ref->type, buf, len);
1500 }
1501 }
1502 }
1503
1504 return NULL_TREE;
1505 }
1506
1507 /* Return true if OPS contain a storage order barrier. */
1508
1509 static bool
1510 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1511 {
1512 vn_reference_op_t op;
1513 unsigned i;
1514
1515 FOR_EACH_VEC_ELT (ops, i, op)
1516 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1517 return true;
1518
1519 return false;
1520 }
1521
1522 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1523 structures into their value numbers. This is done in-place, and
1524 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1525 whether any operands were valueized. */
1526
1527 static vec<vn_reference_op_s>
1528 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1529 bool with_avail = false)
1530 {
1531 vn_reference_op_t vro;
1532 unsigned int i;
1533
1534 *valueized_anything = false;
1535
1536 FOR_EACH_VEC_ELT (orig, i, vro)
1537 {
1538 if (vro->opcode == SSA_NAME
1539 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1540 {
1541 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1542 if (tem != vro->op0)
1543 {
1544 *valueized_anything = true;
1545 vro->op0 = tem;
1546 }
1547 /* If it transforms from an SSA_NAME to a constant, update
1548 the opcode. */
1549 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1550 vro->opcode = TREE_CODE (vro->op0);
1551 }
1552 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1553 {
1554 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1555 if (tem != vro->op1)
1556 {
1557 *valueized_anything = true;
1558 vro->op1 = tem;
1559 }
1560 }
1561 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1562 {
1563 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1564 if (tem != vro->op2)
1565 {
1566 *valueized_anything = true;
1567 vro->op2 = tem;
1568 }
1569 }
1570 /* If it transforms from an SSA_NAME to an address, fold with
1571 a preceding indirect reference. */
1572 if (i > 0
1573 && vro->op0
1574 && TREE_CODE (vro->op0) == ADDR_EXPR
1575 && orig[i - 1].opcode == MEM_REF)
1576 {
1577 if (vn_reference_fold_indirect (&orig, &i))
1578 *valueized_anything = true;
1579 }
1580 else if (i > 0
1581 && vro->opcode == SSA_NAME
1582 && orig[i - 1].opcode == MEM_REF)
1583 {
1584 if (vn_reference_maybe_forwprop_address (&orig, &i))
1585 *valueized_anything = true;
1586 }
1587 /* If it transforms a non-constant ARRAY_REF into a constant
1588 one, adjust the constant offset. */
1589 else if (vro->opcode == ARRAY_REF
1590 && known_eq (vro->off, -1)
1591 && poly_int_tree_p (vro->op0)
1592 && poly_int_tree_p (vro->op1)
1593 && TREE_CODE (vro->op2) == INTEGER_CST)
1594 {
1595 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1596 - wi::to_poly_offset (vro->op1))
1597 * wi::to_offset (vro->op2)
1598 * vn_ref_op_align_unit (vro));
1599 off.to_shwi (&vro->off);
1600 }
1601 }
1602
1603 return orig;
1604 }
1605
1606 static vec<vn_reference_op_s>
1607 valueize_refs (vec<vn_reference_op_s> orig)
1608 {
1609 bool tem;
1610 return valueize_refs_1 (orig, &tem);
1611 }
1612
1613 static vec<vn_reference_op_s> shared_lookup_references;
1614
1615 /* Create a vector of vn_reference_op_s structures from REF, a
1616 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1617 this function. *VALUEIZED_ANYTHING will specify whether any
1618 operands were valueized. */
1619
1620 static vec<vn_reference_op_s>
1621 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1622 {
1623 if (!ref)
1624 return vNULL;
1625 shared_lookup_references.truncate (0);
1626 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1627 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1628 valueized_anything);
1629 return shared_lookup_references;
1630 }
1631
1632 /* Create a vector of vn_reference_op_s structures from CALL, a
1633 call statement. The vector is shared among all callers of
1634 this function. */
1635
1636 static vec<vn_reference_op_s>
1637 valueize_shared_reference_ops_from_call (gcall *call)
1638 {
1639 if (!call)
1640 return vNULL;
1641 shared_lookup_references.truncate (0);
1642 copy_reference_ops_from_call (call, &shared_lookup_references);
1643 shared_lookup_references = valueize_refs (shared_lookup_references);
1644 return shared_lookup_references;
1645 }
1646
1647 /* Lookup a SCCVN reference operation VR in the current hash table.
1648 Returns the resulting value number if it exists in the hash table,
1649 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1650 vn_reference_t stored in the hashtable if something is found. */
1651
1652 static tree
1653 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1654 {
1655 vn_reference_s **slot;
1656 hashval_t hash;
1657
1658 hash = vr->hashcode;
1659 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1660 if (slot)
1661 {
1662 if (vnresult)
1663 *vnresult = (vn_reference_t)*slot;
1664 return ((vn_reference_t)*slot)->result;
1665 }
1666
1667 return NULL_TREE;
1668 }
1669
1670 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1671 with the current VUSE and performs the expression lookup. */
1672
1673 static void *
1674 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1675 {
1676 vn_reference_t vr = (vn_reference_t)vr_;
1677 vn_reference_s **slot;
1678 hashval_t hash;
1679
1680 if (last_vuse_ptr)
1681 *last_vuse_ptr = vuse;
1682
1683 /* Fixup vuse and hash. */
1684 if (vr->vuse)
1685 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1686 vr->vuse = vuse_ssa_val (vuse);
1687 if (vr->vuse)
1688 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1689
1690 hash = vr->hashcode;
1691 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1692 if (slot)
1693 return *slot;
1694
1695 return NULL;
1696 }
1697
1698 /* Lookup an existing or insert a new vn_reference entry into the
1699 value table for the VUSE, SET, TYPE, OPERANDS reference which
1700 has the value VALUE which is either a constant or an SSA name. */
1701
1702 static vn_reference_t
1703 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1704 alias_set_type set,
1705 tree type,
1706 vec<vn_reference_op_s,
1707 va_heap> operands,
1708 tree value)
1709 {
1710 vn_reference_s vr1;
1711 vn_reference_t result;
1712 unsigned value_id;
1713 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1714 vr1.operands = operands;
1715 vr1.type = type;
1716 vr1.set = set;
1717 vr1.hashcode = vn_reference_compute_hash (&vr1);
1718 if (vn_reference_lookup_1 (&vr1, &result))
1719 return result;
1720 if (TREE_CODE (value) == SSA_NAME)
1721 value_id = VN_INFO (value)->value_id;
1722 else
1723 value_id = get_or_alloc_constant_value_id (value);
1724 return vn_reference_insert_pieces (vuse, set, type,
1725 operands.copy (), value, value_id);
1726 }
1727
1728 /* Return a value-number for RCODE OPS... either by looking up an existing
1729 value-number for the simplified result or by inserting the operation if
1730 INSERT is true. */
1731
1732 static tree
1733 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1734 {
1735 tree result = NULL_TREE;
1736 /* We will be creating a value number for
1737 RCODE (OPS...).
1738 So first simplify and lookup this expression to see if it
1739 is already available. */
1740 mprts_hook = vn_lookup_simplify_result;
1741 bool res = false;
1742 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1743 {
1744 case 1:
1745 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1746 break;
1747 case 2:
1748 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1749 break;
1750 case 3:
1751 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1752 break;
1753 }
1754 mprts_hook = NULL;
1755 gimple *new_stmt = NULL;
1756 if (res
1757 && gimple_simplified_result_is_gimple_val (res_op))
1758 {
1759 /* The expression is already available. */
1760 result = res_op->ops[0];
1761 /* Valueize it, simplification returns sth in AVAIL only. */
1762 if (TREE_CODE (result) == SSA_NAME)
1763 result = SSA_VAL (result);
1764 }
1765 else
1766 {
1767 tree val = vn_lookup_simplify_result (res_op);
1768 if (!val && insert)
1769 {
1770 gimple_seq stmts = NULL;
1771 result = maybe_push_res_to_seq (res_op, &stmts);
1772 if (result)
1773 {
1774 gcc_assert (gimple_seq_singleton_p (stmts));
1775 new_stmt = gimple_seq_first_stmt (stmts);
1776 }
1777 }
1778 else
1779 /* The expression is already available. */
1780 result = val;
1781 }
1782 if (new_stmt)
1783 {
1784 /* The expression is not yet available, value-number lhs to
1785 the new SSA_NAME we created. */
1786 /* Initialize value-number information properly. */
1787 vn_ssa_aux_t result_info = VN_INFO (result);
1788 result_info->valnum = result;
1789 result_info->value_id = get_next_value_id ();
1790 result_info->visited = 1;
1791 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1792 new_stmt);
1793 result_info->needs_insertion = true;
1794 /* ??? PRE phi-translation inserts NARYs without corresponding
1795 SSA name result. Re-use those but set their result according
1796 to the stmt we just built. */
1797 vn_nary_op_t nary = NULL;
1798 vn_nary_op_lookup_stmt (new_stmt, &nary);
1799 if (nary)
1800 {
1801 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
1802 nary->u.result = gimple_assign_lhs (new_stmt);
1803 }
1804 /* As all "inserted" statements are singleton SCCs, insert
1805 to the valid table. This is strictly needed to
1806 avoid re-generating new value SSA_NAMEs for the same
1807 expression during SCC iteration over and over (the
1808 optimistic table gets cleared after each iteration).
1809 We do not need to insert into the optimistic table, as
1810 lookups there will fall back to the valid table. */
1811 else
1812 {
1813 unsigned int length = vn_nary_length_from_stmt (new_stmt);
1814 vn_nary_op_t vno1
1815 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
1816 vno1->value_id = result_info->value_id;
1817 vno1->length = length;
1818 vno1->predicated_values = 0;
1819 vno1->u.result = result;
1820 init_vn_nary_op_from_stmt (vno1, new_stmt);
1821 vn_nary_op_insert_into (vno1, valid_info->nary, true);
1822 /* Also do not link it into the undo chain. */
1823 last_inserted_nary = vno1->next;
1824 vno1->next = (vn_nary_op_t)(void *)-1;
1825 }
1826 if (dump_file && (dump_flags & TDF_DETAILS))
1827 {
1828 fprintf (dump_file, "Inserting name ");
1829 print_generic_expr (dump_file, result);
1830 fprintf (dump_file, " for expression ");
1831 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1832 fprintf (dump_file, "\n");
1833 }
1834 }
1835 return result;
1836 }
1837
1838 /* Return a value-number for RCODE OPS... either by looking up an existing
1839 value-number for the simplified result or by inserting the operation. */
1840
1841 static tree
1842 vn_nary_build_or_lookup (gimple_match_op *res_op)
1843 {
1844 return vn_nary_build_or_lookup_1 (res_op, true);
1845 }
1846
1847 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1848 its value if present. */
1849
1850 tree
1851 vn_nary_simplify (vn_nary_op_t nary)
1852 {
1853 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1854 return NULL_TREE;
1855 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
1856 nary->type, nary->length);
1857 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1858 return vn_nary_build_or_lookup_1 (&op, false);
1859 }
1860
1861 /* Elimination engine. */
1862
1863 class eliminate_dom_walker : public dom_walker
1864 {
1865 public:
1866 eliminate_dom_walker (cdi_direction, bitmap);
1867 ~eliminate_dom_walker ();
1868
1869 virtual edge before_dom_children (basic_block);
1870 virtual void after_dom_children (basic_block);
1871
1872 virtual tree eliminate_avail (basic_block, tree op);
1873 virtual void eliminate_push_avail (basic_block, tree op);
1874 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
1875
1876 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
1877
1878 unsigned eliminate_cleanup (bool region_p = false);
1879
1880 bool do_pre;
1881 unsigned int el_todo;
1882 unsigned int eliminations;
1883 unsigned int insertions;
1884
1885 /* SSA names that had their defs inserted by PRE if do_pre. */
1886 bitmap inserted_exprs;
1887
1888 /* Blocks with statements that have had their EH properties changed. */
1889 bitmap need_eh_cleanup;
1890
1891 /* Blocks with statements that have had their AB properties changed. */
1892 bitmap need_ab_cleanup;
1893
1894 /* Local state for the eliminate domwalk. */
1895 auto_vec<gimple *> to_remove;
1896 auto_vec<gimple *> to_fixup;
1897 auto_vec<tree> avail;
1898 auto_vec<tree> avail_stack;
1899 };
1900
1901 /* Adaptor to the elimination engine using RPO availability. */
1902
1903 class rpo_elim : public eliminate_dom_walker
1904 {
1905 public:
1906 rpo_elim(basic_block entry_)
1907 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_) {}
1908 ~rpo_elim();
1909
1910 virtual tree eliminate_avail (basic_block, tree op);
1911
1912 virtual void eliminate_push_avail (basic_block, tree);
1913
1914 basic_block entry;
1915 /* Instead of having a local availability lattice for each
1916 basic-block and availability at X defined as union of
1917 the local availabilities at X and its dominators we're
1918 turning this upside down and track availability per
1919 value given values are usually made available at very
1920 few points (at least one).
1921 So we have a value -> vec<location, leader> map where
1922 LOCATION is specifying the basic-block LEADER is made
1923 available for VALUE. We push to this vector in RPO
1924 order thus for iteration we can simply pop the last
1925 entries.
1926 LOCATION is the basic-block index and LEADER is its
1927 SSA name version. */
1928 /* ??? We'd like to use auto_vec here with embedded storage
1929 but that doesn't play well until we can provide move
1930 constructors and use std::move on hash-table expansion.
1931 So for now this is a bit more expensive than necessary.
1932 We eventually want to switch to a chaining scheme like
1933 for hashtable entries for unwinding which would make
1934 making the vector part of the vn_ssa_aux structure possible. */
1935 typedef hash_map<tree, vec<std::pair<int, int> > > rpo_avail_t;
1936 rpo_avail_t m_rpo_avail;
1937 };
1938
1939 /* Global RPO state for access from hooks. */
1940 static rpo_elim *rpo_avail;
1941 basic_block vn_context_bb;
1942
1943 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1944 from the statement defining VUSE and if not successful tries to
1945 translate *REFP and VR_ through an aggregate copy at the definition
1946 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1947 of *REF and *VR. If only disambiguation was performed then
1948 *DISAMBIGUATE_ONLY is set to true. */
1949
1950 static void *
1951 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1952 bool *disambiguate_only)
1953 {
1954 vn_reference_t vr = (vn_reference_t)vr_;
1955 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1956 tree base = ao_ref_base (ref);
1957 HOST_WIDE_INT offseti, maxsizei;
1958 static vec<vn_reference_op_s> lhs_ops;
1959 ao_ref lhs_ref;
1960 bool lhs_ref_ok = false;
1961 poly_int64 copy_size;
1962
1963 /* First try to disambiguate after value-replacing in the definitions LHS. */
1964 if (is_gimple_assign (def_stmt))
1965 {
1966 tree lhs = gimple_assign_lhs (def_stmt);
1967 bool valueized_anything = false;
1968 /* Avoid re-allocation overhead. */
1969 lhs_ops.truncate (0);
1970 basic_block saved_rpo_bb = vn_context_bb;
1971 vn_context_bb = gimple_bb (def_stmt);
1972 copy_reference_ops_from_ref (lhs, &lhs_ops);
1973 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
1974 vn_context_bb = saved_rpo_bb;
1975 if (valueized_anything)
1976 {
1977 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1978 get_alias_set (lhs),
1979 TREE_TYPE (lhs), lhs_ops);
1980 if (lhs_ref_ok
1981 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1982 {
1983 *disambiguate_only = true;
1984 return NULL;
1985 }
1986 }
1987 else
1988 {
1989 ao_ref_init (&lhs_ref, lhs);
1990 lhs_ref_ok = true;
1991 }
1992
1993 /* If we reach a clobbering statement try to skip it and see if
1994 we find a VN result with exactly the same value as the
1995 possible clobber. In this case we can ignore the clobber
1996 and return the found value.
1997 Note that we don't need to worry about partial overlapping
1998 accesses as we then can use TBAA to disambiguate against the
1999 clobbering statement when looking up a load (thus the
2000 VN_WALKREWRITE guard). */
2001 if (vn_walk_kind == VN_WALKREWRITE
2002 && is_gimple_reg_type (TREE_TYPE (lhs))
2003 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2004 /* The overlap restriction breaks down when either access
2005 alias-set is zero. Still for accesses of the size of
2006 an addressable unit there can be no overlaps. Overlaps
2007 between different union members are not an issue since
2008 activation of a union member via a store makes the
2009 values of untouched bytes unspecified. */
2010 && (known_eq (ref->size, BITS_PER_UNIT)
2011 || (get_alias_set (lhs) != 0
2012 && ao_ref_alias_set (ref) != 0)))
2013 {
2014 tree *saved_last_vuse_ptr = last_vuse_ptr;
2015 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2016 last_vuse_ptr = NULL;
2017 tree saved_vuse = vr->vuse;
2018 hashval_t saved_hashcode = vr->hashcode;
2019 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), vr);
2020 /* Need to restore vr->vuse and vr->hashcode. */
2021 vr->vuse = saved_vuse;
2022 vr->hashcode = saved_hashcode;
2023 last_vuse_ptr = saved_last_vuse_ptr;
2024 if (res && res != (void *)-1)
2025 {
2026 vn_reference_t vnresult = (vn_reference_t) res;
2027 if (vnresult->result
2028 && operand_equal_p (vnresult->result,
2029 gimple_assign_rhs1 (def_stmt), 0))
2030 return res;
2031 }
2032 }
2033 }
2034 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2035 && gimple_call_num_args (def_stmt) <= 4)
2036 {
2037 /* For builtin calls valueize its arguments and call the
2038 alias oracle again. Valueization may improve points-to
2039 info of pointers and constify size and position arguments.
2040 Originally this was motivated by PR61034 which has
2041 conditional calls to free falsely clobbering ref because
2042 of imprecise points-to info of the argument. */
2043 tree oldargs[4];
2044 bool valueized_anything = false;
2045 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2046 {
2047 oldargs[i] = gimple_call_arg (def_stmt, i);
2048 tree val = vn_valueize (oldargs[i]);
2049 if (val != oldargs[i])
2050 {
2051 gimple_call_set_arg (def_stmt, i, val);
2052 valueized_anything = true;
2053 }
2054 }
2055 if (valueized_anything)
2056 {
2057 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2058 ref);
2059 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2060 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2061 if (!res)
2062 {
2063 *disambiguate_only = true;
2064 return NULL;
2065 }
2066 }
2067 }
2068
2069 if (*disambiguate_only)
2070 return (void *)-1;
2071
2072 /* If we cannot constrain the size of the reference we cannot
2073 test if anything kills it. */
2074 if (!ref->max_size_known_p ())
2075 return (void *)-1;
2076
2077 poly_int64 offset = ref->offset;
2078 poly_int64 maxsize = ref->max_size;
2079
2080 /* We can't deduce anything useful from clobbers. */
2081 if (gimple_clobber_p (def_stmt))
2082 return (void *)-1;
2083
2084 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2085 from that definition.
2086 1) Memset. */
2087 if (is_gimple_reg_type (vr->type)
2088 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2089 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2090 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2091 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2092 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2093 && offset.is_constant (&offseti)
2094 && offseti % BITS_PER_UNIT == 0))
2095 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2096 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2097 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2098 {
2099 tree base2;
2100 poly_int64 offset2, size2, maxsize2;
2101 bool reverse;
2102 tree ref2 = gimple_call_arg (def_stmt, 0);
2103 if (TREE_CODE (ref2) == SSA_NAME)
2104 {
2105 ref2 = SSA_VAL (ref2);
2106 if (TREE_CODE (ref2) == SSA_NAME
2107 && (TREE_CODE (base) != MEM_REF
2108 || TREE_OPERAND (base, 0) != ref2))
2109 {
2110 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2111 if (gimple_assign_single_p (def_stmt)
2112 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2113 ref2 = gimple_assign_rhs1 (def_stmt);
2114 }
2115 }
2116 if (TREE_CODE (ref2) == ADDR_EXPR)
2117 {
2118 ref2 = TREE_OPERAND (ref2, 0);
2119 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2120 &reverse);
2121 if (!known_size_p (maxsize2)
2122 || !known_eq (maxsize2, size2)
2123 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2124 return (void *)-1;
2125 }
2126 else if (TREE_CODE (ref2) == SSA_NAME)
2127 {
2128 poly_int64 soff;
2129 if (TREE_CODE (base) != MEM_REF
2130 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2131 return (void *)-1;
2132 offset += soff;
2133 offset2 = 0;
2134 if (TREE_OPERAND (base, 0) != ref2)
2135 {
2136 gimple *def = SSA_NAME_DEF_STMT (ref2);
2137 if (is_gimple_assign (def)
2138 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2139 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2140 && poly_int_tree_p (gimple_assign_rhs2 (def))
2141 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2142 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2143 {
2144 ref2 = gimple_assign_rhs1 (def);
2145 if (TREE_CODE (ref2) == SSA_NAME)
2146 ref2 = SSA_VAL (ref2);
2147 }
2148 else
2149 return (void *)-1;
2150 }
2151 }
2152 else
2153 return (void *)-1;
2154 tree len = gimple_call_arg (def_stmt, 2);
2155 if (known_subrange_p (offset, maxsize, offset2,
2156 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2157 {
2158 tree val;
2159 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2160 val = build_zero_cst (vr->type);
2161 else if (INTEGRAL_TYPE_P (vr->type)
2162 && known_eq (ref->size, 8))
2163 {
2164 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2165 vr->type, gimple_call_arg (def_stmt, 1));
2166 val = vn_nary_build_or_lookup (&res_op);
2167 if (!val
2168 || (TREE_CODE (val) == SSA_NAME
2169 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2170 return (void *)-1;
2171 }
2172 else
2173 {
2174 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2175 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2176 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2177 len);
2178 val = native_interpret_expr (vr->type, buf, len);
2179 if (!val)
2180 return (void *)-1;
2181 }
2182 return vn_reference_lookup_or_insert_for_pieces
2183 (vuse, vr->set, vr->type, vr->operands, val);
2184 }
2185 }
2186
2187 /* 2) Assignment from an empty CONSTRUCTOR. */
2188 else if (is_gimple_reg_type (vr->type)
2189 && gimple_assign_single_p (def_stmt)
2190 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2191 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2192 {
2193 tree base2;
2194 poly_int64 offset2, size2, maxsize2;
2195 bool reverse;
2196 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2197 &offset2, &size2, &maxsize2, &reverse);
2198 if (known_size_p (maxsize2)
2199 && known_eq (maxsize2, size2)
2200 && operand_equal_p (base, base2, 0)
2201 && known_subrange_p (offset, maxsize, offset2, size2))
2202 {
2203 tree val = build_zero_cst (vr->type);
2204 return vn_reference_lookup_or_insert_for_pieces
2205 (vuse, vr->set, vr->type, vr->operands, val);
2206 }
2207 }
2208
2209 /* 3) Assignment from a constant. We can use folds native encode/interpret
2210 routines to extract the assigned bits. */
2211 else if (known_eq (ref->size, maxsize)
2212 && is_gimple_reg_type (vr->type)
2213 && !contains_storage_order_barrier_p (vr->operands)
2214 && gimple_assign_single_p (def_stmt)
2215 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2216 /* native_encode and native_decode operate on arrays of bytes
2217 and so fundamentally need a compile-time size and offset. */
2218 && maxsize.is_constant (&maxsizei)
2219 && maxsizei % BITS_PER_UNIT == 0
2220 && offset.is_constant (&offseti)
2221 && offseti % BITS_PER_UNIT == 0
2222 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2223 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2224 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2225 {
2226 tree base2;
2227 HOST_WIDE_INT offset2, size2;
2228 bool reverse;
2229 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2230 &offset2, &size2, &reverse);
2231 if (base2
2232 && !reverse
2233 && size2 % BITS_PER_UNIT == 0
2234 && offset2 % BITS_PER_UNIT == 0
2235 && operand_equal_p (base, base2, 0)
2236 && known_subrange_p (offseti, maxsizei, offset2, size2))
2237 {
2238 /* We support up to 512-bit values (for V8DFmode). */
2239 unsigned char buffer[64];
2240 int len;
2241
2242 tree rhs = gimple_assign_rhs1 (def_stmt);
2243 if (TREE_CODE (rhs) == SSA_NAME)
2244 rhs = SSA_VAL (rhs);
2245 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2246 buffer, sizeof (buffer),
2247 (offseti - offset2) / BITS_PER_UNIT);
2248 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2249 {
2250 tree type = vr->type;
2251 /* Make sure to interpret in a type that has a range
2252 covering the whole access size. */
2253 if (INTEGRAL_TYPE_P (vr->type)
2254 && maxsizei != TYPE_PRECISION (vr->type))
2255 type = build_nonstandard_integer_type (maxsizei,
2256 TYPE_UNSIGNED (type));
2257 tree val = native_interpret_expr (type, buffer,
2258 maxsizei / BITS_PER_UNIT);
2259 /* If we chop off bits because the types precision doesn't
2260 match the memory access size this is ok when optimizing
2261 reads but not when called from the DSE code during
2262 elimination. */
2263 if (val
2264 && type != vr->type)
2265 {
2266 if (! int_fits_type_p (val, vr->type))
2267 val = NULL_TREE;
2268 else
2269 val = fold_convert (vr->type, val);
2270 }
2271
2272 if (val)
2273 return vn_reference_lookup_or_insert_for_pieces
2274 (vuse, vr->set, vr->type, vr->operands, val);
2275 }
2276 }
2277 }
2278
2279 /* 4) Assignment from an SSA name which definition we may be able
2280 to access pieces from. */
2281 else if (known_eq (ref->size, maxsize)
2282 && is_gimple_reg_type (vr->type)
2283 && !contains_storage_order_barrier_p (vr->operands)
2284 && gimple_assign_single_p (def_stmt)
2285 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2286 {
2287 tree base2;
2288 poly_int64 offset2, size2, maxsize2;
2289 bool reverse;
2290 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2291 &offset2, &size2, &maxsize2,
2292 &reverse);
2293 tree def_rhs = gimple_assign_rhs1 (def_stmt);
2294 if (!reverse
2295 && known_size_p (maxsize2)
2296 && known_eq (maxsize2, size2)
2297 && operand_equal_p (base, base2, 0)
2298 && known_subrange_p (offset, maxsize, offset2, size2)
2299 /* ??? We can't handle bitfield precision extracts without
2300 either using an alternate type for the BIT_FIELD_REF and
2301 then doing a conversion or possibly adjusting the offset
2302 according to endianness. */
2303 && (! INTEGRAL_TYPE_P (vr->type)
2304 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2305 && multiple_p (ref->size, BITS_PER_UNIT)
2306 && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2307 || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2308 {
2309 gimple_match_op op (gimple_match_cond::UNCOND,
2310 BIT_FIELD_REF, vr->type,
2311 vn_valueize (def_rhs),
2312 bitsize_int (ref->size),
2313 bitsize_int (offset - offset2));
2314 tree val = vn_nary_build_or_lookup (&op);
2315 if (val
2316 && (TREE_CODE (val) != SSA_NAME
2317 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2318 {
2319 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2320 (vuse, vr->set, vr->type, vr->operands, val);
2321 return res;
2322 }
2323 }
2324 }
2325
2326 /* 5) For aggregate copies translate the reference through them if
2327 the copy kills ref. */
2328 else if (vn_walk_kind == VN_WALKREWRITE
2329 && gimple_assign_single_p (def_stmt)
2330 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2331 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2332 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2333 {
2334 tree base2;
2335 int i, j, k;
2336 auto_vec<vn_reference_op_s> rhs;
2337 vn_reference_op_t vro;
2338 ao_ref r;
2339
2340 if (!lhs_ref_ok)
2341 return (void *)-1;
2342
2343 /* See if the assignment kills REF. */
2344 base2 = ao_ref_base (&lhs_ref);
2345 if (!lhs_ref.max_size_known_p ()
2346 || (base != base2
2347 && (TREE_CODE (base) != MEM_REF
2348 || TREE_CODE (base2) != MEM_REF
2349 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2350 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2351 TREE_OPERAND (base2, 1))))
2352 || !stmt_kills_ref_p (def_stmt, ref))
2353 return (void *)-1;
2354
2355 /* Find the common base of ref and the lhs. lhs_ops already
2356 contains valueized operands for the lhs. */
2357 i = vr->operands.length () - 1;
2358 j = lhs_ops.length () - 1;
2359 while (j >= 0 && i >= 0
2360 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2361 {
2362 i--;
2363 j--;
2364 }
2365
2366 /* ??? The innermost op should always be a MEM_REF and we already
2367 checked that the assignment to the lhs kills vr. Thus for
2368 aggregate copies using char[] types the vn_reference_op_eq
2369 may fail when comparing types for compatibility. But we really
2370 don't care here - further lookups with the rewritten operands
2371 will simply fail if we messed up types too badly. */
2372 poly_int64 extra_off = 0;
2373 if (j == 0 && i >= 0
2374 && lhs_ops[0].opcode == MEM_REF
2375 && maybe_ne (lhs_ops[0].off, -1))
2376 {
2377 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2378 i--, j--;
2379 else if (vr->operands[i].opcode == MEM_REF
2380 && maybe_ne (vr->operands[i].off, -1))
2381 {
2382 extra_off = vr->operands[i].off - lhs_ops[0].off;
2383 i--, j--;
2384 }
2385 }
2386
2387 /* i now points to the first additional op.
2388 ??? LHS may not be completely contained in VR, one or more
2389 VIEW_CONVERT_EXPRs could be in its way. We could at least
2390 try handling outermost VIEW_CONVERT_EXPRs. */
2391 if (j != -1)
2392 return (void *)-1;
2393
2394 /* Punt if the additional ops contain a storage order barrier. */
2395 for (k = i; k >= 0; k--)
2396 {
2397 vro = &vr->operands[k];
2398 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2399 return (void *)-1;
2400 }
2401
2402 /* Now re-write REF to be based on the rhs of the assignment. */
2403 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2404
2405 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2406 if (maybe_ne (extra_off, 0))
2407 {
2408 if (rhs.length () < 2)
2409 return (void *)-1;
2410 int ix = rhs.length () - 2;
2411 if (rhs[ix].opcode != MEM_REF
2412 || known_eq (rhs[ix].off, -1))
2413 return (void *)-1;
2414 rhs[ix].off += extra_off;
2415 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2416 build_int_cst (TREE_TYPE (rhs[ix].op0),
2417 extra_off));
2418 }
2419
2420 /* We need to pre-pend vr->operands[0..i] to rhs. */
2421 vec<vn_reference_op_s> old = vr->operands;
2422 if (i + 1 + rhs.length () > vr->operands.length ())
2423 vr->operands.safe_grow (i + 1 + rhs.length ());
2424 else
2425 vr->operands.truncate (i + 1 + rhs.length ());
2426 FOR_EACH_VEC_ELT (rhs, j, vro)
2427 vr->operands[i + 1 + j] = *vro;
2428 vr->operands = valueize_refs (vr->operands);
2429 if (old == shared_lookup_references)
2430 shared_lookup_references = vr->operands;
2431 vr->hashcode = vn_reference_compute_hash (vr);
2432
2433 /* Try folding the new reference to a constant. */
2434 tree val = fully_constant_vn_reference_p (vr);
2435 if (val)
2436 return vn_reference_lookup_or_insert_for_pieces
2437 (vuse, vr->set, vr->type, vr->operands, val);
2438
2439 /* Adjust *ref from the new operands. */
2440 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2441 return (void *)-1;
2442 /* This can happen with bitfields. */
2443 if (maybe_ne (ref->size, r.size))
2444 return (void *)-1;
2445 *ref = r;
2446
2447 /* Do not update last seen VUSE after translating. */
2448 last_vuse_ptr = NULL;
2449
2450 /* Keep looking for the adjusted *REF / VR pair. */
2451 return NULL;
2452 }
2453
2454 /* 6) For memcpy copies translate the reference through them if
2455 the copy kills ref. */
2456 else if (vn_walk_kind == VN_WALKREWRITE
2457 && is_gimple_reg_type (vr->type)
2458 /* ??? Handle BCOPY as well. */
2459 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2460 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2461 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2462 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2463 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2464 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2465 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2466 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2467 {
2468 tree lhs, rhs;
2469 ao_ref r;
2470 poly_int64 rhs_offset, lhs_offset;
2471 vn_reference_op_s op;
2472 poly_uint64 mem_offset;
2473 poly_int64 at, byte_maxsize;
2474
2475 /* Only handle non-variable, addressable refs. */
2476 if (maybe_ne (ref->size, maxsize)
2477 || !multiple_p (offset, BITS_PER_UNIT, &at)
2478 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2479 return (void *)-1;
2480
2481 /* Extract a pointer base and an offset for the destination. */
2482 lhs = gimple_call_arg (def_stmt, 0);
2483 lhs_offset = 0;
2484 if (TREE_CODE (lhs) == SSA_NAME)
2485 {
2486 lhs = vn_valueize (lhs);
2487 if (TREE_CODE (lhs) == SSA_NAME)
2488 {
2489 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2490 if (gimple_assign_single_p (def_stmt)
2491 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2492 lhs = gimple_assign_rhs1 (def_stmt);
2493 }
2494 }
2495 if (TREE_CODE (lhs) == ADDR_EXPR)
2496 {
2497 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2498 &lhs_offset);
2499 if (!tem)
2500 return (void *)-1;
2501 if (TREE_CODE (tem) == MEM_REF
2502 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2503 {
2504 lhs = TREE_OPERAND (tem, 0);
2505 if (TREE_CODE (lhs) == SSA_NAME)
2506 lhs = vn_valueize (lhs);
2507 lhs_offset += mem_offset;
2508 }
2509 else if (DECL_P (tem))
2510 lhs = build_fold_addr_expr (tem);
2511 else
2512 return (void *)-1;
2513 }
2514 if (TREE_CODE (lhs) != SSA_NAME
2515 && TREE_CODE (lhs) != ADDR_EXPR)
2516 return (void *)-1;
2517
2518 /* Extract a pointer base and an offset for the source. */
2519 rhs = gimple_call_arg (def_stmt, 1);
2520 rhs_offset = 0;
2521 if (TREE_CODE (rhs) == SSA_NAME)
2522 rhs = vn_valueize (rhs);
2523 if (TREE_CODE (rhs) == ADDR_EXPR)
2524 {
2525 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2526 &rhs_offset);
2527 if (!tem)
2528 return (void *)-1;
2529 if (TREE_CODE (tem) == MEM_REF
2530 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2531 {
2532 rhs = TREE_OPERAND (tem, 0);
2533 rhs_offset += mem_offset;
2534 }
2535 else if (DECL_P (tem)
2536 || TREE_CODE (tem) == STRING_CST)
2537 rhs = build_fold_addr_expr (tem);
2538 else
2539 return (void *)-1;
2540 }
2541 if (TREE_CODE (rhs) != SSA_NAME
2542 && TREE_CODE (rhs) != ADDR_EXPR)
2543 return (void *)-1;
2544
2545 /* The bases of the destination and the references have to agree. */
2546 if (TREE_CODE (base) == MEM_REF)
2547 {
2548 if (TREE_OPERAND (base, 0) != lhs
2549 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2550 return (void *) -1;
2551 at += mem_offset;
2552 }
2553 else if (!DECL_P (base)
2554 || TREE_CODE (lhs) != ADDR_EXPR
2555 || TREE_OPERAND (lhs, 0) != base)
2556 return (void *)-1;
2557
2558 /* If the access is completely outside of the memcpy destination
2559 area there is no aliasing. */
2560 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2561 return NULL;
2562 /* And the access has to be contained within the memcpy destination. */
2563 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2564 return (void *)-1;
2565
2566 /* Make room for 2 operands in the new reference. */
2567 if (vr->operands.length () < 2)
2568 {
2569 vec<vn_reference_op_s> old = vr->operands;
2570 vr->operands.safe_grow_cleared (2);
2571 if (old == shared_lookup_references)
2572 shared_lookup_references = vr->operands;
2573 }
2574 else
2575 vr->operands.truncate (2);
2576
2577 /* The looked-through reference is a simple MEM_REF. */
2578 memset (&op, 0, sizeof (op));
2579 op.type = vr->type;
2580 op.opcode = MEM_REF;
2581 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2582 op.off = at - lhs_offset + rhs_offset;
2583 vr->operands[0] = op;
2584 op.type = TREE_TYPE (rhs);
2585 op.opcode = TREE_CODE (rhs);
2586 op.op0 = rhs;
2587 op.off = -1;
2588 vr->operands[1] = op;
2589 vr->hashcode = vn_reference_compute_hash (vr);
2590
2591 /* Try folding the new reference to a constant. */
2592 tree val = fully_constant_vn_reference_p (vr);
2593 if (val)
2594 return vn_reference_lookup_or_insert_for_pieces
2595 (vuse, vr->set, vr->type, vr->operands, val);
2596
2597 /* Adjust *ref from the new operands. */
2598 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2599 return (void *)-1;
2600 /* This can happen with bitfields. */
2601 if (maybe_ne (ref->size, r.size))
2602 return (void *)-1;
2603 *ref = r;
2604
2605 /* Do not update last seen VUSE after translating. */
2606 last_vuse_ptr = NULL;
2607
2608 /* Keep looking for the adjusted *REF / VR pair. */
2609 return NULL;
2610 }
2611
2612 /* Bail out and stop walking. */
2613 return (void *)-1;
2614 }
2615
2616 /* Return a reference op vector from OP that can be used for
2617 vn_reference_lookup_pieces. The caller is responsible for releasing
2618 the vector. */
2619
2620 vec<vn_reference_op_s>
2621 vn_reference_operands_for_lookup (tree op)
2622 {
2623 bool valueized;
2624 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2625 }
2626
2627 /* Lookup a reference operation by it's parts, in the current hash table.
2628 Returns the resulting value number if it exists in the hash table,
2629 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2630 vn_reference_t stored in the hashtable if something is found. */
2631
2632 tree
2633 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2634 vec<vn_reference_op_s> operands,
2635 vn_reference_t *vnresult, vn_lookup_kind kind)
2636 {
2637 struct vn_reference_s vr1;
2638 vn_reference_t tmp;
2639 tree cst;
2640
2641 if (!vnresult)
2642 vnresult = &tmp;
2643 *vnresult = NULL;
2644
2645 vr1.vuse = vuse_ssa_val (vuse);
2646 shared_lookup_references.truncate (0);
2647 shared_lookup_references.safe_grow (operands.length ());
2648 memcpy (shared_lookup_references.address (),
2649 operands.address (),
2650 sizeof (vn_reference_op_s)
2651 * operands.length ());
2652 vr1.operands = operands = shared_lookup_references
2653 = valueize_refs (shared_lookup_references);
2654 vr1.type = type;
2655 vr1.set = set;
2656 vr1.hashcode = vn_reference_compute_hash (&vr1);
2657 if ((cst = fully_constant_vn_reference_p (&vr1)))
2658 return cst;
2659
2660 vn_reference_lookup_1 (&vr1, vnresult);
2661 if (!*vnresult
2662 && kind != VN_NOWALK
2663 && vr1.vuse)
2664 {
2665 ao_ref r;
2666 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
2667 vn_walk_kind = kind;
2668 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2669 *vnresult =
2670 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2671 vn_reference_lookup_2,
2672 vn_reference_lookup_3,
2673 vuse_valueize, limit, &vr1);
2674 gcc_checking_assert (vr1.operands == shared_lookup_references);
2675 }
2676
2677 if (*vnresult)
2678 return (*vnresult)->result;
2679
2680 return NULL_TREE;
2681 }
2682
2683 /* Lookup OP in the current hash table, and return the resulting value
2684 number if it exists in the hash table. Return NULL_TREE if it does
2685 not exist in the hash table or if the result field of the structure
2686 was NULL.. VNRESULT will be filled in with the vn_reference_t
2687 stored in the hashtable if one exists. When TBAA_P is false assume
2688 we are looking up a store and treat it as having alias-set zero. */
2689
2690 tree
2691 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2692 vn_reference_t *vnresult, bool tbaa_p)
2693 {
2694 vec<vn_reference_op_s> operands;
2695 struct vn_reference_s vr1;
2696 tree cst;
2697 bool valuezied_anything;
2698
2699 if (vnresult)
2700 *vnresult = NULL;
2701
2702 vr1.vuse = vuse_ssa_val (vuse);
2703 vr1.operands = operands
2704 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2705 vr1.type = TREE_TYPE (op);
2706 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2707 vr1.hashcode = vn_reference_compute_hash (&vr1);
2708 if ((cst = fully_constant_vn_reference_p (&vr1)))
2709 return cst;
2710
2711 if (kind != VN_NOWALK
2712 && vr1.vuse)
2713 {
2714 vn_reference_t wvnresult;
2715 ao_ref r;
2716 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
2717 /* Make sure to use a valueized reference if we valueized anything.
2718 Otherwise preserve the full reference for advanced TBAA. */
2719 if (!valuezied_anything
2720 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2721 vr1.operands))
2722 ao_ref_init (&r, op);
2723 if (! tbaa_p)
2724 r.ref_alias_set = r.base_alias_set = 0;
2725 vn_walk_kind = kind;
2726 wvnresult =
2727 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2728 vn_reference_lookup_2,
2729 vn_reference_lookup_3,
2730 vuse_valueize, limit, &vr1);
2731 gcc_checking_assert (vr1.operands == shared_lookup_references);
2732 if (wvnresult)
2733 {
2734 if (vnresult)
2735 *vnresult = wvnresult;
2736 return wvnresult->result;
2737 }
2738
2739 return NULL_TREE;
2740 }
2741
2742 return vn_reference_lookup_1 (&vr1, vnresult);
2743 }
2744
2745 /* Lookup CALL in the current hash table and return the entry in
2746 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2747
2748 void
2749 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2750 vn_reference_t vr)
2751 {
2752 if (vnresult)
2753 *vnresult = NULL;
2754
2755 tree vuse = gimple_vuse (call);
2756
2757 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2758 vr->operands = valueize_shared_reference_ops_from_call (call);
2759 vr->type = gimple_expr_type (call);
2760 vr->set = 0;
2761 vr->hashcode = vn_reference_compute_hash (vr);
2762 vn_reference_lookup_1 (vr, vnresult);
2763 }
2764
2765 /* Insert OP into the current hash table with a value number of RESULT. */
2766
2767 static void
2768 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2769 {
2770 vn_reference_s **slot;
2771 vn_reference_t vr1;
2772 bool tem;
2773
2774 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2775 if (TREE_CODE (result) == SSA_NAME)
2776 vr1->value_id = VN_INFO (result)->value_id;
2777 else
2778 vr1->value_id = get_or_alloc_constant_value_id (result);
2779 vr1->vuse = vuse_ssa_val (vuse);
2780 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2781 vr1->type = TREE_TYPE (op);
2782 vr1->set = get_alias_set (op);
2783 vr1->hashcode = vn_reference_compute_hash (vr1);
2784 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2785 vr1->result_vdef = vdef;
2786
2787 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2788 INSERT);
2789
2790 /* Because IL walking on reference lookup can end up visiting
2791 a def that is only to be visited later in iteration order
2792 when we are about to make an irreducible region reducible
2793 the def can be effectively processed and its ref being inserted
2794 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
2795 but save a lookup if we deal with already inserted refs here. */
2796 if (*slot)
2797 {
2798 /* We cannot assert that we have the same value either because
2799 when disentangling an irreducible region we may end up visiting
2800 a use before the corresponding def. That's a missed optimization
2801 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
2802 if (dump_file && (dump_flags & TDF_DETAILS)
2803 && !operand_equal_p ((*slot)->result, vr1->result, 0))
2804 {
2805 fprintf (dump_file, "Keeping old value ");
2806 print_generic_expr (dump_file, (*slot)->result);
2807 fprintf (dump_file, " because of collision\n");
2808 }
2809 free_reference (vr1);
2810 obstack_free (&vn_tables_obstack, vr1);
2811 return;
2812 }
2813
2814 *slot = vr1;
2815 vr1->next = last_inserted_ref;
2816 last_inserted_ref = vr1;
2817 }
2818
2819 /* Insert a reference by it's pieces into the current hash table with
2820 a value number of RESULT. Return the resulting reference
2821 structure we created. */
2822
2823 vn_reference_t
2824 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2825 vec<vn_reference_op_s> operands,
2826 tree result, unsigned int value_id)
2827
2828 {
2829 vn_reference_s **slot;
2830 vn_reference_t vr1;
2831
2832 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2833 vr1->value_id = value_id;
2834 vr1->vuse = vuse_ssa_val (vuse);
2835 vr1->operands = valueize_refs (operands);
2836 vr1->type = type;
2837 vr1->set = set;
2838 vr1->hashcode = vn_reference_compute_hash (vr1);
2839 if (result && TREE_CODE (result) == SSA_NAME)
2840 result = SSA_VAL (result);
2841 vr1->result = result;
2842
2843 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2844 INSERT);
2845
2846 /* At this point we should have all the things inserted that we have
2847 seen before, and we should never try inserting something that
2848 already exists. */
2849 gcc_assert (!*slot);
2850
2851 *slot = vr1;
2852 vr1->next = last_inserted_ref;
2853 last_inserted_ref = vr1;
2854 return vr1;
2855 }
2856
2857 /* Compute and return the hash value for nary operation VBO1. */
2858
2859 static hashval_t
2860 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2861 {
2862 inchash::hash hstate;
2863 unsigned i;
2864
2865 for (i = 0; i < vno1->length; ++i)
2866 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2867 vno1->op[i] = SSA_VAL (vno1->op[i]);
2868
2869 if (((vno1->length == 2
2870 && commutative_tree_code (vno1->opcode))
2871 || (vno1->length == 3
2872 && commutative_ternary_tree_code (vno1->opcode)))
2873 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2874 std::swap (vno1->op[0], vno1->op[1]);
2875 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2876 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2877 {
2878 std::swap (vno1->op[0], vno1->op[1]);
2879 vno1->opcode = swap_tree_comparison (vno1->opcode);
2880 }
2881
2882 hstate.add_int (vno1->opcode);
2883 for (i = 0; i < vno1->length; ++i)
2884 inchash::add_expr (vno1->op[i], hstate);
2885
2886 return hstate.end ();
2887 }
2888
2889 /* Compare nary operations VNO1 and VNO2 and return true if they are
2890 equivalent. */
2891
2892 bool
2893 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2894 {
2895 unsigned i;
2896
2897 if (vno1->hashcode != vno2->hashcode)
2898 return false;
2899
2900 if (vno1->length != vno2->length)
2901 return false;
2902
2903 if (vno1->opcode != vno2->opcode
2904 || !types_compatible_p (vno1->type, vno2->type))
2905 return false;
2906
2907 for (i = 0; i < vno1->length; ++i)
2908 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2909 return false;
2910
2911 /* BIT_INSERT_EXPR has an implict operand as the type precision
2912 of op1. Need to check to make sure they are the same. */
2913 if (vno1->opcode == BIT_INSERT_EXPR
2914 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2915 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2916 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2917 return false;
2918
2919 return true;
2920 }
2921
2922 /* Initialize VNO from the pieces provided. */
2923
2924 static void
2925 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2926 enum tree_code code, tree type, tree *ops)
2927 {
2928 vno->opcode = code;
2929 vno->length = length;
2930 vno->type = type;
2931 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2932 }
2933
2934 /* Initialize VNO from OP. */
2935
2936 static void
2937 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2938 {
2939 unsigned i;
2940
2941 vno->opcode = TREE_CODE (op);
2942 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2943 vno->type = TREE_TYPE (op);
2944 for (i = 0; i < vno->length; ++i)
2945 vno->op[i] = TREE_OPERAND (op, i);
2946 }
2947
2948 /* Return the number of operands for a vn_nary ops structure from STMT. */
2949
2950 static unsigned int
2951 vn_nary_length_from_stmt (gimple *stmt)
2952 {
2953 switch (gimple_assign_rhs_code (stmt))
2954 {
2955 case REALPART_EXPR:
2956 case IMAGPART_EXPR:
2957 case VIEW_CONVERT_EXPR:
2958 return 1;
2959
2960 case BIT_FIELD_REF:
2961 return 3;
2962
2963 case CONSTRUCTOR:
2964 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2965
2966 default:
2967 return gimple_num_ops (stmt) - 1;
2968 }
2969 }
2970
2971 /* Initialize VNO from STMT. */
2972
2973 static void
2974 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2975 {
2976 unsigned i;
2977
2978 vno->opcode = gimple_assign_rhs_code (stmt);
2979 vno->type = gimple_expr_type (stmt);
2980 switch (vno->opcode)
2981 {
2982 case REALPART_EXPR:
2983 case IMAGPART_EXPR:
2984 case VIEW_CONVERT_EXPR:
2985 vno->length = 1;
2986 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2987 break;
2988
2989 case BIT_FIELD_REF:
2990 vno->length = 3;
2991 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2992 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2993 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2994 break;
2995
2996 case CONSTRUCTOR:
2997 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2998 for (i = 0; i < vno->length; ++i)
2999 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3000 break;
3001
3002 default:
3003 gcc_checking_assert (!gimple_assign_single_p (stmt));
3004 vno->length = gimple_num_ops (stmt) - 1;
3005 for (i = 0; i < vno->length; ++i)
3006 vno->op[i] = gimple_op (stmt, i + 1);
3007 }
3008 }
3009
3010 /* Compute the hashcode for VNO and look for it in the hash table;
3011 return the resulting value number if it exists in the hash table.
3012 Return NULL_TREE if it does not exist in the hash table or if the
3013 result field of the operation is NULL. VNRESULT will contain the
3014 vn_nary_op_t from the hashtable if it exists. */
3015
3016 static tree
3017 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3018 {
3019 vn_nary_op_s **slot;
3020
3021 if (vnresult)
3022 *vnresult = NULL;
3023
3024 vno->hashcode = vn_nary_op_compute_hash (vno);
3025 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3026 if (!slot)
3027 return NULL_TREE;
3028 if (vnresult)
3029 *vnresult = *slot;
3030 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3031 }
3032
3033 /* Lookup a n-ary operation by its pieces and return the resulting value
3034 number if it exists in the hash table. Return NULL_TREE if it does
3035 not exist in the hash table or if the result field of the operation
3036 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3037 if it exists. */
3038
3039 tree
3040 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3041 tree type, tree *ops, vn_nary_op_t *vnresult)
3042 {
3043 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3044 sizeof_vn_nary_op (length));
3045 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3046 return vn_nary_op_lookup_1 (vno1, vnresult);
3047 }
3048
3049 /* Lookup OP in the current hash table, and return the resulting value
3050 number if it exists in the hash table. Return NULL_TREE if it does
3051 not exist in the hash table or if the result field of the operation
3052 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3053 if it exists. */
3054
3055 tree
3056 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
3057 {
3058 vn_nary_op_t vno1
3059 = XALLOCAVAR (struct vn_nary_op_s,
3060 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
3061 init_vn_nary_op_from_op (vno1, op);
3062 return vn_nary_op_lookup_1 (vno1, vnresult);
3063 }
3064
3065 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3066 value number if it exists in the hash table. Return NULL_TREE if
3067 it does not exist in the hash table. VNRESULT will contain the
3068 vn_nary_op_t from the hashtable if it exists. */
3069
3070 tree
3071 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3072 {
3073 vn_nary_op_t vno1
3074 = XALLOCAVAR (struct vn_nary_op_s,
3075 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3076 init_vn_nary_op_from_stmt (vno1, stmt);
3077 return vn_nary_op_lookup_1 (vno1, vnresult);
3078 }
3079
3080 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3081
3082 static vn_nary_op_t
3083 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3084 {
3085 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3086 }
3087
3088 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3089 obstack. */
3090
3091 static vn_nary_op_t
3092 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3093 {
3094 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3095
3096 vno1->value_id = value_id;
3097 vno1->length = length;
3098 vno1->predicated_values = 0;
3099 vno1->u.result = result;
3100
3101 return vno1;
3102 }
3103
3104 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3105 VNO->HASHCODE first. */
3106
3107 static vn_nary_op_t
3108 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3109 bool compute_hash)
3110 {
3111 vn_nary_op_s **slot;
3112
3113 if (compute_hash)
3114 {
3115 vno->hashcode = vn_nary_op_compute_hash (vno);
3116 gcc_assert (! vno->predicated_values
3117 || (! vno->u.values->next
3118 && vno->u.values->n == 1));
3119 }
3120
3121 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3122 vno->unwind_to = *slot;
3123 if (*slot)
3124 {
3125 /* Prefer non-predicated values.
3126 ??? Only if those are constant, otherwise, with constant predicated
3127 value, turn them into predicated values with entry-block validity
3128 (??? but we always find the first valid result currently). */
3129 if ((*slot)->predicated_values
3130 && ! vno->predicated_values)
3131 {
3132 /* ??? We cannot remove *slot from the unwind stack list.
3133 For the moment we deal with this by skipping not found
3134 entries but this isn't ideal ... */
3135 *slot = vno;
3136 /* ??? Maintain a stack of states we can unwind in
3137 vn_nary_op_s? But how far do we unwind? In reality
3138 we need to push change records somewhere... Or not
3139 unwind vn_nary_op_s and linking them but instead
3140 unwind the results "list", linking that, which also
3141 doesn't move on hashtable resize. */
3142 /* We can also have a ->unwind_to recording *slot there.
3143 That way we can make u.values a fixed size array with
3144 recording the number of entries but of course we then
3145 have always N copies for each unwind_to-state. Or we
3146 make sure to only ever append and each unwinding will
3147 pop off one entry (but how to deal with predicated
3148 replaced with non-predicated here?) */
3149 vno->next = last_inserted_nary;
3150 last_inserted_nary = vno;
3151 return vno;
3152 }
3153 else if (vno->predicated_values
3154 && ! (*slot)->predicated_values)
3155 return *slot;
3156 else if (vno->predicated_values
3157 && (*slot)->predicated_values)
3158 {
3159 /* ??? Factor this all into a insert_single_predicated_value
3160 routine. */
3161 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3162 basic_block vno_bb
3163 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3164 vn_pval *nval = vno->u.values;
3165 vn_pval **next = &vno->u.values;
3166 bool found = false;
3167 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3168 {
3169 if (expressions_equal_p (val->result, vno->u.values->result))
3170 {
3171 found = true;
3172 for (unsigned i = 0; i < val->n; ++i)
3173 {
3174 basic_block val_bb
3175 = BASIC_BLOCK_FOR_FN (cfun,
3176 val->valid_dominated_by_p[i]);
3177 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3178 /* Value registered with more generic predicate. */
3179 return *slot;
3180 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3181 /* Shouldn't happen, we insert in RPO order. */
3182 gcc_unreachable ();
3183 }
3184 /* Append value. */
3185 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3186 sizeof (vn_pval)
3187 + val->n * sizeof (int));
3188 (*next)->next = NULL;
3189 (*next)->result = val->result;
3190 (*next)->n = val->n + 1;
3191 memcpy ((*next)->valid_dominated_by_p,
3192 val->valid_dominated_by_p,
3193 val->n * sizeof (int));
3194 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3195 next = &(*next)->next;
3196 if (dump_file && (dump_flags & TDF_DETAILS))
3197 fprintf (dump_file, "Appending predicate to value.\n");
3198 continue;
3199 }
3200 /* Copy other predicated values. */
3201 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3202 sizeof (vn_pval)
3203 + (val->n-1) * sizeof (int));
3204 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3205 (*next)->next = NULL;
3206 next = &(*next)->next;
3207 }
3208 if (!found)
3209 *next = nval;
3210
3211 *slot = vno;
3212 vno->next = last_inserted_nary;
3213 last_inserted_nary = vno;
3214 return vno;
3215 }
3216
3217 /* While we do not want to insert things twice it's awkward to
3218 avoid it in the case where visit_nary_op pattern-matches stuff
3219 and ends up simplifying the replacement to itself. We then
3220 get two inserts, one from visit_nary_op and one from
3221 vn_nary_build_or_lookup.
3222 So allow inserts with the same value number. */
3223 if ((*slot)->u.result == vno->u.result)
3224 return *slot;
3225 }
3226
3227 /* ??? There's also optimistic vs. previous commited state merging
3228 that is problematic for the case of unwinding. */
3229
3230 /* ??? We should return NULL if we do not use 'vno' and have the
3231 caller release it. */
3232 gcc_assert (!*slot);
3233
3234 *slot = vno;
3235 vno->next = last_inserted_nary;
3236 last_inserted_nary = vno;
3237 return vno;
3238 }
3239
3240 /* Insert a n-ary operation into the current hash table using it's
3241 pieces. Return the vn_nary_op_t structure we created and put in
3242 the hashtable. */
3243
3244 vn_nary_op_t
3245 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3246 tree type, tree *ops,
3247 tree result, unsigned int value_id)
3248 {
3249 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3250 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3251 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3252 }
3253
3254 static vn_nary_op_t
3255 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3256 tree type, tree *ops,
3257 tree result, unsigned int value_id,
3258 edge pred_e)
3259 {
3260 /* ??? Currently tracking BBs. */
3261 if (! single_pred_p (pred_e->dest))
3262 {
3263 /* Never record for backedges. */
3264 if (pred_e->flags & EDGE_DFS_BACK)
3265 return NULL;
3266 edge_iterator ei;
3267 edge e;
3268 int cnt = 0;
3269 /* Ignore backedges. */
3270 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3271 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3272 cnt++;
3273 if (cnt != 1)
3274 return NULL;
3275 }
3276 if (dump_file && (dump_flags & TDF_DETAILS)
3277 /* ??? Fix dumping, but currently we only get comparisons. */
3278 && TREE_CODE_CLASS (code) == tcc_comparison)
3279 {
3280 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3281 pred_e->dest->index);
3282 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3283 fprintf (dump_file, " %s ", get_tree_code_name (code));
3284 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3285 fprintf (dump_file, " == %s\n",
3286 integer_zerop (result) ? "false" : "true");
3287 }
3288 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3289 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3290 vno1->predicated_values = 1;
3291 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3292 sizeof (vn_pval));
3293 vno1->u.values->next = NULL;
3294 vno1->u.values->result = result;
3295 vno1->u.values->n = 1;
3296 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3297 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3298 }
3299
3300 static bool
3301 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3302
3303 static tree
3304 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3305 {
3306 if (! vno->predicated_values)
3307 return vno->u.result;
3308 for (vn_pval *val = vno->u.values; val; val = val->next)
3309 for (unsigned i = 0; i < val->n; ++i)
3310 if (dominated_by_p_w_unex (bb,
3311 BASIC_BLOCK_FOR_FN
3312 (cfun, val->valid_dominated_by_p[i])))
3313 return val->result;
3314 return NULL_TREE;
3315 }
3316
3317 /* Insert OP into the current hash table with a value number of
3318 RESULT. Return the vn_nary_op_t structure we created and put in
3319 the hashtable. */
3320
3321 vn_nary_op_t
3322 vn_nary_op_insert (tree op, tree result)
3323 {
3324 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3325 vn_nary_op_t vno1;
3326
3327 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3328 init_vn_nary_op_from_op (vno1, op);
3329 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3330 }
3331
3332 /* Insert the rhs of STMT into the current hash table with a value number of
3333 RESULT. */
3334
3335 static vn_nary_op_t
3336 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3337 {
3338 vn_nary_op_t vno1
3339 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3340 result, VN_INFO (result)->value_id);
3341 init_vn_nary_op_from_stmt (vno1, stmt);
3342 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3343 }
3344
3345 /* Compute a hashcode for PHI operation VP1 and return it. */
3346
3347 static inline hashval_t
3348 vn_phi_compute_hash (vn_phi_t vp1)
3349 {
3350 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3351 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3352 tree phi1op;
3353 tree type;
3354 edge e;
3355 edge_iterator ei;
3356
3357 /* If all PHI arguments are constants we need to distinguish
3358 the PHI node via its type. */
3359 type = vp1->type;
3360 hstate.merge_hash (vn_hash_type (type));
3361
3362 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3363 {
3364 /* Don't hash backedge values they need to be handled as VN_TOP
3365 for optimistic value-numbering. */
3366 if (e->flags & EDGE_DFS_BACK)
3367 continue;
3368
3369 phi1op = vp1->phiargs[e->dest_idx];
3370 if (phi1op == VN_TOP)
3371 continue;
3372 inchash::add_expr (phi1op, hstate);
3373 }
3374
3375 return hstate.end ();
3376 }
3377
3378
3379 /* Return true if COND1 and COND2 represent the same condition, set
3380 *INVERTED_P if one needs to be inverted to make it the same as
3381 the other. */
3382
3383 static bool
3384 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3385 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3386 {
3387 enum tree_code code1 = gimple_cond_code (cond1);
3388 enum tree_code code2 = gimple_cond_code (cond2);
3389
3390 *inverted_p = false;
3391 if (code1 == code2)
3392 ;
3393 else if (code1 == swap_tree_comparison (code2))
3394 std::swap (lhs2, rhs2);
3395 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3396 *inverted_p = true;
3397 else if (code1 == invert_tree_comparison
3398 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3399 {
3400 std::swap (lhs2, rhs2);
3401 *inverted_p = true;
3402 }
3403 else
3404 return false;
3405
3406 return ((expressions_equal_p (lhs1, lhs2)
3407 && expressions_equal_p (rhs1, rhs2))
3408 || (commutative_tree_code (code1)
3409 && expressions_equal_p (lhs1, rhs2)
3410 && expressions_equal_p (rhs1, lhs2)));
3411 }
3412
3413 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3414
3415 static int
3416 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3417 {
3418 if (vp1->hashcode != vp2->hashcode)
3419 return false;
3420
3421 if (vp1->block != vp2->block)
3422 {
3423 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3424 return false;
3425
3426 switch (EDGE_COUNT (vp1->block->preds))
3427 {
3428 case 1:
3429 /* Single-arg PHIs are just copies. */
3430 break;
3431
3432 case 2:
3433 {
3434 /* Rule out backedges into the PHI. */
3435 if (vp1->block->loop_father->header == vp1->block
3436 || vp2->block->loop_father->header == vp2->block)
3437 return false;
3438
3439 /* If the PHI nodes do not have compatible types
3440 they are not the same. */
3441 if (!types_compatible_p (vp1->type, vp2->type))
3442 return false;
3443
3444 basic_block idom1
3445 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3446 basic_block idom2
3447 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3448 /* If the immediate dominator end in switch stmts multiple
3449 values may end up in the same PHI arg via intermediate
3450 CFG merges. */
3451 if (EDGE_COUNT (idom1->succs) != 2
3452 || EDGE_COUNT (idom2->succs) != 2)
3453 return false;
3454
3455 /* Verify the controlling stmt is the same. */
3456 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3457 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3458 if (! last1 || ! last2)
3459 return false;
3460 bool inverted_p;
3461 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3462 last2, vp2->cclhs, vp2->ccrhs,
3463 &inverted_p))
3464 return false;
3465
3466 /* Get at true/false controlled edges into the PHI. */
3467 edge te1, te2, fe1, fe2;
3468 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3469 &te1, &fe1)
3470 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3471 &te2, &fe2))
3472 return false;
3473
3474 /* Swap edges if the second condition is the inverted of the
3475 first. */
3476 if (inverted_p)
3477 std::swap (te2, fe2);
3478
3479 /* ??? Handle VN_TOP specially. */
3480 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3481 vp2->phiargs[te2->dest_idx])
3482 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3483 vp2->phiargs[fe2->dest_idx]))
3484 return false;
3485
3486 return true;
3487 }
3488
3489 default:
3490 return false;
3491 }
3492 }
3493
3494 /* If the PHI nodes do not have compatible types
3495 they are not the same. */
3496 if (!types_compatible_p (vp1->type, vp2->type))
3497 return false;
3498
3499 /* Any phi in the same block will have it's arguments in the
3500 same edge order, because of how we store phi nodes. */
3501 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3502 {
3503 tree phi1op = vp1->phiargs[i];
3504 tree phi2op = vp2->phiargs[i];
3505 if (phi1op == VN_TOP || phi2op == VN_TOP)
3506 continue;
3507 if (!expressions_equal_p (phi1op, phi2op))
3508 return false;
3509 }
3510
3511 return true;
3512 }
3513
3514 /* Lookup PHI in the current hash table, and return the resulting
3515 value number if it exists in the hash table. Return NULL_TREE if
3516 it does not exist in the hash table. */
3517
3518 static tree
3519 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3520 {
3521 vn_phi_s **slot;
3522 struct vn_phi_s *vp1;
3523 edge e;
3524 edge_iterator ei;
3525
3526 vp1 = XALLOCAVAR (struct vn_phi_s,
3527 sizeof (struct vn_phi_s)
3528 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3529
3530 /* Canonicalize the SSA_NAME's to their value number. */
3531 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3532 {
3533 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3534 if (TREE_CODE (def) == SSA_NAME
3535 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3536 def = SSA_VAL (def);
3537 vp1->phiargs[e->dest_idx] = def;
3538 }
3539 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3540 vp1->block = gimple_bb (phi);
3541 /* Extract values of the controlling condition. */
3542 vp1->cclhs = NULL_TREE;
3543 vp1->ccrhs = NULL_TREE;
3544 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3545 if (EDGE_COUNT (idom1->succs) == 2)
3546 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3547 {
3548 /* ??? We want to use SSA_VAL here. But possibly not
3549 allow VN_TOP. */
3550 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3551 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3552 }
3553 vp1->hashcode = vn_phi_compute_hash (vp1);
3554 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3555 if (!slot)
3556 return NULL_TREE;
3557 return (*slot)->result;
3558 }
3559
3560 /* Insert PHI into the current hash table with a value number of
3561 RESULT. */
3562
3563 static vn_phi_t
3564 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3565 {
3566 vn_phi_s **slot;
3567 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3568 sizeof (vn_phi_s)
3569 + ((gimple_phi_num_args (phi) - 1)
3570 * sizeof (tree)));
3571 edge e;
3572 edge_iterator ei;
3573
3574 /* Canonicalize the SSA_NAME's to their value number. */
3575 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3576 {
3577 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3578 if (TREE_CODE (def) == SSA_NAME
3579 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3580 def = SSA_VAL (def);
3581 vp1->phiargs[e->dest_idx] = def;
3582 }
3583 vp1->value_id = VN_INFO (result)->value_id;
3584 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3585 vp1->block = gimple_bb (phi);
3586 /* Extract values of the controlling condition. */
3587 vp1->cclhs = NULL_TREE;
3588 vp1->ccrhs = NULL_TREE;
3589 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3590 if (EDGE_COUNT (idom1->succs) == 2)
3591 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3592 {
3593 /* ??? We want to use SSA_VAL here. But possibly not
3594 allow VN_TOP. */
3595 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3596 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3597 }
3598 vp1->result = result;
3599 vp1->hashcode = vn_phi_compute_hash (vp1);
3600
3601 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3602 gcc_assert (!*slot);
3603
3604 *slot = vp1;
3605 vp1->next = last_inserted_phi;
3606 last_inserted_phi = vp1;
3607 return vp1;
3608 }
3609
3610
3611 /* Return true if BB1 is dominated by BB2 taking into account edges
3612 that are not executable. */
3613
3614 static bool
3615 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3616 {
3617 edge_iterator ei;
3618 edge e;
3619
3620 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3621 return true;
3622
3623 /* Before iterating we'd like to know if there exists a
3624 (executable) path from bb2 to bb1 at all, if not we can
3625 directly return false. For now simply iterate once. */
3626
3627 /* Iterate to the single executable bb1 predecessor. */
3628 if (EDGE_COUNT (bb1->preds) > 1)
3629 {
3630 edge prede = NULL;
3631 FOR_EACH_EDGE (e, ei, bb1->preds)
3632 if (e->flags & EDGE_EXECUTABLE)
3633 {
3634 if (prede)
3635 {
3636 prede = NULL;
3637 break;
3638 }
3639 prede = e;
3640 }
3641 if (prede)
3642 {
3643 bb1 = prede->src;
3644
3645 /* Re-do the dominance check with changed bb1. */
3646 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3647 return true;
3648 }
3649 }
3650
3651 /* Iterate to the single executable bb2 successor. */
3652 edge succe = NULL;
3653 FOR_EACH_EDGE (e, ei, bb2->succs)
3654 if (e->flags & EDGE_EXECUTABLE)
3655 {
3656 if (succe)
3657 {
3658 succe = NULL;
3659 break;
3660 }
3661 succe = e;
3662 }
3663 if (succe)
3664 {
3665 /* Verify the reached block is only reached through succe.
3666 If there is only one edge we can spare us the dominator
3667 check and iterate directly. */
3668 if (EDGE_COUNT (succe->dest->preds) > 1)
3669 {
3670 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3671 if (e != succe
3672 && (e->flags & EDGE_EXECUTABLE))
3673 {
3674 succe = NULL;
3675 break;
3676 }
3677 }
3678 if (succe)
3679 {
3680 bb2 = succe->dest;
3681
3682 /* Re-do the dominance check with changed bb2. */
3683 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3684 return true;
3685 }
3686 }
3687
3688 /* We could now iterate updating bb1 / bb2. */
3689 return false;
3690 }
3691
3692 /* Set the value number of FROM to TO, return true if it has changed
3693 as a result. */
3694
3695 static inline bool
3696 set_ssa_val_to (tree from, tree to)
3697 {
3698 vn_ssa_aux_t from_info = VN_INFO (from);
3699 tree currval = from_info->valnum; // SSA_VAL (from)
3700 poly_int64 toff, coff;
3701
3702 /* The only thing we allow as value numbers are ssa_names
3703 and invariants. So assert that here. We don't allow VN_TOP
3704 as visiting a stmt should produce a value-number other than
3705 that.
3706 ??? Still VN_TOP can happen for unreachable code, so force
3707 it to varying in that case. Not all code is prepared to
3708 get VN_TOP on valueization. */
3709 if (to == VN_TOP)
3710 {
3711 /* ??? When iterating and visiting PHI <undef, backedge-value>
3712 for the first time we rightfully get VN_TOP and we need to
3713 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
3714 With SCCVN we were simply lucky we iterated the other PHI
3715 cycles first and thus visited the backedge-value DEF. */
3716 if (currval == VN_TOP)
3717 goto set_and_exit;
3718 if (dump_file && (dump_flags & TDF_DETAILS))
3719 fprintf (dump_file, "Forcing value number to varying on "
3720 "receiving VN_TOP\n");
3721 to = from;
3722 }
3723
3724 gcc_checking_assert (to != NULL_TREE
3725 && ((TREE_CODE (to) == SSA_NAME
3726 && (to == from || SSA_VAL (to) == to))
3727 || is_gimple_min_invariant (to)));
3728
3729 if (from != to)
3730 {
3731 if (currval == from)
3732 {
3733 if (dump_file && (dump_flags & TDF_DETAILS))
3734 {
3735 fprintf (dump_file, "Not changing value number of ");
3736 print_generic_expr (dump_file, from);
3737 fprintf (dump_file, " from VARYING to ");
3738 print_generic_expr (dump_file, to);
3739 fprintf (dump_file, "\n");
3740 }
3741 return false;
3742 }
3743 bool curr_invariant = is_gimple_min_invariant (currval);
3744 bool curr_undefined = (TREE_CODE (currval) == SSA_NAME
3745 && ssa_undefined_value_p (currval, false));
3746 if (currval != VN_TOP
3747 && !curr_invariant
3748 && !curr_undefined
3749 && is_gimple_min_invariant (to))
3750 {
3751 if (dump_file && (dump_flags & TDF_DETAILS))
3752 {
3753 fprintf (dump_file, "Forcing VARYING instead of changing "
3754 "value number of ");
3755 print_generic_expr (dump_file, from);
3756 fprintf (dump_file, " from ");
3757 print_generic_expr (dump_file, currval);
3758 fprintf (dump_file, " (non-constant) to ");
3759 print_generic_expr (dump_file, to);
3760 fprintf (dump_file, " (constant)\n");
3761 }
3762 to = from;
3763 }
3764 else if (currval != VN_TOP
3765 && !curr_undefined
3766 && TREE_CODE (to) == SSA_NAME
3767 && ssa_undefined_value_p (to, false))
3768 {
3769 if (dump_file && (dump_flags & TDF_DETAILS))
3770 {
3771 fprintf (dump_file, "Forcing VARYING instead of changing "
3772 "value number of ");
3773 print_generic_expr (dump_file, from);
3774 fprintf (dump_file, " from ");
3775 print_generic_expr (dump_file, currval);
3776 fprintf (dump_file, " (non-undefined) to ");
3777 print_generic_expr (dump_file, to);
3778 fprintf (dump_file, " (undefined)\n");
3779 }
3780 to = from;
3781 }
3782 else if (TREE_CODE (to) == SSA_NAME
3783 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3784 to = from;
3785 }
3786
3787 set_and_exit:
3788 if (dump_file && (dump_flags & TDF_DETAILS))
3789 {
3790 fprintf (dump_file, "Setting value number of ");
3791 print_generic_expr (dump_file, from);
3792 fprintf (dump_file, " to ");
3793 print_generic_expr (dump_file, to);
3794 }
3795
3796 if (currval != to
3797 && !operand_equal_p (currval, to, 0)
3798 /* Different undefined SSA names are not actually different. See
3799 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3800 && !(TREE_CODE (currval) == SSA_NAME
3801 && TREE_CODE (to) == SSA_NAME
3802 && ssa_undefined_value_p (currval, false)
3803 && ssa_undefined_value_p (to, false))
3804 /* ??? For addresses involving volatile objects or types operand_equal_p
3805 does not reliably detect ADDR_EXPRs as equal. We know we are only
3806 getting invariant gimple addresses here, so can use
3807 get_addr_base_and_unit_offset to do this comparison. */
3808 && !(TREE_CODE (currval) == ADDR_EXPR
3809 && TREE_CODE (to) == ADDR_EXPR
3810 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3811 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3812 && known_eq (coff, toff)))
3813 {
3814 if (dump_file && (dump_flags & TDF_DETAILS))
3815 fprintf (dump_file, " (changed)\n");
3816 from_info->valnum = to;
3817 return true;
3818 }
3819 if (dump_file && (dump_flags & TDF_DETAILS))
3820 fprintf (dump_file, "\n");
3821 return false;
3822 }
3823
3824 /* Set all definitions in STMT to value number to themselves.
3825 Return true if a value number changed. */
3826
3827 static bool
3828 defs_to_varying (gimple *stmt)
3829 {
3830 bool changed = false;
3831 ssa_op_iter iter;
3832 def_operand_p defp;
3833
3834 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3835 {
3836 tree def = DEF_FROM_PTR (defp);
3837 changed |= set_ssa_val_to (def, def);
3838 }
3839 return changed;
3840 }
3841
3842 /* Visit a copy between LHS and RHS, return true if the value number
3843 changed. */
3844
3845 static bool
3846 visit_copy (tree lhs, tree rhs)
3847 {
3848 /* Valueize. */
3849 rhs = SSA_VAL (rhs);
3850
3851 return set_ssa_val_to (lhs, rhs);
3852 }
3853
3854 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3855 is the same. */
3856
3857 static tree
3858 valueized_wider_op (tree wide_type, tree op)
3859 {
3860 if (TREE_CODE (op) == SSA_NAME)
3861 op = vn_valueize (op);
3862
3863 /* Either we have the op widened available. */
3864 tree ops[3] = {};
3865 ops[0] = op;
3866 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3867 wide_type, ops, NULL);
3868 if (tem)
3869 return tem;
3870
3871 /* Or the op is truncated from some existing value. */
3872 if (TREE_CODE (op) == SSA_NAME)
3873 {
3874 gimple *def = SSA_NAME_DEF_STMT (op);
3875 if (is_gimple_assign (def)
3876 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3877 {
3878 tem = gimple_assign_rhs1 (def);
3879 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3880 {
3881 if (TREE_CODE (tem) == SSA_NAME)
3882 tem = vn_valueize (tem);
3883 return tem;
3884 }
3885 }
3886 }
3887
3888 /* For constants simply extend it. */
3889 if (TREE_CODE (op) == INTEGER_CST)
3890 return wide_int_to_tree (wide_type, wi::to_wide (op));
3891
3892 return NULL_TREE;
3893 }
3894
3895 /* Visit a nary operator RHS, value number it, and return true if the
3896 value number of LHS has changed as a result. */
3897
3898 static bool
3899 visit_nary_op (tree lhs, gassign *stmt)
3900 {
3901 vn_nary_op_t vnresult;
3902 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
3903 if (! result && vnresult)
3904 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
3905 if (result)
3906 return set_ssa_val_to (lhs, result);
3907
3908 /* Do some special pattern matching for redundancies of operations
3909 in different types. */
3910 enum tree_code code = gimple_assign_rhs_code (stmt);
3911 tree type = TREE_TYPE (lhs);
3912 tree rhs1 = gimple_assign_rhs1 (stmt);
3913 switch (code)
3914 {
3915 CASE_CONVERT:
3916 /* Match arithmetic done in a different type where we can easily
3917 substitute the result from some earlier sign-changed or widened
3918 operation. */
3919 if (INTEGRAL_TYPE_P (type)
3920 && TREE_CODE (rhs1) == SSA_NAME
3921 /* We only handle sign-changes or zero-extension -> & mask. */
3922 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3923 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3924 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3925 {
3926 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3927 if (def
3928 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3929 || gimple_assign_rhs_code (def) == MINUS_EXPR
3930 || gimple_assign_rhs_code (def) == MULT_EXPR))
3931 {
3932 tree ops[3] = {};
3933 /* Either we have the op widened available. */
3934 ops[0] = valueized_wider_op (type,
3935 gimple_assign_rhs1 (def));
3936 if (ops[0])
3937 ops[1] = valueized_wider_op (type,
3938 gimple_assign_rhs2 (def));
3939 if (ops[0] && ops[1])
3940 {
3941 ops[0] = vn_nary_op_lookup_pieces
3942 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3943 /* We have wider operation available. */
3944 if (ops[0]
3945 /* If the leader is a wrapping operation we can
3946 insert it for code hoisting w/o introducing
3947 undefined overflow. If it is not it has to
3948 be available. See PR86554. */
3949 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
3950 || (rpo_avail && vn_context_bb
3951 && rpo_avail->eliminate_avail (vn_context_bb,
3952 ops[0]))))
3953 {
3954 unsigned lhs_prec = TYPE_PRECISION (type);
3955 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3956 if (lhs_prec == rhs_prec)
3957 {
3958 gimple_match_op match_op (gimple_match_cond::UNCOND,
3959 NOP_EXPR, type, ops[0]);
3960 result = vn_nary_build_or_lookup (&match_op);
3961 if (result)
3962 {
3963 bool changed = set_ssa_val_to (lhs, result);
3964 vn_nary_op_insert_stmt (stmt, result);
3965 return changed;
3966 }
3967 }
3968 else
3969 {
3970 tree mask = wide_int_to_tree
3971 (type, wi::mask (rhs_prec, false, lhs_prec));
3972 gimple_match_op match_op (gimple_match_cond::UNCOND,
3973 BIT_AND_EXPR,
3974 TREE_TYPE (lhs),
3975 ops[0], mask);
3976 result = vn_nary_build_or_lookup (&match_op);
3977 if (result)
3978 {
3979 bool changed = set_ssa_val_to (lhs, result);
3980 vn_nary_op_insert_stmt (stmt, result);
3981 return changed;
3982 }
3983 }
3984 }
3985 }
3986 }
3987 }
3988 default:;
3989 }
3990
3991 bool changed = set_ssa_val_to (lhs, lhs);
3992 vn_nary_op_insert_stmt (stmt, lhs);
3993 return changed;
3994 }
3995
3996 /* Visit a call STMT storing into LHS. Return true if the value number
3997 of the LHS has changed as a result. */
3998
3999 static bool
4000 visit_reference_op_call (tree lhs, gcall *stmt)
4001 {
4002 bool changed = false;
4003 struct vn_reference_s vr1;
4004 vn_reference_t vnresult = NULL;
4005 tree vdef = gimple_vdef (stmt);
4006
4007 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4008 if (lhs && TREE_CODE (lhs) != SSA_NAME)
4009 lhs = NULL_TREE;
4010
4011 vn_reference_lookup_call (stmt, &vnresult, &vr1);
4012 if (vnresult)
4013 {
4014 if (vnresult->result_vdef && vdef)
4015 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4016 else if (vdef)
4017 /* If the call was discovered to be pure or const reflect
4018 that as far as possible. */
4019 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4020
4021 if (!vnresult->result && lhs)
4022 vnresult->result = lhs;
4023
4024 if (vnresult->result && lhs)
4025 changed |= set_ssa_val_to (lhs, vnresult->result);
4026 }
4027 else
4028 {
4029 vn_reference_t vr2;
4030 vn_reference_s **slot;
4031 tree vdef_val = vdef;
4032 if (vdef)
4033 {
4034 /* If we value numbered an indirect functions function to
4035 one not clobbering memory value number its VDEF to its
4036 VUSE. */
4037 tree fn = gimple_call_fn (stmt);
4038 if (fn && TREE_CODE (fn) == SSA_NAME)
4039 {
4040 fn = SSA_VAL (fn);
4041 if (TREE_CODE (fn) == ADDR_EXPR
4042 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4043 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4044 & (ECF_CONST | ECF_PURE)))
4045 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4046 }
4047 changed |= set_ssa_val_to (vdef, vdef_val);
4048 }
4049 if (lhs)
4050 changed |= set_ssa_val_to (lhs, lhs);
4051 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4052 vr2->vuse = vr1.vuse;
4053 /* As we are not walking the virtual operand chain we know the
4054 shared_lookup_references are still original so we can re-use
4055 them here. */
4056 vr2->operands = vr1.operands.copy ();
4057 vr2->type = vr1.type;
4058 vr2->set = vr1.set;
4059 vr2->hashcode = vr1.hashcode;
4060 vr2->result = lhs;
4061 vr2->result_vdef = vdef_val;
4062 vr2->value_id = 0;
4063 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4064 INSERT);
4065 gcc_assert (!*slot);
4066 *slot = vr2;
4067 vr2->next = last_inserted_ref;
4068 last_inserted_ref = vr2;
4069 }
4070
4071 return changed;
4072 }
4073
4074 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4075 and return true if the value number of the LHS has changed as a result. */
4076
4077 static bool
4078 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4079 {
4080 bool changed = false;
4081 tree last_vuse;
4082 tree result;
4083
4084 last_vuse = gimple_vuse (stmt);
4085 last_vuse_ptr = &last_vuse;
4086 result = vn_reference_lookup (op, gimple_vuse (stmt),
4087 default_vn_walk_kind, NULL, true);
4088 last_vuse_ptr = NULL;
4089
4090 /* We handle type-punning through unions by value-numbering based
4091 on offset and size of the access. Be prepared to handle a
4092 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4093 if (result
4094 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4095 {
4096 /* We will be setting the value number of lhs to the value number
4097 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4098 So first simplify and lookup this expression to see if it
4099 is already available. */
4100 gimple_match_op res_op (gimple_match_cond::UNCOND,
4101 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
4102 result = vn_nary_build_or_lookup (&res_op);
4103 /* When building the conversion fails avoid inserting the reference
4104 again. */
4105 if (!result)
4106 return set_ssa_val_to (lhs, lhs);
4107 }
4108
4109 if (result)
4110 changed = set_ssa_val_to (lhs, result);
4111 else
4112 {
4113 changed = set_ssa_val_to (lhs, lhs);
4114 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
4115 }
4116
4117 return changed;
4118 }
4119
4120
4121 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4122 and return true if the value number of the LHS has changed as a result. */
4123
4124 static bool
4125 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4126 {
4127 bool changed = false;
4128 vn_reference_t vnresult = NULL;
4129 tree assign;
4130 bool resultsame = false;
4131 tree vuse = gimple_vuse (stmt);
4132 tree vdef = gimple_vdef (stmt);
4133
4134 if (TREE_CODE (op) == SSA_NAME)
4135 op = SSA_VAL (op);
4136
4137 /* First we want to lookup using the *vuses* from the store and see
4138 if there the last store to this location with the same address
4139 had the same value.
4140
4141 The vuses represent the memory state before the store. If the
4142 memory state, address, and value of the store is the same as the
4143 last store to this location, then this store will produce the
4144 same memory state as that store.
4145
4146 In this case the vdef versions for this store are value numbered to those
4147 vuse versions, since they represent the same memory state after
4148 this store.
4149
4150 Otherwise, the vdefs for the store are used when inserting into
4151 the table, since the store generates a new memory state. */
4152
4153 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4154 if (vnresult
4155 && vnresult->result)
4156 {
4157 tree result = vnresult->result;
4158 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4159 || result == SSA_VAL (result));
4160 resultsame = expressions_equal_p (result, op);
4161 if (resultsame)
4162 {
4163 /* If the TBAA state isn't compatible for downstream reads
4164 we cannot value-number the VDEFs the same. */
4165 alias_set_type set = get_alias_set (lhs);
4166 if (vnresult->set != set
4167 && ! alias_set_subset_of (set, vnresult->set))
4168 resultsame = false;
4169 }
4170 }
4171
4172 if (!resultsame)
4173 {
4174 /* Only perform the following when being called from PRE
4175 which embeds tail merging. */
4176 if (default_vn_walk_kind == VN_WALK)
4177 {
4178 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4179 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4180 if (vnresult)
4181 {
4182 VN_INFO (vdef)->visited = true;
4183 return set_ssa_val_to (vdef, vnresult->result_vdef);
4184 }
4185 }
4186
4187 if (dump_file && (dump_flags & TDF_DETAILS))
4188 {
4189 fprintf (dump_file, "No store match\n");
4190 fprintf (dump_file, "Value numbering store ");
4191 print_generic_expr (dump_file, lhs);
4192 fprintf (dump_file, " to ");
4193 print_generic_expr (dump_file, op);
4194 fprintf (dump_file, "\n");
4195 }
4196 /* Have to set value numbers before insert, since insert is
4197 going to valueize the references in-place. */
4198 if (vdef)
4199 changed |= set_ssa_val_to (vdef, vdef);
4200
4201 /* Do not insert structure copies into the tables. */
4202 if (is_gimple_min_invariant (op)
4203 || is_gimple_reg (op))
4204 vn_reference_insert (lhs, op, vdef, NULL);
4205
4206 /* Only perform the following when being called from PRE
4207 which embeds tail merging. */
4208 if (default_vn_walk_kind == VN_WALK)
4209 {
4210 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4211 vn_reference_insert (assign, lhs, vuse, vdef);
4212 }
4213 }
4214 else
4215 {
4216 /* We had a match, so value number the vdef to have the value
4217 number of the vuse it came from. */
4218
4219 if (dump_file && (dump_flags & TDF_DETAILS))
4220 fprintf (dump_file, "Store matched earlier value, "
4221 "value numbering store vdefs to matching vuses.\n");
4222
4223 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4224 }
4225
4226 return changed;
4227 }
4228
4229 /* Visit and value number PHI, return true if the value number
4230 changed. When BACKEDGES_VARYING_P is true then assume all
4231 backedge values are varying. When INSERTED is not NULL then
4232 this is just a ahead query for a possible iteration, set INSERTED
4233 to true if we'd insert into the hashtable. */
4234
4235 static bool
4236 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4237 {
4238 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4239 tree backedge_val = NULL_TREE;
4240 bool seen_non_backedge = false;
4241 tree sameval_base = NULL_TREE;
4242 poly_int64 soff, doff;
4243 unsigned n_executable = 0;
4244 edge_iterator ei;
4245 edge e;
4246
4247 /* TODO: We could check for this in initialization, and replace this
4248 with a gcc_assert. */
4249 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4250 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4251
4252 /* We track whether a PHI was CSEd to to avoid excessive iterations
4253 that would be necessary only because the PHI changed arguments
4254 but not value. */
4255 if (!inserted)
4256 gimple_set_plf (phi, GF_PLF_1, false);
4257
4258 /* See if all non-TOP arguments have the same value. TOP is
4259 equivalent to everything, so we can ignore it. */
4260 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4261 if (e->flags & EDGE_EXECUTABLE)
4262 {
4263 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4264
4265 ++n_executable;
4266 if (TREE_CODE (def) == SSA_NAME)
4267 {
4268 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4269 def = SSA_VAL (def);
4270 if (e->flags & EDGE_DFS_BACK)
4271 backedge_val = def;
4272 }
4273 if (!(e->flags & EDGE_DFS_BACK))
4274 seen_non_backedge = true;
4275 if (def == VN_TOP)
4276 ;
4277 /* Ignore undefined defs for sameval but record one. */
4278 else if (TREE_CODE (def) == SSA_NAME
4279 && ! virtual_operand_p (def)
4280 && ssa_undefined_value_p (def, false))
4281 seen_undef = def;
4282 else if (sameval == VN_TOP)
4283 sameval = def;
4284 else if (!expressions_equal_p (def, sameval))
4285 {
4286 /* We know we're arriving only with invariant addresses here,
4287 try harder comparing them. We can do some caching here
4288 which we cannot do in expressions_equal_p. */
4289 if (TREE_CODE (def) == ADDR_EXPR
4290 && TREE_CODE (sameval) == ADDR_EXPR
4291 && sameval_base != (void *)-1)
4292 {
4293 if (!sameval_base)
4294 sameval_base = get_addr_base_and_unit_offset
4295 (TREE_OPERAND (sameval, 0), &soff);
4296 if (!sameval_base)
4297 sameval_base = (tree)(void *)-1;
4298 else if ((get_addr_base_and_unit_offset
4299 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4300 && known_eq (soff, doff))
4301 continue;
4302 }
4303 sameval = NULL_TREE;
4304 break;
4305 }
4306 }
4307
4308 /* If the value we want to use is flowing over the backedge and we
4309 should take it as VARYING but it has a non-VARYING value drop to
4310 VARYING.
4311 If we value-number a virtual operand never value-number to the
4312 value from the backedge as that confuses the alias-walking code.
4313 See gcc.dg/torture/pr87176.c. If the value is the same on a
4314 non-backedge everything is OK though. */
4315 bool visited_p;
4316 if ((backedge_val
4317 && !seen_non_backedge
4318 && TREE_CODE (backedge_val) == SSA_NAME
4319 && sameval == backedge_val
4320 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
4321 || SSA_VAL (backedge_val) != backedge_val))
4322 /* Do not value-number a virtual operand to sth not visited though
4323 given that allows us to escape a region in alias walking. */
4324 || (sameval
4325 && TREE_CODE (sameval) == SSA_NAME
4326 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
4327 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
4328 && (SSA_VAL (sameval, &visited_p), !visited_p)))
4329 /* Note this just drops to VARYING without inserting the PHI into
4330 the hashes. */
4331 result = PHI_RESULT (phi);
4332 /* If none of the edges was executable keep the value-number at VN_TOP,
4333 if only a single edge is exectuable use its value. */
4334 else if (n_executable <= 1)
4335 result = seen_undef ? seen_undef : sameval;
4336 /* If we saw only undefined values and VN_TOP use one of the
4337 undefined values. */
4338 else if (sameval == VN_TOP)
4339 result = seen_undef ? seen_undef : sameval;
4340 /* First see if it is equivalent to a phi node in this block. We prefer
4341 this as it allows IV elimination - see PRs 66502 and 67167. */
4342 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4343 {
4344 if (!inserted
4345 && TREE_CODE (result) == SSA_NAME
4346 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4347 {
4348 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4349 if (dump_file && (dump_flags & TDF_DETAILS))
4350 {
4351 fprintf (dump_file, "Marking CSEd to PHI node ");
4352 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4353 0, TDF_SLIM);
4354 fprintf (dump_file, "\n");
4355 }
4356 }
4357 }
4358 /* If all values are the same use that, unless we've seen undefined
4359 values as well and the value isn't constant.
4360 CCP/copyprop have the same restriction to not remove uninit warnings. */
4361 else if (sameval
4362 && (! seen_undef || is_gimple_min_invariant (sameval)))
4363 result = sameval;
4364 else
4365 {
4366 result = PHI_RESULT (phi);
4367 /* Only insert PHIs that are varying, for constant value numbers
4368 we mess up equivalences otherwise as we are only comparing
4369 the immediate controlling predicates. */
4370 vn_phi_insert (phi, result, backedges_varying_p);
4371 if (inserted)
4372 *inserted = true;
4373 }
4374
4375 return set_ssa_val_to (PHI_RESULT (phi), result);
4376 }
4377
4378 /* Try to simplify RHS using equivalences and constant folding. */
4379
4380 static tree
4381 try_to_simplify (gassign *stmt)
4382 {
4383 enum tree_code code = gimple_assign_rhs_code (stmt);
4384 tree tem;
4385
4386 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4387 in this case, there is no point in doing extra work. */
4388 if (code == SSA_NAME)
4389 return NULL_TREE;
4390
4391 /* First try constant folding based on our current lattice. */
4392 mprts_hook = vn_lookup_simplify_result;
4393 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4394 mprts_hook = NULL;
4395 if (tem
4396 && (TREE_CODE (tem) == SSA_NAME
4397 || is_gimple_min_invariant (tem)))
4398 return tem;
4399
4400 return NULL_TREE;
4401 }
4402
4403 /* Visit and value number STMT, return true if the value number
4404 changed. */
4405
4406 static bool
4407 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4408 {
4409 bool changed = false;
4410
4411 if (dump_file && (dump_flags & TDF_DETAILS))
4412 {
4413 fprintf (dump_file, "Value numbering stmt = ");
4414 print_gimple_stmt (dump_file, stmt, 0);
4415 }
4416
4417 if (gimple_code (stmt) == GIMPLE_PHI)
4418 changed = visit_phi (stmt, NULL, backedges_varying_p);
4419 else if (gimple_has_volatile_ops (stmt))
4420 changed = defs_to_varying (stmt);
4421 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4422 {
4423 enum tree_code code = gimple_assign_rhs_code (ass);
4424 tree lhs = gimple_assign_lhs (ass);
4425 tree rhs1 = gimple_assign_rhs1 (ass);
4426 tree simplified;
4427
4428 /* Shortcut for copies. Simplifying copies is pointless,
4429 since we copy the expression and value they represent. */
4430 if (code == SSA_NAME
4431 && TREE_CODE (lhs) == SSA_NAME)
4432 {
4433 changed = visit_copy (lhs, rhs1);
4434 goto done;
4435 }
4436 simplified = try_to_simplify (ass);
4437 if (simplified)
4438 {
4439 if (dump_file && (dump_flags & TDF_DETAILS))
4440 {
4441 fprintf (dump_file, "RHS ");
4442 print_gimple_expr (dump_file, ass, 0);
4443 fprintf (dump_file, " simplified to ");
4444 print_generic_expr (dump_file, simplified);
4445 fprintf (dump_file, "\n");
4446 }
4447 }
4448 /* Setting value numbers to constants will occasionally
4449 screw up phi congruence because constants are not
4450 uniquely associated with a single ssa name that can be
4451 looked up. */
4452 if (simplified
4453 && is_gimple_min_invariant (simplified)
4454 && TREE_CODE (lhs) == SSA_NAME)
4455 {
4456 changed = set_ssa_val_to (lhs, simplified);
4457 goto done;
4458 }
4459 else if (simplified
4460 && TREE_CODE (simplified) == SSA_NAME
4461 && TREE_CODE (lhs) == SSA_NAME)
4462 {
4463 changed = visit_copy (lhs, simplified);
4464 goto done;
4465 }
4466
4467 if ((TREE_CODE (lhs) == SSA_NAME
4468 /* We can substitute SSA_NAMEs that are live over
4469 abnormal edges with their constant value. */
4470 && !(gimple_assign_copy_p (ass)
4471 && is_gimple_min_invariant (rhs1))
4472 && !(simplified
4473 && is_gimple_min_invariant (simplified))
4474 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4475 /* Stores or copies from SSA_NAMEs that are live over
4476 abnormal edges are a problem. */
4477 || (code == SSA_NAME
4478 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4479 changed = defs_to_varying (ass);
4480 else if (REFERENCE_CLASS_P (lhs)
4481 || DECL_P (lhs))
4482 changed = visit_reference_op_store (lhs, rhs1, ass);
4483 else if (TREE_CODE (lhs) == SSA_NAME)
4484 {
4485 if ((gimple_assign_copy_p (ass)
4486 && is_gimple_min_invariant (rhs1))
4487 || (simplified
4488 && is_gimple_min_invariant (simplified)))
4489 {
4490 if (simplified)
4491 changed = set_ssa_val_to (lhs, simplified);
4492 else
4493 changed = set_ssa_val_to (lhs, rhs1);
4494 }
4495 else
4496 {
4497 /* Visit the original statement. */
4498 switch (vn_get_stmt_kind (ass))
4499 {
4500 case VN_NARY:
4501 changed = visit_nary_op (lhs, ass);
4502 break;
4503 case VN_REFERENCE:
4504 changed = visit_reference_op_load (lhs, rhs1, ass);
4505 break;
4506 default:
4507 changed = defs_to_varying (ass);
4508 break;
4509 }
4510 }
4511 }
4512 else
4513 changed = defs_to_varying (ass);
4514 }
4515 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4516 {
4517 tree lhs = gimple_call_lhs (call_stmt);
4518 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4519 {
4520 /* Try constant folding based on our current lattice. */
4521 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4522 vn_valueize);
4523 if (simplified)
4524 {
4525 if (dump_file && (dump_flags & TDF_DETAILS))
4526 {
4527 fprintf (dump_file, "call ");
4528 print_gimple_expr (dump_file, call_stmt, 0);
4529 fprintf (dump_file, " simplified to ");
4530 print_generic_expr (dump_file, simplified);
4531 fprintf (dump_file, "\n");
4532 }
4533 }
4534 /* Setting value numbers to constants will occasionally
4535 screw up phi congruence because constants are not
4536 uniquely associated with a single ssa name that can be
4537 looked up. */
4538 if (simplified
4539 && is_gimple_min_invariant (simplified))
4540 {
4541 changed = set_ssa_val_to (lhs, simplified);
4542 if (gimple_vdef (call_stmt))
4543 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4544 SSA_VAL (gimple_vuse (call_stmt)));
4545 goto done;
4546 }
4547 else if (simplified
4548 && TREE_CODE (simplified) == SSA_NAME)
4549 {
4550 changed = visit_copy (lhs, simplified);
4551 if (gimple_vdef (call_stmt))
4552 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4553 SSA_VAL (gimple_vuse (call_stmt)));
4554 goto done;
4555 }
4556 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4557 {
4558 changed = defs_to_varying (call_stmt);
4559 goto done;
4560 }
4561 }
4562
4563 /* Pick up flags from a devirtualization target. */
4564 tree fn = gimple_call_fn (stmt);
4565 int extra_fnflags = 0;
4566 if (fn && TREE_CODE (fn) == SSA_NAME)
4567 {
4568 fn = SSA_VAL (fn);
4569 if (TREE_CODE (fn) == ADDR_EXPR
4570 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4571 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4572 }
4573 if (!gimple_call_internal_p (call_stmt)
4574 && (/* Calls to the same function with the same vuse
4575 and the same operands do not necessarily return the same
4576 value, unless they're pure or const. */
4577 ((gimple_call_flags (call_stmt) | extra_fnflags)
4578 & (ECF_PURE | ECF_CONST))
4579 /* If calls have a vdef, subsequent calls won't have
4580 the same incoming vuse. So, if 2 calls with vdef have the
4581 same vuse, we know they're not subsequent.
4582 We can value number 2 calls to the same function with the
4583 same vuse and the same operands which are not subsequent
4584 the same, because there is no code in the program that can
4585 compare the 2 values... */
4586 || (gimple_vdef (call_stmt)
4587 /* ... unless the call returns a pointer which does
4588 not alias with anything else. In which case the
4589 information that the values are distinct are encoded
4590 in the IL. */
4591 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4592 /* Only perform the following when being called from PRE
4593 which embeds tail merging. */
4594 && default_vn_walk_kind == VN_WALK)))
4595 changed = visit_reference_op_call (lhs, call_stmt);
4596 else
4597 changed = defs_to_varying (call_stmt);
4598 }
4599 else
4600 changed = defs_to_varying (stmt);
4601 done:
4602 return changed;
4603 }
4604
4605
4606 /* Allocate a value number table. */
4607
4608 static void
4609 allocate_vn_table (vn_tables_t table, unsigned size)
4610 {
4611 table->phis = new vn_phi_table_type (size);
4612 table->nary = new vn_nary_op_table_type (size);
4613 table->references = new vn_reference_table_type (size);
4614 }
4615
4616 /* Free a value number table. */
4617
4618 static void
4619 free_vn_table (vn_tables_t table)
4620 {
4621 /* Walk over elements and release vectors. */
4622 vn_reference_iterator_type hir;
4623 vn_reference_t vr;
4624 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
4625 vr->operands.release ();
4626 delete table->phis;
4627 table->phis = NULL;
4628 delete table->nary;
4629 table->nary = NULL;
4630 delete table->references;
4631 table->references = NULL;
4632 }
4633
4634 /* Set *ID according to RESULT. */
4635
4636 static void
4637 set_value_id_for_result (tree result, unsigned int *id)
4638 {
4639 if (result && TREE_CODE (result) == SSA_NAME)
4640 *id = VN_INFO (result)->value_id;
4641 else if (result && is_gimple_min_invariant (result))
4642 *id = get_or_alloc_constant_value_id (result);
4643 else
4644 *id = get_next_value_id ();
4645 }
4646
4647 /* Set the value ids in the valid hash tables. */
4648
4649 static void
4650 set_hashtable_value_ids (void)
4651 {
4652 vn_nary_op_iterator_type hin;
4653 vn_phi_iterator_type hip;
4654 vn_reference_iterator_type hir;
4655 vn_nary_op_t vno;
4656 vn_reference_t vr;
4657 vn_phi_t vp;
4658
4659 /* Now set the value ids of the things we had put in the hash
4660 table. */
4661
4662 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4663 if (! vno->predicated_values)
4664 set_value_id_for_result (vno->u.result, &vno->value_id);
4665
4666 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4667 set_value_id_for_result (vp->result, &vp->value_id);
4668
4669 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4670 hir)
4671 set_value_id_for_result (vr->result, &vr->value_id);
4672 }
4673
4674 /* Return the maximum value id we have ever seen. */
4675
4676 unsigned int
4677 get_max_value_id (void)
4678 {
4679 return next_value_id;
4680 }
4681
4682 /* Return the next unique value id. */
4683
4684 unsigned int
4685 get_next_value_id (void)
4686 {
4687 return next_value_id++;
4688 }
4689
4690
4691 /* Compare two expressions E1 and E2 and return true if they are equal. */
4692
4693 bool
4694 expressions_equal_p (tree e1, tree e2)
4695 {
4696 /* The obvious case. */
4697 if (e1 == e2)
4698 return true;
4699
4700 /* If either one is VN_TOP consider them equal. */
4701 if (e1 == VN_TOP || e2 == VN_TOP)
4702 return true;
4703
4704 /* If only one of them is null, they cannot be equal. */
4705 if (!e1 || !e2)
4706 return false;
4707
4708 /* Now perform the actual comparison. */
4709 if (TREE_CODE (e1) == TREE_CODE (e2)
4710 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4711 return true;
4712
4713 return false;
4714 }
4715
4716
4717 /* Return true if the nary operation NARY may trap. This is a copy
4718 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4719
4720 bool
4721 vn_nary_may_trap (vn_nary_op_t nary)
4722 {
4723 tree type;
4724 tree rhs2 = NULL_TREE;
4725 bool honor_nans = false;
4726 bool honor_snans = false;
4727 bool fp_operation = false;
4728 bool honor_trapv = false;
4729 bool handled, ret;
4730 unsigned i;
4731
4732 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4733 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4734 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4735 {
4736 type = nary->type;
4737 fp_operation = FLOAT_TYPE_P (type);
4738 if (fp_operation)
4739 {
4740 honor_nans = flag_trapping_math && !flag_finite_math_only;
4741 honor_snans = flag_signaling_nans != 0;
4742 }
4743 else if (INTEGRAL_TYPE_P (type)
4744 && TYPE_OVERFLOW_TRAPS (type))
4745 honor_trapv = true;
4746 }
4747 if (nary->length >= 2)
4748 rhs2 = nary->op[1];
4749 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4750 honor_trapv,
4751 honor_nans, honor_snans, rhs2,
4752 &handled);
4753 if (handled
4754 && ret)
4755 return true;
4756
4757 for (i = 0; i < nary->length; ++i)
4758 if (tree_could_trap_p (nary->op[i]))
4759 return true;
4760
4761 return false;
4762 }
4763
4764 /* Return true if the reference operation REF may trap. */
4765
4766 bool
4767 vn_reference_may_trap (vn_reference_t ref)
4768 {
4769 switch (ref->operands[0].opcode)
4770 {
4771 case MODIFY_EXPR:
4772 case CALL_EXPR:
4773 /* We do not handle calls. */
4774 case ADDR_EXPR:
4775 /* And toplevel address computations never trap. */
4776 return false;
4777 default:;
4778 }
4779
4780 vn_reference_op_t op;
4781 unsigned i;
4782 FOR_EACH_VEC_ELT (ref->operands, i, op)
4783 {
4784 switch (op->opcode)
4785 {
4786 case WITH_SIZE_EXPR:
4787 case TARGET_MEM_REF:
4788 /* Always variable. */
4789 return true;
4790 case COMPONENT_REF:
4791 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
4792 return true;
4793 break;
4794 case ARRAY_RANGE_REF:
4795 case ARRAY_REF:
4796 if (TREE_CODE (op->op0) == SSA_NAME)
4797 return true;
4798 break;
4799 case MEM_REF:
4800 /* Nothing interesting in itself, the base is separate. */
4801 break;
4802 /* The following are the address bases. */
4803 case SSA_NAME:
4804 return true;
4805 case ADDR_EXPR:
4806 if (op->op0)
4807 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
4808 return false;
4809 default:;
4810 }
4811 }
4812 return false;
4813 }
4814
4815 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
4816 bitmap inserted_exprs_)
4817 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
4818 el_todo (0), eliminations (0), insertions (0),
4819 inserted_exprs (inserted_exprs_)
4820 {
4821 need_eh_cleanup = BITMAP_ALLOC (NULL);
4822 need_ab_cleanup = BITMAP_ALLOC (NULL);
4823 }
4824
4825 eliminate_dom_walker::~eliminate_dom_walker ()
4826 {
4827 BITMAP_FREE (need_eh_cleanup);
4828 BITMAP_FREE (need_ab_cleanup);
4829 }
4830
4831 /* Return a leader for OP that is available at the current point of the
4832 eliminate domwalk. */
4833
4834 tree
4835 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
4836 {
4837 tree valnum = VN_INFO (op)->valnum;
4838 if (TREE_CODE (valnum) == SSA_NAME)
4839 {
4840 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
4841 return valnum;
4842 if (avail.length () > SSA_NAME_VERSION (valnum))
4843 return avail[SSA_NAME_VERSION (valnum)];
4844 }
4845 else if (is_gimple_min_invariant (valnum))
4846 return valnum;
4847 return NULL_TREE;
4848 }
4849
4850 /* At the current point of the eliminate domwalk make OP available. */
4851
4852 void
4853 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
4854 {
4855 tree valnum = VN_INFO (op)->valnum;
4856 if (TREE_CODE (valnum) == SSA_NAME)
4857 {
4858 if (avail.length () <= SSA_NAME_VERSION (valnum))
4859 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
4860 tree pushop = op;
4861 if (avail[SSA_NAME_VERSION (valnum)])
4862 pushop = avail[SSA_NAME_VERSION (valnum)];
4863 avail_stack.safe_push (pushop);
4864 avail[SSA_NAME_VERSION (valnum)] = op;
4865 }
4866 }
4867
4868 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4869 the leader for the expression if insertion was successful. */
4870
4871 tree
4872 eliminate_dom_walker::eliminate_insert (basic_block bb,
4873 gimple_stmt_iterator *gsi, tree val)
4874 {
4875 /* We can insert a sequence with a single assignment only. */
4876 gimple_seq stmts = VN_INFO (val)->expr;
4877 if (!gimple_seq_singleton_p (stmts))
4878 return NULL_TREE;
4879 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
4880 if (!stmt
4881 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
4882 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
4883 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
4884 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
4885 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
4886 return NULL_TREE;
4887
4888 tree op = gimple_assign_rhs1 (stmt);
4889 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
4890 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4891 op = TREE_OPERAND (op, 0);
4892 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
4893 if (!leader)
4894 return NULL_TREE;
4895
4896 tree res;
4897 stmts = NULL;
4898 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4899 res = gimple_build (&stmts, BIT_FIELD_REF,
4900 TREE_TYPE (val), leader,
4901 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
4902 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
4903 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
4904 res = gimple_build (&stmts, BIT_AND_EXPR,
4905 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
4906 else
4907 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
4908 TREE_TYPE (val), leader);
4909 if (TREE_CODE (res) != SSA_NAME
4910 || SSA_NAME_IS_DEFAULT_DEF (res)
4911 || gimple_bb (SSA_NAME_DEF_STMT (res)))
4912 {
4913 gimple_seq_discard (stmts);
4914
4915 /* During propagation we have to treat SSA info conservatively
4916 and thus we can end up simplifying the inserted expression
4917 at elimination time to sth not defined in stmts. */
4918 /* But then this is a redundancy we failed to detect. Which means
4919 res now has two values. That doesn't play well with how
4920 we track availability here, so give up. */
4921 if (dump_file && (dump_flags & TDF_DETAILS))
4922 {
4923 if (TREE_CODE (res) == SSA_NAME)
4924 res = eliminate_avail (bb, res);
4925 if (res)
4926 {
4927 fprintf (dump_file, "Failed to insert expression for value ");
4928 print_generic_expr (dump_file, val);
4929 fprintf (dump_file, " which is really fully redundant to ");
4930 print_generic_expr (dump_file, res);
4931 fprintf (dump_file, "\n");
4932 }
4933 }
4934
4935 return NULL_TREE;
4936 }
4937 else
4938 {
4939 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4940 VN_INFO (res)->valnum = val;
4941 VN_INFO (res)->visited = true;
4942 }
4943
4944 insertions++;
4945 if (dump_file && (dump_flags & TDF_DETAILS))
4946 {
4947 fprintf (dump_file, "Inserted ");
4948 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
4949 }
4950
4951 return res;
4952 }
4953
4954 void
4955 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
4956 {
4957 tree sprime = NULL_TREE;
4958 gimple *stmt = gsi_stmt (*gsi);
4959 tree lhs = gimple_get_lhs (stmt);
4960 if (lhs && TREE_CODE (lhs) == SSA_NAME
4961 && !gimple_has_volatile_ops (stmt)
4962 /* See PR43491. Do not replace a global register variable when
4963 it is a the RHS of an assignment. Do replace local register
4964 variables since gcc does not guarantee a local variable will
4965 be allocated in register.
4966 ??? The fix isn't effective here. This should instead
4967 be ensured by not value-numbering them the same but treating
4968 them like volatiles? */
4969 && !(gimple_assign_single_p (stmt)
4970 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4971 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4972 && is_global_var (gimple_assign_rhs1 (stmt)))))
4973 {
4974 sprime = eliminate_avail (b, lhs);
4975 if (!sprime)
4976 {
4977 /* If there is no existing usable leader but SCCVN thinks
4978 it has an expression it wants to use as replacement,
4979 insert that. */
4980 tree val = VN_INFO (lhs)->valnum;
4981 if (val != VN_TOP
4982 && TREE_CODE (val) == SSA_NAME
4983 && VN_INFO (val)->needs_insertion
4984 && VN_INFO (val)->expr != NULL
4985 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
4986 eliminate_push_avail (b, sprime);
4987 }
4988
4989 /* If this now constitutes a copy duplicate points-to
4990 and range info appropriately. This is especially
4991 important for inserted code. See tree-ssa-copy.c
4992 for similar code. */
4993 if (sprime
4994 && TREE_CODE (sprime) == SSA_NAME)
4995 {
4996 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4997 if (POINTER_TYPE_P (TREE_TYPE (lhs))
4998 && SSA_NAME_PTR_INFO (lhs)
4999 && ! SSA_NAME_PTR_INFO (sprime))
5000 {
5001 duplicate_ssa_name_ptr_info (sprime,
5002 SSA_NAME_PTR_INFO (lhs));
5003 if (b != sprime_b)
5004 mark_ptr_info_alignment_unknown
5005 (SSA_NAME_PTR_INFO (sprime));
5006 }
5007 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5008 && SSA_NAME_RANGE_INFO (lhs)
5009 && ! SSA_NAME_RANGE_INFO (sprime)
5010 && b == sprime_b)
5011 duplicate_ssa_name_range_info (sprime,
5012 SSA_NAME_RANGE_TYPE (lhs),
5013 SSA_NAME_RANGE_INFO (lhs));
5014 }
5015
5016 /* Inhibit the use of an inserted PHI on a loop header when
5017 the address of the memory reference is a simple induction
5018 variable. In other cases the vectorizer won't do anything
5019 anyway (either it's loop invariant or a complicated
5020 expression). */
5021 if (sprime
5022 && TREE_CODE (sprime) == SSA_NAME
5023 && do_pre
5024 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5025 && loop_outer (b->loop_father)
5026 && has_zero_uses (sprime)
5027 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5028 && gimple_assign_load_p (stmt))
5029 {
5030 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5031 basic_block def_bb = gimple_bb (def_stmt);
5032 if (gimple_code (def_stmt) == GIMPLE_PHI
5033 && def_bb->loop_father->header == def_bb)
5034 {
5035 loop_p loop = def_bb->loop_father;
5036 ssa_op_iter iter;
5037 tree op;
5038 bool found = false;
5039 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5040 {
5041 affine_iv iv;
5042 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5043 if (def_bb
5044 && flow_bb_inside_loop_p (loop, def_bb)
5045 && simple_iv (loop, loop, op, &iv, true))
5046 {
5047 found = true;
5048 break;
5049 }
5050 }
5051 if (found)
5052 {
5053 if (dump_file && (dump_flags & TDF_DETAILS))
5054 {
5055 fprintf (dump_file, "Not replacing ");
5056 print_gimple_expr (dump_file, stmt, 0);
5057 fprintf (dump_file, " with ");
5058 print_generic_expr (dump_file, sprime);
5059 fprintf (dump_file, " which would add a loop"
5060 " carried dependence to loop %d\n",
5061 loop->num);
5062 }
5063 /* Don't keep sprime available. */
5064 sprime = NULL_TREE;
5065 }
5066 }
5067 }
5068
5069 if (sprime)
5070 {
5071 /* If we can propagate the value computed for LHS into
5072 all uses don't bother doing anything with this stmt. */
5073 if (may_propagate_copy (lhs, sprime))
5074 {
5075 /* Mark it for removal. */
5076 to_remove.safe_push (stmt);
5077
5078 /* ??? Don't count copy/constant propagations. */
5079 if (gimple_assign_single_p (stmt)
5080 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5081 || gimple_assign_rhs1 (stmt) == sprime))
5082 return;
5083
5084 if (dump_file && (dump_flags & TDF_DETAILS))
5085 {
5086 fprintf (dump_file, "Replaced ");
5087 print_gimple_expr (dump_file, stmt, 0);
5088 fprintf (dump_file, " with ");
5089 print_generic_expr (dump_file, sprime);
5090 fprintf (dump_file, " in all uses of ");
5091 print_gimple_stmt (dump_file, stmt, 0);
5092 }
5093
5094 eliminations++;
5095 return;
5096 }
5097
5098 /* If this is an assignment from our leader (which
5099 happens in the case the value-number is a constant)
5100 then there is nothing to do. */
5101 if (gimple_assign_single_p (stmt)
5102 && sprime == gimple_assign_rhs1 (stmt))
5103 return;
5104
5105 /* Else replace its RHS. */
5106 if (dump_file && (dump_flags & TDF_DETAILS))
5107 {
5108 fprintf (dump_file, "Replaced ");
5109 print_gimple_expr (dump_file, stmt, 0);
5110 fprintf (dump_file, " with ");
5111 print_generic_expr (dump_file, sprime);
5112 fprintf (dump_file, " in ");
5113 print_gimple_stmt (dump_file, stmt, 0);
5114 }
5115 eliminations++;
5116
5117 bool can_make_abnormal_goto = (is_gimple_call (stmt)
5118 && stmt_can_make_abnormal_goto (stmt));
5119 gimple *orig_stmt = stmt;
5120 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5121 TREE_TYPE (sprime)))
5122 {
5123 /* We preserve conversions to but not from function or method
5124 types. This asymmetry makes it necessary to re-instantiate
5125 conversions here. */
5126 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5127 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
5128 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5129 else
5130 gcc_unreachable ();
5131 }
5132 tree vdef = gimple_vdef (stmt);
5133 tree vuse = gimple_vuse (stmt);
5134 propagate_tree_value_into_stmt (gsi, sprime);
5135 stmt = gsi_stmt (*gsi);
5136 update_stmt (stmt);
5137 /* In case the VDEF on the original stmt was released, value-number
5138 it to the VUSE. This is to make vuse_ssa_val able to skip
5139 released virtual operands. */
5140 if (vdef != gimple_vdef (stmt))
5141 {
5142 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
5143 VN_INFO (vdef)->valnum = vuse;
5144 }
5145
5146 /* If we removed EH side-effects from the statement, clean
5147 its EH information. */
5148 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5149 {
5150 bitmap_set_bit (need_eh_cleanup,
5151 gimple_bb (stmt)->index);
5152 if (dump_file && (dump_flags & TDF_DETAILS))
5153 fprintf (dump_file, " Removed EH side-effects.\n");
5154 }
5155
5156 /* Likewise for AB side-effects. */
5157 if (can_make_abnormal_goto
5158 && !stmt_can_make_abnormal_goto (stmt))
5159 {
5160 bitmap_set_bit (need_ab_cleanup,
5161 gimple_bb (stmt)->index);
5162 if (dump_file && (dump_flags & TDF_DETAILS))
5163 fprintf (dump_file, " Removed AB side-effects.\n");
5164 }
5165
5166 return;
5167 }
5168 }
5169
5170 /* If the statement is a scalar store, see if the expression
5171 has the same value number as its rhs. If so, the store is
5172 dead. */
5173 if (gimple_assign_single_p (stmt)
5174 && !gimple_has_volatile_ops (stmt)
5175 && !is_gimple_reg (gimple_assign_lhs (stmt))
5176 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5177 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5178 {
5179 tree val;
5180 tree rhs = gimple_assign_rhs1 (stmt);
5181 vn_reference_t vnresult;
5182 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5183 &vnresult, false);
5184 if (TREE_CODE (rhs) == SSA_NAME)
5185 rhs = VN_INFO (rhs)->valnum;
5186 if (val
5187 && operand_equal_p (val, rhs, 0))
5188 {
5189 /* We can only remove the later store if the former aliases
5190 at least all accesses the later one does or if the store
5191 was to readonly memory storing the same value. */
5192 alias_set_type set = get_alias_set (lhs);
5193 if (! vnresult
5194 || vnresult->set == set
5195 || alias_set_subset_of (set, vnresult->set))
5196 {
5197 if (dump_file && (dump_flags & TDF_DETAILS))
5198 {
5199 fprintf (dump_file, "Deleted redundant store ");
5200 print_gimple_stmt (dump_file, stmt, 0);
5201 }
5202
5203 /* Queue stmt for removal. */
5204 to_remove.safe_push (stmt);
5205 return;
5206 }
5207 }
5208 }
5209
5210 /* If this is a control statement value numbering left edges
5211 unexecuted on force the condition in a way consistent with
5212 that. */
5213 if (gcond *cond = dyn_cast <gcond *> (stmt))
5214 {
5215 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5216 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5217 {
5218 if (dump_file && (dump_flags & TDF_DETAILS))
5219 {
5220 fprintf (dump_file, "Removing unexecutable edge from ");
5221 print_gimple_stmt (dump_file, stmt, 0);
5222 }
5223 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5224 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5225 gimple_cond_make_true (cond);
5226 else
5227 gimple_cond_make_false (cond);
5228 update_stmt (cond);
5229 el_todo |= TODO_cleanup_cfg;
5230 return;
5231 }
5232 }
5233
5234 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5235 bool was_noreturn = (is_gimple_call (stmt)
5236 && gimple_call_noreturn_p (stmt));
5237 tree vdef = gimple_vdef (stmt);
5238 tree vuse = gimple_vuse (stmt);
5239
5240 /* If we didn't replace the whole stmt (or propagate the result
5241 into all uses), replace all uses on this stmt with their
5242 leaders. */
5243 bool modified = false;
5244 use_operand_p use_p;
5245 ssa_op_iter iter;
5246 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5247 {
5248 tree use = USE_FROM_PTR (use_p);
5249 /* ??? The call code above leaves stmt operands un-updated. */
5250 if (TREE_CODE (use) != SSA_NAME)
5251 continue;
5252 tree sprime;
5253 if (SSA_NAME_IS_DEFAULT_DEF (use))
5254 /* ??? For default defs BB shouldn't matter, but we have to
5255 solve the inconsistency between rpo eliminate and
5256 dom eliminate avail valueization first. */
5257 sprime = eliminate_avail (b, use);
5258 else
5259 /* Look for sth available at the definition block of the argument.
5260 This avoids inconsistencies between availability there which
5261 decides if the stmt can be removed and availability at the
5262 use site. The SSA property ensures that things available
5263 at the definition are also available at uses. */
5264 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5265 if (sprime && sprime != use
5266 && may_propagate_copy (use, sprime)
5267 /* We substitute into debug stmts to avoid excessive
5268 debug temporaries created by removed stmts, but we need
5269 to avoid doing so for inserted sprimes as we never want
5270 to create debug temporaries for them. */
5271 && (!inserted_exprs
5272 || TREE_CODE (sprime) != SSA_NAME
5273 || !is_gimple_debug (stmt)
5274 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5275 {
5276 propagate_value (use_p, sprime);
5277 modified = true;
5278 }
5279 }
5280
5281 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5282 into which is a requirement for the IPA devirt machinery. */
5283 gimple *old_stmt = stmt;
5284 if (modified)
5285 {
5286 /* If a formerly non-invariant ADDR_EXPR is turned into an
5287 invariant one it was on a separate stmt. */
5288 if (gimple_assign_single_p (stmt)
5289 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5290 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5291 gimple_stmt_iterator prev = *gsi;
5292 gsi_prev (&prev);
5293 if (fold_stmt (gsi))
5294 {
5295 /* fold_stmt may have created new stmts inbetween
5296 the previous stmt and the folded stmt. Mark
5297 all defs created there as varying to not confuse
5298 the SCCVN machinery as we're using that even during
5299 elimination. */
5300 if (gsi_end_p (prev))
5301 prev = gsi_start_bb (b);
5302 else
5303 gsi_next (&prev);
5304 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5305 do
5306 {
5307 tree def;
5308 ssa_op_iter dit;
5309 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5310 dit, SSA_OP_ALL_DEFS)
5311 /* As existing DEFs may move between stmts
5312 only process new ones. */
5313 if (! has_VN_INFO (def))
5314 {
5315 VN_INFO (def)->valnum = def;
5316 VN_INFO (def)->visited = true;
5317 }
5318 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5319 break;
5320 gsi_next (&prev);
5321 }
5322 while (1);
5323 }
5324 stmt = gsi_stmt (*gsi);
5325 /* In case we folded the stmt away schedule the NOP for removal. */
5326 if (gimple_nop_p (stmt))
5327 to_remove.safe_push (stmt);
5328 }
5329
5330 /* Visit indirect calls and turn them into direct calls if
5331 possible using the devirtualization machinery. Do this before
5332 checking for required EH/abnormal/noreturn cleanup as devird
5333 may expose more of those. */
5334 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5335 {
5336 tree fn = gimple_call_fn (call_stmt);
5337 if (fn
5338 && flag_devirtualize
5339 && virtual_method_call_p (fn))
5340 {
5341 tree otr_type = obj_type_ref_class (fn);
5342 unsigned HOST_WIDE_INT otr_tok
5343 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5344 tree instance;
5345 ipa_polymorphic_call_context context (current_function_decl,
5346 fn, stmt, &instance);
5347 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5348 otr_type, stmt, NULL);
5349 bool final;
5350 vec <cgraph_node *> targets
5351 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5352 otr_tok, context, &final);
5353 if (dump_file)
5354 dump_possible_polymorphic_call_targets (dump_file,
5355 obj_type_ref_class (fn),
5356 otr_tok, context);
5357 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5358 {
5359 tree fn;
5360 if (targets.length () == 1)
5361 fn = targets[0]->decl;
5362 else
5363 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5364 if (dump_enabled_p ())
5365 {
5366 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5367 "converting indirect call to "
5368 "function %s\n",
5369 lang_hooks.decl_printable_name (fn, 2));
5370 }
5371 gimple_call_set_fndecl (call_stmt, fn);
5372 /* If changing the call to __builtin_unreachable
5373 or similar noreturn function, adjust gimple_call_fntype
5374 too. */
5375 if (gimple_call_noreturn_p (call_stmt)
5376 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5377 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5378 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5379 == void_type_node))
5380 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5381 maybe_remove_unused_call_args (cfun, call_stmt);
5382 modified = true;
5383 }
5384 }
5385 }
5386
5387 if (modified)
5388 {
5389 /* When changing a call into a noreturn call, cfg cleanup
5390 is needed to fix up the noreturn call. */
5391 if (!was_noreturn
5392 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5393 to_fixup.safe_push (stmt);
5394 /* When changing a condition or switch into one we know what
5395 edge will be executed, schedule a cfg cleanup. */
5396 if ((gimple_code (stmt) == GIMPLE_COND
5397 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5398 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5399 || (gimple_code (stmt) == GIMPLE_SWITCH
5400 && TREE_CODE (gimple_switch_index
5401 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5402 el_todo |= TODO_cleanup_cfg;
5403 /* If we removed EH side-effects from the statement, clean
5404 its EH information. */
5405 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5406 {
5407 bitmap_set_bit (need_eh_cleanup,
5408 gimple_bb (stmt)->index);
5409 if (dump_file && (dump_flags & TDF_DETAILS))
5410 fprintf (dump_file, " Removed EH side-effects.\n");
5411 }
5412 /* Likewise for AB side-effects. */
5413 if (can_make_abnormal_goto
5414 && !stmt_can_make_abnormal_goto (stmt))
5415 {
5416 bitmap_set_bit (need_ab_cleanup,
5417 gimple_bb (stmt)->index);
5418 if (dump_file && (dump_flags & TDF_DETAILS))
5419 fprintf (dump_file, " Removed AB side-effects.\n");
5420 }
5421 update_stmt (stmt);
5422 /* In case the VDEF on the original stmt was released, value-number
5423 it to the VUSE. This is to make vuse_ssa_val able to skip
5424 released virtual operands. */
5425 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
5426 VN_INFO (vdef)->valnum = vuse;
5427 }
5428
5429 /* Make new values available - for fully redundant LHS we
5430 continue with the next stmt above and skip this. */
5431 def_operand_p defp;
5432 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5433 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5434 }
5435
5436 /* Perform elimination for the basic-block B during the domwalk. */
5437
5438 edge
5439 eliminate_dom_walker::before_dom_children (basic_block b)
5440 {
5441 /* Mark new bb. */
5442 avail_stack.safe_push (NULL_TREE);
5443
5444 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5445 if (!(b->flags & BB_EXECUTABLE))
5446 return NULL;
5447
5448 vn_context_bb = b;
5449
5450 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5451 {
5452 gphi *phi = gsi.phi ();
5453 tree res = PHI_RESULT (phi);
5454
5455 if (virtual_operand_p (res))
5456 {
5457 gsi_next (&gsi);
5458 continue;
5459 }
5460
5461 tree sprime = eliminate_avail (b, res);
5462 if (sprime
5463 && sprime != res)
5464 {
5465 if (dump_file && (dump_flags & TDF_DETAILS))
5466 {
5467 fprintf (dump_file, "Replaced redundant PHI node defining ");
5468 print_generic_expr (dump_file, res);
5469 fprintf (dump_file, " with ");
5470 print_generic_expr (dump_file, sprime);
5471 fprintf (dump_file, "\n");
5472 }
5473
5474 /* If we inserted this PHI node ourself, it's not an elimination. */
5475 if (! inserted_exprs
5476 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5477 eliminations++;
5478
5479 /* If we will propagate into all uses don't bother to do
5480 anything. */
5481 if (may_propagate_copy (res, sprime))
5482 {
5483 /* Mark the PHI for removal. */
5484 to_remove.safe_push (phi);
5485 gsi_next (&gsi);
5486 continue;
5487 }
5488
5489 remove_phi_node (&gsi, false);
5490
5491 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5492 sprime = fold_convert (TREE_TYPE (res), sprime);
5493 gimple *stmt = gimple_build_assign (res, sprime);
5494 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5495 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5496 continue;
5497 }
5498
5499 eliminate_push_avail (b, res);
5500 gsi_next (&gsi);
5501 }
5502
5503 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5504 !gsi_end_p (gsi);
5505 gsi_next (&gsi))
5506 eliminate_stmt (b, &gsi);
5507
5508 /* Replace destination PHI arguments. */
5509 edge_iterator ei;
5510 edge e;
5511 FOR_EACH_EDGE (e, ei, b->succs)
5512 if (e->flags & EDGE_EXECUTABLE)
5513 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5514 !gsi_end_p (gsi);
5515 gsi_next (&gsi))
5516 {
5517 gphi *phi = gsi.phi ();
5518 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5519 tree arg = USE_FROM_PTR (use_p);
5520 if (TREE_CODE (arg) != SSA_NAME
5521 || virtual_operand_p (arg))
5522 continue;
5523 tree sprime = eliminate_avail (b, arg);
5524 if (sprime && may_propagate_copy (arg, sprime))
5525 propagate_value (use_p, sprime);
5526 }
5527
5528 vn_context_bb = NULL;
5529
5530 return NULL;
5531 }
5532
5533 /* Make no longer available leaders no longer available. */
5534
5535 void
5536 eliminate_dom_walker::after_dom_children (basic_block)
5537 {
5538 tree entry;
5539 while ((entry = avail_stack.pop ()) != NULL_TREE)
5540 {
5541 tree valnum = VN_INFO (entry)->valnum;
5542 tree old = avail[SSA_NAME_VERSION (valnum)];
5543 if (old == entry)
5544 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5545 else
5546 avail[SSA_NAME_VERSION (valnum)] = entry;
5547 }
5548 }
5549
5550 /* Remove queued stmts and perform delayed cleanups. */
5551
5552 unsigned
5553 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5554 {
5555 statistics_counter_event (cfun, "Eliminated", eliminations);
5556 statistics_counter_event (cfun, "Insertions", insertions);
5557
5558 /* We cannot remove stmts during BB walk, especially not release SSA
5559 names there as this confuses the VN machinery. The stmts ending
5560 up in to_remove are either stores or simple copies.
5561 Remove stmts in reverse order to make debug stmt creation possible. */
5562 while (!to_remove.is_empty ())
5563 {
5564 bool do_release_defs = true;
5565 gimple *stmt = to_remove.pop ();
5566
5567 /* When we are value-numbering a region we do not require exit PHIs to
5568 be present so we have to make sure to deal with uses outside of the
5569 region of stmts that we thought are eliminated.
5570 ??? Note we may be confused by uses in dead regions we didn't run
5571 elimination on. Rather than checking individual uses we accept
5572 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5573 contains such example). */
5574 if (region_p)
5575 {
5576 if (gphi *phi = dyn_cast <gphi *> (stmt))
5577 {
5578 tree lhs = gimple_phi_result (phi);
5579 if (!has_zero_uses (lhs))
5580 {
5581 if (dump_file && (dump_flags & TDF_DETAILS))
5582 fprintf (dump_file, "Keeping eliminated stmt live "
5583 "as copy because of out-of-region uses\n");
5584 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5585 gimple *copy = gimple_build_assign (lhs, sprime);
5586 gimple_stmt_iterator gsi
5587 = gsi_after_labels (gimple_bb (stmt));
5588 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5589 do_release_defs = false;
5590 }
5591 }
5592 else if (tree lhs = gimple_get_lhs (stmt))
5593 if (TREE_CODE (lhs) == SSA_NAME
5594 && !has_zero_uses (lhs))
5595 {
5596 if (dump_file && (dump_flags & TDF_DETAILS))
5597 fprintf (dump_file, "Keeping eliminated stmt live "
5598 "as copy because of out-of-region uses\n");
5599 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5600 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5601 if (is_gimple_assign (stmt))
5602 {
5603 gimple_assign_set_rhs_from_tree (&gsi, sprime);
5604 stmt = gsi_stmt (gsi);
5605 update_stmt (stmt);
5606 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
5607 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
5608 continue;
5609 }
5610 else
5611 {
5612 gimple *copy = gimple_build_assign (lhs, sprime);
5613 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5614 do_release_defs = false;
5615 }
5616 }
5617 }
5618
5619 if (dump_file && (dump_flags & TDF_DETAILS))
5620 {
5621 fprintf (dump_file, "Removing dead stmt ");
5622 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5623 }
5624
5625 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5626 if (gimple_code (stmt) == GIMPLE_PHI)
5627 remove_phi_node (&gsi, do_release_defs);
5628 else
5629 {
5630 basic_block bb = gimple_bb (stmt);
5631 unlink_stmt_vdef (stmt);
5632 if (gsi_remove (&gsi, true))
5633 bitmap_set_bit (need_eh_cleanup, bb->index);
5634 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5635 bitmap_set_bit (need_ab_cleanup, bb->index);
5636 if (do_release_defs)
5637 release_defs (stmt);
5638 }
5639
5640 /* Removing a stmt may expose a forwarder block. */
5641 el_todo |= TODO_cleanup_cfg;
5642 }
5643
5644 /* Fixup stmts that became noreturn calls. This may require splitting
5645 blocks and thus isn't possible during the dominator walk. Do this
5646 in reverse order so we don't inadvertedly remove a stmt we want to
5647 fixup by visiting a dominating now noreturn call first. */
5648 while (!to_fixup.is_empty ())
5649 {
5650 gimple *stmt = to_fixup.pop ();
5651
5652 if (dump_file && (dump_flags & TDF_DETAILS))
5653 {
5654 fprintf (dump_file, "Fixing up noreturn call ");
5655 print_gimple_stmt (dump_file, stmt, 0);
5656 }
5657
5658 if (fixup_noreturn_call (stmt))
5659 el_todo |= TODO_cleanup_cfg;
5660 }
5661
5662 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
5663 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
5664
5665 if (do_eh_cleanup)
5666 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
5667
5668 if (do_ab_cleanup)
5669 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
5670
5671 if (do_eh_cleanup || do_ab_cleanup)
5672 el_todo |= TODO_cleanup_cfg;
5673
5674 return el_todo;
5675 }
5676
5677 /* Eliminate fully redundant computations. */
5678
5679 unsigned
5680 eliminate_with_rpo_vn (bitmap inserted_exprs)
5681 {
5682 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
5683
5684 walker.walk (cfun->cfg->x_entry_block_ptr);
5685 return walker.eliminate_cleanup ();
5686 }
5687
5688 static unsigned
5689 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
5690 bool iterate, bool eliminate);
5691
5692 void
5693 run_rpo_vn (vn_lookup_kind kind)
5694 {
5695 default_vn_walk_kind = kind;
5696 do_rpo_vn (cfun, NULL, NULL, true, false);
5697
5698 /* ??? Prune requirement of these. */
5699 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
5700 constant_value_ids = BITMAP_ALLOC (NULL);
5701
5702 /* Initialize the value ids and prune out remaining VN_TOPs
5703 from dead code. */
5704 tree name;
5705 unsigned i;
5706 FOR_EACH_SSA_NAME (i, name, cfun)
5707 {
5708 vn_ssa_aux_t info = VN_INFO (name);
5709 if (!info->visited
5710 || info->valnum == VN_TOP)
5711 info->valnum = name;
5712 if (info->valnum == name)
5713 info->value_id = get_next_value_id ();
5714 else if (is_gimple_min_invariant (info->valnum))
5715 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5716 }
5717
5718 /* Propagate. */
5719 FOR_EACH_SSA_NAME (i, name, cfun)
5720 {
5721 vn_ssa_aux_t info = VN_INFO (name);
5722 if (TREE_CODE (info->valnum) == SSA_NAME
5723 && info->valnum != name
5724 && info->value_id != VN_INFO (info->valnum)->value_id)
5725 info->value_id = VN_INFO (info->valnum)->value_id;
5726 }
5727
5728 set_hashtable_value_ids ();
5729
5730 if (dump_file && (dump_flags & TDF_DETAILS))
5731 {
5732 fprintf (dump_file, "Value numbers:\n");
5733 FOR_EACH_SSA_NAME (i, name, cfun)
5734 {
5735 if (VN_INFO (name)->visited
5736 && SSA_VAL (name) != name)
5737 {
5738 print_generic_expr (dump_file, name);
5739 fprintf (dump_file, " = ");
5740 print_generic_expr (dump_file, SSA_VAL (name));
5741 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
5742 }
5743 }
5744 }
5745 }
5746
5747 /* Free VN associated data structures. */
5748
5749 void
5750 free_rpo_vn (void)
5751 {
5752 free_vn_table (valid_info);
5753 XDELETE (valid_info);
5754 obstack_free (&vn_tables_obstack, NULL);
5755 obstack_free (&vn_tables_insert_obstack, NULL);
5756
5757 vn_ssa_aux_iterator_type it;
5758 vn_ssa_aux_t info;
5759 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
5760 if (info->needs_insertion)
5761 release_ssa_name (info->name);
5762 obstack_free (&vn_ssa_aux_obstack, NULL);
5763 delete vn_ssa_aux_hash;
5764
5765 delete constant_to_value_id;
5766 constant_to_value_id = NULL;
5767 BITMAP_FREE (constant_value_ids);
5768 }
5769
5770 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
5771
5772 static tree
5773 vn_lookup_simplify_result (gimple_match_op *res_op)
5774 {
5775 if (!res_op->code.is_tree_code ())
5776 return NULL_TREE;
5777 tree *ops = res_op->ops;
5778 unsigned int length = res_op->num_ops;
5779 if (res_op->code == CONSTRUCTOR
5780 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
5781 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
5782 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
5783 {
5784 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
5785 ops = XALLOCAVEC (tree, length);
5786 for (unsigned i = 0; i < length; ++i)
5787 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
5788 }
5789 vn_nary_op_t vnresult = NULL;
5790 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
5791 res_op->type, ops, &vnresult);
5792 /* If this is used from expression simplification make sure to
5793 return an available expression. */
5794 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
5795 res = rpo_avail->eliminate_avail (vn_context_bb, res);
5796 return res;
5797 }
5798
5799 rpo_elim::~rpo_elim ()
5800 {
5801 /* Release the avail vectors. */
5802 for (rpo_avail_t::iterator i = m_rpo_avail.begin ();
5803 i != m_rpo_avail.end (); ++i)
5804 (*i).second.release ();
5805 }
5806
5807 /* Return a leader for OPs value that is valid at BB. */
5808
5809 tree
5810 rpo_elim::eliminate_avail (basic_block bb, tree op)
5811 {
5812 bool visited;
5813 tree valnum = SSA_VAL (op, &visited);
5814 /* If we didn't visit OP then it must be defined outside of the
5815 region we process and also dominate it. So it is available. */
5816 if (!visited)
5817 return op;
5818 if (TREE_CODE (valnum) == SSA_NAME)
5819 {
5820 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5821 return valnum;
5822 vec<std::pair<int, int> > *av = m_rpo_avail.get (valnum);
5823 if (!av || av->is_empty ())
5824 return NULL_TREE;
5825 int i = av->length () - 1;
5826 if ((*av)[i].first == bb->index)
5827 /* On tramp3d 90% of the cases are here. */
5828 return ssa_name ((*av)[i].second);
5829 do
5830 {
5831 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, (*av)[i].first);
5832 /* ??? During elimination we have to use availability at the
5833 definition site of a use we try to replace. This
5834 is required to not run into inconsistencies because
5835 of dominated_by_p_w_unex behavior and removing a definition
5836 while not replacing all uses.
5837 ??? We could try to consistently walk dominators
5838 ignoring non-executable regions. The nearest common
5839 dominator of bb and abb is where we can stop walking. We
5840 may also be able to "pre-compute" (bits of) the next immediate
5841 (non-)dominator during the RPO walk when marking edges as
5842 executable. */
5843 if (dominated_by_p_w_unex (bb, abb))
5844 {
5845 tree leader = ssa_name ((*av)[i].second);
5846 /* Prevent eliminations that break loop-closed SSA. */
5847 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
5848 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
5849 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
5850 (leader))->loop_father,
5851 bb))
5852 return NULL_TREE;
5853 if (dump_file && (dump_flags & TDF_DETAILS))
5854 {
5855 print_generic_expr (dump_file, leader);
5856 fprintf (dump_file, " is available for ");
5857 print_generic_expr (dump_file, valnum);
5858 fprintf (dump_file, "\n");
5859 }
5860 /* On tramp3d 99% of the _remaining_ cases succeed at
5861 the first enty. */
5862 return leader;
5863 }
5864 /* ??? Can we somehow skip to the immediate dominator
5865 RPO index (bb_to_rpo)? Again, maybe not worth, on
5866 tramp3d the worst number of elements in the vector is 9. */
5867 }
5868 while (--i >= 0);
5869 }
5870 else if (valnum != VN_TOP)
5871 /* valnum is is_gimple_min_invariant. */
5872 return valnum;
5873 return NULL_TREE;
5874 }
5875
5876 /* Make LEADER a leader for its value at BB. */
5877
5878 void
5879 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
5880 {
5881 tree valnum = VN_INFO (leader)->valnum;
5882 if (valnum == VN_TOP)
5883 return;
5884 if (dump_file && (dump_flags & TDF_DETAILS))
5885 {
5886 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
5887 print_generic_expr (dump_file, leader);
5888 fprintf (dump_file, " for value ");
5889 print_generic_expr (dump_file, valnum);
5890 fprintf (dump_file, "\n");
5891 }
5892 bool existed;
5893 vec<std::pair<int, int> > &av = m_rpo_avail.get_or_insert (valnum, &existed);
5894 if (!existed)
5895 {
5896 new (&av) vec<std::pair<int, int> >;
5897 av = vNULL;
5898 av.reserve_exact (2);
5899 }
5900 av.safe_push (std::make_pair (bb->index, SSA_NAME_VERSION (leader)));
5901 }
5902
5903 /* Valueization hook for RPO VN plus required state. */
5904
5905 tree
5906 rpo_vn_valueize (tree name)
5907 {
5908 if (TREE_CODE (name) == SSA_NAME)
5909 {
5910 vn_ssa_aux_t val = VN_INFO (name);
5911 if (val)
5912 {
5913 tree tem = val->valnum;
5914 if (tem != VN_TOP && tem != name)
5915 {
5916 if (TREE_CODE (tem) != SSA_NAME)
5917 return tem;
5918 /* For all values we only valueize to an available leader
5919 which means we can use SSA name info without restriction. */
5920 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
5921 if (tem)
5922 return tem;
5923 }
5924 }
5925 }
5926 return name;
5927 }
5928
5929 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
5930 inverted condition. */
5931
5932 static void
5933 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
5934 {
5935 switch (code)
5936 {
5937 case LT_EXPR:
5938 /* a < b -> a {!,<}= b */
5939 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5940 ops, boolean_true_node, 0, pred_e);
5941 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
5942 ops, boolean_true_node, 0, pred_e);
5943 /* a < b -> ! a {>,=} b */
5944 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5945 ops, boolean_false_node, 0, pred_e);
5946 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5947 ops, boolean_false_node, 0, pred_e);
5948 break;
5949 case GT_EXPR:
5950 /* a > b -> a {!,>}= b */
5951 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5952 ops, boolean_true_node, 0, pred_e);
5953 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
5954 ops, boolean_true_node, 0, pred_e);
5955 /* a > b -> ! a {<,=} b */
5956 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5957 ops, boolean_false_node, 0, pred_e);
5958 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5959 ops, boolean_false_node, 0, pred_e);
5960 break;
5961 case EQ_EXPR:
5962 /* a == b -> ! a {<,>} b */
5963 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5964 ops, boolean_false_node, 0, pred_e);
5965 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5966 ops, boolean_false_node, 0, pred_e);
5967 break;
5968 case LE_EXPR:
5969 case GE_EXPR:
5970 case NE_EXPR:
5971 /* Nothing besides inverted condition. */
5972 break;
5973 default:;
5974 }
5975 }
5976
5977 /* Main stmt worker for RPO VN, process BB. */
5978
5979 static unsigned
5980 process_bb (rpo_elim &avail, basic_block bb,
5981 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
5982 bool do_region, bitmap exit_bbs, bool skip_phis)
5983 {
5984 unsigned todo = 0;
5985 edge_iterator ei;
5986 edge e;
5987
5988 vn_context_bb = bb;
5989
5990 /* If we are in loop-closed SSA preserve this state. This is
5991 relevant when called on regions from outside of FRE/PRE. */
5992 bool lc_phi_nodes = false;
5993 if (!skip_phis
5994 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
5995 FOR_EACH_EDGE (e, ei, bb->preds)
5996 if (e->src->loop_father != e->dest->loop_father
5997 && flow_loop_nested_p (e->dest->loop_father,
5998 e->src->loop_father))
5999 {
6000 lc_phi_nodes = true;
6001 break;
6002 }
6003
6004 /* When we visit a loop header substitute into loop info. */
6005 if (!iterate && eliminate && bb->loop_father->header == bb)
6006 {
6007 /* Keep fields in sync with substitute_in_loop_info. */
6008 if (bb->loop_father->nb_iterations)
6009 bb->loop_father->nb_iterations
6010 = simplify_replace_tree (bb->loop_father->nb_iterations,
6011 NULL_TREE, NULL_TREE, vn_valueize);
6012 }
6013
6014 /* Value-number all defs in the basic-block. */
6015 if (!skip_phis)
6016 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6017 gsi_next (&gsi))
6018 {
6019 gphi *phi = gsi.phi ();
6020 tree res = PHI_RESULT (phi);
6021 vn_ssa_aux_t res_info = VN_INFO (res);
6022 if (!bb_visited)
6023 {
6024 gcc_assert (!res_info->visited);
6025 res_info->valnum = VN_TOP;
6026 res_info->visited = true;
6027 }
6028
6029 /* When not iterating force backedge values to varying. */
6030 visit_stmt (phi, !iterate_phis);
6031 if (virtual_operand_p (res))
6032 continue;
6033
6034 /* Eliminate */
6035 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6036 how we handle backedges and availability.
6037 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6038 tree val = res_info->valnum;
6039 if (res != val && !iterate && eliminate)
6040 {
6041 if (tree leader = avail.eliminate_avail (bb, res))
6042 {
6043 if (leader != res
6044 /* Preserve loop-closed SSA form. */
6045 && (! lc_phi_nodes
6046 || is_gimple_min_invariant (leader)))
6047 {
6048 if (dump_file && (dump_flags & TDF_DETAILS))
6049 {
6050 fprintf (dump_file, "Replaced redundant PHI node "
6051 "defining ");
6052 print_generic_expr (dump_file, res);
6053 fprintf (dump_file, " with ");
6054 print_generic_expr (dump_file, leader);
6055 fprintf (dump_file, "\n");
6056 }
6057 avail.eliminations++;
6058
6059 if (may_propagate_copy (res, leader))
6060 {
6061 /* Schedule for removal. */
6062 avail.to_remove.safe_push (phi);
6063 continue;
6064 }
6065 /* ??? Else generate a copy stmt. */
6066 }
6067 }
6068 }
6069 /* Only make defs available that not already are. But make
6070 sure loop-closed SSA PHI node defs are picked up for
6071 downstream uses. */
6072 if (lc_phi_nodes
6073 || res == val
6074 || ! avail.eliminate_avail (bb, res))
6075 avail.eliminate_push_avail (bb, res);
6076 }
6077
6078 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6079 we do this when processing the last stmt as we have to do this
6080 before elimination which otherwise forces GIMPLE_CONDs to
6081 if (1 != 0) style when seeing non-executable edges. */
6082 if (gsi_end_p (gsi_start_bb (bb)))
6083 {
6084 FOR_EACH_EDGE (e, ei, bb->succs)
6085 {
6086 if (!(e->flags & EDGE_EXECUTABLE))
6087 {
6088 if (dump_file && (dump_flags & TDF_DETAILS))
6089 fprintf (dump_file,
6090 "marking outgoing edge %d -> %d executable\n",
6091 e->src->index, e->dest->index);
6092 e->flags |= EDGE_EXECUTABLE;
6093 e->dest->flags |= BB_EXECUTABLE;
6094 }
6095 else if (!(e->dest->flags & BB_EXECUTABLE))
6096 {
6097 if (dump_file && (dump_flags & TDF_DETAILS))
6098 fprintf (dump_file,
6099 "marking destination block %d reachable\n",
6100 e->dest->index);
6101 e->dest->flags |= BB_EXECUTABLE;
6102 }
6103 }
6104 }
6105 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6106 !gsi_end_p (gsi); gsi_next (&gsi))
6107 {
6108 ssa_op_iter i;
6109 tree op;
6110 if (!bb_visited)
6111 {
6112 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
6113 {
6114 vn_ssa_aux_t op_info = VN_INFO (op);
6115 gcc_assert (!op_info->visited);
6116 op_info->valnum = VN_TOP;
6117 op_info->visited = true;
6118 }
6119
6120 /* We somehow have to deal with uses that are not defined
6121 in the processed region. Forcing unvisited uses to
6122 varying here doesn't play well with def-use following during
6123 expression simplification, so we deal with this by checking
6124 the visited flag in SSA_VAL. */
6125 }
6126
6127 visit_stmt (gsi_stmt (gsi));
6128
6129 gimple *last = gsi_stmt (gsi);
6130 e = NULL;
6131 switch (gimple_code (last))
6132 {
6133 case GIMPLE_SWITCH:
6134 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
6135 (as_a <gswitch *> (last))));
6136 break;
6137 case GIMPLE_COND:
6138 {
6139 tree lhs = vn_valueize (gimple_cond_lhs (last));
6140 tree rhs = vn_valueize (gimple_cond_rhs (last));
6141 tree val = gimple_simplify (gimple_cond_code (last),
6142 boolean_type_node, lhs, rhs,
6143 NULL, vn_valueize);
6144 /* If the condition didn't simplfy see if we have recorded
6145 an expression from sofar taken edges. */
6146 if (! val || TREE_CODE (val) != INTEGER_CST)
6147 {
6148 vn_nary_op_t vnresult;
6149 tree ops[2];
6150 ops[0] = lhs;
6151 ops[1] = rhs;
6152 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6153 boolean_type_node, ops,
6154 &vnresult);
6155 /* Did we get a predicated value? */
6156 if (! val && vnresult && vnresult->predicated_values)
6157 {
6158 val = vn_nary_op_get_predicated_value (vnresult, bb);
6159 if (val && dump_file && (dump_flags & TDF_DETAILS))
6160 {
6161 fprintf (dump_file, "Got predicated value ");
6162 print_generic_expr (dump_file, val, TDF_NONE);
6163 fprintf (dump_file, " for ");
6164 print_gimple_stmt (dump_file, last, TDF_SLIM);
6165 }
6166 }
6167 }
6168 if (val)
6169 e = find_taken_edge (bb, val);
6170 if (! e)
6171 {
6172 /* If we didn't manage to compute the taken edge then
6173 push predicated expressions for the condition itself
6174 and related conditions to the hashtables. This allows
6175 simplification of redundant conditions which is
6176 important as early cleanup. */
6177 edge true_e, false_e;
6178 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6179 enum tree_code code = gimple_cond_code (last);
6180 enum tree_code icode
6181 = invert_tree_comparison (code, HONOR_NANS (lhs));
6182 tree ops[2];
6183 ops[0] = lhs;
6184 ops[1] = rhs;
6185 if (do_region
6186 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6187 true_e = NULL;
6188 if (do_region
6189 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6190 false_e = NULL;
6191 if (true_e)
6192 vn_nary_op_insert_pieces_predicated
6193 (2, code, boolean_type_node, ops,
6194 boolean_true_node, 0, true_e);
6195 if (false_e)
6196 vn_nary_op_insert_pieces_predicated
6197 (2, code, boolean_type_node, ops,
6198 boolean_false_node, 0, false_e);
6199 if (icode != ERROR_MARK)
6200 {
6201 if (true_e)
6202 vn_nary_op_insert_pieces_predicated
6203 (2, icode, boolean_type_node, ops,
6204 boolean_false_node, 0, true_e);
6205 if (false_e)
6206 vn_nary_op_insert_pieces_predicated
6207 (2, icode, boolean_type_node, ops,
6208 boolean_true_node, 0, false_e);
6209 }
6210 /* Relax for non-integers, inverted condition handled
6211 above. */
6212 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6213 {
6214 if (true_e)
6215 insert_related_predicates_on_edge (code, ops, true_e);
6216 if (false_e)
6217 insert_related_predicates_on_edge (icode, ops, false_e);
6218 }
6219 }
6220 break;
6221 }
6222 case GIMPLE_GOTO:
6223 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6224 break;
6225 default:
6226 e = NULL;
6227 }
6228 if (e)
6229 {
6230 todo = TODO_cleanup_cfg;
6231 if (!(e->flags & EDGE_EXECUTABLE))
6232 {
6233 if (dump_file && (dump_flags & TDF_DETAILS))
6234 fprintf (dump_file,
6235 "marking known outgoing %sedge %d -> %d executable\n",
6236 e->flags & EDGE_DFS_BACK ? "back-" : "",
6237 e->src->index, e->dest->index);
6238 e->flags |= EDGE_EXECUTABLE;
6239 e->dest->flags |= BB_EXECUTABLE;
6240 }
6241 else if (!(e->dest->flags & BB_EXECUTABLE))
6242 {
6243 if (dump_file && (dump_flags & TDF_DETAILS))
6244 fprintf (dump_file,
6245 "marking destination block %d reachable\n",
6246 e->dest->index);
6247 e->dest->flags |= BB_EXECUTABLE;
6248 }
6249 }
6250 else if (gsi_one_before_end_p (gsi))
6251 {
6252 FOR_EACH_EDGE (e, ei, bb->succs)
6253 {
6254 if (!(e->flags & EDGE_EXECUTABLE))
6255 {
6256 if (dump_file && (dump_flags & TDF_DETAILS))
6257 fprintf (dump_file,
6258 "marking outgoing edge %d -> %d executable\n",
6259 e->src->index, e->dest->index);
6260 e->flags |= EDGE_EXECUTABLE;
6261 e->dest->flags |= BB_EXECUTABLE;
6262 }
6263 else if (!(e->dest->flags & BB_EXECUTABLE))
6264 {
6265 if (dump_file && (dump_flags & TDF_DETAILS))
6266 fprintf (dump_file,
6267 "marking destination block %d reachable\n",
6268 e->dest->index);
6269 e->dest->flags |= BB_EXECUTABLE;
6270 }
6271 }
6272 }
6273
6274 /* Eliminate. That also pushes to avail. */
6275 if (eliminate && ! iterate)
6276 avail.eliminate_stmt (bb, &gsi);
6277 else
6278 /* If not eliminating, make all not already available defs
6279 available. */
6280 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6281 if (! avail.eliminate_avail (bb, op))
6282 avail.eliminate_push_avail (bb, op);
6283 }
6284
6285 /* Eliminate in destination PHI arguments. Always substitute in dest
6286 PHIs, even for non-executable edges. This handles region
6287 exits PHIs. */
6288 if (!iterate && eliminate)
6289 FOR_EACH_EDGE (e, ei, bb->succs)
6290 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6291 !gsi_end_p (gsi); gsi_next (&gsi))
6292 {
6293 gphi *phi = gsi.phi ();
6294 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6295 tree arg = USE_FROM_PTR (use_p);
6296 if (TREE_CODE (arg) != SSA_NAME
6297 || virtual_operand_p (arg))
6298 continue;
6299 tree sprime;
6300 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6301 {
6302 sprime = SSA_VAL (arg);
6303 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6304 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6305 }
6306 else
6307 /* Look for sth available at the definition block of the argument.
6308 This avoids inconsistencies between availability there which
6309 decides if the stmt can be removed and availability at the
6310 use site. The SSA property ensures that things available
6311 at the definition are also available at uses. */
6312 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6313 arg);
6314 if (sprime
6315 && sprime != arg
6316 && may_propagate_copy (arg, sprime))
6317 propagate_value (use_p, sprime);
6318 }
6319
6320 vn_context_bb = NULL;
6321 return todo;
6322 }
6323
6324 /* Unwind state per basic-block. */
6325
6326 struct unwind_state
6327 {
6328 /* Times this block has been visited. */
6329 unsigned visited;
6330 /* Whether to handle this as iteration point or whether to treat
6331 incoming backedge PHI values as varying. */
6332 bool iterate;
6333 /* Maximum RPO index this block is reachable from. */
6334 int max_rpo;
6335 /* Unwind state. */
6336 void *ob_top;
6337 vn_reference_t ref_top;
6338 vn_phi_t phi_top;
6339 vn_nary_op_t nary_top;
6340 };
6341
6342 /* Unwind the RPO VN state for iteration. */
6343
6344 static void
6345 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6346 {
6347 gcc_assert (to->iterate);
6348 for (; last_inserted_nary != to->nary_top;
6349 last_inserted_nary = last_inserted_nary->next)
6350 {
6351 vn_nary_op_t *slot;
6352 slot = valid_info->nary->find_slot_with_hash
6353 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6354 /* Predication causes the need to restore previous state. */
6355 if ((*slot)->unwind_to)
6356 *slot = (*slot)->unwind_to;
6357 else
6358 valid_info->nary->clear_slot (slot);
6359 }
6360 for (; last_inserted_phi != to->phi_top;
6361 last_inserted_phi = last_inserted_phi->next)
6362 {
6363 vn_phi_t *slot;
6364 slot = valid_info->phis->find_slot_with_hash
6365 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6366 valid_info->phis->clear_slot (slot);
6367 }
6368 for (; last_inserted_ref != to->ref_top;
6369 last_inserted_ref = last_inserted_ref->next)
6370 {
6371 vn_reference_t *slot;
6372 slot = valid_info->references->find_slot_with_hash
6373 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6374 (*slot)->operands.release ();
6375 valid_info->references->clear_slot (slot);
6376 }
6377 obstack_free (&vn_tables_obstack, to->ob_top);
6378
6379 /* Prune [rpo_idx, ] from avail. */
6380 /* ??? This is O(number-of-values-in-region) which is
6381 O(region-size) rather than O(iteration-piece). */
6382 for (rpo_elim::rpo_avail_t::iterator i
6383 = avail.m_rpo_avail.begin ();
6384 i != avail.m_rpo_avail.end (); ++i)
6385 {
6386 while (! (*i).second.is_empty ())
6387 {
6388 if (bb_to_rpo[(*i).second.last ().first] < rpo_idx)
6389 break;
6390 (*i).second.pop ();
6391 }
6392 }
6393 }
6394
6395 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6396 If ITERATE is true then treat backedges optimistically as not
6397 executed and iterate. If ELIMINATE is true then perform
6398 elimination, otherwise leave that to the caller. */
6399
6400 static unsigned
6401 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6402 bool iterate, bool eliminate)
6403 {
6404 unsigned todo = 0;
6405
6406 /* We currently do not support region-based iteration when
6407 elimination is requested. */
6408 gcc_assert (!entry || !iterate || !eliminate);
6409 /* When iterating we need loop info up-to-date. */
6410 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6411
6412 bool do_region = entry != NULL;
6413 if (!do_region)
6414 {
6415 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6416 exit_bbs = BITMAP_ALLOC (NULL);
6417 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6418 }
6419
6420 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
6421 re-mark those that are contained in the region. */
6422 edge_iterator ei;
6423 edge e;
6424 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6425 e->flags &= ~EDGE_DFS_BACK;
6426
6427 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6428 int n = rev_post_order_and_mark_dfs_back_seme
6429 (fn, entry, exit_bbs, !loops_state_satisfies_p (LOOPS_NEED_FIXUP), rpo);
6430 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6431 for (int i = 0; i < n / 2; ++i)
6432 std::swap (rpo[i], rpo[n-i-1]);
6433
6434 if (!do_region)
6435 BITMAP_FREE (exit_bbs);
6436
6437 /* If there are any non-DFS_BACK edges into entry->dest skip
6438 processing PHI nodes for that block. This supports
6439 value-numbering loop bodies w/o the actual loop. */
6440 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6441 if (e != entry
6442 && !(e->flags & EDGE_DFS_BACK))
6443 break;
6444 bool skip_entry_phis = e != NULL;
6445 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
6446 fprintf (dump_file, "Region does not contain all edges into "
6447 "the entry block, skipping its PHIs.\n");
6448
6449 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6450 for (int i = 0; i < n; ++i)
6451 bb_to_rpo[rpo[i]] = i;
6452
6453 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6454
6455 rpo_elim avail (entry->dest);
6456 rpo_avail = &avail;
6457
6458 /* Verify we have no extra entries into the region. */
6459 if (flag_checking && do_region)
6460 {
6461 auto_bb_flag bb_in_region (fn);
6462 for (int i = 0; i < n; ++i)
6463 {
6464 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6465 bb->flags |= bb_in_region;
6466 }
6467 /* We can't merge the first two loops because we cannot rely
6468 on EDGE_DFS_BACK for edges not within the region. But if
6469 we decide to always have the bb_in_region flag we can
6470 do the checking during the RPO walk itself (but then it's
6471 also easy to handle MEME conservatively). */
6472 for (int i = 0; i < n; ++i)
6473 {
6474 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6475 edge e;
6476 edge_iterator ei;
6477 FOR_EACH_EDGE (e, ei, bb->preds)
6478 gcc_assert (e == entry
6479 || (skip_entry_phis && bb == entry->dest)
6480 || (e->src->flags & bb_in_region));
6481 }
6482 for (int i = 0; i < n; ++i)
6483 {
6484 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6485 bb->flags &= ~bb_in_region;
6486 }
6487 }
6488
6489 /* Create the VN state. For the initial size of the various hashtables
6490 use a heuristic based on region size and number of SSA names. */
6491 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6492 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6493 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6494 next_value_id = 1;
6495
6496 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6497 gcc_obstack_init (&vn_ssa_aux_obstack);
6498
6499 gcc_obstack_init (&vn_tables_obstack);
6500 gcc_obstack_init (&vn_tables_insert_obstack);
6501 valid_info = XCNEW (struct vn_tables_s);
6502 allocate_vn_table (valid_info, region_size);
6503 last_inserted_ref = NULL;
6504 last_inserted_phi = NULL;
6505 last_inserted_nary = NULL;
6506
6507 vn_valueize = rpo_vn_valueize;
6508
6509 /* Initialize the unwind state and edge/BB executable state. */
6510 bool need_max_rpo_iterate = false;
6511 for (int i = 0; i < n; ++i)
6512 {
6513 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6514 rpo_state[i].visited = 0;
6515 rpo_state[i].max_rpo = i;
6516 bb->flags &= ~BB_EXECUTABLE;
6517 bool has_backedges = false;
6518 edge e;
6519 edge_iterator ei;
6520 FOR_EACH_EDGE (e, ei, bb->preds)
6521 {
6522 if (e->flags & EDGE_DFS_BACK)
6523 has_backedges = true;
6524 e->flags &= ~EDGE_EXECUTABLE;
6525 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
6526 continue;
6527 if (bb_to_rpo[e->src->index] > i)
6528 {
6529 rpo_state[i].max_rpo = MAX (rpo_state[i].max_rpo,
6530 bb_to_rpo[e->src->index]);
6531 need_max_rpo_iterate = true;
6532 }
6533 else
6534 rpo_state[i].max_rpo
6535 = MAX (rpo_state[i].max_rpo,
6536 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6537 }
6538 rpo_state[i].iterate = iterate && has_backedges;
6539 }
6540 entry->flags |= EDGE_EXECUTABLE;
6541 entry->dest->flags |= BB_EXECUTABLE;
6542
6543 /* When there are irreducible regions the simplistic max_rpo computation
6544 above for the case of backedges doesn't work and we need to iterate
6545 until there are no more changes. */
6546 unsigned nit = 0;
6547 while (need_max_rpo_iterate)
6548 {
6549 nit++;
6550 need_max_rpo_iterate = false;
6551 for (int i = 0; i < n; ++i)
6552 {
6553 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6554 edge e;
6555 edge_iterator ei;
6556 FOR_EACH_EDGE (e, ei, bb->preds)
6557 {
6558 if (e == entry || (skip_entry_phis && bb == entry->dest))
6559 continue;
6560 int max_rpo = MAX (rpo_state[i].max_rpo,
6561 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6562 if (rpo_state[i].max_rpo != max_rpo)
6563 {
6564 rpo_state[i].max_rpo = max_rpo;
6565 need_max_rpo_iterate = true;
6566 }
6567 }
6568 }
6569 }
6570 statistics_histogram_event (cfun, "RPO max_rpo iterations", nit);
6571
6572 /* As heuristic to improve compile-time we handle only the N innermost
6573 loops and the outermost one optimistically. */
6574 if (iterate)
6575 {
6576 loop_p loop;
6577 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6578 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6579 if (loop_depth (loop) > max_depth)
6580 for (unsigned i = 2;
6581 i < loop_depth (loop) - max_depth; ++i)
6582 {
6583 basic_block header = superloop_at_depth (loop, i)->header;
6584 bool non_latch_backedge = false;
6585 edge e;
6586 edge_iterator ei;
6587 FOR_EACH_EDGE (e, ei, header->preds)
6588 if (e->flags & EDGE_DFS_BACK)
6589 {
6590 /* There can be a non-latch backedge into the header
6591 which is part of an outer irreducible region. We
6592 cannot avoid iterating this block then. */
6593 if (!dominated_by_p (CDI_DOMINATORS,
6594 e->src, e->dest))
6595 {
6596 if (dump_file && (dump_flags & TDF_DETAILS))
6597 fprintf (dump_file, "non-latch backedge %d -> %d "
6598 "forces iteration of loop %d\n",
6599 e->src->index, e->dest->index, loop->num);
6600 non_latch_backedge = true;
6601 }
6602 else
6603 e->flags |= EDGE_EXECUTABLE;
6604 }
6605 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
6606 }
6607 }
6608
6609 uint64_t nblk = 0;
6610 int idx = 0;
6611 if (iterate)
6612 /* Go and process all blocks, iterating as necessary. */
6613 do
6614 {
6615 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6616
6617 /* If the block has incoming backedges remember unwind state. This
6618 is required even for non-executable blocks since in irreducible
6619 regions we might reach them via the backedge and re-start iterating
6620 from there.
6621 Note we can individually mark blocks with incoming backedges to
6622 not iterate where we then handle PHIs conservatively. We do that
6623 heuristically to reduce compile-time for degenerate cases. */
6624 if (rpo_state[idx].iterate)
6625 {
6626 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
6627 rpo_state[idx].ref_top = last_inserted_ref;
6628 rpo_state[idx].phi_top = last_inserted_phi;
6629 rpo_state[idx].nary_top = last_inserted_nary;
6630 }
6631
6632 if (!(bb->flags & BB_EXECUTABLE))
6633 {
6634 if (dump_file && (dump_flags & TDF_DETAILS))
6635 fprintf (dump_file, "Block %d: BB%d found not executable\n",
6636 idx, bb->index);
6637 idx++;
6638 continue;
6639 }
6640
6641 if (dump_file && (dump_flags & TDF_DETAILS))
6642 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
6643 nblk++;
6644 todo |= process_bb (avail, bb,
6645 rpo_state[idx].visited != 0,
6646 rpo_state[idx].iterate,
6647 iterate, eliminate, do_region, exit_bbs, false);
6648 rpo_state[idx].visited++;
6649
6650 /* Verify if changed values flow over executable outgoing backedges
6651 and those change destination PHI values (that's the thing we
6652 can easily verify). Reduce over all such edges to the farthest
6653 away PHI. */
6654 int iterate_to = -1;
6655 edge_iterator ei;
6656 edge e;
6657 FOR_EACH_EDGE (e, ei, bb->succs)
6658 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
6659 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
6660 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
6661 {
6662 int destidx = bb_to_rpo[e->dest->index];
6663 if (!rpo_state[destidx].visited)
6664 {
6665 if (dump_file && (dump_flags & TDF_DETAILS))
6666 fprintf (dump_file, "Unvisited destination %d\n",
6667 e->dest->index);
6668 if (iterate_to == -1 || destidx < iterate_to)
6669 iterate_to = destidx;
6670 continue;
6671 }
6672 if (dump_file && (dump_flags & TDF_DETAILS))
6673 fprintf (dump_file, "Looking for changed values of backedge"
6674 " %d->%d destination PHIs\n",
6675 e->src->index, e->dest->index);
6676 vn_context_bb = e->dest;
6677 gphi_iterator gsi;
6678 for (gsi = gsi_start_phis (e->dest);
6679 !gsi_end_p (gsi); gsi_next (&gsi))
6680 {
6681 bool inserted = false;
6682 /* While we'd ideally just iterate on value changes
6683 we CSE PHIs and do that even across basic-block
6684 boundaries. So even hashtable state changes can
6685 be important (which is roughly equivalent to
6686 PHI argument value changes). To not excessively
6687 iterate because of that we track whether a PHI
6688 was CSEd to with GF_PLF_1. */
6689 bool phival_changed;
6690 if ((phival_changed = visit_phi (gsi.phi (),
6691 &inserted, false))
6692 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
6693 {
6694 if (!phival_changed
6695 && dump_file && (dump_flags & TDF_DETAILS))
6696 fprintf (dump_file, "PHI was CSEd and hashtable "
6697 "state (changed)\n");
6698 if (iterate_to == -1 || destidx < iterate_to)
6699 iterate_to = destidx;
6700 break;
6701 }
6702 }
6703 vn_context_bb = NULL;
6704 }
6705 if (iterate_to != -1)
6706 {
6707 do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
6708 idx = iterate_to;
6709 if (dump_file && (dump_flags & TDF_DETAILS))
6710 fprintf (dump_file, "Iterating to %d BB%d\n",
6711 iterate_to, rpo[iterate_to]);
6712 continue;
6713 }
6714
6715 idx++;
6716 }
6717 while (idx < n);
6718
6719 else /* !iterate */
6720 {
6721 /* Process all blocks greedily with a worklist that enforces RPO
6722 processing of reachable blocks. */
6723 auto_bitmap worklist;
6724 bitmap_set_bit (worklist, 0);
6725 while (!bitmap_empty_p (worklist))
6726 {
6727 int idx = bitmap_first_set_bit (worklist);
6728 bitmap_clear_bit (worklist, idx);
6729 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6730 gcc_assert ((bb->flags & BB_EXECUTABLE)
6731 && !rpo_state[idx].visited);
6732
6733 if (dump_file && (dump_flags & TDF_DETAILS))
6734 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
6735
6736 /* When we run into predecessor edges where we cannot trust its
6737 executable state mark them executable so PHI processing will
6738 be conservative.
6739 ??? Do we need to force arguments flowing over that edge
6740 to be varying or will they even always be? */
6741 edge_iterator ei;
6742 edge e;
6743 FOR_EACH_EDGE (e, ei, bb->preds)
6744 if (!(e->flags & EDGE_EXECUTABLE)
6745 && (bb == entry->dest
6746 || (!rpo_state[bb_to_rpo[e->src->index]].visited
6747 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
6748 >= (int)idx))))
6749 {
6750 if (dump_file && (dump_flags & TDF_DETAILS))
6751 fprintf (dump_file, "Cannot trust state of predecessor "
6752 "edge %d -> %d, marking executable\n",
6753 e->src->index, e->dest->index);
6754 e->flags |= EDGE_EXECUTABLE;
6755 }
6756
6757 nblk++;
6758 todo |= process_bb (avail, bb, false, false, false, eliminate,
6759 do_region, exit_bbs,
6760 skip_entry_phis && bb == entry->dest);
6761 rpo_state[idx].visited++;
6762
6763 FOR_EACH_EDGE (e, ei, bb->succs)
6764 if ((e->flags & EDGE_EXECUTABLE)
6765 && e->dest->index != EXIT_BLOCK
6766 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
6767 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
6768 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
6769 }
6770 }
6771
6772 /* If statistics or dump file active. */
6773 int nex = 0;
6774 unsigned max_visited = 1;
6775 for (int i = 0; i < n; ++i)
6776 {
6777 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6778 if (bb->flags & BB_EXECUTABLE)
6779 nex++;
6780 statistics_histogram_event (cfun, "RPO block visited times",
6781 rpo_state[i].visited);
6782 if (rpo_state[i].visited > max_visited)
6783 max_visited = rpo_state[i].visited;
6784 }
6785 unsigned nvalues = 0, navail = 0;
6786 for (rpo_elim::rpo_avail_t::iterator i = avail.m_rpo_avail.begin ();
6787 i != avail.m_rpo_avail.end (); ++i)
6788 {
6789 nvalues++;
6790 navail += (*i).second.length ();
6791 }
6792 statistics_counter_event (cfun, "RPO blocks", n);
6793 statistics_counter_event (cfun, "RPO blocks visited", nblk);
6794 statistics_counter_event (cfun, "RPO blocks executable", nex);
6795 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
6796 statistics_histogram_event (cfun, "RPO num values", nvalues);
6797 statistics_histogram_event (cfun, "RPO num avail", navail);
6798 statistics_histogram_event (cfun, "RPO num lattice",
6799 vn_ssa_aux_hash->elements ());
6800 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
6801 {
6802 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
6803 " blocks in total discovering %d executable blocks iterating "
6804 "%d.%d times, a block was visited max. %u times\n",
6805 n, nblk, nex,
6806 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
6807 max_visited);
6808 fprintf (dump_file, "RPO tracked %d values available at %d locations "
6809 "and %" PRIu64 " lattice elements\n",
6810 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
6811 }
6812
6813 if (eliminate)
6814 {
6815 /* When !iterate we already performed elimination during the RPO
6816 walk. */
6817 if (iterate)
6818 {
6819 /* Elimination for region-based VN needs to be done within the
6820 RPO walk. */
6821 gcc_assert (! do_region);
6822 /* Note we can't use avail.walk here because that gets confused
6823 by the existing availability and it will be less efficient
6824 as well. */
6825 todo |= eliminate_with_rpo_vn (NULL);
6826 }
6827 else
6828 todo |= avail.eliminate_cleanup (do_region);
6829 }
6830
6831 vn_valueize = NULL;
6832 rpo_avail = NULL;
6833
6834 XDELETEVEC (bb_to_rpo);
6835 XDELETEVEC (rpo);
6836 XDELETEVEC (rpo_state);
6837
6838 return todo;
6839 }
6840
6841 /* Region-based entry for RPO VN. Performs value-numbering and elimination
6842 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
6843 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
6844 are not considered. */
6845
6846 unsigned
6847 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
6848 {
6849 default_vn_walk_kind = VN_WALKREWRITE;
6850 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
6851 free_rpo_vn ();
6852 return todo;
6853 }
6854
6855
6856 namespace {
6857
6858 const pass_data pass_data_fre =
6859 {
6860 GIMPLE_PASS, /* type */
6861 "fre", /* name */
6862 OPTGROUP_NONE, /* optinfo_flags */
6863 TV_TREE_FRE, /* tv_id */
6864 ( PROP_cfg | PROP_ssa ), /* properties_required */
6865 0, /* properties_provided */
6866 0, /* properties_destroyed */
6867 0, /* todo_flags_start */
6868 0, /* todo_flags_finish */
6869 };
6870
6871 class pass_fre : public gimple_opt_pass
6872 {
6873 public:
6874 pass_fre (gcc::context *ctxt)
6875 : gimple_opt_pass (pass_data_fre, ctxt)
6876 {}
6877
6878 /* opt_pass methods: */
6879 opt_pass * clone () { return new pass_fre (m_ctxt); }
6880 virtual bool gate (function *) { return flag_tree_fre != 0; }
6881 virtual unsigned int execute (function *);
6882
6883 }; // class pass_fre
6884
6885 unsigned int
6886 pass_fre::execute (function *fun)
6887 {
6888 unsigned todo = 0;
6889
6890 /* At -O[1g] use the cheap non-iterating mode. */
6891 calculate_dominance_info (CDI_DOMINATORS);
6892 if (optimize > 1)
6893 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6894
6895 default_vn_walk_kind = VN_WALKREWRITE;
6896 todo = do_rpo_vn (fun, NULL, NULL, optimize > 1, true);
6897 free_rpo_vn ();
6898
6899 if (optimize > 1)
6900 loop_optimizer_finalize ();
6901
6902 return todo;
6903 }
6904
6905 } // anon namespace
6906
6907 gimple_opt_pass *
6908 make_pass_fre (gcc::context *ctxt)
6909 {
6910 return new pass_fre (ctxt);
6911 }
6912
6913 #undef BB_EXECUTABLE