]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-sccvn.c
2019-08-20 Richard Biener <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / tree-ssa-sccvn.c
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-loop-niter.h"
73 #include "builtins.h"
74 #include "tree-ssa-sccvn.h"
75
76 /* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
81
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
85
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
90
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
95
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
99
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
104
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
108
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
111
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
117
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
121 equivalent.
122 TODO:
123
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
131 */
132
133 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
134 #define BB_EXECUTABLE BB_VISITED
135
136 static vn_lookup_kind default_vn_walk_kind;
137
138 /* vn_nary_op hashtable helpers. */
139
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 {
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
145 };
146
147 /* Return the computed hashcode for nary operation P1. */
148
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 {
152 return vno1->hashcode;
153 }
154
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
157
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 {
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
162 }
163
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166
167
168 /* vn_phi hashtable helpers. */
169
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172
173 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
174 {
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
177 };
178
179 /* Return the computed hashcode for phi operation P1. */
180
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 {
184 return vp1->hashcode;
185 }
186
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 {
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
193 }
194
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197
198
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
201
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
204 {
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
217 }
218
219 /* Free a reference operation structure VP. */
220
221 static inline void
222 free_reference (vn_reference_s *vr)
223 {
224 vr->operands.release ();
225 }
226
227
228 /* vn_reference hashtable helpers. */
229
230 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
231 {
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
234 };
235
236 /* Return the hashcode for a given reference operation P1. */
237
238 inline hashval_t
239 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 {
241 return vr1->hashcode;
242 }
243
244 inline bool
245 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 {
247 return v == c || vn_reference_eq (v, c);
248 }
249
250 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
251 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252
253
254 /* The set of VN hashtables. */
255
256 typedef struct vn_tables_s
257 {
258 vn_nary_op_table_type *nary;
259 vn_phi_table_type *phis;
260 vn_reference_table_type *references;
261 } *vn_tables_t;
262
263
264 /* vn_constant hashtable helpers. */
265
266 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
267 {
268 static inline hashval_t hash (const vn_constant_s *);
269 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
270 };
271
272 /* Hash table hash function for vn_constant_t. */
273
274 inline hashval_t
275 vn_constant_hasher::hash (const vn_constant_s *vc1)
276 {
277 return vc1->hashcode;
278 }
279
280 /* Hash table equality function for vn_constant_t. */
281
282 inline bool
283 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
284 {
285 if (vc1->hashcode != vc2->hashcode)
286 return false;
287
288 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
289 }
290
291 static hash_table<vn_constant_hasher> *constant_to_value_id;
292 static bitmap constant_value_ids;
293
294
295 /* Obstack we allocate the vn-tables elements from. */
296 static obstack vn_tables_obstack;
297 /* Special obstack we never unwind. */
298 static obstack vn_tables_insert_obstack;
299
300 static vn_reference_t last_inserted_ref;
301 static vn_phi_t last_inserted_phi;
302 static vn_nary_op_t last_inserted_nary;
303
304 /* Valid hashtables storing information we have proven to be
305 correct. */
306 static vn_tables_t valid_info;
307
308
309 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 just return it. */
311 tree (*vn_valueize) (tree);
312
313
314 /* This represents the top of the VN lattice, which is the universal
315 value. */
316
317 tree VN_TOP;
318
319 /* Unique counter for our value ids. */
320
321 static unsigned int next_value_id;
322
323
324 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
325 are allocated on an obstack for locality reasons, and to free them
326 without looping over the vec. */
327
328 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
329 {
330 typedef vn_ssa_aux_t value_type;
331 typedef tree compare_type;
332 static inline hashval_t hash (const value_type &);
333 static inline bool equal (const value_type &, const compare_type &);
334 static inline void mark_deleted (value_type &) {}
335 static inline void mark_empty (value_type &e) { e = NULL; }
336 static inline bool is_deleted (value_type &) { return false; }
337 static inline bool is_empty (value_type &e) { return e == NULL; }
338 };
339
340 hashval_t
341 vn_ssa_aux_hasher::hash (const value_type &entry)
342 {
343 return SSA_NAME_VERSION (entry->name);
344 }
345
346 bool
347 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
348 {
349 return name == entry->name;
350 }
351
352 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
353 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
354 static struct obstack vn_ssa_aux_obstack;
355
356 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
357 static unsigned int vn_nary_length_from_stmt (gimple *);
358 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
359 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
360 vn_nary_op_table_type *, bool);
361 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
362 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
363 enum tree_code, tree, tree *);
364 static tree vn_lookup_simplify_result (gimple_match_op *);
365 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
366 (tree, alias_set_type, tree, vec<vn_reference_op_s, va_heap>, tree);
367
368 /* Return whether there is value numbering information for a given SSA name. */
369
370 bool
371 has_VN_INFO (tree name)
372 {
373 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
374 }
375
376 vn_ssa_aux_t
377 VN_INFO (tree name)
378 {
379 vn_ssa_aux_t *res
380 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
381 INSERT);
382 if (*res != NULL)
383 return *res;
384
385 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
386 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
387 newinfo->name = name;
388 newinfo->valnum = VN_TOP;
389 /* We are using the visited flag to handle uses with defs not within the
390 region being value-numbered. */
391 newinfo->visited = false;
392
393 /* Given we create the VN_INFOs on-demand now we have to do initialization
394 different than VN_TOP here. */
395 if (SSA_NAME_IS_DEFAULT_DEF (name))
396 switch (TREE_CODE (SSA_NAME_VAR (name)))
397 {
398 case VAR_DECL:
399 /* All undefined vars are VARYING. */
400 newinfo->valnum = name;
401 newinfo->visited = true;
402 break;
403
404 case PARM_DECL:
405 /* Parameters are VARYING but we can record a condition
406 if we know it is a non-NULL pointer. */
407 newinfo->visited = true;
408 newinfo->valnum = name;
409 if (POINTER_TYPE_P (TREE_TYPE (name))
410 && nonnull_arg_p (SSA_NAME_VAR (name)))
411 {
412 tree ops[2];
413 ops[0] = name;
414 ops[1] = build_int_cst (TREE_TYPE (name), 0);
415 vn_nary_op_t nary;
416 /* Allocate from non-unwinding stack. */
417 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
418 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
419 boolean_type_node, ops);
420 nary->predicated_values = 0;
421 nary->u.result = boolean_true_node;
422 vn_nary_op_insert_into (nary, valid_info->nary, true);
423 gcc_assert (nary->unwind_to == NULL);
424 /* Also do not link it into the undo chain. */
425 last_inserted_nary = nary->next;
426 nary->next = (vn_nary_op_t)(void *)-1;
427 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
428 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
429 boolean_type_node, ops);
430 nary->predicated_values = 0;
431 nary->u.result = boolean_false_node;
432 vn_nary_op_insert_into (nary, valid_info->nary, true);
433 gcc_assert (nary->unwind_to == NULL);
434 last_inserted_nary = nary->next;
435 nary->next = (vn_nary_op_t)(void *)-1;
436 if (dump_file && (dump_flags & TDF_DETAILS))
437 {
438 fprintf (dump_file, "Recording ");
439 print_generic_expr (dump_file, name, TDF_SLIM);
440 fprintf (dump_file, " != 0\n");
441 }
442 }
443 break;
444
445 case RESULT_DECL:
446 /* If the result is passed by invisible reference the default
447 def is initialized, otherwise it's uninitialized. Still
448 undefined is varying. */
449 newinfo->visited = true;
450 newinfo->valnum = name;
451 break;
452
453 default:
454 gcc_unreachable ();
455 }
456 return newinfo;
457 }
458
459 /* Return the SSA value of X. */
460
461 inline tree
462 SSA_VAL (tree x, bool *visited = NULL)
463 {
464 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
465 if (visited)
466 *visited = tem && tem->visited;
467 return tem && tem->visited ? tem->valnum : x;
468 }
469
470 /* Return the SSA value of the VUSE x, supporting released VDEFs
471 during elimination which will value-number the VDEF to the
472 associated VUSE (but not substitute in the whole lattice). */
473
474 static inline tree
475 vuse_ssa_val (tree x)
476 {
477 if (!x)
478 return NULL_TREE;
479
480 do
481 {
482 x = SSA_VAL (x);
483 gcc_assert (x != VN_TOP);
484 }
485 while (SSA_NAME_IN_FREE_LIST (x));
486
487 return x;
488 }
489
490 /* Similar to the above but used as callback for walk_non_aliases_vuses
491 and thus should stop at unvisited VUSE to not walk across region
492 boundaries. */
493
494 static tree
495 vuse_valueize (tree vuse)
496 {
497 do
498 {
499 bool visited;
500 vuse = SSA_VAL (vuse, &visited);
501 if (!visited)
502 return NULL_TREE;
503 gcc_assert (vuse != VN_TOP);
504 }
505 while (SSA_NAME_IN_FREE_LIST (vuse));
506 return vuse;
507 }
508
509
510 /* Return the vn_kind the expression computed by the stmt should be
511 associated with. */
512
513 enum vn_kind
514 vn_get_stmt_kind (gimple *stmt)
515 {
516 switch (gimple_code (stmt))
517 {
518 case GIMPLE_CALL:
519 return VN_REFERENCE;
520 case GIMPLE_PHI:
521 return VN_PHI;
522 case GIMPLE_ASSIGN:
523 {
524 enum tree_code code = gimple_assign_rhs_code (stmt);
525 tree rhs1 = gimple_assign_rhs1 (stmt);
526 switch (get_gimple_rhs_class (code))
527 {
528 case GIMPLE_UNARY_RHS:
529 case GIMPLE_BINARY_RHS:
530 case GIMPLE_TERNARY_RHS:
531 return VN_NARY;
532 case GIMPLE_SINGLE_RHS:
533 switch (TREE_CODE_CLASS (code))
534 {
535 case tcc_reference:
536 /* VOP-less references can go through unary case. */
537 if ((code == REALPART_EXPR
538 || code == IMAGPART_EXPR
539 || code == VIEW_CONVERT_EXPR
540 || code == BIT_FIELD_REF)
541 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
542 return VN_NARY;
543
544 /* Fallthrough. */
545 case tcc_declaration:
546 return VN_REFERENCE;
547
548 case tcc_constant:
549 return VN_CONSTANT;
550
551 default:
552 if (code == ADDR_EXPR)
553 return (is_gimple_min_invariant (rhs1)
554 ? VN_CONSTANT : VN_REFERENCE);
555 else if (code == CONSTRUCTOR)
556 return VN_NARY;
557 return VN_NONE;
558 }
559 default:
560 return VN_NONE;
561 }
562 }
563 default:
564 return VN_NONE;
565 }
566 }
567
568 /* Lookup a value id for CONSTANT and return it. If it does not
569 exist returns 0. */
570
571 unsigned int
572 get_constant_value_id (tree constant)
573 {
574 vn_constant_s **slot;
575 struct vn_constant_s vc;
576
577 vc.hashcode = vn_hash_constant_with_type (constant);
578 vc.constant = constant;
579 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
580 if (slot)
581 return (*slot)->value_id;
582 return 0;
583 }
584
585 /* Lookup a value id for CONSTANT, and if it does not exist, create a
586 new one and return it. If it does exist, return it. */
587
588 unsigned int
589 get_or_alloc_constant_value_id (tree constant)
590 {
591 vn_constant_s **slot;
592 struct vn_constant_s vc;
593 vn_constant_t vcp;
594
595 /* If the hashtable isn't initialized we're not running from PRE and thus
596 do not need value-ids. */
597 if (!constant_to_value_id)
598 return 0;
599
600 vc.hashcode = vn_hash_constant_with_type (constant);
601 vc.constant = constant;
602 slot = constant_to_value_id->find_slot (&vc, INSERT);
603 if (*slot)
604 return (*slot)->value_id;
605
606 vcp = XNEW (struct vn_constant_s);
607 vcp->hashcode = vc.hashcode;
608 vcp->constant = constant;
609 vcp->value_id = get_next_value_id ();
610 *slot = vcp;
611 bitmap_set_bit (constant_value_ids, vcp->value_id);
612 return vcp->value_id;
613 }
614
615 /* Return true if V is a value id for a constant. */
616
617 bool
618 value_id_constant_p (unsigned int v)
619 {
620 return bitmap_bit_p (constant_value_ids, v);
621 }
622
623 /* Compute the hash for a reference operand VRO1. */
624
625 static void
626 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
627 {
628 hstate.add_int (vro1->opcode);
629 if (vro1->op0)
630 inchash::add_expr (vro1->op0, hstate);
631 if (vro1->op1)
632 inchash::add_expr (vro1->op1, hstate);
633 if (vro1->op2)
634 inchash::add_expr (vro1->op2, hstate);
635 }
636
637 /* Compute a hash for the reference operation VR1 and return it. */
638
639 static hashval_t
640 vn_reference_compute_hash (const vn_reference_t vr1)
641 {
642 inchash::hash hstate;
643 hashval_t result;
644 int i;
645 vn_reference_op_t vro;
646 poly_int64 off = -1;
647 bool deref = false;
648
649 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
650 {
651 if (vro->opcode == MEM_REF)
652 deref = true;
653 else if (vro->opcode != ADDR_EXPR)
654 deref = false;
655 if (maybe_ne (vro->off, -1))
656 {
657 if (known_eq (off, -1))
658 off = 0;
659 off += vro->off;
660 }
661 else
662 {
663 if (maybe_ne (off, -1)
664 && maybe_ne (off, 0))
665 hstate.add_poly_int (off);
666 off = -1;
667 if (deref
668 && vro->opcode == ADDR_EXPR)
669 {
670 if (vro->op0)
671 {
672 tree op = TREE_OPERAND (vro->op0, 0);
673 hstate.add_int (TREE_CODE (op));
674 inchash::add_expr (op, hstate);
675 }
676 }
677 else
678 vn_reference_op_compute_hash (vro, hstate);
679 }
680 }
681 result = hstate.end ();
682 /* ??? We would ICE later if we hash instead of adding that in. */
683 if (vr1->vuse)
684 result += SSA_NAME_VERSION (vr1->vuse);
685
686 return result;
687 }
688
689 /* Return true if reference operations VR1 and VR2 are equivalent. This
690 means they have the same set of operands and vuses. */
691
692 bool
693 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
694 {
695 unsigned i, j;
696
697 /* Early out if this is not a hash collision. */
698 if (vr1->hashcode != vr2->hashcode)
699 return false;
700
701 /* The VOP needs to be the same. */
702 if (vr1->vuse != vr2->vuse)
703 return false;
704
705 /* If the operands are the same we are done. */
706 if (vr1->operands == vr2->operands)
707 return true;
708
709 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
710 return false;
711
712 if (INTEGRAL_TYPE_P (vr1->type)
713 && INTEGRAL_TYPE_P (vr2->type))
714 {
715 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
716 return false;
717 }
718 else if (INTEGRAL_TYPE_P (vr1->type)
719 && (TYPE_PRECISION (vr1->type)
720 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
721 return false;
722 else if (INTEGRAL_TYPE_P (vr2->type)
723 && (TYPE_PRECISION (vr2->type)
724 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
725 return false;
726
727 i = 0;
728 j = 0;
729 do
730 {
731 poly_int64 off1 = 0, off2 = 0;
732 vn_reference_op_t vro1, vro2;
733 vn_reference_op_s tem1, tem2;
734 bool deref1 = false, deref2 = false;
735 for (; vr1->operands.iterate (i, &vro1); i++)
736 {
737 if (vro1->opcode == MEM_REF)
738 deref1 = true;
739 /* Do not look through a storage order barrier. */
740 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
741 return false;
742 if (known_eq (vro1->off, -1))
743 break;
744 off1 += vro1->off;
745 }
746 for (; vr2->operands.iterate (j, &vro2); j++)
747 {
748 if (vro2->opcode == MEM_REF)
749 deref2 = true;
750 /* Do not look through a storage order barrier. */
751 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
752 return false;
753 if (known_eq (vro2->off, -1))
754 break;
755 off2 += vro2->off;
756 }
757 if (maybe_ne (off1, off2))
758 return false;
759 if (deref1 && vro1->opcode == ADDR_EXPR)
760 {
761 memset (&tem1, 0, sizeof (tem1));
762 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
763 tem1.type = TREE_TYPE (tem1.op0);
764 tem1.opcode = TREE_CODE (tem1.op0);
765 vro1 = &tem1;
766 deref1 = false;
767 }
768 if (deref2 && vro2->opcode == ADDR_EXPR)
769 {
770 memset (&tem2, 0, sizeof (tem2));
771 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
772 tem2.type = TREE_TYPE (tem2.op0);
773 tem2.opcode = TREE_CODE (tem2.op0);
774 vro2 = &tem2;
775 deref2 = false;
776 }
777 if (deref1 != deref2)
778 return false;
779 if (!vn_reference_op_eq (vro1, vro2))
780 return false;
781 ++j;
782 ++i;
783 }
784 while (vr1->operands.length () != i
785 || vr2->operands.length () != j);
786
787 return true;
788 }
789
790 /* Copy the operations present in load/store REF into RESULT, a vector of
791 vn_reference_op_s's. */
792
793 static void
794 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
795 {
796 /* For non-calls, store the information that makes up the address. */
797 tree orig = ref;
798 while (ref)
799 {
800 vn_reference_op_s temp;
801
802 memset (&temp, 0, sizeof (temp));
803 temp.type = TREE_TYPE (ref);
804 temp.opcode = TREE_CODE (ref);
805 temp.off = -1;
806
807 switch (temp.opcode)
808 {
809 case MODIFY_EXPR:
810 temp.op0 = TREE_OPERAND (ref, 1);
811 break;
812 case WITH_SIZE_EXPR:
813 temp.op0 = TREE_OPERAND (ref, 1);
814 temp.off = 0;
815 break;
816 case MEM_REF:
817 /* The base address gets its own vn_reference_op_s structure. */
818 temp.op0 = TREE_OPERAND (ref, 1);
819 if (!mem_ref_offset (ref).to_shwi (&temp.off))
820 temp.off = -1;
821 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
822 temp.base = MR_DEPENDENCE_BASE (ref);
823 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
824 break;
825 case TARGET_MEM_REF:
826 /* The base address gets its own vn_reference_op_s structure. */
827 temp.op0 = TMR_INDEX (ref);
828 temp.op1 = TMR_STEP (ref);
829 temp.op2 = TMR_OFFSET (ref);
830 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
831 temp.base = MR_DEPENDENCE_BASE (ref);
832 result->safe_push (temp);
833 memset (&temp, 0, sizeof (temp));
834 temp.type = NULL_TREE;
835 temp.opcode = ERROR_MARK;
836 temp.op0 = TMR_INDEX2 (ref);
837 temp.off = -1;
838 break;
839 case BIT_FIELD_REF:
840 /* Record bits, position and storage order. */
841 temp.op0 = TREE_OPERAND (ref, 1);
842 temp.op1 = TREE_OPERAND (ref, 2);
843 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
844 temp.off = -1;
845 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
846 break;
847 case COMPONENT_REF:
848 /* The field decl is enough to unambiguously specify the field,
849 a matching type is not necessary and a mismatching type
850 is always a spurious difference. */
851 temp.type = NULL_TREE;
852 temp.op0 = TREE_OPERAND (ref, 1);
853 temp.op1 = TREE_OPERAND (ref, 2);
854 {
855 tree this_offset = component_ref_field_offset (ref);
856 if (this_offset
857 && poly_int_tree_p (this_offset))
858 {
859 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
860 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
861 {
862 poly_offset_int off
863 = (wi::to_poly_offset (this_offset)
864 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
865 /* Probibit value-numbering zero offset components
866 of addresses the same before the pass folding
867 __builtin_object_size had a chance to run
868 (checking cfun->after_inlining does the
869 trick here). */
870 if (TREE_CODE (orig) != ADDR_EXPR
871 || maybe_ne (off, 0)
872 || cfun->after_inlining)
873 off.to_shwi (&temp.off);
874 }
875 }
876 }
877 break;
878 case ARRAY_RANGE_REF:
879 case ARRAY_REF:
880 {
881 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
882 /* Record index as operand. */
883 temp.op0 = TREE_OPERAND (ref, 1);
884 /* Always record lower bounds and element size. */
885 temp.op1 = array_ref_low_bound (ref);
886 /* But record element size in units of the type alignment. */
887 temp.op2 = TREE_OPERAND (ref, 3);
888 temp.align = eltype->type_common.align;
889 if (! temp.op2)
890 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
891 size_int (TYPE_ALIGN_UNIT (eltype)));
892 if (poly_int_tree_p (temp.op0)
893 && poly_int_tree_p (temp.op1)
894 && TREE_CODE (temp.op2) == INTEGER_CST)
895 {
896 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
897 - wi::to_poly_offset (temp.op1))
898 * wi::to_offset (temp.op2)
899 * vn_ref_op_align_unit (&temp));
900 off.to_shwi (&temp.off);
901 }
902 }
903 break;
904 case VAR_DECL:
905 if (DECL_HARD_REGISTER (ref))
906 {
907 temp.op0 = ref;
908 break;
909 }
910 /* Fallthru. */
911 case PARM_DECL:
912 case CONST_DECL:
913 case RESULT_DECL:
914 /* Canonicalize decls to MEM[&decl] which is what we end up with
915 when valueizing MEM[ptr] with ptr = &decl. */
916 temp.opcode = MEM_REF;
917 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
918 temp.off = 0;
919 result->safe_push (temp);
920 temp.opcode = ADDR_EXPR;
921 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
922 temp.type = TREE_TYPE (temp.op0);
923 temp.off = -1;
924 break;
925 case STRING_CST:
926 case INTEGER_CST:
927 case COMPLEX_CST:
928 case VECTOR_CST:
929 case REAL_CST:
930 case FIXED_CST:
931 case CONSTRUCTOR:
932 case SSA_NAME:
933 temp.op0 = ref;
934 break;
935 case ADDR_EXPR:
936 if (is_gimple_min_invariant (ref))
937 {
938 temp.op0 = ref;
939 break;
940 }
941 break;
942 /* These are only interesting for their operands, their
943 existence, and their type. They will never be the last
944 ref in the chain of references (IE they require an
945 operand), so we don't have to put anything
946 for op* as it will be handled by the iteration */
947 case REALPART_EXPR:
948 temp.off = 0;
949 break;
950 case VIEW_CONVERT_EXPR:
951 temp.off = 0;
952 temp.reverse = storage_order_barrier_p (ref);
953 break;
954 case IMAGPART_EXPR:
955 /* This is only interesting for its constant offset. */
956 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
957 break;
958 default:
959 gcc_unreachable ();
960 }
961 result->safe_push (temp);
962
963 if (REFERENCE_CLASS_P (ref)
964 || TREE_CODE (ref) == MODIFY_EXPR
965 || TREE_CODE (ref) == WITH_SIZE_EXPR
966 || (TREE_CODE (ref) == ADDR_EXPR
967 && !is_gimple_min_invariant (ref)))
968 ref = TREE_OPERAND (ref, 0);
969 else
970 ref = NULL_TREE;
971 }
972 }
973
974 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
975 operands in *OPS, the reference alias set SET and the reference type TYPE.
976 Return true if something useful was produced. */
977
978 bool
979 ao_ref_init_from_vn_reference (ao_ref *ref,
980 alias_set_type set, tree type,
981 vec<vn_reference_op_s> ops)
982 {
983 vn_reference_op_t op;
984 unsigned i;
985 tree base = NULL_TREE;
986 tree *op0_p = &base;
987 poly_offset_int offset = 0;
988 poly_offset_int max_size;
989 poly_offset_int size = -1;
990 tree size_tree = NULL_TREE;
991 alias_set_type base_alias_set = -1;
992
993 /* First get the final access size from just the outermost expression. */
994 op = &ops[0];
995 if (op->opcode == COMPONENT_REF)
996 size_tree = DECL_SIZE (op->op0);
997 else if (op->opcode == BIT_FIELD_REF)
998 size_tree = op->op0;
999 else
1000 {
1001 machine_mode mode = TYPE_MODE (type);
1002 if (mode == BLKmode)
1003 size_tree = TYPE_SIZE (type);
1004 else
1005 size = GET_MODE_BITSIZE (mode);
1006 }
1007 if (size_tree != NULL_TREE
1008 && poly_int_tree_p (size_tree))
1009 size = wi::to_poly_offset (size_tree);
1010
1011 /* Initially, maxsize is the same as the accessed element size.
1012 In the following it will only grow (or become -1). */
1013 max_size = size;
1014
1015 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1016 and find the ultimate containing object. */
1017 FOR_EACH_VEC_ELT (ops, i, op)
1018 {
1019 switch (op->opcode)
1020 {
1021 /* These may be in the reference ops, but we cannot do anything
1022 sensible with them here. */
1023 case ADDR_EXPR:
1024 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1025 if (base != NULL_TREE
1026 && TREE_CODE (base) == MEM_REF
1027 && op->op0
1028 && DECL_P (TREE_OPERAND (op->op0, 0)))
1029 {
1030 vn_reference_op_t pop = &ops[i-1];
1031 base = TREE_OPERAND (op->op0, 0);
1032 if (known_eq (pop->off, -1))
1033 {
1034 max_size = -1;
1035 offset = 0;
1036 }
1037 else
1038 offset += pop->off * BITS_PER_UNIT;
1039 op0_p = NULL;
1040 break;
1041 }
1042 /* Fallthru. */
1043 case CALL_EXPR:
1044 return false;
1045
1046 /* Record the base objects. */
1047 case MEM_REF:
1048 base_alias_set = get_deref_alias_set (op->op0);
1049 *op0_p = build2 (MEM_REF, op->type,
1050 NULL_TREE, op->op0);
1051 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1052 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1053 op0_p = &TREE_OPERAND (*op0_p, 0);
1054 break;
1055
1056 case VAR_DECL:
1057 case PARM_DECL:
1058 case RESULT_DECL:
1059 case SSA_NAME:
1060 *op0_p = op->op0;
1061 op0_p = NULL;
1062 break;
1063
1064 /* And now the usual component-reference style ops. */
1065 case BIT_FIELD_REF:
1066 offset += wi::to_poly_offset (op->op1);
1067 break;
1068
1069 case COMPONENT_REF:
1070 {
1071 tree field = op->op0;
1072 /* We do not have a complete COMPONENT_REF tree here so we
1073 cannot use component_ref_field_offset. Do the interesting
1074 parts manually. */
1075 tree this_offset = DECL_FIELD_OFFSET (field);
1076
1077 if (op->op1 || !poly_int_tree_p (this_offset))
1078 max_size = -1;
1079 else
1080 {
1081 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1082 << LOG2_BITS_PER_UNIT);
1083 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1084 offset += woffset;
1085 }
1086 break;
1087 }
1088
1089 case ARRAY_RANGE_REF:
1090 case ARRAY_REF:
1091 /* We recorded the lower bound and the element size. */
1092 if (!poly_int_tree_p (op->op0)
1093 || !poly_int_tree_p (op->op1)
1094 || TREE_CODE (op->op2) != INTEGER_CST)
1095 max_size = -1;
1096 else
1097 {
1098 poly_offset_int woffset
1099 = wi::sext (wi::to_poly_offset (op->op0)
1100 - wi::to_poly_offset (op->op1),
1101 TYPE_PRECISION (TREE_TYPE (op->op0)));
1102 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1103 woffset <<= LOG2_BITS_PER_UNIT;
1104 offset += woffset;
1105 }
1106 break;
1107
1108 case REALPART_EXPR:
1109 break;
1110
1111 case IMAGPART_EXPR:
1112 offset += size;
1113 break;
1114
1115 case VIEW_CONVERT_EXPR:
1116 break;
1117
1118 case STRING_CST:
1119 case INTEGER_CST:
1120 case COMPLEX_CST:
1121 case VECTOR_CST:
1122 case REAL_CST:
1123 case CONSTRUCTOR:
1124 case CONST_DECL:
1125 return false;
1126
1127 default:
1128 return false;
1129 }
1130 }
1131
1132 if (base == NULL_TREE)
1133 return false;
1134
1135 ref->ref = NULL_TREE;
1136 ref->base = base;
1137 ref->ref_alias_set = set;
1138 if (base_alias_set != -1)
1139 ref->base_alias_set = base_alias_set;
1140 else
1141 ref->base_alias_set = get_alias_set (base);
1142 /* We discount volatiles from value-numbering elsewhere. */
1143 ref->volatile_p = false;
1144
1145 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1146 {
1147 ref->offset = 0;
1148 ref->size = -1;
1149 ref->max_size = -1;
1150 return true;
1151 }
1152
1153 if (!offset.to_shwi (&ref->offset))
1154 {
1155 ref->offset = 0;
1156 ref->max_size = -1;
1157 return true;
1158 }
1159
1160 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1161 ref->max_size = -1;
1162
1163 return true;
1164 }
1165
1166 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1167 vn_reference_op_s's. */
1168
1169 static void
1170 copy_reference_ops_from_call (gcall *call,
1171 vec<vn_reference_op_s> *result)
1172 {
1173 vn_reference_op_s temp;
1174 unsigned i;
1175 tree lhs = gimple_call_lhs (call);
1176 int lr;
1177
1178 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1179 different. By adding the lhs here in the vector, we ensure that the
1180 hashcode is different, guaranteeing a different value number. */
1181 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1182 {
1183 memset (&temp, 0, sizeof (temp));
1184 temp.opcode = MODIFY_EXPR;
1185 temp.type = TREE_TYPE (lhs);
1186 temp.op0 = lhs;
1187 temp.off = -1;
1188 result->safe_push (temp);
1189 }
1190
1191 /* Copy the type, opcode, function, static chain and EH region, if any. */
1192 memset (&temp, 0, sizeof (temp));
1193 temp.type = gimple_call_fntype (call);
1194 temp.opcode = CALL_EXPR;
1195 temp.op0 = gimple_call_fn (call);
1196 temp.op1 = gimple_call_chain (call);
1197 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1198 temp.op2 = size_int (lr);
1199 temp.off = -1;
1200 result->safe_push (temp);
1201
1202 /* Copy the call arguments. As they can be references as well,
1203 just chain them together. */
1204 for (i = 0; i < gimple_call_num_args (call); ++i)
1205 {
1206 tree callarg = gimple_call_arg (call, i);
1207 copy_reference_ops_from_ref (callarg, result);
1208 }
1209 }
1210
1211 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1212 *I_P to point to the last element of the replacement. */
1213 static bool
1214 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1215 unsigned int *i_p)
1216 {
1217 unsigned int i = *i_p;
1218 vn_reference_op_t op = &(*ops)[i];
1219 vn_reference_op_t mem_op = &(*ops)[i - 1];
1220 tree addr_base;
1221 poly_int64 addr_offset = 0;
1222
1223 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1224 from .foo.bar to the preceding MEM_REF offset and replace the
1225 address with &OBJ. */
1226 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1227 &addr_offset);
1228 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1229 if (addr_base != TREE_OPERAND (op->op0, 0))
1230 {
1231 poly_offset_int off
1232 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1233 SIGNED)
1234 + addr_offset);
1235 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1236 op->op0 = build_fold_addr_expr (addr_base);
1237 if (tree_fits_shwi_p (mem_op->op0))
1238 mem_op->off = tree_to_shwi (mem_op->op0);
1239 else
1240 mem_op->off = -1;
1241 return true;
1242 }
1243 return false;
1244 }
1245
1246 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1247 *I_P to point to the last element of the replacement. */
1248 static bool
1249 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1250 unsigned int *i_p)
1251 {
1252 bool changed = false;
1253 vn_reference_op_t op;
1254
1255 do
1256 {
1257 unsigned int i = *i_p;
1258 op = &(*ops)[i];
1259 vn_reference_op_t mem_op = &(*ops)[i - 1];
1260 gimple *def_stmt;
1261 enum tree_code code;
1262 poly_offset_int off;
1263
1264 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1265 if (!is_gimple_assign (def_stmt))
1266 return changed;
1267
1268 code = gimple_assign_rhs_code (def_stmt);
1269 if (code != ADDR_EXPR
1270 && code != POINTER_PLUS_EXPR)
1271 return changed;
1272
1273 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1274
1275 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1276 from .foo.bar to the preceding MEM_REF offset and replace the
1277 address with &OBJ. */
1278 if (code == ADDR_EXPR)
1279 {
1280 tree addr, addr_base;
1281 poly_int64 addr_offset;
1282
1283 addr = gimple_assign_rhs1 (def_stmt);
1284 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1285 &addr_offset);
1286 /* If that didn't work because the address isn't invariant propagate
1287 the reference tree from the address operation in case the current
1288 dereference isn't offsetted. */
1289 if (!addr_base
1290 && *i_p == ops->length () - 1
1291 && known_eq (off, 0)
1292 /* This makes us disable this transform for PRE where the
1293 reference ops might be also used for code insertion which
1294 is invalid. */
1295 && default_vn_walk_kind == VN_WALKREWRITE)
1296 {
1297 auto_vec<vn_reference_op_s, 32> tem;
1298 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1299 /* Make sure to preserve TBAA info. The only objects not
1300 wrapped in MEM_REFs that can have their address taken are
1301 STRING_CSTs. */
1302 if (tem.length () >= 2
1303 && tem[tem.length () - 2].opcode == MEM_REF)
1304 {
1305 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1306 new_mem_op->op0
1307 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1308 wi::to_poly_wide (new_mem_op->op0));
1309 }
1310 else
1311 gcc_assert (tem.last ().opcode == STRING_CST);
1312 ops->pop ();
1313 ops->pop ();
1314 ops->safe_splice (tem);
1315 --*i_p;
1316 return true;
1317 }
1318 if (!addr_base
1319 || TREE_CODE (addr_base) != MEM_REF
1320 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1321 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1322 0))))
1323 return changed;
1324
1325 off += addr_offset;
1326 off += mem_ref_offset (addr_base);
1327 op->op0 = TREE_OPERAND (addr_base, 0);
1328 }
1329 else
1330 {
1331 tree ptr, ptroff;
1332 ptr = gimple_assign_rhs1 (def_stmt);
1333 ptroff = gimple_assign_rhs2 (def_stmt);
1334 if (TREE_CODE (ptr) != SSA_NAME
1335 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1336 /* Make sure to not endlessly recurse.
1337 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1338 happen when we value-number a PHI to its backedge value. */
1339 || SSA_VAL (ptr) == op->op0
1340 || !poly_int_tree_p (ptroff))
1341 return changed;
1342
1343 off += wi::to_poly_offset (ptroff);
1344 op->op0 = ptr;
1345 }
1346
1347 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1348 if (tree_fits_shwi_p (mem_op->op0))
1349 mem_op->off = tree_to_shwi (mem_op->op0);
1350 else
1351 mem_op->off = -1;
1352 /* ??? Can end up with endless recursion here!?
1353 gcc.c-torture/execute/strcmp-1.c */
1354 if (TREE_CODE (op->op0) == SSA_NAME)
1355 op->op0 = SSA_VAL (op->op0);
1356 if (TREE_CODE (op->op0) != SSA_NAME)
1357 op->opcode = TREE_CODE (op->op0);
1358
1359 changed = true;
1360 }
1361 /* Tail-recurse. */
1362 while (TREE_CODE (op->op0) == SSA_NAME);
1363
1364 /* Fold a remaining *&. */
1365 if (TREE_CODE (op->op0) == ADDR_EXPR)
1366 vn_reference_fold_indirect (ops, i_p);
1367
1368 return changed;
1369 }
1370
1371 /* Optimize the reference REF to a constant if possible or return
1372 NULL_TREE if not. */
1373
1374 tree
1375 fully_constant_vn_reference_p (vn_reference_t ref)
1376 {
1377 vec<vn_reference_op_s> operands = ref->operands;
1378 vn_reference_op_t op;
1379
1380 /* Try to simplify the translated expression if it is
1381 a call to a builtin function with at most two arguments. */
1382 op = &operands[0];
1383 if (op->opcode == CALL_EXPR
1384 && TREE_CODE (op->op0) == ADDR_EXPR
1385 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1386 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1387 && operands.length () >= 2
1388 && operands.length () <= 3)
1389 {
1390 vn_reference_op_t arg0, arg1 = NULL;
1391 bool anyconst = false;
1392 arg0 = &operands[1];
1393 if (operands.length () > 2)
1394 arg1 = &operands[2];
1395 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1396 || (arg0->opcode == ADDR_EXPR
1397 && is_gimple_min_invariant (arg0->op0)))
1398 anyconst = true;
1399 if (arg1
1400 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1401 || (arg1->opcode == ADDR_EXPR
1402 && is_gimple_min_invariant (arg1->op0))))
1403 anyconst = true;
1404 if (anyconst)
1405 {
1406 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1407 arg1 ? 2 : 1,
1408 arg0->op0,
1409 arg1 ? arg1->op0 : NULL);
1410 if (folded
1411 && TREE_CODE (folded) == NOP_EXPR)
1412 folded = TREE_OPERAND (folded, 0);
1413 if (folded
1414 && is_gimple_min_invariant (folded))
1415 return folded;
1416 }
1417 }
1418
1419 /* Simplify reads from constants or constant initializers. */
1420 else if (BITS_PER_UNIT == 8
1421 && COMPLETE_TYPE_P (ref->type)
1422 && is_gimple_reg_type (ref->type))
1423 {
1424 poly_int64 off = 0;
1425 HOST_WIDE_INT size;
1426 if (INTEGRAL_TYPE_P (ref->type))
1427 size = TYPE_PRECISION (ref->type);
1428 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1429 size = tree_to_shwi (TYPE_SIZE (ref->type));
1430 else
1431 return NULL_TREE;
1432 if (size % BITS_PER_UNIT != 0
1433 || size > MAX_BITSIZE_MODE_ANY_MODE)
1434 return NULL_TREE;
1435 size /= BITS_PER_UNIT;
1436 unsigned i;
1437 for (i = 0; i < operands.length (); ++i)
1438 {
1439 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1440 {
1441 ++i;
1442 break;
1443 }
1444 if (known_eq (operands[i].off, -1))
1445 return NULL_TREE;
1446 off += operands[i].off;
1447 if (operands[i].opcode == MEM_REF)
1448 {
1449 ++i;
1450 break;
1451 }
1452 }
1453 vn_reference_op_t base = &operands[--i];
1454 tree ctor = error_mark_node;
1455 tree decl = NULL_TREE;
1456 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1457 ctor = base->op0;
1458 else if (base->opcode == MEM_REF
1459 && base[1].opcode == ADDR_EXPR
1460 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1461 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1462 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1463 {
1464 decl = TREE_OPERAND (base[1].op0, 0);
1465 if (TREE_CODE (decl) == STRING_CST)
1466 ctor = decl;
1467 else
1468 ctor = ctor_for_folding (decl);
1469 }
1470 if (ctor == NULL_TREE)
1471 return build_zero_cst (ref->type);
1472 else if (ctor != error_mark_node)
1473 {
1474 HOST_WIDE_INT const_off;
1475 if (decl)
1476 {
1477 tree res = fold_ctor_reference (ref->type, ctor,
1478 off * BITS_PER_UNIT,
1479 size * BITS_PER_UNIT, decl);
1480 if (res)
1481 {
1482 STRIP_USELESS_TYPE_CONVERSION (res);
1483 if (is_gimple_min_invariant (res))
1484 return res;
1485 }
1486 }
1487 else if (off.is_constant (&const_off))
1488 {
1489 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1490 int len = native_encode_expr (ctor, buf, size, const_off);
1491 if (len > 0)
1492 return native_interpret_expr (ref->type, buf, len);
1493 }
1494 }
1495 }
1496
1497 return NULL_TREE;
1498 }
1499
1500 /* Return true if OPS contain a storage order barrier. */
1501
1502 static bool
1503 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1504 {
1505 vn_reference_op_t op;
1506 unsigned i;
1507
1508 FOR_EACH_VEC_ELT (ops, i, op)
1509 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1510 return true;
1511
1512 return false;
1513 }
1514
1515 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1516 structures into their value numbers. This is done in-place, and
1517 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1518 whether any operands were valueized. */
1519
1520 static vec<vn_reference_op_s>
1521 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1522 bool with_avail = false)
1523 {
1524 vn_reference_op_t vro;
1525 unsigned int i;
1526
1527 *valueized_anything = false;
1528
1529 FOR_EACH_VEC_ELT (orig, i, vro)
1530 {
1531 if (vro->opcode == SSA_NAME
1532 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1533 {
1534 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1535 if (tem != vro->op0)
1536 {
1537 *valueized_anything = true;
1538 vro->op0 = tem;
1539 }
1540 /* If it transforms from an SSA_NAME to a constant, update
1541 the opcode. */
1542 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1543 vro->opcode = TREE_CODE (vro->op0);
1544 }
1545 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1546 {
1547 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1548 if (tem != vro->op1)
1549 {
1550 *valueized_anything = true;
1551 vro->op1 = tem;
1552 }
1553 }
1554 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1555 {
1556 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1557 if (tem != vro->op2)
1558 {
1559 *valueized_anything = true;
1560 vro->op2 = tem;
1561 }
1562 }
1563 /* If it transforms from an SSA_NAME to an address, fold with
1564 a preceding indirect reference. */
1565 if (i > 0
1566 && vro->op0
1567 && TREE_CODE (vro->op0) == ADDR_EXPR
1568 && orig[i - 1].opcode == MEM_REF)
1569 {
1570 if (vn_reference_fold_indirect (&orig, &i))
1571 *valueized_anything = true;
1572 }
1573 else if (i > 0
1574 && vro->opcode == SSA_NAME
1575 && orig[i - 1].opcode == MEM_REF)
1576 {
1577 if (vn_reference_maybe_forwprop_address (&orig, &i))
1578 *valueized_anything = true;
1579 }
1580 /* If it transforms a non-constant ARRAY_REF into a constant
1581 one, adjust the constant offset. */
1582 else if (vro->opcode == ARRAY_REF
1583 && known_eq (vro->off, -1)
1584 && poly_int_tree_p (vro->op0)
1585 && poly_int_tree_p (vro->op1)
1586 && TREE_CODE (vro->op2) == INTEGER_CST)
1587 {
1588 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1589 - wi::to_poly_offset (vro->op1))
1590 * wi::to_offset (vro->op2)
1591 * vn_ref_op_align_unit (vro));
1592 off.to_shwi (&vro->off);
1593 }
1594 }
1595
1596 return orig;
1597 }
1598
1599 static vec<vn_reference_op_s>
1600 valueize_refs (vec<vn_reference_op_s> orig)
1601 {
1602 bool tem;
1603 return valueize_refs_1 (orig, &tem);
1604 }
1605
1606 static vec<vn_reference_op_s> shared_lookup_references;
1607
1608 /* Create a vector of vn_reference_op_s structures from REF, a
1609 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1610 this function. *VALUEIZED_ANYTHING will specify whether any
1611 operands were valueized. */
1612
1613 static vec<vn_reference_op_s>
1614 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1615 {
1616 if (!ref)
1617 return vNULL;
1618 shared_lookup_references.truncate (0);
1619 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1620 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1621 valueized_anything);
1622 return shared_lookup_references;
1623 }
1624
1625 /* Create a vector of vn_reference_op_s structures from CALL, a
1626 call statement. The vector is shared among all callers of
1627 this function. */
1628
1629 static vec<vn_reference_op_s>
1630 valueize_shared_reference_ops_from_call (gcall *call)
1631 {
1632 if (!call)
1633 return vNULL;
1634 shared_lookup_references.truncate (0);
1635 copy_reference_ops_from_call (call, &shared_lookup_references);
1636 shared_lookup_references = valueize_refs (shared_lookup_references);
1637 return shared_lookup_references;
1638 }
1639
1640 /* Lookup a SCCVN reference operation VR in the current hash table.
1641 Returns the resulting value number if it exists in the hash table,
1642 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1643 vn_reference_t stored in the hashtable if something is found. */
1644
1645 static tree
1646 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1647 {
1648 vn_reference_s **slot;
1649 hashval_t hash;
1650
1651 hash = vr->hashcode;
1652 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1653 if (slot)
1654 {
1655 if (vnresult)
1656 *vnresult = (vn_reference_t)*slot;
1657 return ((vn_reference_t)*slot)->result;
1658 }
1659
1660 return NULL_TREE;
1661 }
1662
1663
1664 /* Partial definition tracking support. */
1665
1666 struct pd_range
1667 {
1668 HOST_WIDE_INT offset;
1669 HOST_WIDE_INT size;
1670 };
1671
1672 struct pd_data
1673 {
1674 tree rhs;
1675 HOST_WIDE_INT offset;
1676 HOST_WIDE_INT size;
1677 };
1678
1679 /* Context for alias walking. */
1680
1681 struct vn_walk_cb_data
1682 {
1683 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1684 vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
1685 : vr (vr_), last_vuse_ptr (last_vuse_ptr_),
1686 vn_walk_kind (vn_walk_kind_), tbaa_p (tbaa_p_), known_ranges (NULL)
1687 {
1688 ao_ref_init (&orig_ref, orig_ref_);
1689 }
1690 ~vn_walk_cb_data ();
1691 void *push_partial_def (const pd_data& pd, tree, HOST_WIDE_INT);
1692
1693 vn_reference_t vr;
1694 ao_ref orig_ref;
1695 tree *last_vuse_ptr;
1696 vn_lookup_kind vn_walk_kind;
1697 bool tbaa_p;
1698
1699 /* The VDEFs of partial defs we come along. */
1700 auto_vec<pd_data, 2> partial_defs;
1701 /* The first defs range to avoid splay tree setup in most cases. */
1702 pd_range first_range;
1703 tree first_vuse;
1704 splay_tree known_ranges;
1705 obstack ranges_obstack;
1706 };
1707
1708 vn_walk_cb_data::~vn_walk_cb_data ()
1709 {
1710 if (known_ranges)
1711 {
1712 splay_tree_delete (known_ranges);
1713 obstack_free (&ranges_obstack, NULL);
1714 }
1715 }
1716
1717 /* pd_range splay-tree helpers. */
1718
1719 static int
1720 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1721 {
1722 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1723 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1724 if (offset1 < offset2)
1725 return -1;
1726 else if (offset1 > offset2)
1727 return 1;
1728 return 0;
1729 }
1730
1731 static void *
1732 pd_tree_alloc (int size, void *data_)
1733 {
1734 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1735 return obstack_alloc (&data->ranges_obstack, size);
1736 }
1737
1738 static void
1739 pd_tree_dealloc (void *, void *)
1740 {
1741 }
1742
1743 /* Push PD to the vector of partial definitions returning a
1744 value when we are ready to combine things with VUSE and MAXSIZEI,
1745 NULL when we want to continue looking for partial defs or -1
1746 on failure. */
1747
1748 void *
1749 vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse,
1750 HOST_WIDE_INT maxsizei)
1751 {
1752 if (partial_defs.is_empty ())
1753 {
1754 partial_defs.safe_push (pd);
1755 first_range.offset = pd.offset;
1756 first_range.size = pd.size;
1757 first_vuse = vuse;
1758 last_vuse_ptr = NULL;
1759 /* Continue looking for partial defs. */
1760 return NULL;
1761 }
1762
1763 if (!known_ranges)
1764 {
1765 /* ??? Optimize the case where the 2nd partial def completes things. */
1766 gcc_obstack_init (&ranges_obstack);
1767 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1768 pd_tree_alloc,
1769 pd_tree_dealloc, this);
1770 splay_tree_insert (known_ranges,
1771 (splay_tree_key)&first_range.offset,
1772 (splay_tree_value)&first_range);
1773 }
1774
1775 pd_range newr = { pd.offset, pd.size };
1776 splay_tree_node n;
1777 pd_range *r;
1778 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1779 HOST_WIDE_INT loffset = newr.offset + 1;
1780 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1781 && ((r = (pd_range *)n->value), true)
1782 && ranges_known_overlap_p (r->offset, r->size + 1,
1783 newr.offset, newr.size))
1784 {
1785 /* Ignore partial defs already covered. */
1786 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1787 return NULL;
1788 r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
1789 }
1790 else
1791 {
1792 /* newr.offset wasn't covered yet, insert the range. */
1793 r = XOBNEW (&ranges_obstack, pd_range);
1794 *r = newr;
1795 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1796 (splay_tree_value)r);
1797 }
1798 /* Merge r which now contains newr and is a member of the splay tree with
1799 adjacent overlapping ranges. */
1800 pd_range *rafter;
1801 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
1802 && ((rafter = (pd_range *)n->value), true)
1803 && ranges_known_overlap_p (r->offset, r->size + 1,
1804 rafter->offset, rafter->size))
1805 {
1806 r->size = MAX (r->offset + r->size,
1807 rafter->offset + rafter->size) - r->offset;
1808 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
1809 }
1810 partial_defs.safe_push (pd);
1811
1812 /* Now we have merged newr into the range tree. When we have covered
1813 [offseti, sizei] then the tree will contain exactly one node which has
1814 the desired properties and it will be 'r'. */
1815 if (!known_subrange_p (0, maxsizei / BITS_PER_UNIT, r->offset, r->size))
1816 /* Continue looking for partial defs. */
1817 return NULL;
1818
1819 /* Now simply native encode all partial defs in reverse order. */
1820 unsigned ndefs = partial_defs.length ();
1821 /* We support up to 512-bit values (for V8DFmode). */
1822 unsigned char buffer[64];
1823 int len;
1824
1825 while (!partial_defs.is_empty ())
1826 {
1827 pd_data pd = partial_defs.pop ();
1828 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
1829 /* Empty CONSTRUCTOR. */
1830 memset (buffer + MAX (0, pd.offset),
1831 0, MIN ((HOST_WIDE_INT)sizeof (buffer) - MAX (0, pd.offset),
1832 pd.size + MIN (0, pd.offset)));
1833 else
1834 {
1835 unsigned pad = 0;
1836 if (BYTES_BIG_ENDIAN
1837 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (pd.rhs))))
1838 {
1839 /* On big-endian the padding is at the 'front' so just skip
1840 the initial bytes. */
1841 fixed_size_mode mode
1842 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (pd.rhs)));
1843 pad = GET_MODE_SIZE (mode) - pd.size;
1844 }
1845 len = native_encode_expr (pd.rhs, buffer + MAX (0, pd.offset),
1846 sizeof (buffer) - MAX (0, pd.offset),
1847 MAX (0, -pd.offset) + pad);
1848 if (len <= 0 || len < (pd.size - MAX (0, -pd.offset)))
1849 {
1850 if (dump_file && (dump_flags & TDF_DETAILS))
1851 fprintf (dump_file, "Failed to encode %u "
1852 "partial definitions\n", ndefs);
1853 return (void *)-1;
1854 }
1855 }
1856 }
1857
1858 tree type = vr->type;
1859 /* Make sure to interpret in a type that has a range covering the whole
1860 access size. */
1861 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
1862 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
1863 tree val = native_interpret_expr (type, buffer, maxsizei / BITS_PER_UNIT);
1864 /* If we chop off bits because the types precision doesn't match the memory
1865 access size this is ok when optimizing reads but not when called from
1866 the DSE code during elimination. */
1867 if (val && type != vr->type)
1868 {
1869 if (! int_fits_type_p (val, vr->type))
1870 val = NULL_TREE;
1871 else
1872 val = fold_convert (vr->type, val);
1873 }
1874
1875 if (val)
1876 {
1877 if (dump_file && (dump_flags & TDF_DETAILS))
1878 fprintf (dump_file,
1879 "Successfully combined %u partial definitions\n", ndefs);
1880 return vn_reference_lookup_or_insert_for_pieces
1881 (first_vuse, vr->set, vr->type, vr->operands, val);
1882 }
1883 else
1884 {
1885 if (dump_file && (dump_flags & TDF_DETAILS))
1886 fprintf (dump_file,
1887 "Failed to interpret %u encoded partial definitions\n", ndefs);
1888 return (void *)-1;
1889 }
1890 }
1891
1892 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1893 with the current VUSE and performs the expression lookup. */
1894
1895 static void *
1896 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
1897 {
1898 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1899 vn_reference_t vr = data->vr;
1900 vn_reference_s **slot;
1901 hashval_t hash;
1902
1903 /* If we have partial definitions recorded we have to go through
1904 vn_reference_lookup_3. */
1905 if (!data->partial_defs.is_empty ())
1906 return NULL;
1907
1908 if (data->last_vuse_ptr)
1909 *data->last_vuse_ptr = vuse;
1910
1911 /* Fixup vuse and hash. */
1912 if (vr->vuse)
1913 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1914 vr->vuse = vuse_ssa_val (vuse);
1915 if (vr->vuse)
1916 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1917
1918 hash = vr->hashcode;
1919 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1920 if (slot)
1921 return *slot;
1922
1923 return NULL;
1924 }
1925
1926 /* Lookup an existing or insert a new vn_reference entry into the
1927 value table for the VUSE, SET, TYPE, OPERANDS reference which
1928 has the value VALUE which is either a constant or an SSA name. */
1929
1930 static vn_reference_t
1931 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1932 alias_set_type set,
1933 tree type,
1934 vec<vn_reference_op_s,
1935 va_heap> operands,
1936 tree value)
1937 {
1938 vn_reference_s vr1;
1939 vn_reference_t result;
1940 unsigned value_id;
1941 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1942 vr1.operands = operands;
1943 vr1.type = type;
1944 vr1.set = set;
1945 vr1.hashcode = vn_reference_compute_hash (&vr1);
1946 if (vn_reference_lookup_1 (&vr1, &result))
1947 return result;
1948 if (TREE_CODE (value) == SSA_NAME)
1949 value_id = VN_INFO (value)->value_id;
1950 else
1951 value_id = get_or_alloc_constant_value_id (value);
1952 return vn_reference_insert_pieces (vuse, set, type,
1953 operands.copy (), value, value_id);
1954 }
1955
1956 /* Return a value-number for RCODE OPS... either by looking up an existing
1957 value-number for the simplified result or by inserting the operation if
1958 INSERT is true. */
1959
1960 static tree
1961 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1962 {
1963 tree result = NULL_TREE;
1964 /* We will be creating a value number for
1965 RCODE (OPS...).
1966 So first simplify and lookup this expression to see if it
1967 is already available. */
1968 /* For simplification valueize. */
1969 unsigned i;
1970 for (i = 0; i < res_op->num_ops; ++i)
1971 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
1972 {
1973 tree tem = vn_valueize (res_op->ops[i]);
1974 if (!tem)
1975 break;
1976 res_op->ops[i] = tem;
1977 }
1978 /* If valueization of an operand fails (it is not available), skip
1979 simplification. */
1980 bool res = false;
1981 if (i == res_op->num_ops)
1982 {
1983 mprts_hook = vn_lookup_simplify_result;
1984 res = res_op->resimplify (NULL, vn_valueize);
1985 mprts_hook = NULL;
1986 }
1987 gimple *new_stmt = NULL;
1988 if (res
1989 && gimple_simplified_result_is_gimple_val (res_op))
1990 {
1991 /* The expression is already available. */
1992 result = res_op->ops[0];
1993 /* Valueize it, simplification returns sth in AVAIL only. */
1994 if (TREE_CODE (result) == SSA_NAME)
1995 result = SSA_VAL (result);
1996 }
1997 else
1998 {
1999 tree val = vn_lookup_simplify_result (res_op);
2000 if (!val && insert)
2001 {
2002 gimple_seq stmts = NULL;
2003 result = maybe_push_res_to_seq (res_op, &stmts);
2004 if (result)
2005 {
2006 gcc_assert (gimple_seq_singleton_p (stmts));
2007 new_stmt = gimple_seq_first_stmt (stmts);
2008 }
2009 }
2010 else
2011 /* The expression is already available. */
2012 result = val;
2013 }
2014 if (new_stmt)
2015 {
2016 /* The expression is not yet available, value-number lhs to
2017 the new SSA_NAME we created. */
2018 /* Initialize value-number information properly. */
2019 vn_ssa_aux_t result_info = VN_INFO (result);
2020 result_info->valnum = result;
2021 result_info->value_id = get_next_value_id ();
2022 result_info->visited = 1;
2023 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2024 new_stmt);
2025 result_info->needs_insertion = true;
2026 /* ??? PRE phi-translation inserts NARYs without corresponding
2027 SSA name result. Re-use those but set their result according
2028 to the stmt we just built. */
2029 vn_nary_op_t nary = NULL;
2030 vn_nary_op_lookup_stmt (new_stmt, &nary);
2031 if (nary)
2032 {
2033 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2034 nary->u.result = gimple_assign_lhs (new_stmt);
2035 }
2036 /* As all "inserted" statements are singleton SCCs, insert
2037 to the valid table. This is strictly needed to
2038 avoid re-generating new value SSA_NAMEs for the same
2039 expression during SCC iteration over and over (the
2040 optimistic table gets cleared after each iteration).
2041 We do not need to insert into the optimistic table, as
2042 lookups there will fall back to the valid table. */
2043 else
2044 {
2045 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2046 vn_nary_op_t vno1
2047 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2048 vno1->value_id = result_info->value_id;
2049 vno1->length = length;
2050 vno1->predicated_values = 0;
2051 vno1->u.result = result;
2052 init_vn_nary_op_from_stmt (vno1, new_stmt);
2053 vn_nary_op_insert_into (vno1, valid_info->nary, true);
2054 /* Also do not link it into the undo chain. */
2055 last_inserted_nary = vno1->next;
2056 vno1->next = (vn_nary_op_t)(void *)-1;
2057 }
2058 if (dump_file && (dump_flags & TDF_DETAILS))
2059 {
2060 fprintf (dump_file, "Inserting name ");
2061 print_generic_expr (dump_file, result);
2062 fprintf (dump_file, " for expression ");
2063 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2064 fprintf (dump_file, "\n");
2065 }
2066 }
2067 return result;
2068 }
2069
2070 /* Return a value-number for RCODE OPS... either by looking up an existing
2071 value-number for the simplified result or by inserting the operation. */
2072
2073 static tree
2074 vn_nary_build_or_lookup (gimple_match_op *res_op)
2075 {
2076 return vn_nary_build_or_lookup_1 (res_op, true);
2077 }
2078
2079 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2080 its value if present. */
2081
2082 tree
2083 vn_nary_simplify (vn_nary_op_t nary)
2084 {
2085 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2086 return NULL_TREE;
2087 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2088 nary->type, nary->length);
2089 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2090 return vn_nary_build_or_lookup_1 (&op, false);
2091 }
2092
2093 /* Elimination engine. */
2094
2095 class eliminate_dom_walker : public dom_walker
2096 {
2097 public:
2098 eliminate_dom_walker (cdi_direction, bitmap);
2099 ~eliminate_dom_walker ();
2100
2101 virtual edge before_dom_children (basic_block);
2102 virtual void after_dom_children (basic_block);
2103
2104 virtual tree eliminate_avail (basic_block, tree op);
2105 virtual void eliminate_push_avail (basic_block, tree op);
2106 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2107
2108 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2109
2110 unsigned eliminate_cleanup (bool region_p = false);
2111
2112 bool do_pre;
2113 unsigned int el_todo;
2114 unsigned int eliminations;
2115 unsigned int insertions;
2116
2117 /* SSA names that had their defs inserted by PRE if do_pre. */
2118 bitmap inserted_exprs;
2119
2120 /* Blocks with statements that have had their EH properties changed. */
2121 bitmap need_eh_cleanup;
2122
2123 /* Blocks with statements that have had their AB properties changed. */
2124 bitmap need_ab_cleanup;
2125
2126 /* Local state for the eliminate domwalk. */
2127 auto_vec<gimple *> to_remove;
2128 auto_vec<gimple *> to_fixup;
2129 auto_vec<tree> avail;
2130 auto_vec<tree> avail_stack;
2131 };
2132
2133 /* Adaptor to the elimination engine using RPO availability. */
2134
2135 class rpo_elim : public eliminate_dom_walker
2136 {
2137 public:
2138 rpo_elim(basic_block entry_)
2139 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2140 m_avail_freelist (NULL) {}
2141
2142 virtual tree eliminate_avail (basic_block, tree op);
2143
2144 virtual void eliminate_push_avail (basic_block, tree);
2145
2146 basic_block entry;
2147 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2148 obstack. */
2149 vn_avail *m_avail_freelist;
2150 };
2151
2152 /* Global RPO state for access from hooks. */
2153 static rpo_elim *rpo_avail;
2154 basic_block vn_context_bb;
2155
2156 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2157 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2158 Otherwise return false. */
2159
2160 static bool
2161 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2162 tree base2, poly_int64 *offset2)
2163 {
2164 poly_int64 soff;
2165 if (TREE_CODE (base1) == MEM_REF
2166 && TREE_CODE (base2) == MEM_REF)
2167 {
2168 if (mem_ref_offset (base1).to_shwi (&soff))
2169 {
2170 base1 = TREE_OPERAND (base1, 0);
2171 *offset1 += soff * BITS_PER_UNIT;
2172 }
2173 if (mem_ref_offset (base2).to_shwi (&soff))
2174 {
2175 base2 = TREE_OPERAND (base2, 0);
2176 *offset2 += soff * BITS_PER_UNIT;
2177 }
2178 return operand_equal_p (base1, base2, 0);
2179 }
2180 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2181 }
2182
2183 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2184 from the statement defining VUSE and if not successful tries to
2185 translate *REFP and VR_ through an aggregate copy at the definition
2186 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2187 of *REF and *VR. If only disambiguation was performed then
2188 *DISAMBIGUATE_ONLY is set to true. */
2189
2190 static void *
2191 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2192 bool *disambiguate_only)
2193 {
2194 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2195 vn_reference_t vr = data->vr;
2196 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2197 tree base = ao_ref_base (ref);
2198 HOST_WIDE_INT offseti, maxsizei;
2199 static vec<vn_reference_op_s> lhs_ops;
2200 ao_ref lhs_ref;
2201 bool lhs_ref_ok = false;
2202 poly_int64 copy_size;
2203
2204 /* First try to disambiguate after value-replacing in the definitions LHS. */
2205 if (is_gimple_assign (def_stmt))
2206 {
2207 tree lhs = gimple_assign_lhs (def_stmt);
2208 bool valueized_anything = false;
2209 /* Avoid re-allocation overhead. */
2210 lhs_ops.truncate (0);
2211 basic_block saved_rpo_bb = vn_context_bb;
2212 vn_context_bb = gimple_bb (def_stmt);
2213 copy_reference_ops_from_ref (lhs, &lhs_ops);
2214 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
2215 vn_context_bb = saved_rpo_bb;
2216 if (valueized_anything)
2217 {
2218 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
2219 get_alias_set (lhs),
2220 TREE_TYPE (lhs), lhs_ops);
2221 if (lhs_ref_ok
2222 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2223 {
2224 *disambiguate_only = true;
2225 return NULL;
2226 }
2227 }
2228 else
2229 {
2230 ao_ref_init (&lhs_ref, lhs);
2231 lhs_ref_ok = true;
2232 }
2233
2234 /* Besides valueizing the LHS we can also use access-path based
2235 disambiguation on the original non-valueized ref. */
2236 if (!ref->ref
2237 && lhs_ref_ok
2238 && data->orig_ref.ref)
2239 {
2240 /* We want to use the non-valueized LHS for this, but avoid redundant
2241 work. */
2242 ao_ref *lref = &lhs_ref;
2243 ao_ref lref_alt;
2244 if (valueized_anything)
2245 {
2246 ao_ref_init (&lref_alt, lhs);
2247 lref = &lref_alt;
2248 }
2249 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2250 {
2251 *disambiguate_only = true;
2252 return NULL;
2253 }
2254 }
2255
2256 /* If we reach a clobbering statement try to skip it and see if
2257 we find a VN result with exactly the same value as the
2258 possible clobber. In this case we can ignore the clobber
2259 and return the found value. */
2260 if (is_gimple_reg_type (TREE_TYPE (lhs))
2261 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2262 && ref->ref)
2263 {
2264 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2265 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2266 data->last_vuse_ptr = NULL;
2267 tree saved_vuse = vr->vuse;
2268 hashval_t saved_hashcode = vr->hashcode;
2269 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2270 /* Need to restore vr->vuse and vr->hashcode. */
2271 vr->vuse = saved_vuse;
2272 vr->hashcode = saved_hashcode;
2273 data->last_vuse_ptr = saved_last_vuse_ptr;
2274 if (res && res != (void *)-1)
2275 {
2276 vn_reference_t vnresult = (vn_reference_t) res;
2277 tree rhs = gimple_assign_rhs1 (def_stmt);
2278 if (TREE_CODE (rhs) == SSA_NAME)
2279 rhs = SSA_VAL (rhs);
2280 if (vnresult->result
2281 && operand_equal_p (vnresult->result, rhs, 0)
2282 /* We have to honor our promise about union type punning
2283 and also support arbitrary overlaps with
2284 -fno-strict-aliasing. So simply resort to alignment to
2285 rule out overlaps. Do this check last because it is
2286 quite expensive compared to the hash-lookup above. */
2287 && multiple_p (get_object_alignment (ref->ref), ref->size)
2288 && multiple_p (get_object_alignment (lhs), ref->size))
2289 return res;
2290 }
2291 }
2292 }
2293 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2294 && gimple_call_num_args (def_stmt) <= 4)
2295 {
2296 /* For builtin calls valueize its arguments and call the
2297 alias oracle again. Valueization may improve points-to
2298 info of pointers and constify size and position arguments.
2299 Originally this was motivated by PR61034 which has
2300 conditional calls to free falsely clobbering ref because
2301 of imprecise points-to info of the argument. */
2302 tree oldargs[4];
2303 bool valueized_anything = false;
2304 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2305 {
2306 oldargs[i] = gimple_call_arg (def_stmt, i);
2307 tree val = vn_valueize (oldargs[i]);
2308 if (val != oldargs[i])
2309 {
2310 gimple_call_set_arg (def_stmt, i, val);
2311 valueized_anything = true;
2312 }
2313 }
2314 if (valueized_anything)
2315 {
2316 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2317 ref);
2318 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2319 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2320 if (!res)
2321 {
2322 *disambiguate_only = true;
2323 return NULL;
2324 }
2325 }
2326 }
2327
2328 /* If we are looking for redundant stores do not create new hashtable
2329 entries from aliasing defs with made up alias-sets. */
2330 if (*disambiguate_only || !data->tbaa_p)
2331 return (void *)-1;
2332
2333 /* If we cannot constrain the size of the reference we cannot
2334 test if anything kills it. */
2335 if (!ref->max_size_known_p ())
2336 return (void *)-1;
2337
2338 poly_int64 offset = ref->offset;
2339 poly_int64 maxsize = ref->max_size;
2340
2341 /* We can't deduce anything useful from clobbers. */
2342 if (gimple_clobber_p (def_stmt))
2343 return (void *)-1;
2344
2345 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2346 from that definition.
2347 1) Memset. */
2348 if (is_gimple_reg_type (vr->type)
2349 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2350 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2351 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2352 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2353 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2354 && offset.is_constant (&offseti)
2355 && offseti % BITS_PER_UNIT == 0))
2356 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2357 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2358 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2359 {
2360 tree base2;
2361 poly_int64 offset2, size2, maxsize2;
2362 bool reverse;
2363 tree ref2 = gimple_call_arg (def_stmt, 0);
2364 if (TREE_CODE (ref2) == SSA_NAME)
2365 {
2366 ref2 = SSA_VAL (ref2);
2367 if (TREE_CODE (ref2) == SSA_NAME
2368 && (TREE_CODE (base) != MEM_REF
2369 || TREE_OPERAND (base, 0) != ref2))
2370 {
2371 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2372 if (gimple_assign_single_p (def_stmt)
2373 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2374 ref2 = gimple_assign_rhs1 (def_stmt);
2375 }
2376 }
2377 if (TREE_CODE (ref2) == ADDR_EXPR)
2378 {
2379 ref2 = TREE_OPERAND (ref2, 0);
2380 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2381 &reverse);
2382 if (!known_size_p (maxsize2)
2383 || !known_eq (maxsize2, size2)
2384 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2385 return (void *)-1;
2386 }
2387 else if (TREE_CODE (ref2) == SSA_NAME)
2388 {
2389 poly_int64 soff;
2390 if (TREE_CODE (base) != MEM_REF
2391 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2392 return (void *)-1;
2393 offset += soff;
2394 offset2 = 0;
2395 if (TREE_OPERAND (base, 0) != ref2)
2396 {
2397 gimple *def = SSA_NAME_DEF_STMT (ref2);
2398 if (is_gimple_assign (def)
2399 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2400 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2401 && poly_int_tree_p (gimple_assign_rhs2 (def))
2402 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2403 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2404 {
2405 ref2 = gimple_assign_rhs1 (def);
2406 if (TREE_CODE (ref2) == SSA_NAME)
2407 ref2 = SSA_VAL (ref2);
2408 }
2409 else
2410 return (void *)-1;
2411 }
2412 }
2413 else
2414 return (void *)-1;
2415 tree len = gimple_call_arg (def_stmt, 2);
2416 HOST_WIDE_INT leni, offset2i, offseti;
2417 if (data->partial_defs.is_empty ()
2418 && known_subrange_p (offset, maxsize, offset2,
2419 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2420 {
2421 tree val;
2422 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2423 val = build_zero_cst (vr->type);
2424 else if (INTEGRAL_TYPE_P (vr->type)
2425 && known_eq (ref->size, 8))
2426 {
2427 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2428 vr->type, gimple_call_arg (def_stmt, 1));
2429 val = vn_nary_build_or_lookup (&res_op);
2430 if (!val
2431 || (TREE_CODE (val) == SSA_NAME
2432 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2433 return (void *)-1;
2434 }
2435 else
2436 {
2437 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2438 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2439 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2440 len);
2441 val = native_interpret_expr (vr->type, buf, len);
2442 if (!val)
2443 return (void *)-1;
2444 }
2445 return vn_reference_lookup_or_insert_for_pieces
2446 (vuse, vr->set, vr->type, vr->operands, val);
2447 }
2448 /* For now handle clearing memory with partial defs. */
2449 else if (known_eq (ref->size, maxsize)
2450 && integer_zerop (gimple_call_arg (def_stmt, 1))
2451 && tree_to_poly_int64 (len).is_constant (&leni)
2452 && offset.is_constant (&offseti)
2453 && offset2.is_constant (&offset2i)
2454 && maxsize.is_constant (&maxsizei))
2455 {
2456 pd_data pd;
2457 pd.rhs = build_constructor (NULL_TREE, NULL);
2458 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2459 pd.size = leni;
2460 return data->push_partial_def (pd, vuse, maxsizei);
2461 }
2462 }
2463
2464 /* 2) Assignment from an empty CONSTRUCTOR. */
2465 else if (is_gimple_reg_type (vr->type)
2466 && gimple_assign_single_p (def_stmt)
2467 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2468 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2469 {
2470 tree lhs = gimple_assign_lhs (def_stmt);
2471 tree base2;
2472 poly_int64 offset2, size2, maxsize2;
2473 HOST_WIDE_INT offset2i, size2i;
2474 bool reverse;
2475 if (lhs_ref_ok)
2476 {
2477 base2 = ao_ref_base (&lhs_ref);
2478 offset2 = lhs_ref.offset;
2479 size2 = lhs_ref.size;
2480 maxsize2 = lhs_ref.max_size;
2481 reverse = reverse_storage_order_for_component_p (lhs);
2482 }
2483 else
2484 base2 = get_ref_base_and_extent (lhs,
2485 &offset2, &size2, &maxsize2, &reverse);
2486 if (known_size_p (maxsize2)
2487 && known_eq (maxsize2, size2)
2488 && adjust_offsets_for_equal_base_address (base, &offset,
2489 base2, &offset2))
2490 {
2491 if (data->partial_defs.is_empty ()
2492 && known_subrange_p (offset, maxsize, offset2, size2))
2493 {
2494 tree val = build_zero_cst (vr->type);
2495 return vn_reference_lookup_or_insert_for_pieces
2496 (vuse, vr->set, vr->type, vr->operands, val);
2497 }
2498 else if (known_eq (ref->size, maxsize)
2499 && maxsize.is_constant (&maxsizei)
2500 && maxsizei % BITS_PER_UNIT == 0
2501 && offset.is_constant (&offseti)
2502 && offseti % BITS_PER_UNIT == 0
2503 && offset2.is_constant (&offset2i)
2504 && offset2i % BITS_PER_UNIT == 0
2505 && size2.is_constant (&size2i)
2506 && size2i % BITS_PER_UNIT == 0)
2507 {
2508 pd_data pd;
2509 pd.rhs = gimple_assign_rhs1 (def_stmt);
2510 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2511 pd.size = size2i / BITS_PER_UNIT;
2512 return data->push_partial_def (pd, vuse, maxsizei);
2513 }
2514 }
2515 }
2516
2517 /* 3) Assignment from a constant. We can use folds native encode/interpret
2518 routines to extract the assigned bits. */
2519 else if (known_eq (ref->size, maxsize)
2520 && is_gimple_reg_type (vr->type)
2521 && !contains_storage_order_barrier_p (vr->operands)
2522 && gimple_assign_single_p (def_stmt)
2523 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2524 /* native_encode and native_decode operate on arrays of bytes
2525 and so fundamentally need a compile-time size and offset. */
2526 && maxsize.is_constant (&maxsizei)
2527 && maxsizei % BITS_PER_UNIT == 0
2528 && offset.is_constant (&offseti)
2529 && offseti % BITS_PER_UNIT == 0
2530 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2531 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2532 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2533 {
2534 tree lhs = gimple_assign_lhs (def_stmt);
2535 tree base2;
2536 poly_int64 offset2, size2, maxsize2;
2537 HOST_WIDE_INT offset2i, size2i;
2538 bool reverse;
2539 if (lhs_ref_ok)
2540 {
2541 base2 = ao_ref_base (&lhs_ref);
2542 offset2 = lhs_ref.offset;
2543 size2 = lhs_ref.size;
2544 maxsize2 = lhs_ref.max_size;
2545 reverse = reverse_storage_order_for_component_p (lhs);
2546 }
2547 else
2548 base2 = get_ref_base_and_extent (lhs,
2549 &offset2, &size2, &maxsize2, &reverse);
2550 if (base2
2551 && !reverse
2552 && known_eq (maxsize2, size2)
2553 && multiple_p (size2, BITS_PER_UNIT)
2554 && multiple_p (offset2, BITS_PER_UNIT)
2555 && adjust_offsets_for_equal_base_address (base, &offset,
2556 base2, &offset2)
2557 && offset.is_constant (&offseti)
2558 && offset2.is_constant (&offset2i)
2559 && size2.is_constant (&size2i))
2560 {
2561 if (data->partial_defs.is_empty ()
2562 && known_subrange_p (offseti, maxsizei, offset2, size2))
2563 {
2564 /* We support up to 512-bit values (for V8DFmode). */
2565 unsigned char buffer[64];
2566 int len;
2567
2568 tree rhs = gimple_assign_rhs1 (def_stmt);
2569 if (TREE_CODE (rhs) == SSA_NAME)
2570 rhs = SSA_VAL (rhs);
2571 unsigned pad = 0;
2572 if (BYTES_BIG_ENDIAN
2573 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs))))
2574 {
2575 /* On big-endian the padding is at the 'front' so
2576 just skip the initial bytes. */
2577 fixed_size_mode mode
2578 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (rhs)));
2579 pad = GET_MODE_SIZE (mode) - size2i / BITS_PER_UNIT;
2580 }
2581 len = native_encode_expr (rhs,
2582 buffer, sizeof (buffer),
2583 ((offseti - offset2i) / BITS_PER_UNIT
2584 + pad));
2585 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2586 {
2587 tree type = vr->type;
2588 /* Make sure to interpret in a type that has a range
2589 covering the whole access size. */
2590 if (INTEGRAL_TYPE_P (vr->type)
2591 && maxsizei != TYPE_PRECISION (vr->type))
2592 type = build_nonstandard_integer_type (maxsizei,
2593 TYPE_UNSIGNED (type));
2594 tree val = native_interpret_expr (type, buffer,
2595 maxsizei / BITS_PER_UNIT);
2596 /* If we chop off bits because the types precision doesn't
2597 match the memory access size this is ok when optimizing
2598 reads but not when called from the DSE code during
2599 elimination. */
2600 if (val
2601 && type != vr->type)
2602 {
2603 if (! int_fits_type_p (val, vr->type))
2604 val = NULL_TREE;
2605 else
2606 val = fold_convert (vr->type, val);
2607 }
2608
2609 if (val)
2610 return vn_reference_lookup_or_insert_for_pieces
2611 (vuse, vr->set, vr->type, vr->operands, val);
2612 }
2613 }
2614 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i, size2i))
2615 {
2616 pd_data pd;
2617 tree rhs = gimple_assign_rhs1 (def_stmt);
2618 if (TREE_CODE (rhs) == SSA_NAME)
2619 rhs = SSA_VAL (rhs);
2620 pd.rhs = rhs;
2621 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2622 pd.size = size2i / BITS_PER_UNIT;
2623 return data->push_partial_def (pd, vuse, maxsizei);
2624 }
2625 }
2626 }
2627
2628 /* 4) Assignment from an SSA name which definition we may be able
2629 to access pieces from. */
2630 else if (known_eq (ref->size, maxsize)
2631 && is_gimple_reg_type (vr->type)
2632 && !contains_storage_order_barrier_p (vr->operands)
2633 && gimple_assign_single_p (def_stmt)
2634 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2635 /* A subset of partial defs from non-constants can be handled
2636 by for example inserting a CONSTRUCTOR, a COMPLEX_EXPR or
2637 even a (series of) BIT_INSERT_EXPR hoping for simplifications
2638 downstream, not so much for actually doing the insertion. */
2639 && data->partial_defs.is_empty ())
2640 {
2641 tree lhs = gimple_assign_lhs (def_stmt);
2642 tree base2;
2643 poly_int64 offset2, size2, maxsize2;
2644 bool reverse;
2645 if (lhs_ref_ok)
2646 {
2647 base2 = ao_ref_base (&lhs_ref);
2648 offset2 = lhs_ref.offset;
2649 size2 = lhs_ref.size;
2650 maxsize2 = lhs_ref.max_size;
2651 reverse = reverse_storage_order_for_component_p (lhs);
2652 }
2653 else
2654 base2 = get_ref_base_and_extent (lhs,
2655 &offset2, &size2, &maxsize2, &reverse);
2656 tree def_rhs = gimple_assign_rhs1 (def_stmt);
2657 if (!reverse
2658 && known_size_p (maxsize2)
2659 && known_eq (maxsize2, size2)
2660 && adjust_offsets_for_equal_base_address (base, &offset,
2661 base2, &offset2)
2662 && known_subrange_p (offset, maxsize, offset2, size2)
2663 /* ??? We can't handle bitfield precision extracts without
2664 either using an alternate type for the BIT_FIELD_REF and
2665 then doing a conversion or possibly adjusting the offset
2666 according to endianness. */
2667 && (! INTEGRAL_TYPE_P (vr->type)
2668 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2669 && multiple_p (ref->size, BITS_PER_UNIT)
2670 && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2671 || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2672 {
2673 gimple_match_op op (gimple_match_cond::UNCOND,
2674 BIT_FIELD_REF, vr->type,
2675 vn_valueize (def_rhs),
2676 bitsize_int (ref->size),
2677 bitsize_int (offset - offset2));
2678 tree val = vn_nary_build_or_lookup (&op);
2679 if (val
2680 && (TREE_CODE (val) != SSA_NAME
2681 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2682 {
2683 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2684 (vuse, vr->set, vr->type, vr->operands, val);
2685 return res;
2686 }
2687 }
2688 }
2689
2690 /* 5) For aggregate copies translate the reference through them if
2691 the copy kills ref. */
2692 else if (data->vn_walk_kind == VN_WALKREWRITE
2693 && gimple_assign_single_p (def_stmt)
2694 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2695 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2696 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2697 {
2698 tree base2;
2699 int i, j, k;
2700 auto_vec<vn_reference_op_s> rhs;
2701 vn_reference_op_t vro;
2702 ao_ref r;
2703
2704 if (!lhs_ref_ok)
2705 return (void *)-1;
2706
2707 /* See if the assignment kills REF. */
2708 base2 = ao_ref_base (&lhs_ref);
2709 if (!lhs_ref.max_size_known_p ()
2710 || (base != base2
2711 && (TREE_CODE (base) != MEM_REF
2712 || TREE_CODE (base2) != MEM_REF
2713 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2714 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2715 TREE_OPERAND (base2, 1))))
2716 || !stmt_kills_ref_p (def_stmt, ref))
2717 return (void *)-1;
2718
2719 /* Find the common base of ref and the lhs. lhs_ops already
2720 contains valueized operands for the lhs. */
2721 i = vr->operands.length () - 1;
2722 j = lhs_ops.length () - 1;
2723 while (j >= 0 && i >= 0
2724 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2725 {
2726 i--;
2727 j--;
2728 }
2729
2730 /* ??? The innermost op should always be a MEM_REF and we already
2731 checked that the assignment to the lhs kills vr. Thus for
2732 aggregate copies using char[] types the vn_reference_op_eq
2733 may fail when comparing types for compatibility. But we really
2734 don't care here - further lookups with the rewritten operands
2735 will simply fail if we messed up types too badly. */
2736 poly_int64 extra_off = 0;
2737 if (j == 0 && i >= 0
2738 && lhs_ops[0].opcode == MEM_REF
2739 && maybe_ne (lhs_ops[0].off, -1))
2740 {
2741 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2742 i--, j--;
2743 else if (vr->operands[i].opcode == MEM_REF
2744 && maybe_ne (vr->operands[i].off, -1))
2745 {
2746 extra_off = vr->operands[i].off - lhs_ops[0].off;
2747 i--, j--;
2748 }
2749 }
2750
2751 /* i now points to the first additional op.
2752 ??? LHS may not be completely contained in VR, one or more
2753 VIEW_CONVERT_EXPRs could be in its way. We could at least
2754 try handling outermost VIEW_CONVERT_EXPRs. */
2755 if (j != -1)
2756 return (void *)-1;
2757
2758 /* Punt if the additional ops contain a storage order barrier. */
2759 for (k = i; k >= 0; k--)
2760 {
2761 vro = &vr->operands[k];
2762 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2763 return (void *)-1;
2764 }
2765
2766 /* Now re-write REF to be based on the rhs of the assignment. */
2767 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2768
2769 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2770 if (maybe_ne (extra_off, 0))
2771 {
2772 if (rhs.length () < 2)
2773 return (void *)-1;
2774 int ix = rhs.length () - 2;
2775 if (rhs[ix].opcode != MEM_REF
2776 || known_eq (rhs[ix].off, -1))
2777 return (void *)-1;
2778 rhs[ix].off += extra_off;
2779 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2780 build_int_cst (TREE_TYPE (rhs[ix].op0),
2781 extra_off));
2782 }
2783
2784 /* We need to pre-pend vr->operands[0..i] to rhs. */
2785 vec<vn_reference_op_s> old = vr->operands;
2786 if (i + 1 + rhs.length () > vr->operands.length ())
2787 vr->operands.safe_grow (i + 1 + rhs.length ());
2788 else
2789 vr->operands.truncate (i + 1 + rhs.length ());
2790 FOR_EACH_VEC_ELT (rhs, j, vro)
2791 vr->operands[i + 1 + j] = *vro;
2792 vr->operands = valueize_refs (vr->operands);
2793 if (old == shared_lookup_references)
2794 shared_lookup_references = vr->operands;
2795 vr->hashcode = vn_reference_compute_hash (vr);
2796
2797 /* Try folding the new reference to a constant. */
2798 tree val = fully_constant_vn_reference_p (vr);
2799 if (val)
2800 {
2801 if (data->partial_defs.is_empty ())
2802 return vn_reference_lookup_or_insert_for_pieces
2803 (vuse, vr->set, vr->type, vr->operands, val);
2804 /* This is the only interesting case for partial-def handling
2805 coming from targets that like to gimplify init-ctors as
2806 aggregate copies from constant data like aarch64 for
2807 PR83518. */
2808 if (maxsize.is_constant (&maxsizei)
2809 && known_eq (ref->size, maxsize))
2810 {
2811 pd_data pd;
2812 pd.rhs = val;
2813 pd.offset = 0;
2814 pd.size = maxsizei / BITS_PER_UNIT;
2815 return data->push_partial_def (pd, vuse, maxsizei);
2816 }
2817 }
2818
2819 /* Continuing with partial defs isn't easily possible here, we
2820 have to find a full def from further lookups from here. Probably
2821 not worth the special-casing everywhere. */
2822 if (!data->partial_defs.is_empty ())
2823 return (void *)-1;
2824
2825 /* Adjust *ref from the new operands. */
2826 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2827 return (void *)-1;
2828 /* This can happen with bitfields. */
2829 if (maybe_ne (ref->size, r.size))
2830 return (void *)-1;
2831 *ref = r;
2832
2833 /* Do not update last seen VUSE after translating. */
2834 data->last_vuse_ptr = NULL;
2835 /* Invalidate the original access path since it now contains
2836 the wrong base. */
2837 data->orig_ref.ref = NULL_TREE;
2838
2839 /* Keep looking for the adjusted *REF / VR pair. */
2840 return NULL;
2841 }
2842
2843 /* 6) For memcpy copies translate the reference through them if
2844 the copy kills ref. */
2845 else if (data->vn_walk_kind == VN_WALKREWRITE
2846 && is_gimple_reg_type (vr->type)
2847 /* ??? Handle BCOPY as well. */
2848 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2849 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2850 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2851 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2852 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2853 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2854 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2855 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
2856 /* Handling this is more complicated, give up for now. */
2857 && data->partial_defs.is_empty ())
2858 {
2859 tree lhs, rhs;
2860 ao_ref r;
2861 poly_int64 rhs_offset, lhs_offset;
2862 vn_reference_op_s op;
2863 poly_uint64 mem_offset;
2864 poly_int64 at, byte_maxsize;
2865
2866 /* Only handle non-variable, addressable refs. */
2867 if (maybe_ne (ref->size, maxsize)
2868 || !multiple_p (offset, BITS_PER_UNIT, &at)
2869 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2870 return (void *)-1;
2871
2872 /* Extract a pointer base and an offset for the destination. */
2873 lhs = gimple_call_arg (def_stmt, 0);
2874 lhs_offset = 0;
2875 if (TREE_CODE (lhs) == SSA_NAME)
2876 {
2877 lhs = vn_valueize (lhs);
2878 if (TREE_CODE (lhs) == SSA_NAME)
2879 {
2880 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2881 if (gimple_assign_single_p (def_stmt)
2882 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2883 lhs = gimple_assign_rhs1 (def_stmt);
2884 }
2885 }
2886 if (TREE_CODE (lhs) == ADDR_EXPR)
2887 {
2888 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2889 &lhs_offset);
2890 if (!tem)
2891 return (void *)-1;
2892 if (TREE_CODE (tem) == MEM_REF
2893 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2894 {
2895 lhs = TREE_OPERAND (tem, 0);
2896 if (TREE_CODE (lhs) == SSA_NAME)
2897 lhs = vn_valueize (lhs);
2898 lhs_offset += mem_offset;
2899 }
2900 else if (DECL_P (tem))
2901 lhs = build_fold_addr_expr (tem);
2902 else
2903 return (void *)-1;
2904 }
2905 if (TREE_CODE (lhs) != SSA_NAME
2906 && TREE_CODE (lhs) != ADDR_EXPR)
2907 return (void *)-1;
2908
2909 /* Extract a pointer base and an offset for the source. */
2910 rhs = gimple_call_arg (def_stmt, 1);
2911 rhs_offset = 0;
2912 if (TREE_CODE (rhs) == SSA_NAME)
2913 rhs = vn_valueize (rhs);
2914 if (TREE_CODE (rhs) == ADDR_EXPR)
2915 {
2916 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2917 &rhs_offset);
2918 if (!tem)
2919 return (void *)-1;
2920 if (TREE_CODE (tem) == MEM_REF
2921 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2922 {
2923 rhs = TREE_OPERAND (tem, 0);
2924 rhs_offset += mem_offset;
2925 }
2926 else if (DECL_P (tem)
2927 || TREE_CODE (tem) == STRING_CST)
2928 rhs = build_fold_addr_expr (tem);
2929 else
2930 return (void *)-1;
2931 }
2932 if (TREE_CODE (rhs) != SSA_NAME
2933 && TREE_CODE (rhs) != ADDR_EXPR)
2934 return (void *)-1;
2935
2936 /* The bases of the destination and the references have to agree. */
2937 if (TREE_CODE (base) == MEM_REF)
2938 {
2939 if (TREE_OPERAND (base, 0) != lhs
2940 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2941 return (void *) -1;
2942 at += mem_offset;
2943 }
2944 else if (!DECL_P (base)
2945 || TREE_CODE (lhs) != ADDR_EXPR
2946 || TREE_OPERAND (lhs, 0) != base)
2947 return (void *)-1;
2948
2949 /* If the access is completely outside of the memcpy destination
2950 area there is no aliasing. */
2951 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2952 return NULL;
2953 /* And the access has to be contained within the memcpy destination. */
2954 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2955 return (void *)-1;
2956
2957 /* Make room for 2 operands in the new reference. */
2958 if (vr->operands.length () < 2)
2959 {
2960 vec<vn_reference_op_s> old = vr->operands;
2961 vr->operands.safe_grow_cleared (2);
2962 if (old == shared_lookup_references)
2963 shared_lookup_references = vr->operands;
2964 }
2965 else
2966 vr->operands.truncate (2);
2967
2968 /* The looked-through reference is a simple MEM_REF. */
2969 memset (&op, 0, sizeof (op));
2970 op.type = vr->type;
2971 op.opcode = MEM_REF;
2972 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2973 op.off = at - lhs_offset + rhs_offset;
2974 vr->operands[0] = op;
2975 op.type = TREE_TYPE (rhs);
2976 op.opcode = TREE_CODE (rhs);
2977 op.op0 = rhs;
2978 op.off = -1;
2979 vr->operands[1] = op;
2980 vr->hashcode = vn_reference_compute_hash (vr);
2981
2982 /* Try folding the new reference to a constant. */
2983 tree val = fully_constant_vn_reference_p (vr);
2984 if (val)
2985 return vn_reference_lookup_or_insert_for_pieces
2986 (vuse, vr->set, vr->type, vr->operands, val);
2987
2988 /* Adjust *ref from the new operands. */
2989 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2990 return (void *)-1;
2991 /* This can happen with bitfields. */
2992 if (maybe_ne (ref->size, r.size))
2993 return (void *)-1;
2994 *ref = r;
2995
2996 /* Do not update last seen VUSE after translating. */
2997 data->last_vuse_ptr = NULL;
2998 /* Invalidate the original access path since it now contains
2999 the wrong base. */
3000 data->orig_ref.ref = NULL_TREE;
3001
3002 /* Keep looking for the adjusted *REF / VR pair. */
3003 return NULL;
3004 }
3005
3006 /* Bail out and stop walking. */
3007 return (void *)-1;
3008 }
3009
3010 /* Return a reference op vector from OP that can be used for
3011 vn_reference_lookup_pieces. The caller is responsible for releasing
3012 the vector. */
3013
3014 vec<vn_reference_op_s>
3015 vn_reference_operands_for_lookup (tree op)
3016 {
3017 bool valueized;
3018 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3019 }
3020
3021 /* Lookup a reference operation by it's parts, in the current hash table.
3022 Returns the resulting value number if it exists in the hash table,
3023 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3024 vn_reference_t stored in the hashtable if something is found. */
3025
3026 tree
3027 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
3028 vec<vn_reference_op_s> operands,
3029 vn_reference_t *vnresult, vn_lookup_kind kind)
3030 {
3031 struct vn_reference_s vr1;
3032 vn_reference_t tmp;
3033 tree cst;
3034
3035 if (!vnresult)
3036 vnresult = &tmp;
3037 *vnresult = NULL;
3038
3039 vr1.vuse = vuse_ssa_val (vuse);
3040 shared_lookup_references.truncate (0);
3041 shared_lookup_references.safe_grow (operands.length ());
3042 memcpy (shared_lookup_references.address (),
3043 operands.address (),
3044 sizeof (vn_reference_op_s)
3045 * operands.length ());
3046 vr1.operands = operands = shared_lookup_references
3047 = valueize_refs (shared_lookup_references);
3048 vr1.type = type;
3049 vr1.set = set;
3050 vr1.hashcode = vn_reference_compute_hash (&vr1);
3051 if ((cst = fully_constant_vn_reference_p (&vr1)))
3052 return cst;
3053
3054 vn_reference_lookup_1 (&vr1, vnresult);
3055 if (!*vnresult
3056 && kind != VN_NOWALK
3057 && vr1.vuse)
3058 {
3059 ao_ref r;
3060 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
3061 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true);
3062 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
3063 *vnresult =
3064 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
3065 vn_reference_lookup_2,
3066 vn_reference_lookup_3,
3067 vuse_valueize, limit, &data);
3068 gcc_checking_assert (vr1.operands == shared_lookup_references);
3069 }
3070
3071 if (*vnresult)
3072 return (*vnresult)->result;
3073
3074 return NULL_TREE;
3075 }
3076
3077 /* Lookup OP in the current hash table, and return the resulting value
3078 number if it exists in the hash table. Return NULL_TREE if it does
3079 not exist in the hash table or if the result field of the structure
3080 was NULL.. VNRESULT will be filled in with the vn_reference_t
3081 stored in the hashtable if one exists. When TBAA_P is false assume
3082 we are looking up a store and treat it as having alias-set zero.
3083 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. */
3084
3085 tree
3086 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3087 vn_reference_t *vnresult, bool tbaa_p, tree *last_vuse_ptr)
3088 {
3089 vec<vn_reference_op_s> operands;
3090 struct vn_reference_s vr1;
3091 tree cst;
3092 bool valuezied_anything;
3093
3094 if (vnresult)
3095 *vnresult = NULL;
3096
3097 vr1.vuse = vuse_ssa_val (vuse);
3098 vr1.operands = operands
3099 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3100 vr1.type = TREE_TYPE (op);
3101 vr1.set = get_alias_set (op);
3102 vr1.hashcode = vn_reference_compute_hash (&vr1);
3103 if ((cst = fully_constant_vn_reference_p (&vr1)))
3104 return cst;
3105
3106 if (kind != VN_NOWALK
3107 && vr1.vuse)
3108 {
3109 vn_reference_t wvnresult;
3110 ao_ref r;
3111 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
3112 /* Make sure to use a valueized reference if we valueized anything.
3113 Otherwise preserve the full reference for advanced TBAA. */
3114 if (!valuezied_anything
3115 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
3116 vr1.operands))
3117 ao_ref_init (&r, op);
3118 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3119 last_vuse_ptr, kind, tbaa_p);
3120 wvnresult =
3121 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p,
3122 vn_reference_lookup_2,
3123 vn_reference_lookup_3,
3124 vuse_valueize, limit, &data);
3125 gcc_checking_assert (vr1.operands == shared_lookup_references);
3126 if (wvnresult)
3127 {
3128 if (vnresult)
3129 *vnresult = wvnresult;
3130 return wvnresult->result;
3131 }
3132
3133 return NULL_TREE;
3134 }
3135
3136 return vn_reference_lookup_1 (&vr1, vnresult);
3137 }
3138
3139 /* Lookup CALL in the current hash table and return the entry in
3140 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3141
3142 void
3143 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3144 vn_reference_t vr)
3145 {
3146 if (vnresult)
3147 *vnresult = NULL;
3148
3149 tree vuse = gimple_vuse (call);
3150
3151 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3152 vr->operands = valueize_shared_reference_ops_from_call (call);
3153 vr->type = gimple_expr_type (call);
3154 vr->set = 0;
3155 vr->hashcode = vn_reference_compute_hash (vr);
3156 vn_reference_lookup_1 (vr, vnresult);
3157 }
3158
3159 /* Insert OP into the current hash table with a value number of RESULT. */
3160
3161 static void
3162 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3163 {
3164 vn_reference_s **slot;
3165 vn_reference_t vr1;
3166 bool tem;
3167
3168 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3169 if (TREE_CODE (result) == SSA_NAME)
3170 vr1->value_id = VN_INFO (result)->value_id;
3171 else
3172 vr1->value_id = get_or_alloc_constant_value_id (result);
3173 vr1->vuse = vuse_ssa_val (vuse);
3174 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3175 vr1->type = TREE_TYPE (op);
3176 vr1->set = get_alias_set (op);
3177 vr1->hashcode = vn_reference_compute_hash (vr1);
3178 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3179 vr1->result_vdef = vdef;
3180
3181 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3182 INSERT);
3183
3184 /* Because IL walking on reference lookup can end up visiting
3185 a def that is only to be visited later in iteration order
3186 when we are about to make an irreducible region reducible
3187 the def can be effectively processed and its ref being inserted
3188 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3189 but save a lookup if we deal with already inserted refs here. */
3190 if (*slot)
3191 {
3192 /* We cannot assert that we have the same value either because
3193 when disentangling an irreducible region we may end up visiting
3194 a use before the corresponding def. That's a missed optimization
3195 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3196 if (dump_file && (dump_flags & TDF_DETAILS)
3197 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3198 {
3199 fprintf (dump_file, "Keeping old value ");
3200 print_generic_expr (dump_file, (*slot)->result);
3201 fprintf (dump_file, " because of collision\n");
3202 }
3203 free_reference (vr1);
3204 obstack_free (&vn_tables_obstack, vr1);
3205 return;
3206 }
3207
3208 *slot = vr1;
3209 vr1->next = last_inserted_ref;
3210 last_inserted_ref = vr1;
3211 }
3212
3213 /* Insert a reference by it's pieces into the current hash table with
3214 a value number of RESULT. Return the resulting reference
3215 structure we created. */
3216
3217 vn_reference_t
3218 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
3219 vec<vn_reference_op_s> operands,
3220 tree result, unsigned int value_id)
3221
3222 {
3223 vn_reference_s **slot;
3224 vn_reference_t vr1;
3225
3226 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3227 vr1->value_id = value_id;
3228 vr1->vuse = vuse_ssa_val (vuse);
3229 vr1->operands = valueize_refs (operands);
3230 vr1->type = type;
3231 vr1->set = set;
3232 vr1->hashcode = vn_reference_compute_hash (vr1);
3233 if (result && TREE_CODE (result) == SSA_NAME)
3234 result = SSA_VAL (result);
3235 vr1->result = result;
3236
3237 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3238 INSERT);
3239
3240 /* At this point we should have all the things inserted that we have
3241 seen before, and we should never try inserting something that
3242 already exists. */
3243 gcc_assert (!*slot);
3244
3245 *slot = vr1;
3246 vr1->next = last_inserted_ref;
3247 last_inserted_ref = vr1;
3248 return vr1;
3249 }
3250
3251 /* Compute and return the hash value for nary operation VBO1. */
3252
3253 static hashval_t
3254 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3255 {
3256 inchash::hash hstate;
3257 unsigned i;
3258
3259 for (i = 0; i < vno1->length; ++i)
3260 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3261 vno1->op[i] = SSA_VAL (vno1->op[i]);
3262
3263 if (((vno1->length == 2
3264 && commutative_tree_code (vno1->opcode))
3265 || (vno1->length == 3
3266 && commutative_ternary_tree_code (vno1->opcode)))
3267 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3268 std::swap (vno1->op[0], vno1->op[1]);
3269 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3270 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3271 {
3272 std::swap (vno1->op[0], vno1->op[1]);
3273 vno1->opcode = swap_tree_comparison (vno1->opcode);
3274 }
3275
3276 hstate.add_int (vno1->opcode);
3277 for (i = 0; i < vno1->length; ++i)
3278 inchash::add_expr (vno1->op[i], hstate);
3279
3280 return hstate.end ();
3281 }
3282
3283 /* Compare nary operations VNO1 and VNO2 and return true if they are
3284 equivalent. */
3285
3286 bool
3287 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3288 {
3289 unsigned i;
3290
3291 if (vno1->hashcode != vno2->hashcode)
3292 return false;
3293
3294 if (vno1->length != vno2->length)
3295 return false;
3296
3297 if (vno1->opcode != vno2->opcode
3298 || !types_compatible_p (vno1->type, vno2->type))
3299 return false;
3300
3301 for (i = 0; i < vno1->length; ++i)
3302 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3303 return false;
3304
3305 /* BIT_INSERT_EXPR has an implict operand as the type precision
3306 of op1. Need to check to make sure they are the same. */
3307 if (vno1->opcode == BIT_INSERT_EXPR
3308 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3309 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3310 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3311 return false;
3312
3313 return true;
3314 }
3315
3316 /* Initialize VNO from the pieces provided. */
3317
3318 static void
3319 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3320 enum tree_code code, tree type, tree *ops)
3321 {
3322 vno->opcode = code;
3323 vno->length = length;
3324 vno->type = type;
3325 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3326 }
3327
3328 /* Initialize VNO from OP. */
3329
3330 static void
3331 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
3332 {
3333 unsigned i;
3334
3335 vno->opcode = TREE_CODE (op);
3336 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
3337 vno->type = TREE_TYPE (op);
3338 for (i = 0; i < vno->length; ++i)
3339 vno->op[i] = TREE_OPERAND (op, i);
3340 }
3341
3342 /* Return the number of operands for a vn_nary ops structure from STMT. */
3343
3344 static unsigned int
3345 vn_nary_length_from_stmt (gimple *stmt)
3346 {
3347 switch (gimple_assign_rhs_code (stmt))
3348 {
3349 case REALPART_EXPR:
3350 case IMAGPART_EXPR:
3351 case VIEW_CONVERT_EXPR:
3352 return 1;
3353
3354 case BIT_FIELD_REF:
3355 return 3;
3356
3357 case CONSTRUCTOR:
3358 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3359
3360 default:
3361 return gimple_num_ops (stmt) - 1;
3362 }
3363 }
3364
3365 /* Initialize VNO from STMT. */
3366
3367 static void
3368 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3369 {
3370 unsigned i;
3371
3372 vno->opcode = gimple_assign_rhs_code (stmt);
3373 vno->type = gimple_expr_type (stmt);
3374 switch (vno->opcode)
3375 {
3376 case REALPART_EXPR:
3377 case IMAGPART_EXPR:
3378 case VIEW_CONVERT_EXPR:
3379 vno->length = 1;
3380 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3381 break;
3382
3383 case BIT_FIELD_REF:
3384 vno->length = 3;
3385 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3386 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3387 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3388 break;
3389
3390 case CONSTRUCTOR:
3391 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3392 for (i = 0; i < vno->length; ++i)
3393 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3394 break;
3395
3396 default:
3397 gcc_checking_assert (!gimple_assign_single_p (stmt));
3398 vno->length = gimple_num_ops (stmt) - 1;
3399 for (i = 0; i < vno->length; ++i)
3400 vno->op[i] = gimple_op (stmt, i + 1);
3401 }
3402 }
3403
3404 /* Compute the hashcode for VNO and look for it in the hash table;
3405 return the resulting value number if it exists in the hash table.
3406 Return NULL_TREE if it does not exist in the hash table or if the
3407 result field of the operation is NULL. VNRESULT will contain the
3408 vn_nary_op_t from the hashtable if it exists. */
3409
3410 static tree
3411 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3412 {
3413 vn_nary_op_s **slot;
3414
3415 if (vnresult)
3416 *vnresult = NULL;
3417
3418 vno->hashcode = vn_nary_op_compute_hash (vno);
3419 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3420 if (!slot)
3421 return NULL_TREE;
3422 if (vnresult)
3423 *vnresult = *slot;
3424 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3425 }
3426
3427 /* Lookup a n-ary operation by its pieces and return the resulting value
3428 number if it exists in the hash table. Return NULL_TREE if it does
3429 not exist in the hash table or if the result field of the operation
3430 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3431 if it exists. */
3432
3433 tree
3434 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3435 tree type, tree *ops, vn_nary_op_t *vnresult)
3436 {
3437 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3438 sizeof_vn_nary_op (length));
3439 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3440 return vn_nary_op_lookup_1 (vno1, vnresult);
3441 }
3442
3443 /* Lookup OP in the current hash table, and return the resulting value
3444 number if it exists in the hash table. Return NULL_TREE if it does
3445 not exist in the hash table or if the result field of the operation
3446 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3447 if it exists. */
3448
3449 tree
3450 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
3451 {
3452 vn_nary_op_t vno1
3453 = XALLOCAVAR (struct vn_nary_op_s,
3454 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
3455 init_vn_nary_op_from_op (vno1, op);
3456 return vn_nary_op_lookup_1 (vno1, vnresult);
3457 }
3458
3459 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3460 value number if it exists in the hash table. Return NULL_TREE if
3461 it does not exist in the hash table. VNRESULT will contain the
3462 vn_nary_op_t from the hashtable if it exists. */
3463
3464 tree
3465 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3466 {
3467 vn_nary_op_t vno1
3468 = XALLOCAVAR (struct vn_nary_op_s,
3469 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3470 init_vn_nary_op_from_stmt (vno1, stmt);
3471 return vn_nary_op_lookup_1 (vno1, vnresult);
3472 }
3473
3474 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3475
3476 static vn_nary_op_t
3477 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3478 {
3479 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3480 }
3481
3482 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3483 obstack. */
3484
3485 static vn_nary_op_t
3486 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3487 {
3488 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3489
3490 vno1->value_id = value_id;
3491 vno1->length = length;
3492 vno1->predicated_values = 0;
3493 vno1->u.result = result;
3494
3495 return vno1;
3496 }
3497
3498 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3499 VNO->HASHCODE first. */
3500
3501 static vn_nary_op_t
3502 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3503 bool compute_hash)
3504 {
3505 vn_nary_op_s **slot;
3506
3507 if (compute_hash)
3508 {
3509 vno->hashcode = vn_nary_op_compute_hash (vno);
3510 gcc_assert (! vno->predicated_values
3511 || (! vno->u.values->next
3512 && vno->u.values->n == 1));
3513 }
3514
3515 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3516 vno->unwind_to = *slot;
3517 if (*slot)
3518 {
3519 /* Prefer non-predicated values.
3520 ??? Only if those are constant, otherwise, with constant predicated
3521 value, turn them into predicated values with entry-block validity
3522 (??? but we always find the first valid result currently). */
3523 if ((*slot)->predicated_values
3524 && ! vno->predicated_values)
3525 {
3526 /* ??? We cannot remove *slot from the unwind stack list.
3527 For the moment we deal with this by skipping not found
3528 entries but this isn't ideal ... */
3529 *slot = vno;
3530 /* ??? Maintain a stack of states we can unwind in
3531 vn_nary_op_s? But how far do we unwind? In reality
3532 we need to push change records somewhere... Or not
3533 unwind vn_nary_op_s and linking them but instead
3534 unwind the results "list", linking that, which also
3535 doesn't move on hashtable resize. */
3536 /* We can also have a ->unwind_to recording *slot there.
3537 That way we can make u.values a fixed size array with
3538 recording the number of entries but of course we then
3539 have always N copies for each unwind_to-state. Or we
3540 make sure to only ever append and each unwinding will
3541 pop off one entry (but how to deal with predicated
3542 replaced with non-predicated here?) */
3543 vno->next = last_inserted_nary;
3544 last_inserted_nary = vno;
3545 return vno;
3546 }
3547 else if (vno->predicated_values
3548 && ! (*slot)->predicated_values)
3549 return *slot;
3550 else if (vno->predicated_values
3551 && (*slot)->predicated_values)
3552 {
3553 /* ??? Factor this all into a insert_single_predicated_value
3554 routine. */
3555 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3556 basic_block vno_bb
3557 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3558 vn_pval *nval = vno->u.values;
3559 vn_pval **next = &vno->u.values;
3560 bool found = false;
3561 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3562 {
3563 if (expressions_equal_p (val->result, vno->u.values->result))
3564 {
3565 found = true;
3566 for (unsigned i = 0; i < val->n; ++i)
3567 {
3568 basic_block val_bb
3569 = BASIC_BLOCK_FOR_FN (cfun,
3570 val->valid_dominated_by_p[i]);
3571 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3572 /* Value registered with more generic predicate. */
3573 return *slot;
3574 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3575 /* Shouldn't happen, we insert in RPO order. */
3576 gcc_unreachable ();
3577 }
3578 /* Append value. */
3579 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3580 sizeof (vn_pval)
3581 + val->n * sizeof (int));
3582 (*next)->next = NULL;
3583 (*next)->result = val->result;
3584 (*next)->n = val->n + 1;
3585 memcpy ((*next)->valid_dominated_by_p,
3586 val->valid_dominated_by_p,
3587 val->n * sizeof (int));
3588 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3589 next = &(*next)->next;
3590 if (dump_file && (dump_flags & TDF_DETAILS))
3591 fprintf (dump_file, "Appending predicate to value.\n");
3592 continue;
3593 }
3594 /* Copy other predicated values. */
3595 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3596 sizeof (vn_pval)
3597 + (val->n-1) * sizeof (int));
3598 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3599 (*next)->next = NULL;
3600 next = &(*next)->next;
3601 }
3602 if (!found)
3603 *next = nval;
3604
3605 *slot = vno;
3606 vno->next = last_inserted_nary;
3607 last_inserted_nary = vno;
3608 return vno;
3609 }
3610
3611 /* While we do not want to insert things twice it's awkward to
3612 avoid it in the case where visit_nary_op pattern-matches stuff
3613 and ends up simplifying the replacement to itself. We then
3614 get two inserts, one from visit_nary_op and one from
3615 vn_nary_build_or_lookup.
3616 So allow inserts with the same value number. */
3617 if ((*slot)->u.result == vno->u.result)
3618 return *slot;
3619 }
3620
3621 /* ??? There's also optimistic vs. previous commited state merging
3622 that is problematic for the case of unwinding. */
3623
3624 /* ??? We should return NULL if we do not use 'vno' and have the
3625 caller release it. */
3626 gcc_assert (!*slot);
3627
3628 *slot = vno;
3629 vno->next = last_inserted_nary;
3630 last_inserted_nary = vno;
3631 return vno;
3632 }
3633
3634 /* Insert a n-ary operation into the current hash table using it's
3635 pieces. Return the vn_nary_op_t structure we created and put in
3636 the hashtable. */
3637
3638 vn_nary_op_t
3639 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3640 tree type, tree *ops,
3641 tree result, unsigned int value_id)
3642 {
3643 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3644 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3645 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3646 }
3647
3648 static vn_nary_op_t
3649 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3650 tree type, tree *ops,
3651 tree result, unsigned int value_id,
3652 edge pred_e)
3653 {
3654 /* ??? Currently tracking BBs. */
3655 if (! single_pred_p (pred_e->dest))
3656 {
3657 /* Never record for backedges. */
3658 if (pred_e->flags & EDGE_DFS_BACK)
3659 return NULL;
3660 edge_iterator ei;
3661 edge e;
3662 int cnt = 0;
3663 /* Ignore backedges. */
3664 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3665 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3666 cnt++;
3667 if (cnt != 1)
3668 return NULL;
3669 }
3670 if (dump_file && (dump_flags & TDF_DETAILS)
3671 /* ??? Fix dumping, but currently we only get comparisons. */
3672 && TREE_CODE_CLASS (code) == tcc_comparison)
3673 {
3674 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3675 pred_e->dest->index);
3676 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3677 fprintf (dump_file, " %s ", get_tree_code_name (code));
3678 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3679 fprintf (dump_file, " == %s\n",
3680 integer_zerop (result) ? "false" : "true");
3681 }
3682 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3683 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3684 vno1->predicated_values = 1;
3685 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3686 sizeof (vn_pval));
3687 vno1->u.values->next = NULL;
3688 vno1->u.values->result = result;
3689 vno1->u.values->n = 1;
3690 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3691 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3692 }
3693
3694 static bool
3695 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3696
3697 static tree
3698 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3699 {
3700 if (! vno->predicated_values)
3701 return vno->u.result;
3702 for (vn_pval *val = vno->u.values; val; val = val->next)
3703 for (unsigned i = 0; i < val->n; ++i)
3704 if (dominated_by_p_w_unex (bb,
3705 BASIC_BLOCK_FOR_FN
3706 (cfun, val->valid_dominated_by_p[i])))
3707 return val->result;
3708 return NULL_TREE;
3709 }
3710
3711 /* Insert OP into the current hash table with a value number of
3712 RESULT. Return the vn_nary_op_t structure we created and put in
3713 the hashtable. */
3714
3715 vn_nary_op_t
3716 vn_nary_op_insert (tree op, tree result)
3717 {
3718 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3719 vn_nary_op_t vno1;
3720
3721 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3722 init_vn_nary_op_from_op (vno1, op);
3723 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3724 }
3725
3726 /* Insert the rhs of STMT into the current hash table with a value number of
3727 RESULT. */
3728
3729 static vn_nary_op_t
3730 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3731 {
3732 vn_nary_op_t vno1
3733 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3734 result, VN_INFO (result)->value_id);
3735 init_vn_nary_op_from_stmt (vno1, stmt);
3736 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3737 }
3738
3739 /* Compute a hashcode for PHI operation VP1 and return it. */
3740
3741 static inline hashval_t
3742 vn_phi_compute_hash (vn_phi_t vp1)
3743 {
3744 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3745 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3746 tree phi1op;
3747 tree type;
3748 edge e;
3749 edge_iterator ei;
3750
3751 /* If all PHI arguments are constants we need to distinguish
3752 the PHI node via its type. */
3753 type = vp1->type;
3754 hstate.merge_hash (vn_hash_type (type));
3755
3756 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3757 {
3758 /* Don't hash backedge values they need to be handled as VN_TOP
3759 for optimistic value-numbering. */
3760 if (e->flags & EDGE_DFS_BACK)
3761 continue;
3762
3763 phi1op = vp1->phiargs[e->dest_idx];
3764 if (phi1op == VN_TOP)
3765 continue;
3766 inchash::add_expr (phi1op, hstate);
3767 }
3768
3769 return hstate.end ();
3770 }
3771
3772
3773 /* Return true if COND1 and COND2 represent the same condition, set
3774 *INVERTED_P if one needs to be inverted to make it the same as
3775 the other. */
3776
3777 static bool
3778 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3779 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3780 {
3781 enum tree_code code1 = gimple_cond_code (cond1);
3782 enum tree_code code2 = gimple_cond_code (cond2);
3783
3784 *inverted_p = false;
3785 if (code1 == code2)
3786 ;
3787 else if (code1 == swap_tree_comparison (code2))
3788 std::swap (lhs2, rhs2);
3789 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3790 *inverted_p = true;
3791 else if (code1 == invert_tree_comparison
3792 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3793 {
3794 std::swap (lhs2, rhs2);
3795 *inverted_p = true;
3796 }
3797 else
3798 return false;
3799
3800 return ((expressions_equal_p (lhs1, lhs2)
3801 && expressions_equal_p (rhs1, rhs2))
3802 || (commutative_tree_code (code1)
3803 && expressions_equal_p (lhs1, rhs2)
3804 && expressions_equal_p (rhs1, lhs2)));
3805 }
3806
3807 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3808
3809 static int
3810 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3811 {
3812 if (vp1->hashcode != vp2->hashcode)
3813 return false;
3814
3815 if (vp1->block != vp2->block)
3816 {
3817 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3818 return false;
3819
3820 switch (EDGE_COUNT (vp1->block->preds))
3821 {
3822 case 1:
3823 /* Single-arg PHIs are just copies. */
3824 break;
3825
3826 case 2:
3827 {
3828 /* Rule out backedges into the PHI. */
3829 if (vp1->block->loop_father->header == vp1->block
3830 || vp2->block->loop_father->header == vp2->block)
3831 return false;
3832
3833 /* If the PHI nodes do not have compatible types
3834 they are not the same. */
3835 if (!types_compatible_p (vp1->type, vp2->type))
3836 return false;
3837
3838 basic_block idom1
3839 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3840 basic_block idom2
3841 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3842 /* If the immediate dominator end in switch stmts multiple
3843 values may end up in the same PHI arg via intermediate
3844 CFG merges. */
3845 if (EDGE_COUNT (idom1->succs) != 2
3846 || EDGE_COUNT (idom2->succs) != 2)
3847 return false;
3848
3849 /* Verify the controlling stmt is the same. */
3850 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3851 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3852 if (! last1 || ! last2)
3853 return false;
3854 bool inverted_p;
3855 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3856 last2, vp2->cclhs, vp2->ccrhs,
3857 &inverted_p))
3858 return false;
3859
3860 /* Get at true/false controlled edges into the PHI. */
3861 edge te1, te2, fe1, fe2;
3862 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3863 &te1, &fe1)
3864 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3865 &te2, &fe2))
3866 return false;
3867
3868 /* Swap edges if the second condition is the inverted of the
3869 first. */
3870 if (inverted_p)
3871 std::swap (te2, fe2);
3872
3873 /* ??? Handle VN_TOP specially. */
3874 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3875 vp2->phiargs[te2->dest_idx])
3876 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3877 vp2->phiargs[fe2->dest_idx]))
3878 return false;
3879
3880 return true;
3881 }
3882
3883 default:
3884 return false;
3885 }
3886 }
3887
3888 /* If the PHI nodes do not have compatible types
3889 they are not the same. */
3890 if (!types_compatible_p (vp1->type, vp2->type))
3891 return false;
3892
3893 /* Any phi in the same block will have it's arguments in the
3894 same edge order, because of how we store phi nodes. */
3895 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3896 {
3897 tree phi1op = vp1->phiargs[i];
3898 tree phi2op = vp2->phiargs[i];
3899 if (phi1op == VN_TOP || phi2op == VN_TOP)
3900 continue;
3901 if (!expressions_equal_p (phi1op, phi2op))
3902 return false;
3903 }
3904
3905 return true;
3906 }
3907
3908 /* Lookup PHI in the current hash table, and return the resulting
3909 value number if it exists in the hash table. Return NULL_TREE if
3910 it does not exist in the hash table. */
3911
3912 static tree
3913 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3914 {
3915 vn_phi_s **slot;
3916 struct vn_phi_s *vp1;
3917 edge e;
3918 edge_iterator ei;
3919
3920 vp1 = XALLOCAVAR (struct vn_phi_s,
3921 sizeof (struct vn_phi_s)
3922 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3923
3924 /* Canonicalize the SSA_NAME's to their value number. */
3925 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3926 {
3927 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3928 if (TREE_CODE (def) == SSA_NAME
3929 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3930 def = SSA_VAL (def);
3931 vp1->phiargs[e->dest_idx] = def;
3932 }
3933 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3934 vp1->block = gimple_bb (phi);
3935 /* Extract values of the controlling condition. */
3936 vp1->cclhs = NULL_TREE;
3937 vp1->ccrhs = NULL_TREE;
3938 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3939 if (EDGE_COUNT (idom1->succs) == 2)
3940 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3941 {
3942 /* ??? We want to use SSA_VAL here. But possibly not
3943 allow VN_TOP. */
3944 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3945 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3946 }
3947 vp1->hashcode = vn_phi_compute_hash (vp1);
3948 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3949 if (!slot)
3950 return NULL_TREE;
3951 return (*slot)->result;
3952 }
3953
3954 /* Insert PHI into the current hash table with a value number of
3955 RESULT. */
3956
3957 static vn_phi_t
3958 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3959 {
3960 vn_phi_s **slot;
3961 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3962 sizeof (vn_phi_s)
3963 + ((gimple_phi_num_args (phi) - 1)
3964 * sizeof (tree)));
3965 edge e;
3966 edge_iterator ei;
3967
3968 /* Canonicalize the SSA_NAME's to their value number. */
3969 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3970 {
3971 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3972 if (TREE_CODE (def) == SSA_NAME
3973 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3974 def = SSA_VAL (def);
3975 vp1->phiargs[e->dest_idx] = def;
3976 }
3977 vp1->value_id = VN_INFO (result)->value_id;
3978 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3979 vp1->block = gimple_bb (phi);
3980 /* Extract values of the controlling condition. */
3981 vp1->cclhs = NULL_TREE;
3982 vp1->ccrhs = NULL_TREE;
3983 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3984 if (EDGE_COUNT (idom1->succs) == 2)
3985 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3986 {
3987 /* ??? We want to use SSA_VAL here. But possibly not
3988 allow VN_TOP. */
3989 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3990 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3991 }
3992 vp1->result = result;
3993 vp1->hashcode = vn_phi_compute_hash (vp1);
3994
3995 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3996 gcc_assert (!*slot);
3997
3998 *slot = vp1;
3999 vp1->next = last_inserted_phi;
4000 last_inserted_phi = vp1;
4001 return vp1;
4002 }
4003
4004
4005 /* Return true if BB1 is dominated by BB2 taking into account edges
4006 that are not executable. */
4007
4008 static bool
4009 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
4010 {
4011 edge_iterator ei;
4012 edge e;
4013
4014 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4015 return true;
4016
4017 /* Before iterating we'd like to know if there exists a
4018 (executable) path from bb2 to bb1 at all, if not we can
4019 directly return false. For now simply iterate once. */
4020
4021 /* Iterate to the single executable bb1 predecessor. */
4022 if (EDGE_COUNT (bb1->preds) > 1)
4023 {
4024 edge prede = NULL;
4025 FOR_EACH_EDGE (e, ei, bb1->preds)
4026 if (e->flags & EDGE_EXECUTABLE)
4027 {
4028 if (prede)
4029 {
4030 prede = NULL;
4031 break;
4032 }
4033 prede = e;
4034 }
4035 if (prede)
4036 {
4037 bb1 = prede->src;
4038
4039 /* Re-do the dominance check with changed bb1. */
4040 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4041 return true;
4042 }
4043 }
4044
4045 /* Iterate to the single executable bb2 successor. */
4046 edge succe = NULL;
4047 FOR_EACH_EDGE (e, ei, bb2->succs)
4048 if (e->flags & EDGE_EXECUTABLE)
4049 {
4050 if (succe)
4051 {
4052 succe = NULL;
4053 break;
4054 }
4055 succe = e;
4056 }
4057 if (succe)
4058 {
4059 /* Verify the reached block is only reached through succe.
4060 If there is only one edge we can spare us the dominator
4061 check and iterate directly. */
4062 if (EDGE_COUNT (succe->dest->preds) > 1)
4063 {
4064 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4065 if (e != succe
4066 && (e->flags & EDGE_EXECUTABLE))
4067 {
4068 succe = NULL;
4069 break;
4070 }
4071 }
4072 if (succe)
4073 {
4074 bb2 = succe->dest;
4075
4076 /* Re-do the dominance check with changed bb2. */
4077 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4078 return true;
4079 }
4080 }
4081
4082 /* We could now iterate updating bb1 / bb2. */
4083 return false;
4084 }
4085
4086 /* Set the value number of FROM to TO, return true if it has changed
4087 as a result. */
4088
4089 static inline bool
4090 set_ssa_val_to (tree from, tree to)
4091 {
4092 vn_ssa_aux_t from_info = VN_INFO (from);
4093 tree currval = from_info->valnum; // SSA_VAL (from)
4094 poly_int64 toff, coff;
4095
4096 /* The only thing we allow as value numbers are ssa_names
4097 and invariants. So assert that here. We don't allow VN_TOP
4098 as visiting a stmt should produce a value-number other than
4099 that.
4100 ??? Still VN_TOP can happen for unreachable code, so force
4101 it to varying in that case. Not all code is prepared to
4102 get VN_TOP on valueization. */
4103 if (to == VN_TOP)
4104 {
4105 /* ??? When iterating and visiting PHI <undef, backedge-value>
4106 for the first time we rightfully get VN_TOP and we need to
4107 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4108 With SCCVN we were simply lucky we iterated the other PHI
4109 cycles first and thus visited the backedge-value DEF. */
4110 if (currval == VN_TOP)
4111 goto set_and_exit;
4112 if (dump_file && (dump_flags & TDF_DETAILS))
4113 fprintf (dump_file, "Forcing value number to varying on "
4114 "receiving VN_TOP\n");
4115 to = from;
4116 }
4117
4118 gcc_checking_assert (to != NULL_TREE
4119 && ((TREE_CODE (to) == SSA_NAME
4120 && (to == from || SSA_VAL (to) == to))
4121 || is_gimple_min_invariant (to)));
4122
4123 if (from != to)
4124 {
4125 if (currval == from)
4126 {
4127 if (dump_file && (dump_flags & TDF_DETAILS))
4128 {
4129 fprintf (dump_file, "Not changing value number of ");
4130 print_generic_expr (dump_file, from);
4131 fprintf (dump_file, " from VARYING to ");
4132 print_generic_expr (dump_file, to);
4133 fprintf (dump_file, "\n");
4134 }
4135 return false;
4136 }
4137 bool curr_invariant = is_gimple_min_invariant (currval);
4138 bool curr_undefined = (TREE_CODE (currval) == SSA_NAME
4139 && ssa_undefined_value_p (currval, false));
4140 if (currval != VN_TOP
4141 && !curr_invariant
4142 && !curr_undefined
4143 && is_gimple_min_invariant (to))
4144 {
4145 if (dump_file && (dump_flags & TDF_DETAILS))
4146 {
4147 fprintf (dump_file, "Forcing VARYING instead of changing "
4148 "value number of ");
4149 print_generic_expr (dump_file, from);
4150 fprintf (dump_file, " from ");
4151 print_generic_expr (dump_file, currval);
4152 fprintf (dump_file, " (non-constant) to ");
4153 print_generic_expr (dump_file, to);
4154 fprintf (dump_file, " (constant)\n");
4155 }
4156 to = from;
4157 }
4158 else if (currval != VN_TOP
4159 && !curr_undefined
4160 && TREE_CODE (to) == SSA_NAME
4161 && ssa_undefined_value_p (to, false))
4162 {
4163 if (dump_file && (dump_flags & TDF_DETAILS))
4164 {
4165 fprintf (dump_file, "Forcing VARYING instead of changing "
4166 "value number of ");
4167 print_generic_expr (dump_file, from);
4168 fprintf (dump_file, " from ");
4169 print_generic_expr (dump_file, currval);
4170 fprintf (dump_file, " (non-undefined) to ");
4171 print_generic_expr (dump_file, to);
4172 fprintf (dump_file, " (undefined)\n");
4173 }
4174 to = from;
4175 }
4176 else if (TREE_CODE (to) == SSA_NAME
4177 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4178 to = from;
4179 }
4180
4181 set_and_exit:
4182 if (dump_file && (dump_flags & TDF_DETAILS))
4183 {
4184 fprintf (dump_file, "Setting value number of ");
4185 print_generic_expr (dump_file, from);
4186 fprintf (dump_file, " to ");
4187 print_generic_expr (dump_file, to);
4188 }
4189
4190 if (currval != to
4191 && !operand_equal_p (currval, to, 0)
4192 /* Different undefined SSA names are not actually different. See
4193 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4194 && !(TREE_CODE (currval) == SSA_NAME
4195 && TREE_CODE (to) == SSA_NAME
4196 && ssa_undefined_value_p (currval, false)
4197 && ssa_undefined_value_p (to, false))
4198 /* ??? For addresses involving volatile objects or types operand_equal_p
4199 does not reliably detect ADDR_EXPRs as equal. We know we are only
4200 getting invariant gimple addresses here, so can use
4201 get_addr_base_and_unit_offset to do this comparison. */
4202 && !(TREE_CODE (currval) == ADDR_EXPR
4203 && TREE_CODE (to) == ADDR_EXPR
4204 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4205 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4206 && known_eq (coff, toff)))
4207 {
4208 if (dump_file && (dump_flags & TDF_DETAILS))
4209 fprintf (dump_file, " (changed)\n");
4210 from_info->valnum = to;
4211 return true;
4212 }
4213 if (dump_file && (dump_flags & TDF_DETAILS))
4214 fprintf (dump_file, "\n");
4215 return false;
4216 }
4217
4218 /* Set all definitions in STMT to value number to themselves.
4219 Return true if a value number changed. */
4220
4221 static bool
4222 defs_to_varying (gimple *stmt)
4223 {
4224 bool changed = false;
4225 ssa_op_iter iter;
4226 def_operand_p defp;
4227
4228 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4229 {
4230 tree def = DEF_FROM_PTR (defp);
4231 changed |= set_ssa_val_to (def, def);
4232 }
4233 return changed;
4234 }
4235
4236 /* Visit a copy between LHS and RHS, return true if the value number
4237 changed. */
4238
4239 static bool
4240 visit_copy (tree lhs, tree rhs)
4241 {
4242 /* Valueize. */
4243 rhs = SSA_VAL (rhs);
4244
4245 return set_ssa_val_to (lhs, rhs);
4246 }
4247
4248 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4249 is the same. */
4250
4251 static tree
4252 valueized_wider_op (tree wide_type, tree op)
4253 {
4254 if (TREE_CODE (op) == SSA_NAME)
4255 op = vn_valueize (op);
4256
4257 /* Either we have the op widened available. */
4258 tree ops[3] = {};
4259 ops[0] = op;
4260 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4261 wide_type, ops, NULL);
4262 if (tem)
4263 return tem;
4264
4265 /* Or the op is truncated from some existing value. */
4266 if (TREE_CODE (op) == SSA_NAME)
4267 {
4268 gimple *def = SSA_NAME_DEF_STMT (op);
4269 if (is_gimple_assign (def)
4270 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4271 {
4272 tem = gimple_assign_rhs1 (def);
4273 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4274 {
4275 if (TREE_CODE (tem) == SSA_NAME)
4276 tem = vn_valueize (tem);
4277 return tem;
4278 }
4279 }
4280 }
4281
4282 /* For constants simply extend it. */
4283 if (TREE_CODE (op) == INTEGER_CST)
4284 return wide_int_to_tree (wide_type, wi::to_wide (op));
4285
4286 return NULL_TREE;
4287 }
4288
4289 /* Visit a nary operator RHS, value number it, and return true if the
4290 value number of LHS has changed as a result. */
4291
4292 static bool
4293 visit_nary_op (tree lhs, gassign *stmt)
4294 {
4295 vn_nary_op_t vnresult;
4296 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4297 if (! result && vnresult)
4298 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4299 if (result)
4300 return set_ssa_val_to (lhs, result);
4301
4302 /* Do some special pattern matching for redundancies of operations
4303 in different types. */
4304 enum tree_code code = gimple_assign_rhs_code (stmt);
4305 tree type = TREE_TYPE (lhs);
4306 tree rhs1 = gimple_assign_rhs1 (stmt);
4307 switch (code)
4308 {
4309 CASE_CONVERT:
4310 /* Match arithmetic done in a different type where we can easily
4311 substitute the result from some earlier sign-changed or widened
4312 operation. */
4313 if (INTEGRAL_TYPE_P (type)
4314 && TREE_CODE (rhs1) == SSA_NAME
4315 /* We only handle sign-changes, zero-extension -> & mask or
4316 sign-extension if we know the inner operation doesn't
4317 overflow. */
4318 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4319 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4320 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4321 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4322 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4323 {
4324 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4325 if (def
4326 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4327 || gimple_assign_rhs_code (def) == MINUS_EXPR
4328 || gimple_assign_rhs_code (def) == MULT_EXPR))
4329 {
4330 tree ops[3] = {};
4331 /* Either we have the op widened available. */
4332 ops[0] = valueized_wider_op (type,
4333 gimple_assign_rhs1 (def));
4334 if (ops[0])
4335 ops[1] = valueized_wider_op (type,
4336 gimple_assign_rhs2 (def));
4337 if (ops[0] && ops[1])
4338 {
4339 ops[0] = vn_nary_op_lookup_pieces
4340 (2, gimple_assign_rhs_code (def), type, ops, NULL);
4341 /* We have wider operation available. */
4342 if (ops[0]
4343 /* If the leader is a wrapping operation we can
4344 insert it for code hoisting w/o introducing
4345 undefined overflow. If it is not it has to
4346 be available. See PR86554. */
4347 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4348 || (rpo_avail && vn_context_bb
4349 && rpo_avail->eliminate_avail (vn_context_bb,
4350 ops[0]))))
4351 {
4352 unsigned lhs_prec = TYPE_PRECISION (type);
4353 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4354 if (lhs_prec == rhs_prec
4355 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4356 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4357 {
4358 gimple_match_op match_op (gimple_match_cond::UNCOND,
4359 NOP_EXPR, type, ops[0]);
4360 result = vn_nary_build_or_lookup (&match_op);
4361 if (result)
4362 {
4363 bool changed = set_ssa_val_to (lhs, result);
4364 vn_nary_op_insert_stmt (stmt, result);
4365 return changed;
4366 }
4367 }
4368 else
4369 {
4370 tree mask = wide_int_to_tree
4371 (type, wi::mask (rhs_prec, false, lhs_prec));
4372 gimple_match_op match_op (gimple_match_cond::UNCOND,
4373 BIT_AND_EXPR,
4374 TREE_TYPE (lhs),
4375 ops[0], mask);
4376 result = vn_nary_build_or_lookup (&match_op);
4377 if (result)
4378 {
4379 bool changed = set_ssa_val_to (lhs, result);
4380 vn_nary_op_insert_stmt (stmt, result);
4381 return changed;
4382 }
4383 }
4384 }
4385 }
4386 }
4387 }
4388 default:;
4389 }
4390
4391 bool changed = set_ssa_val_to (lhs, lhs);
4392 vn_nary_op_insert_stmt (stmt, lhs);
4393 return changed;
4394 }
4395
4396 /* Visit a call STMT storing into LHS. Return true if the value number
4397 of the LHS has changed as a result. */
4398
4399 static bool
4400 visit_reference_op_call (tree lhs, gcall *stmt)
4401 {
4402 bool changed = false;
4403 struct vn_reference_s vr1;
4404 vn_reference_t vnresult = NULL;
4405 tree vdef = gimple_vdef (stmt);
4406
4407 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4408 if (lhs && TREE_CODE (lhs) != SSA_NAME)
4409 lhs = NULL_TREE;
4410
4411 vn_reference_lookup_call (stmt, &vnresult, &vr1);
4412 if (vnresult)
4413 {
4414 if (vnresult->result_vdef && vdef)
4415 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4416 else if (vdef)
4417 /* If the call was discovered to be pure or const reflect
4418 that as far as possible. */
4419 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4420
4421 if (!vnresult->result && lhs)
4422 vnresult->result = lhs;
4423
4424 if (vnresult->result && lhs)
4425 changed |= set_ssa_val_to (lhs, vnresult->result);
4426 }
4427 else
4428 {
4429 vn_reference_t vr2;
4430 vn_reference_s **slot;
4431 tree vdef_val = vdef;
4432 if (vdef)
4433 {
4434 /* If we value numbered an indirect functions function to
4435 one not clobbering memory value number its VDEF to its
4436 VUSE. */
4437 tree fn = gimple_call_fn (stmt);
4438 if (fn && TREE_CODE (fn) == SSA_NAME)
4439 {
4440 fn = SSA_VAL (fn);
4441 if (TREE_CODE (fn) == ADDR_EXPR
4442 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4443 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4444 & (ECF_CONST | ECF_PURE)))
4445 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4446 }
4447 changed |= set_ssa_val_to (vdef, vdef_val);
4448 }
4449 if (lhs)
4450 changed |= set_ssa_val_to (lhs, lhs);
4451 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4452 vr2->vuse = vr1.vuse;
4453 /* As we are not walking the virtual operand chain we know the
4454 shared_lookup_references are still original so we can re-use
4455 them here. */
4456 vr2->operands = vr1.operands.copy ();
4457 vr2->type = vr1.type;
4458 vr2->set = vr1.set;
4459 vr2->hashcode = vr1.hashcode;
4460 vr2->result = lhs;
4461 vr2->result_vdef = vdef_val;
4462 vr2->value_id = 0;
4463 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4464 INSERT);
4465 gcc_assert (!*slot);
4466 *slot = vr2;
4467 vr2->next = last_inserted_ref;
4468 last_inserted_ref = vr2;
4469 }
4470
4471 return changed;
4472 }
4473
4474 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4475 and return true if the value number of the LHS has changed as a result. */
4476
4477 static bool
4478 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4479 {
4480 bool changed = false;
4481 tree last_vuse;
4482 tree result;
4483
4484 last_vuse = gimple_vuse (stmt);
4485 result = vn_reference_lookup (op, gimple_vuse (stmt),
4486 default_vn_walk_kind, NULL, true, &last_vuse);
4487
4488 /* We handle type-punning through unions by value-numbering based
4489 on offset and size of the access. Be prepared to handle a
4490 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4491 if (result
4492 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4493 {
4494 /* We will be setting the value number of lhs to the value number
4495 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4496 So first simplify and lookup this expression to see if it
4497 is already available. */
4498 gimple_match_op res_op (gimple_match_cond::UNCOND,
4499 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
4500 result = vn_nary_build_or_lookup (&res_op);
4501 /* When building the conversion fails avoid inserting the reference
4502 again. */
4503 if (!result)
4504 return set_ssa_val_to (lhs, lhs);
4505 }
4506
4507 if (result)
4508 changed = set_ssa_val_to (lhs, result);
4509 else
4510 {
4511 changed = set_ssa_val_to (lhs, lhs);
4512 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
4513 }
4514
4515 return changed;
4516 }
4517
4518
4519 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4520 and return true if the value number of the LHS has changed as a result. */
4521
4522 static bool
4523 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4524 {
4525 bool changed = false;
4526 vn_reference_t vnresult = NULL;
4527 tree assign;
4528 bool resultsame = false;
4529 tree vuse = gimple_vuse (stmt);
4530 tree vdef = gimple_vdef (stmt);
4531
4532 if (TREE_CODE (op) == SSA_NAME)
4533 op = SSA_VAL (op);
4534
4535 /* First we want to lookup using the *vuses* from the store and see
4536 if there the last store to this location with the same address
4537 had the same value.
4538
4539 The vuses represent the memory state before the store. If the
4540 memory state, address, and value of the store is the same as the
4541 last store to this location, then this store will produce the
4542 same memory state as that store.
4543
4544 In this case the vdef versions for this store are value numbered to those
4545 vuse versions, since they represent the same memory state after
4546 this store.
4547
4548 Otherwise, the vdefs for the store are used when inserting into
4549 the table, since the store generates a new memory state. */
4550
4551 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4552 if (vnresult
4553 && vnresult->result)
4554 {
4555 tree result = vnresult->result;
4556 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4557 || result == SSA_VAL (result));
4558 resultsame = expressions_equal_p (result, op);
4559 if (resultsame)
4560 {
4561 /* If the TBAA state isn't compatible for downstream reads
4562 we cannot value-number the VDEFs the same. */
4563 alias_set_type set = get_alias_set (lhs);
4564 if (vnresult->set != set
4565 && ! alias_set_subset_of (set, vnresult->set))
4566 resultsame = false;
4567 }
4568 }
4569
4570 if (!resultsame)
4571 {
4572 /* Only perform the following when being called from PRE
4573 which embeds tail merging. */
4574 if (default_vn_walk_kind == VN_WALK)
4575 {
4576 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4577 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4578 if (vnresult)
4579 {
4580 VN_INFO (vdef)->visited = true;
4581 return set_ssa_val_to (vdef, vnresult->result_vdef);
4582 }
4583 }
4584
4585 if (dump_file && (dump_flags & TDF_DETAILS))
4586 {
4587 fprintf (dump_file, "No store match\n");
4588 fprintf (dump_file, "Value numbering store ");
4589 print_generic_expr (dump_file, lhs);
4590 fprintf (dump_file, " to ");
4591 print_generic_expr (dump_file, op);
4592 fprintf (dump_file, "\n");
4593 }
4594 /* Have to set value numbers before insert, since insert is
4595 going to valueize the references in-place. */
4596 if (vdef)
4597 changed |= set_ssa_val_to (vdef, vdef);
4598
4599 /* Do not insert structure copies into the tables. */
4600 if (is_gimple_min_invariant (op)
4601 || is_gimple_reg (op))
4602 vn_reference_insert (lhs, op, vdef, NULL);
4603
4604 /* Only perform the following when being called from PRE
4605 which embeds tail merging. */
4606 if (default_vn_walk_kind == VN_WALK)
4607 {
4608 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4609 vn_reference_insert (assign, lhs, vuse, vdef);
4610 }
4611 }
4612 else
4613 {
4614 /* We had a match, so value number the vdef to have the value
4615 number of the vuse it came from. */
4616
4617 if (dump_file && (dump_flags & TDF_DETAILS))
4618 fprintf (dump_file, "Store matched earlier value, "
4619 "value numbering store vdefs to matching vuses.\n");
4620
4621 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4622 }
4623
4624 return changed;
4625 }
4626
4627 /* Visit and value number PHI, return true if the value number
4628 changed. When BACKEDGES_VARYING_P is true then assume all
4629 backedge values are varying. When INSERTED is not NULL then
4630 this is just a ahead query for a possible iteration, set INSERTED
4631 to true if we'd insert into the hashtable. */
4632
4633 static bool
4634 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4635 {
4636 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4637 tree backedge_val = NULL_TREE;
4638 bool seen_non_backedge = false;
4639 tree sameval_base = NULL_TREE;
4640 poly_int64 soff, doff;
4641 unsigned n_executable = 0;
4642 edge_iterator ei;
4643 edge e;
4644
4645 /* TODO: We could check for this in initialization, and replace this
4646 with a gcc_assert. */
4647 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4648 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4649
4650 /* We track whether a PHI was CSEd to to avoid excessive iterations
4651 that would be necessary only because the PHI changed arguments
4652 but not value. */
4653 if (!inserted)
4654 gimple_set_plf (phi, GF_PLF_1, false);
4655
4656 /* See if all non-TOP arguments have the same value. TOP is
4657 equivalent to everything, so we can ignore it. */
4658 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4659 if (e->flags & EDGE_EXECUTABLE)
4660 {
4661 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4662
4663 ++n_executable;
4664 if (TREE_CODE (def) == SSA_NAME)
4665 {
4666 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4667 def = SSA_VAL (def);
4668 if (e->flags & EDGE_DFS_BACK)
4669 backedge_val = def;
4670 }
4671 if (!(e->flags & EDGE_DFS_BACK))
4672 seen_non_backedge = true;
4673 if (def == VN_TOP)
4674 ;
4675 /* Ignore undefined defs for sameval but record one. */
4676 else if (TREE_CODE (def) == SSA_NAME
4677 && ! virtual_operand_p (def)
4678 && ssa_undefined_value_p (def, false))
4679 seen_undef = def;
4680 else if (sameval == VN_TOP)
4681 sameval = def;
4682 else if (!expressions_equal_p (def, sameval))
4683 {
4684 /* We know we're arriving only with invariant addresses here,
4685 try harder comparing them. We can do some caching here
4686 which we cannot do in expressions_equal_p. */
4687 if (TREE_CODE (def) == ADDR_EXPR
4688 && TREE_CODE (sameval) == ADDR_EXPR
4689 && sameval_base != (void *)-1)
4690 {
4691 if (!sameval_base)
4692 sameval_base = get_addr_base_and_unit_offset
4693 (TREE_OPERAND (sameval, 0), &soff);
4694 if (!sameval_base)
4695 sameval_base = (tree)(void *)-1;
4696 else if ((get_addr_base_and_unit_offset
4697 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4698 && known_eq (soff, doff))
4699 continue;
4700 }
4701 sameval = NULL_TREE;
4702 break;
4703 }
4704 }
4705
4706 /* If the value we want to use is flowing over the backedge and we
4707 should take it as VARYING but it has a non-VARYING value drop to
4708 VARYING.
4709 If we value-number a virtual operand never value-number to the
4710 value from the backedge as that confuses the alias-walking code.
4711 See gcc.dg/torture/pr87176.c. If the value is the same on a
4712 non-backedge everything is OK though. */
4713 bool visited_p;
4714 if ((backedge_val
4715 && !seen_non_backedge
4716 && TREE_CODE (backedge_val) == SSA_NAME
4717 && sameval == backedge_val
4718 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
4719 || SSA_VAL (backedge_val) != backedge_val))
4720 /* Do not value-number a virtual operand to sth not visited though
4721 given that allows us to escape a region in alias walking. */
4722 || (sameval
4723 && TREE_CODE (sameval) == SSA_NAME
4724 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
4725 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
4726 && (SSA_VAL (sameval, &visited_p), !visited_p)))
4727 /* Note this just drops to VARYING without inserting the PHI into
4728 the hashes. */
4729 result = PHI_RESULT (phi);
4730 /* If none of the edges was executable keep the value-number at VN_TOP,
4731 if only a single edge is exectuable use its value. */
4732 else if (n_executable <= 1)
4733 result = seen_undef ? seen_undef : sameval;
4734 /* If we saw only undefined values and VN_TOP use one of the
4735 undefined values. */
4736 else if (sameval == VN_TOP)
4737 result = seen_undef ? seen_undef : sameval;
4738 /* First see if it is equivalent to a phi node in this block. We prefer
4739 this as it allows IV elimination - see PRs 66502 and 67167. */
4740 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4741 {
4742 if (!inserted
4743 && TREE_CODE (result) == SSA_NAME
4744 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4745 {
4746 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4747 if (dump_file && (dump_flags & TDF_DETAILS))
4748 {
4749 fprintf (dump_file, "Marking CSEd to PHI node ");
4750 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4751 0, TDF_SLIM);
4752 fprintf (dump_file, "\n");
4753 }
4754 }
4755 }
4756 /* If all values are the same use that, unless we've seen undefined
4757 values as well and the value isn't constant.
4758 CCP/copyprop have the same restriction to not remove uninit warnings. */
4759 else if (sameval
4760 && (! seen_undef || is_gimple_min_invariant (sameval)))
4761 result = sameval;
4762 else
4763 {
4764 result = PHI_RESULT (phi);
4765 /* Only insert PHIs that are varying, for constant value numbers
4766 we mess up equivalences otherwise as we are only comparing
4767 the immediate controlling predicates. */
4768 vn_phi_insert (phi, result, backedges_varying_p);
4769 if (inserted)
4770 *inserted = true;
4771 }
4772
4773 return set_ssa_val_to (PHI_RESULT (phi), result);
4774 }
4775
4776 /* Try to simplify RHS using equivalences and constant folding. */
4777
4778 static tree
4779 try_to_simplify (gassign *stmt)
4780 {
4781 enum tree_code code = gimple_assign_rhs_code (stmt);
4782 tree tem;
4783
4784 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4785 in this case, there is no point in doing extra work. */
4786 if (code == SSA_NAME)
4787 return NULL_TREE;
4788
4789 /* First try constant folding based on our current lattice. */
4790 mprts_hook = vn_lookup_simplify_result;
4791 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4792 mprts_hook = NULL;
4793 if (tem
4794 && (TREE_CODE (tem) == SSA_NAME
4795 || is_gimple_min_invariant (tem)))
4796 return tem;
4797
4798 return NULL_TREE;
4799 }
4800
4801 /* Visit and value number STMT, return true if the value number
4802 changed. */
4803
4804 static bool
4805 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4806 {
4807 bool changed = false;
4808
4809 if (dump_file && (dump_flags & TDF_DETAILS))
4810 {
4811 fprintf (dump_file, "Value numbering stmt = ");
4812 print_gimple_stmt (dump_file, stmt, 0);
4813 }
4814
4815 if (gimple_code (stmt) == GIMPLE_PHI)
4816 changed = visit_phi (stmt, NULL, backedges_varying_p);
4817 else if (gimple_has_volatile_ops (stmt))
4818 changed = defs_to_varying (stmt);
4819 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4820 {
4821 enum tree_code code = gimple_assign_rhs_code (ass);
4822 tree lhs = gimple_assign_lhs (ass);
4823 tree rhs1 = gimple_assign_rhs1 (ass);
4824 tree simplified;
4825
4826 /* Shortcut for copies. Simplifying copies is pointless,
4827 since we copy the expression and value they represent. */
4828 if (code == SSA_NAME
4829 && TREE_CODE (lhs) == SSA_NAME)
4830 {
4831 changed = visit_copy (lhs, rhs1);
4832 goto done;
4833 }
4834 simplified = try_to_simplify (ass);
4835 if (simplified)
4836 {
4837 if (dump_file && (dump_flags & TDF_DETAILS))
4838 {
4839 fprintf (dump_file, "RHS ");
4840 print_gimple_expr (dump_file, ass, 0);
4841 fprintf (dump_file, " simplified to ");
4842 print_generic_expr (dump_file, simplified);
4843 fprintf (dump_file, "\n");
4844 }
4845 }
4846 /* Setting value numbers to constants will occasionally
4847 screw up phi congruence because constants are not
4848 uniquely associated with a single ssa name that can be
4849 looked up. */
4850 if (simplified
4851 && is_gimple_min_invariant (simplified)
4852 && TREE_CODE (lhs) == SSA_NAME)
4853 {
4854 changed = set_ssa_val_to (lhs, simplified);
4855 goto done;
4856 }
4857 else if (simplified
4858 && TREE_CODE (simplified) == SSA_NAME
4859 && TREE_CODE (lhs) == SSA_NAME)
4860 {
4861 changed = visit_copy (lhs, simplified);
4862 goto done;
4863 }
4864
4865 if ((TREE_CODE (lhs) == SSA_NAME
4866 /* We can substitute SSA_NAMEs that are live over
4867 abnormal edges with their constant value. */
4868 && !(gimple_assign_copy_p (ass)
4869 && is_gimple_min_invariant (rhs1))
4870 && !(simplified
4871 && is_gimple_min_invariant (simplified))
4872 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4873 /* Stores or copies from SSA_NAMEs that are live over
4874 abnormal edges are a problem. */
4875 || (code == SSA_NAME
4876 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4877 changed = defs_to_varying (ass);
4878 else if (REFERENCE_CLASS_P (lhs)
4879 || DECL_P (lhs))
4880 changed = visit_reference_op_store (lhs, rhs1, ass);
4881 else if (TREE_CODE (lhs) == SSA_NAME)
4882 {
4883 if ((gimple_assign_copy_p (ass)
4884 && is_gimple_min_invariant (rhs1))
4885 || (simplified
4886 && is_gimple_min_invariant (simplified)))
4887 {
4888 if (simplified)
4889 changed = set_ssa_val_to (lhs, simplified);
4890 else
4891 changed = set_ssa_val_to (lhs, rhs1);
4892 }
4893 else
4894 {
4895 /* Visit the original statement. */
4896 switch (vn_get_stmt_kind (ass))
4897 {
4898 case VN_NARY:
4899 changed = visit_nary_op (lhs, ass);
4900 break;
4901 case VN_REFERENCE:
4902 changed = visit_reference_op_load (lhs, rhs1, ass);
4903 break;
4904 default:
4905 changed = defs_to_varying (ass);
4906 break;
4907 }
4908 }
4909 }
4910 else
4911 changed = defs_to_varying (ass);
4912 }
4913 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4914 {
4915 tree lhs = gimple_call_lhs (call_stmt);
4916 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4917 {
4918 /* Try constant folding based on our current lattice. */
4919 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4920 vn_valueize);
4921 if (simplified)
4922 {
4923 if (dump_file && (dump_flags & TDF_DETAILS))
4924 {
4925 fprintf (dump_file, "call ");
4926 print_gimple_expr (dump_file, call_stmt, 0);
4927 fprintf (dump_file, " simplified to ");
4928 print_generic_expr (dump_file, simplified);
4929 fprintf (dump_file, "\n");
4930 }
4931 }
4932 /* Setting value numbers to constants will occasionally
4933 screw up phi congruence because constants are not
4934 uniquely associated with a single ssa name that can be
4935 looked up. */
4936 if (simplified
4937 && is_gimple_min_invariant (simplified))
4938 {
4939 changed = set_ssa_val_to (lhs, simplified);
4940 if (gimple_vdef (call_stmt))
4941 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4942 SSA_VAL (gimple_vuse (call_stmt)));
4943 goto done;
4944 }
4945 else if (simplified
4946 && TREE_CODE (simplified) == SSA_NAME)
4947 {
4948 changed = visit_copy (lhs, simplified);
4949 if (gimple_vdef (call_stmt))
4950 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4951 SSA_VAL (gimple_vuse (call_stmt)));
4952 goto done;
4953 }
4954 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4955 {
4956 changed = defs_to_varying (call_stmt);
4957 goto done;
4958 }
4959 }
4960
4961 /* Pick up flags from a devirtualization target. */
4962 tree fn = gimple_call_fn (stmt);
4963 int extra_fnflags = 0;
4964 if (fn && TREE_CODE (fn) == SSA_NAME)
4965 {
4966 fn = SSA_VAL (fn);
4967 if (TREE_CODE (fn) == ADDR_EXPR
4968 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4969 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4970 }
4971 if (!gimple_call_internal_p (call_stmt)
4972 && (/* Calls to the same function with the same vuse
4973 and the same operands do not necessarily return the same
4974 value, unless they're pure or const. */
4975 ((gimple_call_flags (call_stmt) | extra_fnflags)
4976 & (ECF_PURE | ECF_CONST))
4977 /* If calls have a vdef, subsequent calls won't have
4978 the same incoming vuse. So, if 2 calls with vdef have the
4979 same vuse, we know they're not subsequent.
4980 We can value number 2 calls to the same function with the
4981 same vuse and the same operands which are not subsequent
4982 the same, because there is no code in the program that can
4983 compare the 2 values... */
4984 || (gimple_vdef (call_stmt)
4985 /* ... unless the call returns a pointer which does
4986 not alias with anything else. In which case the
4987 information that the values are distinct are encoded
4988 in the IL. */
4989 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4990 /* Only perform the following when being called from PRE
4991 which embeds tail merging. */
4992 && default_vn_walk_kind == VN_WALK)))
4993 changed = visit_reference_op_call (lhs, call_stmt);
4994 else
4995 changed = defs_to_varying (call_stmt);
4996 }
4997 else
4998 changed = defs_to_varying (stmt);
4999 done:
5000 return changed;
5001 }
5002
5003
5004 /* Allocate a value number table. */
5005
5006 static void
5007 allocate_vn_table (vn_tables_t table, unsigned size)
5008 {
5009 table->phis = new vn_phi_table_type (size);
5010 table->nary = new vn_nary_op_table_type (size);
5011 table->references = new vn_reference_table_type (size);
5012 }
5013
5014 /* Free a value number table. */
5015
5016 static void
5017 free_vn_table (vn_tables_t table)
5018 {
5019 /* Walk over elements and release vectors. */
5020 vn_reference_iterator_type hir;
5021 vn_reference_t vr;
5022 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5023 vr->operands.release ();
5024 delete table->phis;
5025 table->phis = NULL;
5026 delete table->nary;
5027 table->nary = NULL;
5028 delete table->references;
5029 table->references = NULL;
5030 }
5031
5032 /* Set *ID according to RESULT. */
5033
5034 static void
5035 set_value_id_for_result (tree result, unsigned int *id)
5036 {
5037 if (result && TREE_CODE (result) == SSA_NAME)
5038 *id = VN_INFO (result)->value_id;
5039 else if (result && is_gimple_min_invariant (result))
5040 *id = get_or_alloc_constant_value_id (result);
5041 else
5042 *id = get_next_value_id ();
5043 }
5044
5045 /* Set the value ids in the valid hash tables. */
5046
5047 static void
5048 set_hashtable_value_ids (void)
5049 {
5050 vn_nary_op_iterator_type hin;
5051 vn_phi_iterator_type hip;
5052 vn_reference_iterator_type hir;
5053 vn_nary_op_t vno;
5054 vn_reference_t vr;
5055 vn_phi_t vp;
5056
5057 /* Now set the value ids of the things we had put in the hash
5058 table. */
5059
5060 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5061 if (! vno->predicated_values)
5062 set_value_id_for_result (vno->u.result, &vno->value_id);
5063
5064 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5065 set_value_id_for_result (vp->result, &vp->value_id);
5066
5067 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5068 hir)
5069 set_value_id_for_result (vr->result, &vr->value_id);
5070 }
5071
5072 /* Return the maximum value id we have ever seen. */
5073
5074 unsigned int
5075 get_max_value_id (void)
5076 {
5077 return next_value_id;
5078 }
5079
5080 /* Return the next unique value id. */
5081
5082 unsigned int
5083 get_next_value_id (void)
5084 {
5085 return next_value_id++;
5086 }
5087
5088
5089 /* Compare two expressions E1 and E2 and return true if they are equal. */
5090
5091 bool
5092 expressions_equal_p (tree e1, tree e2)
5093 {
5094 /* The obvious case. */
5095 if (e1 == e2)
5096 return true;
5097
5098 /* If either one is VN_TOP consider them equal. */
5099 if (e1 == VN_TOP || e2 == VN_TOP)
5100 return true;
5101
5102 /* If only one of them is null, they cannot be equal. */
5103 if (!e1 || !e2)
5104 return false;
5105
5106 /* Now perform the actual comparison. */
5107 if (TREE_CODE (e1) == TREE_CODE (e2)
5108 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5109 return true;
5110
5111 return false;
5112 }
5113
5114
5115 /* Return true if the nary operation NARY may trap. This is a copy
5116 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5117
5118 bool
5119 vn_nary_may_trap (vn_nary_op_t nary)
5120 {
5121 tree type;
5122 tree rhs2 = NULL_TREE;
5123 bool honor_nans = false;
5124 bool honor_snans = false;
5125 bool fp_operation = false;
5126 bool honor_trapv = false;
5127 bool handled, ret;
5128 unsigned i;
5129
5130 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5131 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5132 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5133 {
5134 type = nary->type;
5135 fp_operation = FLOAT_TYPE_P (type);
5136 if (fp_operation)
5137 {
5138 honor_nans = flag_trapping_math && !flag_finite_math_only;
5139 honor_snans = flag_signaling_nans != 0;
5140 }
5141 else if (INTEGRAL_TYPE_P (type)
5142 && TYPE_OVERFLOW_TRAPS (type))
5143 honor_trapv = true;
5144 }
5145 if (nary->length >= 2)
5146 rhs2 = nary->op[1];
5147 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5148 honor_trapv,
5149 honor_nans, honor_snans, rhs2,
5150 &handled);
5151 if (handled
5152 && ret)
5153 return true;
5154
5155 for (i = 0; i < nary->length; ++i)
5156 if (tree_could_trap_p (nary->op[i]))
5157 return true;
5158
5159 return false;
5160 }
5161
5162 /* Return true if the reference operation REF may trap. */
5163
5164 bool
5165 vn_reference_may_trap (vn_reference_t ref)
5166 {
5167 switch (ref->operands[0].opcode)
5168 {
5169 case MODIFY_EXPR:
5170 case CALL_EXPR:
5171 /* We do not handle calls. */
5172 case ADDR_EXPR:
5173 /* And toplevel address computations never trap. */
5174 return false;
5175 default:;
5176 }
5177
5178 vn_reference_op_t op;
5179 unsigned i;
5180 FOR_EACH_VEC_ELT (ref->operands, i, op)
5181 {
5182 switch (op->opcode)
5183 {
5184 case WITH_SIZE_EXPR:
5185 case TARGET_MEM_REF:
5186 /* Always variable. */
5187 return true;
5188 case COMPONENT_REF:
5189 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5190 return true;
5191 break;
5192 case ARRAY_RANGE_REF:
5193 case ARRAY_REF:
5194 if (TREE_CODE (op->op0) == SSA_NAME)
5195 return true;
5196 break;
5197 case MEM_REF:
5198 /* Nothing interesting in itself, the base is separate. */
5199 break;
5200 /* The following are the address bases. */
5201 case SSA_NAME:
5202 return true;
5203 case ADDR_EXPR:
5204 if (op->op0)
5205 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5206 return false;
5207 default:;
5208 }
5209 }
5210 return false;
5211 }
5212
5213 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5214 bitmap inserted_exprs_)
5215 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5216 el_todo (0), eliminations (0), insertions (0),
5217 inserted_exprs (inserted_exprs_)
5218 {
5219 need_eh_cleanup = BITMAP_ALLOC (NULL);
5220 need_ab_cleanup = BITMAP_ALLOC (NULL);
5221 }
5222
5223 eliminate_dom_walker::~eliminate_dom_walker ()
5224 {
5225 BITMAP_FREE (need_eh_cleanup);
5226 BITMAP_FREE (need_ab_cleanup);
5227 }
5228
5229 /* Return a leader for OP that is available at the current point of the
5230 eliminate domwalk. */
5231
5232 tree
5233 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5234 {
5235 tree valnum = VN_INFO (op)->valnum;
5236 if (TREE_CODE (valnum) == SSA_NAME)
5237 {
5238 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5239 return valnum;
5240 if (avail.length () > SSA_NAME_VERSION (valnum))
5241 return avail[SSA_NAME_VERSION (valnum)];
5242 }
5243 else if (is_gimple_min_invariant (valnum))
5244 return valnum;
5245 return NULL_TREE;
5246 }
5247
5248 /* At the current point of the eliminate domwalk make OP available. */
5249
5250 void
5251 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5252 {
5253 tree valnum = VN_INFO (op)->valnum;
5254 if (TREE_CODE (valnum) == SSA_NAME)
5255 {
5256 if (avail.length () <= SSA_NAME_VERSION (valnum))
5257 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5258 tree pushop = op;
5259 if (avail[SSA_NAME_VERSION (valnum)])
5260 pushop = avail[SSA_NAME_VERSION (valnum)];
5261 avail_stack.safe_push (pushop);
5262 avail[SSA_NAME_VERSION (valnum)] = op;
5263 }
5264 }
5265
5266 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5267 the leader for the expression if insertion was successful. */
5268
5269 tree
5270 eliminate_dom_walker::eliminate_insert (basic_block bb,
5271 gimple_stmt_iterator *gsi, tree val)
5272 {
5273 /* We can insert a sequence with a single assignment only. */
5274 gimple_seq stmts = VN_INFO (val)->expr;
5275 if (!gimple_seq_singleton_p (stmts))
5276 return NULL_TREE;
5277 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5278 if (!stmt
5279 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5280 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5281 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5282 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5283 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5284 return NULL_TREE;
5285
5286 tree op = gimple_assign_rhs1 (stmt);
5287 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5288 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5289 op = TREE_OPERAND (op, 0);
5290 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5291 if (!leader)
5292 return NULL_TREE;
5293
5294 tree res;
5295 stmts = NULL;
5296 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5297 res = gimple_build (&stmts, BIT_FIELD_REF,
5298 TREE_TYPE (val), leader,
5299 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5300 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5301 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5302 res = gimple_build (&stmts, BIT_AND_EXPR,
5303 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5304 else
5305 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5306 TREE_TYPE (val), leader);
5307 if (TREE_CODE (res) != SSA_NAME
5308 || SSA_NAME_IS_DEFAULT_DEF (res)
5309 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5310 {
5311 gimple_seq_discard (stmts);
5312
5313 /* During propagation we have to treat SSA info conservatively
5314 and thus we can end up simplifying the inserted expression
5315 at elimination time to sth not defined in stmts. */
5316 /* But then this is a redundancy we failed to detect. Which means
5317 res now has two values. That doesn't play well with how
5318 we track availability here, so give up. */
5319 if (dump_file && (dump_flags & TDF_DETAILS))
5320 {
5321 if (TREE_CODE (res) == SSA_NAME)
5322 res = eliminate_avail (bb, res);
5323 if (res)
5324 {
5325 fprintf (dump_file, "Failed to insert expression for value ");
5326 print_generic_expr (dump_file, val);
5327 fprintf (dump_file, " which is really fully redundant to ");
5328 print_generic_expr (dump_file, res);
5329 fprintf (dump_file, "\n");
5330 }
5331 }
5332
5333 return NULL_TREE;
5334 }
5335 else
5336 {
5337 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5338 VN_INFO (res)->valnum = val;
5339 VN_INFO (res)->visited = true;
5340 }
5341
5342 insertions++;
5343 if (dump_file && (dump_flags & TDF_DETAILS))
5344 {
5345 fprintf (dump_file, "Inserted ");
5346 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5347 }
5348
5349 return res;
5350 }
5351
5352 void
5353 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
5354 {
5355 tree sprime = NULL_TREE;
5356 gimple *stmt = gsi_stmt (*gsi);
5357 tree lhs = gimple_get_lhs (stmt);
5358 if (lhs && TREE_CODE (lhs) == SSA_NAME
5359 && !gimple_has_volatile_ops (stmt)
5360 /* See PR43491. Do not replace a global register variable when
5361 it is a the RHS of an assignment. Do replace local register
5362 variables since gcc does not guarantee a local variable will
5363 be allocated in register.
5364 ??? The fix isn't effective here. This should instead
5365 be ensured by not value-numbering them the same but treating
5366 them like volatiles? */
5367 && !(gimple_assign_single_p (stmt)
5368 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5369 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5370 && is_global_var (gimple_assign_rhs1 (stmt)))))
5371 {
5372 sprime = eliminate_avail (b, lhs);
5373 if (!sprime)
5374 {
5375 /* If there is no existing usable leader but SCCVN thinks
5376 it has an expression it wants to use as replacement,
5377 insert that. */
5378 tree val = VN_INFO (lhs)->valnum;
5379 if (val != VN_TOP
5380 && TREE_CODE (val) == SSA_NAME
5381 && VN_INFO (val)->needs_insertion
5382 && VN_INFO (val)->expr != NULL
5383 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
5384 eliminate_push_avail (b, sprime);
5385 }
5386
5387 /* If this now constitutes a copy duplicate points-to
5388 and range info appropriately. This is especially
5389 important for inserted code. See tree-ssa-copy.c
5390 for similar code. */
5391 if (sprime
5392 && TREE_CODE (sprime) == SSA_NAME)
5393 {
5394 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5395 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5396 && SSA_NAME_PTR_INFO (lhs)
5397 && ! SSA_NAME_PTR_INFO (sprime))
5398 {
5399 duplicate_ssa_name_ptr_info (sprime,
5400 SSA_NAME_PTR_INFO (lhs));
5401 if (b != sprime_b)
5402 mark_ptr_info_alignment_unknown
5403 (SSA_NAME_PTR_INFO (sprime));
5404 }
5405 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5406 && SSA_NAME_RANGE_INFO (lhs)
5407 && ! SSA_NAME_RANGE_INFO (sprime)
5408 && b == sprime_b)
5409 duplicate_ssa_name_range_info (sprime,
5410 SSA_NAME_RANGE_TYPE (lhs),
5411 SSA_NAME_RANGE_INFO (lhs));
5412 }
5413
5414 /* Inhibit the use of an inserted PHI on a loop header when
5415 the address of the memory reference is a simple induction
5416 variable. In other cases the vectorizer won't do anything
5417 anyway (either it's loop invariant or a complicated
5418 expression). */
5419 if (sprime
5420 && TREE_CODE (sprime) == SSA_NAME
5421 && do_pre
5422 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5423 && loop_outer (b->loop_father)
5424 && has_zero_uses (sprime)
5425 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5426 && gimple_assign_load_p (stmt))
5427 {
5428 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5429 basic_block def_bb = gimple_bb (def_stmt);
5430 if (gimple_code (def_stmt) == GIMPLE_PHI
5431 && def_bb->loop_father->header == def_bb)
5432 {
5433 loop_p loop = def_bb->loop_father;
5434 ssa_op_iter iter;
5435 tree op;
5436 bool found = false;
5437 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5438 {
5439 affine_iv iv;
5440 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5441 if (def_bb
5442 && flow_bb_inside_loop_p (loop, def_bb)
5443 && simple_iv (loop, loop, op, &iv, true))
5444 {
5445 found = true;
5446 break;
5447 }
5448 }
5449 if (found)
5450 {
5451 if (dump_file && (dump_flags & TDF_DETAILS))
5452 {
5453 fprintf (dump_file, "Not replacing ");
5454 print_gimple_expr (dump_file, stmt, 0);
5455 fprintf (dump_file, " with ");
5456 print_generic_expr (dump_file, sprime);
5457 fprintf (dump_file, " which would add a loop"
5458 " carried dependence to loop %d\n",
5459 loop->num);
5460 }
5461 /* Don't keep sprime available. */
5462 sprime = NULL_TREE;
5463 }
5464 }
5465 }
5466
5467 if (sprime)
5468 {
5469 /* If we can propagate the value computed for LHS into
5470 all uses don't bother doing anything with this stmt. */
5471 if (may_propagate_copy (lhs, sprime))
5472 {
5473 /* Mark it for removal. */
5474 to_remove.safe_push (stmt);
5475
5476 /* ??? Don't count copy/constant propagations. */
5477 if (gimple_assign_single_p (stmt)
5478 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5479 || gimple_assign_rhs1 (stmt) == sprime))
5480 return;
5481
5482 if (dump_file && (dump_flags & TDF_DETAILS))
5483 {
5484 fprintf (dump_file, "Replaced ");
5485 print_gimple_expr (dump_file, stmt, 0);
5486 fprintf (dump_file, " with ");
5487 print_generic_expr (dump_file, sprime);
5488 fprintf (dump_file, " in all uses of ");
5489 print_gimple_stmt (dump_file, stmt, 0);
5490 }
5491
5492 eliminations++;
5493 return;
5494 }
5495
5496 /* If this is an assignment from our leader (which
5497 happens in the case the value-number is a constant)
5498 then there is nothing to do. */
5499 if (gimple_assign_single_p (stmt)
5500 && sprime == gimple_assign_rhs1 (stmt))
5501 return;
5502
5503 /* Else replace its RHS. */
5504 if (dump_file && (dump_flags & TDF_DETAILS))
5505 {
5506 fprintf (dump_file, "Replaced ");
5507 print_gimple_expr (dump_file, stmt, 0);
5508 fprintf (dump_file, " with ");
5509 print_generic_expr (dump_file, sprime);
5510 fprintf (dump_file, " in ");
5511 print_gimple_stmt (dump_file, stmt, 0);
5512 }
5513 eliminations++;
5514
5515 bool can_make_abnormal_goto = (is_gimple_call (stmt)
5516 && stmt_can_make_abnormal_goto (stmt));
5517 gimple *orig_stmt = stmt;
5518 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5519 TREE_TYPE (sprime)))
5520 {
5521 /* We preserve conversions to but not from function or method
5522 types. This asymmetry makes it necessary to re-instantiate
5523 conversions here. */
5524 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5525 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
5526 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5527 else
5528 gcc_unreachable ();
5529 }
5530 tree vdef = gimple_vdef (stmt);
5531 tree vuse = gimple_vuse (stmt);
5532 propagate_tree_value_into_stmt (gsi, sprime);
5533 stmt = gsi_stmt (*gsi);
5534 update_stmt (stmt);
5535 /* In case the VDEF on the original stmt was released, value-number
5536 it to the VUSE. This is to make vuse_ssa_val able to skip
5537 released virtual operands. */
5538 if (vdef != gimple_vdef (stmt))
5539 {
5540 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
5541 VN_INFO (vdef)->valnum = vuse;
5542 }
5543
5544 /* If we removed EH side-effects from the statement, clean
5545 its EH information. */
5546 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5547 {
5548 bitmap_set_bit (need_eh_cleanup,
5549 gimple_bb (stmt)->index);
5550 if (dump_file && (dump_flags & TDF_DETAILS))
5551 fprintf (dump_file, " Removed EH side-effects.\n");
5552 }
5553
5554 /* Likewise for AB side-effects. */
5555 if (can_make_abnormal_goto
5556 && !stmt_can_make_abnormal_goto (stmt))
5557 {
5558 bitmap_set_bit (need_ab_cleanup,
5559 gimple_bb (stmt)->index);
5560 if (dump_file && (dump_flags & TDF_DETAILS))
5561 fprintf (dump_file, " Removed AB side-effects.\n");
5562 }
5563
5564 return;
5565 }
5566 }
5567
5568 /* If the statement is a scalar store, see if the expression
5569 has the same value number as its rhs. If so, the store is
5570 dead. */
5571 if (gimple_assign_single_p (stmt)
5572 && !gimple_has_volatile_ops (stmt)
5573 && !is_gimple_reg (gimple_assign_lhs (stmt))
5574 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5575 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5576 {
5577 tree val;
5578 tree rhs = gimple_assign_rhs1 (stmt);
5579 vn_reference_t vnresult;
5580 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5581 &vnresult, false);
5582 if (TREE_CODE (rhs) == SSA_NAME)
5583 rhs = VN_INFO (rhs)->valnum;
5584 if (val
5585 && operand_equal_p (val, rhs, 0))
5586 {
5587 /* We can only remove the later store if the former aliases
5588 at least all accesses the later one does or if the store
5589 was to readonly memory storing the same value. */
5590 alias_set_type set = get_alias_set (lhs);
5591 if (! vnresult
5592 || vnresult->set == set
5593 || alias_set_subset_of (set, vnresult->set))
5594 {
5595 if (dump_file && (dump_flags & TDF_DETAILS))
5596 {
5597 fprintf (dump_file, "Deleted redundant store ");
5598 print_gimple_stmt (dump_file, stmt, 0);
5599 }
5600
5601 /* Queue stmt for removal. */
5602 to_remove.safe_push (stmt);
5603 return;
5604 }
5605 }
5606 }
5607
5608 /* If this is a control statement value numbering left edges
5609 unexecuted on force the condition in a way consistent with
5610 that. */
5611 if (gcond *cond = dyn_cast <gcond *> (stmt))
5612 {
5613 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5614 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5615 {
5616 if (dump_file && (dump_flags & TDF_DETAILS))
5617 {
5618 fprintf (dump_file, "Removing unexecutable edge from ");
5619 print_gimple_stmt (dump_file, stmt, 0);
5620 }
5621 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5622 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5623 gimple_cond_make_true (cond);
5624 else
5625 gimple_cond_make_false (cond);
5626 update_stmt (cond);
5627 el_todo |= TODO_cleanup_cfg;
5628 return;
5629 }
5630 }
5631
5632 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5633 bool was_noreturn = (is_gimple_call (stmt)
5634 && gimple_call_noreturn_p (stmt));
5635 tree vdef = gimple_vdef (stmt);
5636 tree vuse = gimple_vuse (stmt);
5637
5638 /* If we didn't replace the whole stmt (or propagate the result
5639 into all uses), replace all uses on this stmt with their
5640 leaders. */
5641 bool modified = false;
5642 use_operand_p use_p;
5643 ssa_op_iter iter;
5644 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5645 {
5646 tree use = USE_FROM_PTR (use_p);
5647 /* ??? The call code above leaves stmt operands un-updated. */
5648 if (TREE_CODE (use) != SSA_NAME)
5649 continue;
5650 tree sprime;
5651 if (SSA_NAME_IS_DEFAULT_DEF (use))
5652 /* ??? For default defs BB shouldn't matter, but we have to
5653 solve the inconsistency between rpo eliminate and
5654 dom eliminate avail valueization first. */
5655 sprime = eliminate_avail (b, use);
5656 else
5657 /* Look for sth available at the definition block of the argument.
5658 This avoids inconsistencies between availability there which
5659 decides if the stmt can be removed and availability at the
5660 use site. The SSA property ensures that things available
5661 at the definition are also available at uses. */
5662 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5663 if (sprime && sprime != use
5664 && may_propagate_copy (use, sprime)
5665 /* We substitute into debug stmts to avoid excessive
5666 debug temporaries created by removed stmts, but we need
5667 to avoid doing so for inserted sprimes as we never want
5668 to create debug temporaries for them. */
5669 && (!inserted_exprs
5670 || TREE_CODE (sprime) != SSA_NAME
5671 || !is_gimple_debug (stmt)
5672 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5673 {
5674 propagate_value (use_p, sprime);
5675 modified = true;
5676 }
5677 }
5678
5679 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5680 into which is a requirement for the IPA devirt machinery. */
5681 gimple *old_stmt = stmt;
5682 if (modified)
5683 {
5684 /* If a formerly non-invariant ADDR_EXPR is turned into an
5685 invariant one it was on a separate stmt. */
5686 if (gimple_assign_single_p (stmt)
5687 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5688 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5689 gimple_stmt_iterator prev = *gsi;
5690 gsi_prev (&prev);
5691 if (fold_stmt (gsi))
5692 {
5693 /* fold_stmt may have created new stmts inbetween
5694 the previous stmt and the folded stmt. Mark
5695 all defs created there as varying to not confuse
5696 the SCCVN machinery as we're using that even during
5697 elimination. */
5698 if (gsi_end_p (prev))
5699 prev = gsi_start_bb (b);
5700 else
5701 gsi_next (&prev);
5702 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5703 do
5704 {
5705 tree def;
5706 ssa_op_iter dit;
5707 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5708 dit, SSA_OP_ALL_DEFS)
5709 /* As existing DEFs may move between stmts
5710 only process new ones. */
5711 if (! has_VN_INFO (def))
5712 {
5713 VN_INFO (def)->valnum = def;
5714 VN_INFO (def)->visited = true;
5715 }
5716 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5717 break;
5718 gsi_next (&prev);
5719 }
5720 while (1);
5721 }
5722 stmt = gsi_stmt (*gsi);
5723 /* In case we folded the stmt away schedule the NOP for removal. */
5724 if (gimple_nop_p (stmt))
5725 to_remove.safe_push (stmt);
5726 }
5727
5728 /* Visit indirect calls and turn them into direct calls if
5729 possible using the devirtualization machinery. Do this before
5730 checking for required EH/abnormal/noreturn cleanup as devird
5731 may expose more of those. */
5732 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5733 {
5734 tree fn = gimple_call_fn (call_stmt);
5735 if (fn
5736 && flag_devirtualize
5737 && virtual_method_call_p (fn))
5738 {
5739 tree otr_type = obj_type_ref_class (fn);
5740 unsigned HOST_WIDE_INT otr_tok
5741 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5742 tree instance;
5743 ipa_polymorphic_call_context context (current_function_decl,
5744 fn, stmt, &instance);
5745 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5746 otr_type, stmt, NULL);
5747 bool final;
5748 vec <cgraph_node *> targets
5749 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5750 otr_tok, context, &final);
5751 if (dump_file)
5752 dump_possible_polymorphic_call_targets (dump_file,
5753 obj_type_ref_class (fn),
5754 otr_tok, context);
5755 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5756 {
5757 tree fn;
5758 if (targets.length () == 1)
5759 fn = targets[0]->decl;
5760 else
5761 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5762 if (dump_enabled_p ())
5763 {
5764 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5765 "converting indirect call to "
5766 "function %s\n",
5767 lang_hooks.decl_printable_name (fn, 2));
5768 }
5769 gimple_call_set_fndecl (call_stmt, fn);
5770 /* If changing the call to __builtin_unreachable
5771 or similar noreturn function, adjust gimple_call_fntype
5772 too. */
5773 if (gimple_call_noreturn_p (call_stmt)
5774 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5775 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5776 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5777 == void_type_node))
5778 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5779 maybe_remove_unused_call_args (cfun, call_stmt);
5780 modified = true;
5781 }
5782 }
5783 }
5784
5785 if (modified)
5786 {
5787 /* When changing a call into a noreturn call, cfg cleanup
5788 is needed to fix up the noreturn call. */
5789 if (!was_noreturn
5790 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5791 to_fixup.safe_push (stmt);
5792 /* When changing a condition or switch into one we know what
5793 edge will be executed, schedule a cfg cleanup. */
5794 if ((gimple_code (stmt) == GIMPLE_COND
5795 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5796 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5797 || (gimple_code (stmt) == GIMPLE_SWITCH
5798 && TREE_CODE (gimple_switch_index
5799 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5800 el_todo |= TODO_cleanup_cfg;
5801 /* If we removed EH side-effects from the statement, clean
5802 its EH information. */
5803 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5804 {
5805 bitmap_set_bit (need_eh_cleanup,
5806 gimple_bb (stmt)->index);
5807 if (dump_file && (dump_flags & TDF_DETAILS))
5808 fprintf (dump_file, " Removed EH side-effects.\n");
5809 }
5810 /* Likewise for AB side-effects. */
5811 if (can_make_abnormal_goto
5812 && !stmt_can_make_abnormal_goto (stmt))
5813 {
5814 bitmap_set_bit (need_ab_cleanup,
5815 gimple_bb (stmt)->index);
5816 if (dump_file && (dump_flags & TDF_DETAILS))
5817 fprintf (dump_file, " Removed AB side-effects.\n");
5818 }
5819 update_stmt (stmt);
5820 /* In case the VDEF on the original stmt was released, value-number
5821 it to the VUSE. This is to make vuse_ssa_val able to skip
5822 released virtual operands. */
5823 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
5824 VN_INFO (vdef)->valnum = vuse;
5825 }
5826
5827 /* Make new values available - for fully redundant LHS we
5828 continue with the next stmt above and skip this. */
5829 def_operand_p defp;
5830 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5831 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5832 }
5833
5834 /* Perform elimination for the basic-block B during the domwalk. */
5835
5836 edge
5837 eliminate_dom_walker::before_dom_children (basic_block b)
5838 {
5839 /* Mark new bb. */
5840 avail_stack.safe_push (NULL_TREE);
5841
5842 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5843 if (!(b->flags & BB_EXECUTABLE))
5844 return NULL;
5845
5846 vn_context_bb = b;
5847
5848 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5849 {
5850 gphi *phi = gsi.phi ();
5851 tree res = PHI_RESULT (phi);
5852
5853 if (virtual_operand_p (res))
5854 {
5855 gsi_next (&gsi);
5856 continue;
5857 }
5858
5859 tree sprime = eliminate_avail (b, res);
5860 if (sprime
5861 && sprime != res)
5862 {
5863 if (dump_file && (dump_flags & TDF_DETAILS))
5864 {
5865 fprintf (dump_file, "Replaced redundant PHI node defining ");
5866 print_generic_expr (dump_file, res);
5867 fprintf (dump_file, " with ");
5868 print_generic_expr (dump_file, sprime);
5869 fprintf (dump_file, "\n");
5870 }
5871
5872 /* If we inserted this PHI node ourself, it's not an elimination. */
5873 if (! inserted_exprs
5874 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5875 eliminations++;
5876
5877 /* If we will propagate into all uses don't bother to do
5878 anything. */
5879 if (may_propagate_copy (res, sprime))
5880 {
5881 /* Mark the PHI for removal. */
5882 to_remove.safe_push (phi);
5883 gsi_next (&gsi);
5884 continue;
5885 }
5886
5887 remove_phi_node (&gsi, false);
5888
5889 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5890 sprime = fold_convert (TREE_TYPE (res), sprime);
5891 gimple *stmt = gimple_build_assign (res, sprime);
5892 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5893 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5894 continue;
5895 }
5896
5897 eliminate_push_avail (b, res);
5898 gsi_next (&gsi);
5899 }
5900
5901 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5902 !gsi_end_p (gsi);
5903 gsi_next (&gsi))
5904 eliminate_stmt (b, &gsi);
5905
5906 /* Replace destination PHI arguments. */
5907 edge_iterator ei;
5908 edge e;
5909 FOR_EACH_EDGE (e, ei, b->succs)
5910 if (e->flags & EDGE_EXECUTABLE)
5911 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5912 !gsi_end_p (gsi);
5913 gsi_next (&gsi))
5914 {
5915 gphi *phi = gsi.phi ();
5916 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5917 tree arg = USE_FROM_PTR (use_p);
5918 if (TREE_CODE (arg) != SSA_NAME
5919 || virtual_operand_p (arg))
5920 continue;
5921 tree sprime = eliminate_avail (b, arg);
5922 if (sprime && may_propagate_copy (arg, sprime))
5923 propagate_value (use_p, sprime);
5924 }
5925
5926 vn_context_bb = NULL;
5927
5928 return NULL;
5929 }
5930
5931 /* Make no longer available leaders no longer available. */
5932
5933 void
5934 eliminate_dom_walker::after_dom_children (basic_block)
5935 {
5936 tree entry;
5937 while ((entry = avail_stack.pop ()) != NULL_TREE)
5938 {
5939 tree valnum = VN_INFO (entry)->valnum;
5940 tree old = avail[SSA_NAME_VERSION (valnum)];
5941 if (old == entry)
5942 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5943 else
5944 avail[SSA_NAME_VERSION (valnum)] = entry;
5945 }
5946 }
5947
5948 /* Remove queued stmts and perform delayed cleanups. */
5949
5950 unsigned
5951 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5952 {
5953 statistics_counter_event (cfun, "Eliminated", eliminations);
5954 statistics_counter_event (cfun, "Insertions", insertions);
5955
5956 /* We cannot remove stmts during BB walk, especially not release SSA
5957 names there as this confuses the VN machinery. The stmts ending
5958 up in to_remove are either stores or simple copies.
5959 Remove stmts in reverse order to make debug stmt creation possible. */
5960 while (!to_remove.is_empty ())
5961 {
5962 bool do_release_defs = true;
5963 gimple *stmt = to_remove.pop ();
5964
5965 /* When we are value-numbering a region we do not require exit PHIs to
5966 be present so we have to make sure to deal with uses outside of the
5967 region of stmts that we thought are eliminated.
5968 ??? Note we may be confused by uses in dead regions we didn't run
5969 elimination on. Rather than checking individual uses we accept
5970 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5971 contains such example). */
5972 if (region_p)
5973 {
5974 if (gphi *phi = dyn_cast <gphi *> (stmt))
5975 {
5976 tree lhs = gimple_phi_result (phi);
5977 if (!has_zero_uses (lhs))
5978 {
5979 if (dump_file && (dump_flags & TDF_DETAILS))
5980 fprintf (dump_file, "Keeping eliminated stmt live "
5981 "as copy because of out-of-region uses\n");
5982 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5983 gimple *copy = gimple_build_assign (lhs, sprime);
5984 gimple_stmt_iterator gsi
5985 = gsi_after_labels (gimple_bb (stmt));
5986 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5987 do_release_defs = false;
5988 }
5989 }
5990 else if (tree lhs = gimple_get_lhs (stmt))
5991 if (TREE_CODE (lhs) == SSA_NAME
5992 && !has_zero_uses (lhs))
5993 {
5994 if (dump_file && (dump_flags & TDF_DETAILS))
5995 fprintf (dump_file, "Keeping eliminated stmt live "
5996 "as copy because of out-of-region uses\n");
5997 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5998 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5999 if (is_gimple_assign (stmt))
6000 {
6001 gimple_assign_set_rhs_from_tree (&gsi, sprime);
6002 stmt = gsi_stmt (gsi);
6003 update_stmt (stmt);
6004 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6005 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6006 continue;
6007 }
6008 else
6009 {
6010 gimple *copy = gimple_build_assign (lhs, sprime);
6011 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6012 do_release_defs = false;
6013 }
6014 }
6015 }
6016
6017 if (dump_file && (dump_flags & TDF_DETAILS))
6018 {
6019 fprintf (dump_file, "Removing dead stmt ");
6020 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6021 }
6022
6023 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6024 if (gimple_code (stmt) == GIMPLE_PHI)
6025 remove_phi_node (&gsi, do_release_defs);
6026 else
6027 {
6028 basic_block bb = gimple_bb (stmt);
6029 unlink_stmt_vdef (stmt);
6030 if (gsi_remove (&gsi, true))
6031 bitmap_set_bit (need_eh_cleanup, bb->index);
6032 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6033 bitmap_set_bit (need_ab_cleanup, bb->index);
6034 if (do_release_defs)
6035 release_defs (stmt);
6036 }
6037
6038 /* Removing a stmt may expose a forwarder block. */
6039 el_todo |= TODO_cleanup_cfg;
6040 }
6041
6042 /* Fixup stmts that became noreturn calls. This may require splitting
6043 blocks and thus isn't possible during the dominator walk. Do this
6044 in reverse order so we don't inadvertedly remove a stmt we want to
6045 fixup by visiting a dominating now noreturn call first. */
6046 while (!to_fixup.is_empty ())
6047 {
6048 gimple *stmt = to_fixup.pop ();
6049
6050 if (dump_file && (dump_flags & TDF_DETAILS))
6051 {
6052 fprintf (dump_file, "Fixing up noreturn call ");
6053 print_gimple_stmt (dump_file, stmt, 0);
6054 }
6055
6056 if (fixup_noreturn_call (stmt))
6057 el_todo |= TODO_cleanup_cfg;
6058 }
6059
6060 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6061 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6062
6063 if (do_eh_cleanup)
6064 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6065
6066 if (do_ab_cleanup)
6067 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6068
6069 if (do_eh_cleanup || do_ab_cleanup)
6070 el_todo |= TODO_cleanup_cfg;
6071
6072 return el_todo;
6073 }
6074
6075 /* Eliminate fully redundant computations. */
6076
6077 unsigned
6078 eliminate_with_rpo_vn (bitmap inserted_exprs)
6079 {
6080 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6081
6082 walker.walk (cfun->cfg->x_entry_block_ptr);
6083 return walker.eliminate_cleanup ();
6084 }
6085
6086 static unsigned
6087 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6088 bool iterate, bool eliminate);
6089
6090 void
6091 run_rpo_vn (vn_lookup_kind kind)
6092 {
6093 default_vn_walk_kind = kind;
6094 do_rpo_vn (cfun, NULL, NULL, true, false);
6095
6096 /* ??? Prune requirement of these. */
6097 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6098 constant_value_ids = BITMAP_ALLOC (NULL);
6099
6100 /* Initialize the value ids and prune out remaining VN_TOPs
6101 from dead code. */
6102 tree name;
6103 unsigned i;
6104 FOR_EACH_SSA_NAME (i, name, cfun)
6105 {
6106 vn_ssa_aux_t info = VN_INFO (name);
6107 if (!info->visited
6108 || info->valnum == VN_TOP)
6109 info->valnum = name;
6110 if (info->valnum == name)
6111 info->value_id = get_next_value_id ();
6112 else if (is_gimple_min_invariant (info->valnum))
6113 info->value_id = get_or_alloc_constant_value_id (info->valnum);
6114 }
6115
6116 /* Propagate. */
6117 FOR_EACH_SSA_NAME (i, name, cfun)
6118 {
6119 vn_ssa_aux_t info = VN_INFO (name);
6120 if (TREE_CODE (info->valnum) == SSA_NAME
6121 && info->valnum != name
6122 && info->value_id != VN_INFO (info->valnum)->value_id)
6123 info->value_id = VN_INFO (info->valnum)->value_id;
6124 }
6125
6126 set_hashtable_value_ids ();
6127
6128 if (dump_file && (dump_flags & TDF_DETAILS))
6129 {
6130 fprintf (dump_file, "Value numbers:\n");
6131 FOR_EACH_SSA_NAME (i, name, cfun)
6132 {
6133 if (VN_INFO (name)->visited
6134 && SSA_VAL (name) != name)
6135 {
6136 print_generic_expr (dump_file, name);
6137 fprintf (dump_file, " = ");
6138 print_generic_expr (dump_file, SSA_VAL (name));
6139 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6140 }
6141 }
6142 }
6143 }
6144
6145 /* Free VN associated data structures. */
6146
6147 void
6148 free_rpo_vn (void)
6149 {
6150 free_vn_table (valid_info);
6151 XDELETE (valid_info);
6152 obstack_free (&vn_tables_obstack, NULL);
6153 obstack_free (&vn_tables_insert_obstack, NULL);
6154
6155 vn_ssa_aux_iterator_type it;
6156 vn_ssa_aux_t info;
6157 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6158 if (info->needs_insertion)
6159 release_ssa_name (info->name);
6160 obstack_free (&vn_ssa_aux_obstack, NULL);
6161 delete vn_ssa_aux_hash;
6162
6163 delete constant_to_value_id;
6164 constant_to_value_id = NULL;
6165 BITMAP_FREE (constant_value_ids);
6166 }
6167
6168 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6169
6170 static tree
6171 vn_lookup_simplify_result (gimple_match_op *res_op)
6172 {
6173 if (!res_op->code.is_tree_code ())
6174 return NULL_TREE;
6175 tree *ops = res_op->ops;
6176 unsigned int length = res_op->num_ops;
6177 if (res_op->code == CONSTRUCTOR
6178 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6179 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6180 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6181 {
6182 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6183 ops = XALLOCAVEC (tree, length);
6184 for (unsigned i = 0; i < length; ++i)
6185 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6186 }
6187 vn_nary_op_t vnresult = NULL;
6188 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6189 res_op->type, ops, &vnresult);
6190 /* If this is used from expression simplification make sure to
6191 return an available expression. */
6192 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6193 res = rpo_avail->eliminate_avail (vn_context_bb, res);
6194 return res;
6195 }
6196
6197 /* Return a leader for OPs value that is valid at BB. */
6198
6199 tree
6200 rpo_elim::eliminate_avail (basic_block bb, tree op)
6201 {
6202 bool visited;
6203 tree valnum = SSA_VAL (op, &visited);
6204 /* If we didn't visit OP then it must be defined outside of the
6205 region we process and also dominate it. So it is available. */
6206 if (!visited)
6207 return op;
6208 if (TREE_CODE (valnum) == SSA_NAME)
6209 {
6210 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6211 return valnum;
6212 vn_avail *av = VN_INFO (valnum)->avail;
6213 if (!av)
6214 return NULL_TREE;
6215 if (av->location == bb->index)
6216 /* On tramp3d 90% of the cases are here. */
6217 return ssa_name (av->leader);
6218 do
6219 {
6220 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
6221 /* ??? During elimination we have to use availability at the
6222 definition site of a use we try to replace. This
6223 is required to not run into inconsistencies because
6224 of dominated_by_p_w_unex behavior and removing a definition
6225 while not replacing all uses.
6226 ??? We could try to consistently walk dominators
6227 ignoring non-executable regions. The nearest common
6228 dominator of bb and abb is where we can stop walking. We
6229 may also be able to "pre-compute" (bits of) the next immediate
6230 (non-)dominator during the RPO walk when marking edges as
6231 executable. */
6232 if (dominated_by_p_w_unex (bb, abb))
6233 {
6234 tree leader = ssa_name (av->leader);
6235 /* Prevent eliminations that break loop-closed SSA. */
6236 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6237 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6238 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6239 (leader))->loop_father,
6240 bb))
6241 return NULL_TREE;
6242 if (dump_file && (dump_flags & TDF_DETAILS))
6243 {
6244 print_generic_expr (dump_file, leader);
6245 fprintf (dump_file, " is available for ");
6246 print_generic_expr (dump_file, valnum);
6247 fprintf (dump_file, "\n");
6248 }
6249 /* On tramp3d 99% of the _remaining_ cases succeed at
6250 the first enty. */
6251 return leader;
6252 }
6253 /* ??? Can we somehow skip to the immediate dominator
6254 RPO index (bb_to_rpo)? Again, maybe not worth, on
6255 tramp3d the worst number of elements in the vector is 9. */
6256 av = av->next;
6257 }
6258 while (av);
6259 }
6260 else if (valnum != VN_TOP)
6261 /* valnum is is_gimple_min_invariant. */
6262 return valnum;
6263 return NULL_TREE;
6264 }
6265
6266 /* Make LEADER a leader for its value at BB. */
6267
6268 void
6269 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
6270 {
6271 tree valnum = VN_INFO (leader)->valnum;
6272 if (valnum == VN_TOP
6273 || is_gimple_min_invariant (valnum))
6274 return;
6275 if (dump_file && (dump_flags & TDF_DETAILS))
6276 {
6277 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
6278 print_generic_expr (dump_file, leader);
6279 fprintf (dump_file, " for value ");
6280 print_generic_expr (dump_file, valnum);
6281 fprintf (dump_file, "\n");
6282 }
6283 vn_ssa_aux_t value = VN_INFO (valnum);
6284 vn_avail *av;
6285 if (m_avail_freelist)
6286 {
6287 av = m_avail_freelist;
6288 m_avail_freelist = m_avail_freelist->next;
6289 }
6290 else
6291 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
6292 av->location = bb->index;
6293 av->leader = SSA_NAME_VERSION (leader);
6294 av->next = value->avail;
6295 value->avail = av;
6296 }
6297
6298 /* Valueization hook for RPO VN plus required state. */
6299
6300 tree
6301 rpo_vn_valueize (tree name)
6302 {
6303 if (TREE_CODE (name) == SSA_NAME)
6304 {
6305 vn_ssa_aux_t val = VN_INFO (name);
6306 if (val)
6307 {
6308 tree tem = val->valnum;
6309 if (tem != VN_TOP && tem != name)
6310 {
6311 if (TREE_CODE (tem) != SSA_NAME)
6312 return tem;
6313 /* For all values we only valueize to an available leader
6314 which means we can use SSA name info without restriction. */
6315 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
6316 if (tem)
6317 return tem;
6318 }
6319 }
6320 }
6321 return name;
6322 }
6323
6324 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6325 inverted condition. */
6326
6327 static void
6328 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
6329 {
6330 switch (code)
6331 {
6332 case LT_EXPR:
6333 /* a < b -> a {!,<}= b */
6334 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6335 ops, boolean_true_node, 0, pred_e);
6336 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
6337 ops, boolean_true_node, 0, pred_e);
6338 /* a < b -> ! a {>,=} b */
6339 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6340 ops, boolean_false_node, 0, pred_e);
6341 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6342 ops, boolean_false_node, 0, pred_e);
6343 break;
6344 case GT_EXPR:
6345 /* a > b -> a {!,>}= b */
6346 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6347 ops, boolean_true_node, 0, pred_e);
6348 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
6349 ops, boolean_true_node, 0, pred_e);
6350 /* a > b -> ! a {<,=} b */
6351 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6352 ops, boolean_false_node, 0, pred_e);
6353 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6354 ops, boolean_false_node, 0, pred_e);
6355 break;
6356 case EQ_EXPR:
6357 /* a == b -> ! a {<,>} b */
6358 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6359 ops, boolean_false_node, 0, pred_e);
6360 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6361 ops, boolean_false_node, 0, pred_e);
6362 break;
6363 case LE_EXPR:
6364 case GE_EXPR:
6365 case NE_EXPR:
6366 /* Nothing besides inverted condition. */
6367 break;
6368 default:;
6369 }
6370 }
6371
6372 /* Main stmt worker for RPO VN, process BB. */
6373
6374 static unsigned
6375 process_bb (rpo_elim &avail, basic_block bb,
6376 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
6377 bool do_region, bitmap exit_bbs, bool skip_phis)
6378 {
6379 unsigned todo = 0;
6380 edge_iterator ei;
6381 edge e;
6382
6383 vn_context_bb = bb;
6384
6385 /* If we are in loop-closed SSA preserve this state. This is
6386 relevant when called on regions from outside of FRE/PRE. */
6387 bool lc_phi_nodes = false;
6388 if (!skip_phis
6389 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
6390 FOR_EACH_EDGE (e, ei, bb->preds)
6391 if (e->src->loop_father != e->dest->loop_father
6392 && flow_loop_nested_p (e->dest->loop_father,
6393 e->src->loop_father))
6394 {
6395 lc_phi_nodes = true;
6396 break;
6397 }
6398
6399 /* When we visit a loop header substitute into loop info. */
6400 if (!iterate && eliminate && bb->loop_father->header == bb)
6401 {
6402 /* Keep fields in sync with substitute_in_loop_info. */
6403 if (bb->loop_father->nb_iterations)
6404 bb->loop_father->nb_iterations
6405 = simplify_replace_tree (bb->loop_father->nb_iterations,
6406 NULL_TREE, NULL_TREE, vn_valueize);
6407 }
6408
6409 /* Value-number all defs in the basic-block. */
6410 if (!skip_phis)
6411 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6412 gsi_next (&gsi))
6413 {
6414 gphi *phi = gsi.phi ();
6415 tree res = PHI_RESULT (phi);
6416 vn_ssa_aux_t res_info = VN_INFO (res);
6417 if (!bb_visited)
6418 {
6419 gcc_assert (!res_info->visited);
6420 res_info->valnum = VN_TOP;
6421 res_info->visited = true;
6422 }
6423
6424 /* When not iterating force backedge values to varying. */
6425 visit_stmt (phi, !iterate_phis);
6426 if (virtual_operand_p (res))
6427 continue;
6428
6429 /* Eliminate */
6430 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6431 how we handle backedges and availability.
6432 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6433 tree val = res_info->valnum;
6434 if (res != val && !iterate && eliminate)
6435 {
6436 if (tree leader = avail.eliminate_avail (bb, res))
6437 {
6438 if (leader != res
6439 /* Preserve loop-closed SSA form. */
6440 && (! lc_phi_nodes
6441 || is_gimple_min_invariant (leader)))
6442 {
6443 if (dump_file && (dump_flags & TDF_DETAILS))
6444 {
6445 fprintf (dump_file, "Replaced redundant PHI node "
6446 "defining ");
6447 print_generic_expr (dump_file, res);
6448 fprintf (dump_file, " with ");
6449 print_generic_expr (dump_file, leader);
6450 fprintf (dump_file, "\n");
6451 }
6452 avail.eliminations++;
6453
6454 if (may_propagate_copy (res, leader))
6455 {
6456 /* Schedule for removal. */
6457 avail.to_remove.safe_push (phi);
6458 continue;
6459 }
6460 /* ??? Else generate a copy stmt. */
6461 }
6462 }
6463 }
6464 /* Only make defs available that not already are. But make
6465 sure loop-closed SSA PHI node defs are picked up for
6466 downstream uses. */
6467 if (lc_phi_nodes
6468 || res == val
6469 || ! avail.eliminate_avail (bb, res))
6470 avail.eliminate_push_avail (bb, res);
6471 }
6472
6473 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6474 we do this when processing the last stmt as we have to do this
6475 before elimination which otherwise forces GIMPLE_CONDs to
6476 if (1 != 0) style when seeing non-executable edges. */
6477 if (gsi_end_p (gsi_start_bb (bb)))
6478 {
6479 FOR_EACH_EDGE (e, ei, bb->succs)
6480 {
6481 if (!(e->flags & EDGE_EXECUTABLE))
6482 {
6483 if (dump_file && (dump_flags & TDF_DETAILS))
6484 fprintf (dump_file,
6485 "marking outgoing edge %d -> %d executable\n",
6486 e->src->index, e->dest->index);
6487 e->flags |= EDGE_EXECUTABLE;
6488 e->dest->flags |= BB_EXECUTABLE;
6489 }
6490 else if (!(e->dest->flags & BB_EXECUTABLE))
6491 {
6492 if (dump_file && (dump_flags & TDF_DETAILS))
6493 fprintf (dump_file,
6494 "marking destination block %d reachable\n",
6495 e->dest->index);
6496 e->dest->flags |= BB_EXECUTABLE;
6497 }
6498 }
6499 }
6500 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6501 !gsi_end_p (gsi); gsi_next (&gsi))
6502 {
6503 ssa_op_iter i;
6504 tree op;
6505 if (!bb_visited)
6506 {
6507 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
6508 {
6509 vn_ssa_aux_t op_info = VN_INFO (op);
6510 gcc_assert (!op_info->visited);
6511 op_info->valnum = VN_TOP;
6512 op_info->visited = true;
6513 }
6514
6515 /* We somehow have to deal with uses that are not defined
6516 in the processed region. Forcing unvisited uses to
6517 varying here doesn't play well with def-use following during
6518 expression simplification, so we deal with this by checking
6519 the visited flag in SSA_VAL. */
6520 }
6521
6522 visit_stmt (gsi_stmt (gsi));
6523
6524 gimple *last = gsi_stmt (gsi);
6525 e = NULL;
6526 switch (gimple_code (last))
6527 {
6528 case GIMPLE_SWITCH:
6529 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
6530 (as_a <gswitch *> (last))));
6531 break;
6532 case GIMPLE_COND:
6533 {
6534 tree lhs = vn_valueize (gimple_cond_lhs (last));
6535 tree rhs = vn_valueize (gimple_cond_rhs (last));
6536 tree val = gimple_simplify (gimple_cond_code (last),
6537 boolean_type_node, lhs, rhs,
6538 NULL, vn_valueize);
6539 /* If the condition didn't simplfy see if we have recorded
6540 an expression from sofar taken edges. */
6541 if (! val || TREE_CODE (val) != INTEGER_CST)
6542 {
6543 vn_nary_op_t vnresult;
6544 tree ops[2];
6545 ops[0] = lhs;
6546 ops[1] = rhs;
6547 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6548 boolean_type_node, ops,
6549 &vnresult);
6550 /* Did we get a predicated value? */
6551 if (! val && vnresult && vnresult->predicated_values)
6552 {
6553 val = vn_nary_op_get_predicated_value (vnresult, bb);
6554 if (val && dump_file && (dump_flags & TDF_DETAILS))
6555 {
6556 fprintf (dump_file, "Got predicated value ");
6557 print_generic_expr (dump_file, val, TDF_NONE);
6558 fprintf (dump_file, " for ");
6559 print_gimple_stmt (dump_file, last, TDF_SLIM);
6560 }
6561 }
6562 }
6563 if (val)
6564 e = find_taken_edge (bb, val);
6565 if (! e)
6566 {
6567 /* If we didn't manage to compute the taken edge then
6568 push predicated expressions for the condition itself
6569 and related conditions to the hashtables. This allows
6570 simplification of redundant conditions which is
6571 important as early cleanup. */
6572 edge true_e, false_e;
6573 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6574 enum tree_code code = gimple_cond_code (last);
6575 enum tree_code icode
6576 = invert_tree_comparison (code, HONOR_NANS (lhs));
6577 tree ops[2];
6578 ops[0] = lhs;
6579 ops[1] = rhs;
6580 if (do_region
6581 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6582 true_e = NULL;
6583 if (do_region
6584 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6585 false_e = NULL;
6586 if (true_e)
6587 vn_nary_op_insert_pieces_predicated
6588 (2, code, boolean_type_node, ops,
6589 boolean_true_node, 0, true_e);
6590 if (false_e)
6591 vn_nary_op_insert_pieces_predicated
6592 (2, code, boolean_type_node, ops,
6593 boolean_false_node, 0, false_e);
6594 if (icode != ERROR_MARK)
6595 {
6596 if (true_e)
6597 vn_nary_op_insert_pieces_predicated
6598 (2, icode, boolean_type_node, ops,
6599 boolean_false_node, 0, true_e);
6600 if (false_e)
6601 vn_nary_op_insert_pieces_predicated
6602 (2, icode, boolean_type_node, ops,
6603 boolean_true_node, 0, false_e);
6604 }
6605 /* Relax for non-integers, inverted condition handled
6606 above. */
6607 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6608 {
6609 if (true_e)
6610 insert_related_predicates_on_edge (code, ops, true_e);
6611 if (false_e)
6612 insert_related_predicates_on_edge (icode, ops, false_e);
6613 }
6614 }
6615 break;
6616 }
6617 case GIMPLE_GOTO:
6618 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6619 break;
6620 default:
6621 e = NULL;
6622 }
6623 if (e)
6624 {
6625 todo = TODO_cleanup_cfg;
6626 if (!(e->flags & EDGE_EXECUTABLE))
6627 {
6628 if (dump_file && (dump_flags & TDF_DETAILS))
6629 fprintf (dump_file,
6630 "marking known outgoing %sedge %d -> %d executable\n",
6631 e->flags & EDGE_DFS_BACK ? "back-" : "",
6632 e->src->index, e->dest->index);
6633 e->flags |= EDGE_EXECUTABLE;
6634 e->dest->flags |= BB_EXECUTABLE;
6635 }
6636 else if (!(e->dest->flags & BB_EXECUTABLE))
6637 {
6638 if (dump_file && (dump_flags & TDF_DETAILS))
6639 fprintf (dump_file,
6640 "marking destination block %d reachable\n",
6641 e->dest->index);
6642 e->dest->flags |= BB_EXECUTABLE;
6643 }
6644 }
6645 else if (gsi_one_before_end_p (gsi))
6646 {
6647 FOR_EACH_EDGE (e, ei, bb->succs)
6648 {
6649 if (!(e->flags & EDGE_EXECUTABLE))
6650 {
6651 if (dump_file && (dump_flags & TDF_DETAILS))
6652 fprintf (dump_file,
6653 "marking outgoing edge %d -> %d executable\n",
6654 e->src->index, e->dest->index);
6655 e->flags |= EDGE_EXECUTABLE;
6656 e->dest->flags |= BB_EXECUTABLE;
6657 }
6658 else if (!(e->dest->flags & BB_EXECUTABLE))
6659 {
6660 if (dump_file && (dump_flags & TDF_DETAILS))
6661 fprintf (dump_file,
6662 "marking destination block %d reachable\n",
6663 e->dest->index);
6664 e->dest->flags |= BB_EXECUTABLE;
6665 }
6666 }
6667 }
6668
6669 /* Eliminate. That also pushes to avail. */
6670 if (eliminate && ! iterate)
6671 avail.eliminate_stmt (bb, &gsi);
6672 else
6673 /* If not eliminating, make all not already available defs
6674 available. */
6675 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6676 if (! avail.eliminate_avail (bb, op))
6677 avail.eliminate_push_avail (bb, op);
6678 }
6679
6680 /* Eliminate in destination PHI arguments. Always substitute in dest
6681 PHIs, even for non-executable edges. This handles region
6682 exits PHIs. */
6683 if (!iterate && eliminate)
6684 FOR_EACH_EDGE (e, ei, bb->succs)
6685 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6686 !gsi_end_p (gsi); gsi_next (&gsi))
6687 {
6688 gphi *phi = gsi.phi ();
6689 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6690 tree arg = USE_FROM_PTR (use_p);
6691 if (TREE_CODE (arg) != SSA_NAME
6692 || virtual_operand_p (arg))
6693 continue;
6694 tree sprime;
6695 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6696 {
6697 sprime = SSA_VAL (arg);
6698 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6699 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6700 }
6701 else
6702 /* Look for sth available at the definition block of the argument.
6703 This avoids inconsistencies between availability there which
6704 decides if the stmt can be removed and availability at the
6705 use site. The SSA property ensures that things available
6706 at the definition are also available at uses. */
6707 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6708 arg);
6709 if (sprime
6710 && sprime != arg
6711 && may_propagate_copy (arg, sprime))
6712 propagate_value (use_p, sprime);
6713 }
6714
6715 vn_context_bb = NULL;
6716 return todo;
6717 }
6718
6719 /* Unwind state per basic-block. */
6720
6721 struct unwind_state
6722 {
6723 /* Times this block has been visited. */
6724 unsigned visited;
6725 /* Whether to handle this as iteration point or whether to treat
6726 incoming backedge PHI values as varying. */
6727 bool iterate;
6728 /* Maximum RPO index this block is reachable from. */
6729 int max_rpo;
6730 /* Unwind state. */
6731 void *ob_top;
6732 vn_reference_t ref_top;
6733 vn_phi_t phi_top;
6734 vn_nary_op_t nary_top;
6735 };
6736
6737 /* Unwind the RPO VN state for iteration. */
6738
6739 static void
6740 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6741 {
6742 gcc_assert (to->iterate);
6743 for (; last_inserted_nary != to->nary_top;
6744 last_inserted_nary = last_inserted_nary->next)
6745 {
6746 vn_nary_op_t *slot;
6747 slot = valid_info->nary->find_slot_with_hash
6748 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6749 /* Predication causes the need to restore previous state. */
6750 if ((*slot)->unwind_to)
6751 *slot = (*slot)->unwind_to;
6752 else
6753 valid_info->nary->clear_slot (slot);
6754 }
6755 for (; last_inserted_phi != to->phi_top;
6756 last_inserted_phi = last_inserted_phi->next)
6757 {
6758 vn_phi_t *slot;
6759 slot = valid_info->phis->find_slot_with_hash
6760 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6761 valid_info->phis->clear_slot (slot);
6762 }
6763 for (; last_inserted_ref != to->ref_top;
6764 last_inserted_ref = last_inserted_ref->next)
6765 {
6766 vn_reference_t *slot;
6767 slot = valid_info->references->find_slot_with_hash
6768 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6769 (*slot)->operands.release ();
6770 valid_info->references->clear_slot (slot);
6771 }
6772 obstack_free (&vn_tables_obstack, to->ob_top);
6773
6774 /* Prune [rpo_idx, ] from avail. */
6775 /* ??? This is O(number-of-values-in-region) which is
6776 O(region-size) rather than O(iteration-piece). */
6777 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
6778 i != vn_ssa_aux_hash->end (); ++i)
6779 {
6780 while ((*i)->avail)
6781 {
6782 if (bb_to_rpo[(*i)->avail->location] < rpo_idx)
6783 break;
6784 vn_avail *av = (*i)->avail;
6785 (*i)->avail = (*i)->avail->next;
6786 av->next = avail.m_avail_freelist;
6787 avail.m_avail_freelist = av;
6788 }
6789 }
6790 }
6791
6792 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6793 If ITERATE is true then treat backedges optimistically as not
6794 executed and iterate. If ELIMINATE is true then perform
6795 elimination, otherwise leave that to the caller. */
6796
6797 static unsigned
6798 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6799 bool iterate, bool eliminate)
6800 {
6801 unsigned todo = 0;
6802
6803 /* We currently do not support region-based iteration when
6804 elimination is requested. */
6805 gcc_assert (!entry || !iterate || !eliminate);
6806 /* When iterating we need loop info up-to-date. */
6807 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6808
6809 bool do_region = entry != NULL;
6810 if (!do_region)
6811 {
6812 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6813 exit_bbs = BITMAP_ALLOC (NULL);
6814 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6815 }
6816
6817 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
6818 re-mark those that are contained in the region. */
6819 edge_iterator ei;
6820 edge e;
6821 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6822 e->flags &= ~EDGE_DFS_BACK;
6823
6824 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6825 int n = rev_post_order_and_mark_dfs_back_seme
6826 (fn, entry, exit_bbs, !loops_state_satisfies_p (LOOPS_NEED_FIXUP), rpo);
6827 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6828 for (int i = 0; i < n / 2; ++i)
6829 std::swap (rpo[i], rpo[n-i-1]);
6830
6831 if (!do_region)
6832 BITMAP_FREE (exit_bbs);
6833
6834 /* If there are any non-DFS_BACK edges into entry->dest skip
6835 processing PHI nodes for that block. This supports
6836 value-numbering loop bodies w/o the actual loop. */
6837 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6838 if (e != entry
6839 && !(e->flags & EDGE_DFS_BACK))
6840 break;
6841 bool skip_entry_phis = e != NULL;
6842 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
6843 fprintf (dump_file, "Region does not contain all edges into "
6844 "the entry block, skipping its PHIs.\n");
6845
6846 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6847 for (int i = 0; i < n; ++i)
6848 bb_to_rpo[rpo[i]] = i;
6849
6850 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6851
6852 rpo_elim avail (entry->dest);
6853 rpo_avail = &avail;
6854
6855 /* Verify we have no extra entries into the region. */
6856 if (flag_checking && do_region)
6857 {
6858 auto_bb_flag bb_in_region (fn);
6859 for (int i = 0; i < n; ++i)
6860 {
6861 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6862 bb->flags |= bb_in_region;
6863 }
6864 /* We can't merge the first two loops because we cannot rely
6865 on EDGE_DFS_BACK for edges not within the region. But if
6866 we decide to always have the bb_in_region flag we can
6867 do the checking during the RPO walk itself (but then it's
6868 also easy to handle MEME conservatively). */
6869 for (int i = 0; i < n; ++i)
6870 {
6871 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6872 edge e;
6873 edge_iterator ei;
6874 FOR_EACH_EDGE (e, ei, bb->preds)
6875 gcc_assert (e == entry
6876 || (skip_entry_phis && bb == entry->dest)
6877 || (e->src->flags & bb_in_region));
6878 }
6879 for (int i = 0; i < n; ++i)
6880 {
6881 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6882 bb->flags &= ~bb_in_region;
6883 }
6884 }
6885
6886 /* Create the VN state. For the initial size of the various hashtables
6887 use a heuristic based on region size and number of SSA names. */
6888 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6889 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6890 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6891 next_value_id = 1;
6892
6893 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6894 gcc_obstack_init (&vn_ssa_aux_obstack);
6895
6896 gcc_obstack_init (&vn_tables_obstack);
6897 gcc_obstack_init (&vn_tables_insert_obstack);
6898 valid_info = XCNEW (struct vn_tables_s);
6899 allocate_vn_table (valid_info, region_size);
6900 last_inserted_ref = NULL;
6901 last_inserted_phi = NULL;
6902 last_inserted_nary = NULL;
6903
6904 vn_valueize = rpo_vn_valueize;
6905
6906 /* Initialize the unwind state and edge/BB executable state. */
6907 bool need_max_rpo_iterate = false;
6908 for (int i = 0; i < n; ++i)
6909 {
6910 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6911 rpo_state[i].visited = 0;
6912 rpo_state[i].max_rpo = i;
6913 bb->flags &= ~BB_EXECUTABLE;
6914 bool has_backedges = false;
6915 edge e;
6916 edge_iterator ei;
6917 FOR_EACH_EDGE (e, ei, bb->preds)
6918 {
6919 if (e->flags & EDGE_DFS_BACK)
6920 has_backedges = true;
6921 e->flags &= ~EDGE_EXECUTABLE;
6922 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
6923 continue;
6924 if (bb_to_rpo[e->src->index] > i)
6925 {
6926 rpo_state[i].max_rpo = MAX (rpo_state[i].max_rpo,
6927 bb_to_rpo[e->src->index]);
6928 need_max_rpo_iterate = true;
6929 }
6930 else
6931 rpo_state[i].max_rpo
6932 = MAX (rpo_state[i].max_rpo,
6933 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6934 }
6935 rpo_state[i].iterate = iterate && has_backedges;
6936 }
6937 entry->flags |= EDGE_EXECUTABLE;
6938 entry->dest->flags |= BB_EXECUTABLE;
6939
6940 /* When there are irreducible regions the simplistic max_rpo computation
6941 above for the case of backedges doesn't work and we need to iterate
6942 until there are no more changes. */
6943 unsigned nit = 0;
6944 while (need_max_rpo_iterate)
6945 {
6946 nit++;
6947 need_max_rpo_iterate = false;
6948 for (int i = 0; i < n; ++i)
6949 {
6950 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6951 edge e;
6952 edge_iterator ei;
6953 FOR_EACH_EDGE (e, ei, bb->preds)
6954 {
6955 if (e == entry || (skip_entry_phis && bb == entry->dest))
6956 continue;
6957 int max_rpo = MAX (rpo_state[i].max_rpo,
6958 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6959 if (rpo_state[i].max_rpo != max_rpo)
6960 {
6961 rpo_state[i].max_rpo = max_rpo;
6962 need_max_rpo_iterate = true;
6963 }
6964 }
6965 }
6966 }
6967 statistics_histogram_event (cfun, "RPO max_rpo iterations", nit);
6968
6969 /* As heuristic to improve compile-time we handle only the N innermost
6970 loops and the outermost one optimistically. */
6971 if (iterate)
6972 {
6973 loop_p loop;
6974 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6975 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6976 if (loop_depth (loop) > max_depth)
6977 for (unsigned i = 2;
6978 i < loop_depth (loop) - max_depth; ++i)
6979 {
6980 basic_block header = superloop_at_depth (loop, i)->header;
6981 bool non_latch_backedge = false;
6982 edge e;
6983 edge_iterator ei;
6984 FOR_EACH_EDGE (e, ei, header->preds)
6985 if (e->flags & EDGE_DFS_BACK)
6986 {
6987 /* There can be a non-latch backedge into the header
6988 which is part of an outer irreducible region. We
6989 cannot avoid iterating this block then. */
6990 if (!dominated_by_p (CDI_DOMINATORS,
6991 e->src, e->dest))
6992 {
6993 if (dump_file && (dump_flags & TDF_DETAILS))
6994 fprintf (dump_file, "non-latch backedge %d -> %d "
6995 "forces iteration of loop %d\n",
6996 e->src->index, e->dest->index, loop->num);
6997 non_latch_backedge = true;
6998 }
6999 else
7000 e->flags |= EDGE_EXECUTABLE;
7001 }
7002 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7003 }
7004 }
7005
7006 uint64_t nblk = 0;
7007 int idx = 0;
7008 if (iterate)
7009 /* Go and process all blocks, iterating as necessary. */
7010 do
7011 {
7012 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7013
7014 /* If the block has incoming backedges remember unwind state. This
7015 is required even for non-executable blocks since in irreducible
7016 regions we might reach them via the backedge and re-start iterating
7017 from there.
7018 Note we can individually mark blocks with incoming backedges to
7019 not iterate where we then handle PHIs conservatively. We do that
7020 heuristically to reduce compile-time for degenerate cases. */
7021 if (rpo_state[idx].iterate)
7022 {
7023 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7024 rpo_state[idx].ref_top = last_inserted_ref;
7025 rpo_state[idx].phi_top = last_inserted_phi;
7026 rpo_state[idx].nary_top = last_inserted_nary;
7027 }
7028
7029 if (!(bb->flags & BB_EXECUTABLE))
7030 {
7031 if (dump_file && (dump_flags & TDF_DETAILS))
7032 fprintf (dump_file, "Block %d: BB%d found not executable\n",
7033 idx, bb->index);
7034 idx++;
7035 continue;
7036 }
7037
7038 if (dump_file && (dump_flags & TDF_DETAILS))
7039 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7040 nblk++;
7041 todo |= process_bb (avail, bb,
7042 rpo_state[idx].visited != 0,
7043 rpo_state[idx].iterate,
7044 iterate, eliminate, do_region, exit_bbs, false);
7045 rpo_state[idx].visited++;
7046
7047 /* Verify if changed values flow over executable outgoing backedges
7048 and those change destination PHI values (that's the thing we
7049 can easily verify). Reduce over all such edges to the farthest
7050 away PHI. */
7051 int iterate_to = -1;
7052 edge_iterator ei;
7053 edge e;
7054 FOR_EACH_EDGE (e, ei, bb->succs)
7055 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7056 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7057 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7058 {
7059 int destidx = bb_to_rpo[e->dest->index];
7060 if (!rpo_state[destidx].visited)
7061 {
7062 if (dump_file && (dump_flags & TDF_DETAILS))
7063 fprintf (dump_file, "Unvisited destination %d\n",
7064 e->dest->index);
7065 if (iterate_to == -1 || destidx < iterate_to)
7066 iterate_to = destidx;
7067 continue;
7068 }
7069 if (dump_file && (dump_flags & TDF_DETAILS))
7070 fprintf (dump_file, "Looking for changed values of backedge"
7071 " %d->%d destination PHIs\n",
7072 e->src->index, e->dest->index);
7073 vn_context_bb = e->dest;
7074 gphi_iterator gsi;
7075 for (gsi = gsi_start_phis (e->dest);
7076 !gsi_end_p (gsi); gsi_next (&gsi))
7077 {
7078 bool inserted = false;
7079 /* While we'd ideally just iterate on value changes
7080 we CSE PHIs and do that even across basic-block
7081 boundaries. So even hashtable state changes can
7082 be important (which is roughly equivalent to
7083 PHI argument value changes). To not excessively
7084 iterate because of that we track whether a PHI
7085 was CSEd to with GF_PLF_1. */
7086 bool phival_changed;
7087 if ((phival_changed = visit_phi (gsi.phi (),
7088 &inserted, false))
7089 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7090 {
7091 if (!phival_changed
7092 && dump_file && (dump_flags & TDF_DETAILS))
7093 fprintf (dump_file, "PHI was CSEd and hashtable "
7094 "state (changed)\n");
7095 if (iterate_to == -1 || destidx < iterate_to)
7096 iterate_to = destidx;
7097 break;
7098 }
7099 }
7100 vn_context_bb = NULL;
7101 }
7102 if (iterate_to != -1)
7103 {
7104 do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
7105 idx = iterate_to;
7106 if (dump_file && (dump_flags & TDF_DETAILS))
7107 fprintf (dump_file, "Iterating to %d BB%d\n",
7108 iterate_to, rpo[iterate_to]);
7109 continue;
7110 }
7111
7112 idx++;
7113 }
7114 while (idx < n);
7115
7116 else /* !iterate */
7117 {
7118 /* Process all blocks greedily with a worklist that enforces RPO
7119 processing of reachable blocks. */
7120 auto_bitmap worklist;
7121 bitmap_set_bit (worklist, 0);
7122 while (!bitmap_empty_p (worklist))
7123 {
7124 int idx = bitmap_first_set_bit (worklist);
7125 bitmap_clear_bit (worklist, idx);
7126 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7127 gcc_assert ((bb->flags & BB_EXECUTABLE)
7128 && !rpo_state[idx].visited);
7129
7130 if (dump_file && (dump_flags & TDF_DETAILS))
7131 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7132
7133 /* When we run into predecessor edges where we cannot trust its
7134 executable state mark them executable so PHI processing will
7135 be conservative.
7136 ??? Do we need to force arguments flowing over that edge
7137 to be varying or will they even always be? */
7138 edge_iterator ei;
7139 edge e;
7140 FOR_EACH_EDGE (e, ei, bb->preds)
7141 if (!(e->flags & EDGE_EXECUTABLE)
7142 && (bb == entry->dest
7143 || (!rpo_state[bb_to_rpo[e->src->index]].visited
7144 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7145 >= (int)idx))))
7146 {
7147 if (dump_file && (dump_flags & TDF_DETAILS))
7148 fprintf (dump_file, "Cannot trust state of predecessor "
7149 "edge %d -> %d, marking executable\n",
7150 e->src->index, e->dest->index);
7151 e->flags |= EDGE_EXECUTABLE;
7152 }
7153
7154 nblk++;
7155 todo |= process_bb (avail, bb, false, false, false, eliminate,
7156 do_region, exit_bbs,
7157 skip_entry_phis && bb == entry->dest);
7158 rpo_state[idx].visited++;
7159
7160 FOR_EACH_EDGE (e, ei, bb->succs)
7161 if ((e->flags & EDGE_EXECUTABLE)
7162 && e->dest->index != EXIT_BLOCK
7163 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7164 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
7165 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7166 }
7167 }
7168
7169 /* If statistics or dump file active. */
7170 int nex = 0;
7171 unsigned max_visited = 1;
7172 for (int i = 0; i < n; ++i)
7173 {
7174 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7175 if (bb->flags & BB_EXECUTABLE)
7176 nex++;
7177 statistics_histogram_event (cfun, "RPO block visited times",
7178 rpo_state[i].visited);
7179 if (rpo_state[i].visited > max_visited)
7180 max_visited = rpo_state[i].visited;
7181 }
7182 unsigned nvalues = 0, navail = 0;
7183 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7184 i != vn_ssa_aux_hash->end (); ++i)
7185 {
7186 nvalues++;
7187 vn_avail *av = (*i)->avail;
7188 while (av)
7189 {
7190 navail++;
7191 av = av->next;
7192 }
7193 }
7194 statistics_counter_event (cfun, "RPO blocks", n);
7195 statistics_counter_event (cfun, "RPO blocks visited", nblk);
7196 statistics_counter_event (cfun, "RPO blocks executable", nex);
7197 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7198 statistics_histogram_event (cfun, "RPO num values", nvalues);
7199 statistics_histogram_event (cfun, "RPO num avail", navail);
7200 statistics_histogram_event (cfun, "RPO num lattice",
7201 vn_ssa_aux_hash->elements ());
7202 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7203 {
7204 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7205 " blocks in total discovering %d executable blocks iterating "
7206 "%d.%d times, a block was visited max. %u times\n",
7207 n, nblk, nex,
7208 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7209 max_visited);
7210 fprintf (dump_file, "RPO tracked %d values available at %d locations "
7211 "and %" PRIu64 " lattice elements\n",
7212 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7213 }
7214
7215 if (eliminate)
7216 {
7217 /* When !iterate we already performed elimination during the RPO
7218 walk. */
7219 if (iterate)
7220 {
7221 /* Elimination for region-based VN needs to be done within the
7222 RPO walk. */
7223 gcc_assert (! do_region);
7224 /* Note we can't use avail.walk here because that gets confused
7225 by the existing availability and it will be less efficient
7226 as well. */
7227 todo |= eliminate_with_rpo_vn (NULL);
7228 }
7229 else
7230 todo |= avail.eliminate_cleanup (do_region);
7231 }
7232
7233 vn_valueize = NULL;
7234 rpo_avail = NULL;
7235
7236 XDELETEVEC (bb_to_rpo);
7237 XDELETEVEC (rpo);
7238 XDELETEVEC (rpo_state);
7239
7240 return todo;
7241 }
7242
7243 /* Region-based entry for RPO VN. Performs value-numbering and elimination
7244 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7245 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7246 are not considered. */
7247
7248 unsigned
7249 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7250 {
7251 default_vn_walk_kind = VN_WALKREWRITE;
7252 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7253 free_rpo_vn ();
7254 return todo;
7255 }
7256
7257
7258 namespace {
7259
7260 const pass_data pass_data_fre =
7261 {
7262 GIMPLE_PASS, /* type */
7263 "fre", /* name */
7264 OPTGROUP_NONE, /* optinfo_flags */
7265 TV_TREE_FRE, /* tv_id */
7266 ( PROP_cfg | PROP_ssa ), /* properties_required */
7267 0, /* properties_provided */
7268 0, /* properties_destroyed */
7269 0, /* todo_flags_start */
7270 0, /* todo_flags_finish */
7271 };
7272
7273 class pass_fre : public gimple_opt_pass
7274 {
7275 public:
7276 pass_fre (gcc::context *ctxt)
7277 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7278 {}
7279
7280 /* opt_pass methods: */
7281 opt_pass * clone () { return new pass_fre (m_ctxt); }
7282 void set_pass_param (unsigned int n, bool param)
7283 {
7284 gcc_assert (n == 0);
7285 may_iterate = param;
7286 }
7287 virtual bool gate (function *)
7288 {
7289 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7290 }
7291 virtual unsigned int execute (function *);
7292
7293 private:
7294 bool may_iterate;
7295 }; // class pass_fre
7296
7297 unsigned int
7298 pass_fre::execute (function *fun)
7299 {
7300 unsigned todo = 0;
7301
7302 /* At -O[1g] use the cheap non-iterating mode. */
7303 bool iterate_p = may_iterate && (optimize > 1);
7304 calculate_dominance_info (CDI_DOMINATORS);
7305 if (iterate_p)
7306 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
7307
7308 default_vn_walk_kind = VN_WALKREWRITE;
7309 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
7310 free_rpo_vn ();
7311
7312 if (iterate_p)
7313 loop_optimizer_finalize ();
7314
7315 return todo;
7316 }
7317
7318 } // anon namespace
7319
7320 gimple_opt_pass *
7321 make_pass_fre (gcc::context *ctxt)
7322 {
7323 return new pass_fre (ctxt);
7324 }
7325
7326 #undef BB_EXECUTABLE