]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-scopedtables.c
Update copyright years.
[thirdparty/gcc.git] / gcc / tree-ssa-scopedtables.c
1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
37
38 static bool hashable_expr_equal_p (const struct hashable_expr *,
39 const struct hashable_expr *);
40
41 /* Initialize local stacks for this optimizer and record equivalences
42 upon entry to BB. Equivalences can come from the edge traversed to
43 reach BB or they may come from PHI nodes at the start of BB. */
44
45 /* Pop items off the unwinding stack, removing each from the hash table
46 until a marker is encountered. */
47
48 void
49 avail_exprs_stack::pop_to_marker ()
50 {
51 /* Remove all the expressions made available in this block. */
52 while (m_stack.length () > 0)
53 {
54 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
55 expr_hash_elt **slot;
56
57 if (victim.first == NULL)
58 break;
59
60 /* This must precede the actual removal from the hash table,
61 as ELEMENT and the table entry may share a call argument
62 vector which will be freed during removal. */
63 if (dump_file && (dump_flags & TDF_DETAILS))
64 {
65 fprintf (dump_file, "<<<< ");
66 victim.first->print (dump_file);
67 }
68
69 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
70 gcc_assert (slot && *slot == victim.first);
71 if (victim.second != NULL)
72 {
73 delete *slot;
74 *slot = victim.second;
75 }
76 else
77 m_avail_exprs->clear_slot (slot);
78 }
79 }
80
81 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
82 from the hash table. */
83
84 void
85 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
86 class expr_hash_elt *elt2,
87 char type)
88 {
89 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
90 {
91 fprintf (dump_file, "%c>>> ", type);
92 elt1->print (dump_file);
93 }
94
95 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
96 }
97
98 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
99 the desired memory state. */
100
101 static void *
102 vuse_eq (ao_ref *, tree vuse1, void *data)
103 {
104 tree vuse2 = (tree) data;
105 if (vuse1 == vuse2)
106 return data;
107
108 return NULL;
109 }
110
111 /* We looked for STMT in the hash table, but did not find it.
112
113 If STMT is an assignment from a binary operator, we may know something
114 about the operands relationship to each other which would allow
115 us to derive a constant value for the RHS of STMT. */
116
117 tree
118 avail_exprs_stack::simplify_binary_operation (gimple *stmt,
119 class expr_hash_elt element)
120 {
121 if (is_gimple_assign (stmt))
122 {
123 struct hashable_expr *expr = element.expr ();
124 if (expr->kind == EXPR_BINARY)
125 {
126 enum tree_code code = expr->ops.binary.op;
127
128 switch (code)
129 {
130 /* For these cases, if we know the operands
131 are equal, then we know the result. */
132 case MIN_EXPR:
133 case MAX_EXPR:
134 case BIT_IOR_EXPR:
135 case BIT_AND_EXPR:
136 case BIT_XOR_EXPR:
137 case MINUS_EXPR:
138 case TRUNC_DIV_EXPR:
139 case CEIL_DIV_EXPR:
140 case FLOOR_DIV_EXPR:
141 case ROUND_DIV_EXPR:
142 case EXACT_DIV_EXPR:
143 case TRUNC_MOD_EXPR:
144 case CEIL_MOD_EXPR:
145 case FLOOR_MOD_EXPR:
146 case ROUND_MOD_EXPR:
147 {
148 /* Build a simple equality expr and query the hash table
149 for it. */
150 struct hashable_expr expr;
151 expr.type = boolean_type_node;
152 expr.kind = EXPR_BINARY;
153 expr.ops.binary.op = EQ_EXPR;
154 expr.ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
155 expr.ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
156 class expr_hash_elt element2 (&expr, NULL_TREE);
157 expr_hash_elt **slot
158 = m_avail_exprs->find_slot (&element2, NO_INSERT);
159 tree result_type = TREE_TYPE (gimple_assign_lhs (stmt));
160
161 /* If the query was successful and returned a nonzero
162 result, then we know that the operands of the binary
163 expression are the same. In many cases this allows
164 us to compute a constant result of the expression
165 at compile time, even if we do not know the exact
166 values of the operands. */
167 if (slot && *slot && integer_onep ((*slot)->lhs ()))
168 {
169 switch (code)
170 {
171 case MIN_EXPR:
172 case MAX_EXPR:
173 case BIT_IOR_EXPR:
174 case BIT_AND_EXPR:
175 return gimple_assign_rhs1 (stmt);
176
177 case MINUS_EXPR:
178 /* This is unsafe for certain floats even in non-IEEE
179 formats. In IEEE, it is unsafe because it does
180 wrong for NaNs. */
181 if (FLOAT_TYPE_P (result_type)
182 && HONOR_NANS (result_type))
183 break;
184 /* FALLTHRU */
185 case BIT_XOR_EXPR:
186 case TRUNC_MOD_EXPR:
187 case CEIL_MOD_EXPR:
188 case FLOOR_MOD_EXPR:
189 case ROUND_MOD_EXPR:
190 return build_zero_cst (result_type);
191
192 case TRUNC_DIV_EXPR:
193 case CEIL_DIV_EXPR:
194 case FLOOR_DIV_EXPR:
195 case ROUND_DIV_EXPR:
196 case EXACT_DIV_EXPR:
197 /* Avoid _Fract types where we can't build 1. */
198 if (ALL_FRACT_MODE_P (TYPE_MODE (result_type)))
199 break;
200 return build_one_cst (result_type);
201
202 default:
203 gcc_unreachable ();
204 }
205 }
206 break;
207 }
208
209 default:
210 break;
211 }
212 }
213 }
214 return NULL_TREE;
215 }
216
217 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
218 If found, return its LHS. Otherwise insert STMT in the table and
219 return NULL_TREE.
220
221 Also, when an expression is first inserted in the table, it is also
222 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
223 we finish processing this block and its children. */
224
225 tree
226 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
227 {
228 expr_hash_elt **slot;
229 tree lhs;
230
231 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
232 if (gimple_code (stmt) == GIMPLE_PHI)
233 lhs = gimple_phi_result (stmt);
234 else
235 lhs = gimple_get_lhs (stmt);
236
237 class expr_hash_elt element (stmt, lhs);
238
239 if (dump_file && (dump_flags & TDF_DETAILS))
240 {
241 fprintf (dump_file, "LKUP ");
242 element.print (dump_file);
243 }
244
245 /* Don't bother remembering constant assignments and copy operations.
246 Constants and copy operations are handled by the constant/copy propagator
247 in optimize_stmt. */
248 if (element.expr()->kind == EXPR_SINGLE
249 && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
250 || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
251 return NULL_TREE;
252
253 /* Finally try to find the expression in the main expression hash table. */
254 slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
255 if (slot == NULL)
256 {
257 return NULL_TREE;
258 }
259 else if (*slot == NULL)
260 {
261 /* If we did not find the expression in the hash table, we may still
262 be able to produce a result for some expressions. */
263 tree retval = avail_exprs_stack::simplify_binary_operation (stmt,
264 element);
265
266 /* We have, in effect, allocated *SLOT for ELEMENT at this point.
267 We must initialize *SLOT to a real entry, even if we found a
268 way to prove ELEMENT was a constant after not finding ELEMENT
269 in the hash table.
270
271 An uninitialized or empty slot is an indication no prior objects
272 entered into the hash table had a hash collection with ELEMENT.
273
274 If we fail to do so and had such entries in the table, they
275 would become unreachable. */
276 class expr_hash_elt *element2 = new expr_hash_elt (element);
277 *slot = element2;
278
279 record_expr (element2, NULL, '2');
280 return retval;
281 }
282
283 /* If we found a redundant memory operation do an alias walk to
284 check if we can re-use it. */
285 if (gimple_vuse (stmt) != (*slot)->vop ())
286 {
287 tree vuse1 = (*slot)->vop ();
288 tree vuse2 = gimple_vuse (stmt);
289 /* If we have a load of a register and a candidate in the
290 hash with vuse1 then try to reach its stmt by walking
291 up the virtual use-def chain using walk_non_aliased_vuses.
292 But don't do this when removing expressions from the hash. */
293 ao_ref ref;
294 unsigned limit = param_sccvn_max_alias_queries_per_access;
295 if (!(vuse1 && vuse2
296 && gimple_assign_single_p (stmt)
297 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
298 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
299 ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
300 && walk_non_aliased_vuses (&ref, vuse2, true, vuse_eq, NULL, NULL,
301 limit, vuse1) != NULL))
302 {
303 if (insert)
304 {
305 class expr_hash_elt *element2 = new expr_hash_elt (element);
306
307 /* Insert the expr into the hash by replacing the current
308 entry and recording the value to restore in the
309 avail_exprs_stack. */
310 record_expr (element2, *slot, '2');
311 *slot = element2;
312 }
313 return NULL_TREE;
314 }
315 }
316
317 /* Extract the LHS of the assignment so that it can be used as the current
318 definition of another variable. */
319 lhs = (*slot)->lhs ();
320
321 /* Valueize the result. */
322 if (TREE_CODE (lhs) == SSA_NAME)
323 {
324 tree tem = SSA_NAME_VALUE (lhs);
325 if (tem)
326 lhs = tem;
327 }
328
329 if (dump_file && (dump_flags & TDF_DETAILS))
330 {
331 fprintf (dump_file, "FIND: ");
332 print_generic_expr (dump_file, lhs);
333 fprintf (dump_file, "\n");
334 }
335
336 return lhs;
337 }
338
339 /* Enter condition equivalence P into the hash table.
340
341 This indicates that a conditional expression has a known
342 boolean value. */
343
344 void
345 avail_exprs_stack::record_cond (cond_equivalence *p)
346 {
347 class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
348 expr_hash_elt **slot;
349
350 slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
351 if (*slot == NULL)
352 {
353 *slot = element;
354 record_expr (element, NULL, '1');
355 }
356 else
357 delete element;
358 }
359
360 /* Generate a hash value for a pair of expressions. This can be used
361 iteratively by passing a previous result in HSTATE.
362
363 The same hash value is always returned for a given pair of expressions,
364 regardless of the order in which they are presented. This is useful in
365 hashing the operands of commutative functions. */
366
367 namespace inchash
368 {
369
370 static void
371 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
372 {
373 hash one, two;
374
375 inchash::add_expr (t1, one);
376 inchash::add_expr (t2, two);
377 hstate.add_commutative (one, two);
378 }
379
380 /* Compute a hash value for a hashable_expr value EXPR and a
381 previously accumulated hash value VAL. If two hashable_expr
382 values compare equal with hashable_expr_equal_p, they must
383 hash to the same value, given an identical value of VAL.
384 The logic is intended to follow inchash::add_expr in tree.c. */
385
386 static void
387 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
388 {
389 switch (expr->kind)
390 {
391 case EXPR_SINGLE:
392 inchash::add_expr (expr->ops.single.rhs, hstate);
393 break;
394
395 case EXPR_UNARY:
396 hstate.add_object (expr->ops.unary.op);
397
398 /* Make sure to include signedness in the hash computation.
399 Don't hash the type, that can lead to having nodes which
400 compare equal according to operand_equal_p, but which
401 have different hash codes. */
402 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
403 || expr->ops.unary.op == NON_LVALUE_EXPR)
404 hstate.add_int (TYPE_UNSIGNED (expr->type));
405
406 inchash::add_expr (expr->ops.unary.opnd, hstate);
407 break;
408
409 case EXPR_BINARY:
410 hstate.add_object (expr->ops.binary.op);
411 if (commutative_tree_code (expr->ops.binary.op))
412 inchash::add_expr_commutative (expr->ops.binary.opnd0,
413 expr->ops.binary.opnd1, hstate);
414 else
415 {
416 inchash::add_expr (expr->ops.binary.opnd0, hstate);
417 inchash::add_expr (expr->ops.binary.opnd1, hstate);
418 }
419 break;
420
421 case EXPR_TERNARY:
422 hstate.add_object (expr->ops.ternary.op);
423 if (commutative_ternary_tree_code (expr->ops.ternary.op))
424 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
425 expr->ops.ternary.opnd1, hstate);
426 else
427 {
428 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
429 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
430 }
431 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
432 break;
433
434 case EXPR_CALL:
435 {
436 size_t i;
437 enum tree_code code = CALL_EXPR;
438 gcall *fn_from;
439
440 hstate.add_object (code);
441 fn_from = expr->ops.call.fn_from;
442 if (gimple_call_internal_p (fn_from))
443 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
444 else
445 inchash::add_expr (gimple_call_fn (fn_from), hstate);
446 for (i = 0; i < expr->ops.call.nargs; i++)
447 inchash::add_expr (expr->ops.call.args[i], hstate);
448 }
449 break;
450
451 case EXPR_PHI:
452 {
453 size_t i;
454
455 for (i = 0; i < expr->ops.phi.nargs; i++)
456 inchash::add_expr (expr->ops.phi.args[i], hstate);
457 }
458 break;
459
460 default:
461 gcc_unreachable ();
462 }
463 }
464
465 }
466
467 /* Hashing and equality functions. We compute a value number for expressions
468 using the code of the expression and the SSA numbers of its operands. */
469
470 static hashval_t
471 avail_expr_hash (class expr_hash_elt *p)
472 {
473 const struct hashable_expr *expr = p->expr ();
474 inchash::hash hstate;
475
476 if (expr->kind == EXPR_SINGLE)
477 {
478 /* T could potentially be a switch index or a goto dest. */
479 tree t = expr->ops.single.rhs;
480 if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
481 {
482 /* Make equivalent statements of both these kinds hash together.
483 Dealing with both MEM_REF and ARRAY_REF allows us not to care
484 about equivalence with other statements not considered here. */
485 bool reverse;
486 poly_int64 offset, size, max_size;
487 tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
488 &reverse);
489 /* Strictly, we could try to normalize variable-sized accesses too,
490 but here we just deal with the common case. */
491 if (known_size_p (max_size)
492 && known_eq (size, max_size))
493 {
494 enum tree_code code = MEM_REF;
495 hstate.add_object (code);
496 inchash::add_expr (base, hstate,
497 TREE_CODE (base) == MEM_REF
498 ? OEP_ADDRESS_OF : 0);
499 hstate.add_object (offset);
500 hstate.add_object (size);
501 return hstate.end ();
502 }
503 }
504 }
505
506 inchash::add_hashable_expr (expr, hstate);
507
508 return hstate.end ();
509 }
510
511 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
512 to each other. (That is, they return the value of the same bit of memory.)
513
514 Return TRUE if the two are so equivalent; FALSE if not (which could still
515 mean the two are equivalent by other means). */
516
517 static bool
518 equal_mem_array_ref_p (tree t0, tree t1)
519 {
520 if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
521 return false;
522 if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
523 return false;
524
525 if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
526 return false;
527 bool rev0;
528 poly_int64 off0, sz0, max0;
529 tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
530 if (!known_size_p (max0)
531 || maybe_ne (sz0, max0))
532 return false;
533
534 bool rev1;
535 poly_int64 off1, sz1, max1;
536 tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
537 if (!known_size_p (max1)
538 || maybe_ne (sz1, max1))
539 return false;
540
541 if (rev0 != rev1 || maybe_ne (sz0, sz1) || maybe_ne (off0, off1))
542 return false;
543
544 return operand_equal_p (base0, base1,
545 (TREE_CODE (base0) == MEM_REF
546 || TREE_CODE (base0) == TARGET_MEM_REF)
547 && (TREE_CODE (base1) == MEM_REF
548 || TREE_CODE (base1) == TARGET_MEM_REF)
549 ? OEP_ADDRESS_OF : 0);
550 }
551
552 /* Compare two hashable_expr structures for equivalence. They are
553 considered equivalent when the expressions they denote must
554 necessarily be equal. The logic is intended to follow that of
555 operand_equal_p in fold-const.c */
556
557 static bool
558 hashable_expr_equal_p (const struct hashable_expr *expr0,
559 const struct hashable_expr *expr1)
560 {
561 tree type0 = expr0->type;
562 tree type1 = expr1->type;
563
564 /* If either type is NULL, there is nothing to check. */
565 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
566 return false;
567
568 /* If both types don't have the same signedness, precision, and mode,
569 then we can't consider them equal. */
570 if (type0 != type1
571 && (TREE_CODE (type0) == ERROR_MARK
572 || TREE_CODE (type1) == ERROR_MARK
573 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
574 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
575 || TYPE_MODE (type0) != TYPE_MODE (type1)))
576 return false;
577
578 if (expr0->kind != expr1->kind)
579 return false;
580
581 switch (expr0->kind)
582 {
583 case EXPR_SINGLE:
584 return equal_mem_array_ref_p (expr0->ops.single.rhs,
585 expr1->ops.single.rhs)
586 || operand_equal_p (expr0->ops.single.rhs,
587 expr1->ops.single.rhs, 0);
588 case EXPR_UNARY:
589 if (expr0->ops.unary.op != expr1->ops.unary.op)
590 return false;
591
592 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
593 || expr0->ops.unary.op == NON_LVALUE_EXPR)
594 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
595 return false;
596
597 return operand_equal_p (expr0->ops.unary.opnd,
598 expr1->ops.unary.opnd, 0);
599
600 case EXPR_BINARY:
601 if (expr0->ops.binary.op != expr1->ops.binary.op)
602 return false;
603
604 if (operand_equal_p (expr0->ops.binary.opnd0,
605 expr1->ops.binary.opnd0, 0)
606 && operand_equal_p (expr0->ops.binary.opnd1,
607 expr1->ops.binary.opnd1, 0))
608 return true;
609
610 /* For commutative ops, allow the other order. */
611 return (commutative_tree_code (expr0->ops.binary.op)
612 && operand_equal_p (expr0->ops.binary.opnd0,
613 expr1->ops.binary.opnd1, 0)
614 && operand_equal_p (expr0->ops.binary.opnd1,
615 expr1->ops.binary.opnd0, 0));
616
617 case EXPR_TERNARY:
618 if (expr0->ops.ternary.op != expr1->ops.ternary.op
619 || !operand_equal_p (expr0->ops.ternary.opnd2,
620 expr1->ops.ternary.opnd2, 0))
621 return false;
622
623 /* BIT_INSERT_EXPR has an implict operand as the type precision
624 of op1. Need to check to make sure they are the same. */
625 if (expr0->ops.ternary.op == BIT_INSERT_EXPR
626 && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
627 && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
628 && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
629 != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
630 return false;
631
632 if (operand_equal_p (expr0->ops.ternary.opnd0,
633 expr1->ops.ternary.opnd0, 0)
634 && operand_equal_p (expr0->ops.ternary.opnd1,
635 expr1->ops.ternary.opnd1, 0))
636 return true;
637
638 /* For commutative ops, allow the other order. */
639 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
640 && operand_equal_p (expr0->ops.ternary.opnd0,
641 expr1->ops.ternary.opnd1, 0)
642 && operand_equal_p (expr0->ops.ternary.opnd1,
643 expr1->ops.ternary.opnd0, 0));
644
645 case EXPR_CALL:
646 {
647 size_t i;
648
649 /* If the calls are to different functions, then they
650 clearly cannot be equal. */
651 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
652 expr1->ops.call.fn_from))
653 return false;
654
655 if (! expr0->ops.call.pure)
656 return false;
657
658 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
659 return false;
660
661 for (i = 0; i < expr0->ops.call.nargs; i++)
662 if (! operand_equal_p (expr0->ops.call.args[i],
663 expr1->ops.call.args[i], 0))
664 return false;
665
666 if (stmt_could_throw_p (cfun, expr0->ops.call.fn_from))
667 {
668 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
669 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
670 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
671 return false;
672 }
673
674 return true;
675 }
676
677 case EXPR_PHI:
678 {
679 size_t i;
680
681 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
682 return false;
683
684 for (i = 0; i < expr0->ops.phi.nargs; i++)
685 if (! operand_equal_p (expr0->ops.phi.args[i],
686 expr1->ops.phi.args[i], 0))
687 return false;
688
689 return true;
690 }
691
692 default:
693 gcc_unreachable ();
694 }
695 }
696
697 /* Given a statement STMT, construct a hash table element. */
698
699 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
700 {
701 enum gimple_code code = gimple_code (stmt);
702 struct hashable_expr *expr = this->expr ();
703
704 if (code == GIMPLE_ASSIGN)
705 {
706 enum tree_code subcode = gimple_assign_rhs_code (stmt);
707
708 switch (get_gimple_rhs_class (subcode))
709 {
710 case GIMPLE_SINGLE_RHS:
711 expr->kind = EXPR_SINGLE;
712 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
713 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
714 break;
715 case GIMPLE_UNARY_RHS:
716 expr->kind = EXPR_UNARY;
717 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
718 if (CONVERT_EXPR_CODE_P (subcode))
719 subcode = NOP_EXPR;
720 expr->ops.unary.op = subcode;
721 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
722 break;
723 case GIMPLE_BINARY_RHS:
724 expr->kind = EXPR_BINARY;
725 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
726 expr->ops.binary.op = subcode;
727 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
728 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
729 break;
730 case GIMPLE_TERNARY_RHS:
731 expr->kind = EXPR_TERNARY;
732 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
733 expr->ops.ternary.op = subcode;
734 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
735 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
736 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
737 break;
738 default:
739 gcc_unreachable ();
740 }
741 }
742 else if (code == GIMPLE_COND)
743 {
744 expr->type = boolean_type_node;
745 expr->kind = EXPR_BINARY;
746 expr->ops.binary.op = gimple_cond_code (stmt);
747 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
748 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
749 }
750 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
751 {
752 size_t nargs = gimple_call_num_args (call_stmt);
753 size_t i;
754
755 gcc_assert (gimple_call_lhs (call_stmt));
756
757 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
758 expr->kind = EXPR_CALL;
759 expr->ops.call.fn_from = call_stmt;
760
761 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
762 expr->ops.call.pure = true;
763 else
764 expr->ops.call.pure = false;
765
766 expr->ops.call.nargs = nargs;
767 expr->ops.call.args = XCNEWVEC (tree, nargs);
768 for (i = 0; i < nargs; i++)
769 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
770 }
771 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
772 {
773 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
774 expr->kind = EXPR_SINGLE;
775 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
776 }
777 else if (code == GIMPLE_GOTO)
778 {
779 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
780 expr->kind = EXPR_SINGLE;
781 expr->ops.single.rhs = gimple_goto_dest (stmt);
782 }
783 else if (code == GIMPLE_PHI)
784 {
785 size_t nargs = gimple_phi_num_args (stmt);
786 size_t i;
787
788 expr->type = TREE_TYPE (gimple_phi_result (stmt));
789 expr->kind = EXPR_PHI;
790 expr->ops.phi.nargs = nargs;
791 expr->ops.phi.args = XCNEWVEC (tree, nargs);
792 for (i = 0; i < nargs; i++)
793 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
794 }
795 else
796 gcc_unreachable ();
797
798 m_lhs = orig_lhs;
799 m_vop = gimple_vuse (stmt);
800 m_hash = avail_expr_hash (this);
801 m_stamp = this;
802 }
803
804 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
805 construct a hash table element. */
806
807 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
808 {
809 m_expr = *orig;
810 m_lhs = orig_lhs;
811 m_vop = NULL_TREE;
812 m_hash = avail_expr_hash (this);
813 m_stamp = this;
814 }
815
816 /* Copy constructor for a hash table element. */
817
818 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
819 {
820 m_expr = old_elt.m_expr;
821 m_lhs = old_elt.m_lhs;
822 m_vop = old_elt.m_vop;
823 m_hash = old_elt.m_hash;
824 m_stamp = this;
825
826 /* Now deep copy the malloc'd space for CALL and PHI args. */
827 if (old_elt.m_expr.kind == EXPR_CALL)
828 {
829 size_t nargs = old_elt.m_expr.ops.call.nargs;
830 size_t i;
831
832 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
833 for (i = 0; i < nargs; i++)
834 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
835 }
836 else if (old_elt.m_expr.kind == EXPR_PHI)
837 {
838 size_t nargs = old_elt.m_expr.ops.phi.nargs;
839 size_t i;
840
841 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
842 for (i = 0; i < nargs; i++)
843 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
844 }
845 }
846
847 /* Calls and PHIs have a variable number of arguments that are allocated
848 on the heap. Thus we have to have a special dtor to release them. */
849
850 expr_hash_elt::~expr_hash_elt ()
851 {
852 if (m_expr.kind == EXPR_CALL)
853 free (m_expr.ops.call.args);
854 else if (m_expr.kind == EXPR_PHI)
855 free (m_expr.ops.phi.args);
856 }
857
858 /* Print a diagnostic dump of an expression hash table entry. */
859
860 void
861 expr_hash_elt::print (FILE *stream)
862 {
863 fprintf (stream, "STMT ");
864
865 if (m_lhs)
866 {
867 print_generic_expr (stream, m_lhs);
868 fprintf (stream, " = ");
869 }
870
871 switch (m_expr.kind)
872 {
873 case EXPR_SINGLE:
874 print_generic_expr (stream, m_expr.ops.single.rhs);
875 break;
876
877 case EXPR_UNARY:
878 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
879 print_generic_expr (stream, m_expr.ops.unary.opnd);
880 break;
881
882 case EXPR_BINARY:
883 print_generic_expr (stream, m_expr.ops.binary.opnd0);
884 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
885 print_generic_expr (stream, m_expr.ops.binary.opnd1);
886 break;
887
888 case EXPR_TERNARY:
889 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
890 print_generic_expr (stream, m_expr.ops.ternary.opnd0);
891 fputs (", ", stream);
892 print_generic_expr (stream, m_expr.ops.ternary.opnd1);
893 fputs (", ", stream);
894 print_generic_expr (stream, m_expr.ops.ternary.opnd2);
895 fputs (">", stream);
896 break;
897
898 case EXPR_CALL:
899 {
900 size_t i;
901 size_t nargs = m_expr.ops.call.nargs;
902 gcall *fn_from;
903
904 fn_from = m_expr.ops.call.fn_from;
905 if (gimple_call_internal_p (fn_from))
906 fprintf (stream, ".%s",
907 internal_fn_name (gimple_call_internal_fn (fn_from)));
908 else
909 print_generic_expr (stream, gimple_call_fn (fn_from));
910 fprintf (stream, " (");
911 for (i = 0; i < nargs; i++)
912 {
913 print_generic_expr (stream, m_expr.ops.call.args[i]);
914 if (i + 1 < nargs)
915 fprintf (stream, ", ");
916 }
917 fprintf (stream, ")");
918 }
919 break;
920
921 case EXPR_PHI:
922 {
923 size_t i;
924 size_t nargs = m_expr.ops.phi.nargs;
925
926 fprintf (stream, "PHI <");
927 for (i = 0; i < nargs; i++)
928 {
929 print_generic_expr (stream, m_expr.ops.phi.args[i]);
930 if (i + 1 < nargs)
931 fprintf (stream, ", ");
932 }
933 fprintf (stream, ">");
934 }
935 break;
936 }
937
938 if (m_vop)
939 {
940 fprintf (stream, " with ");
941 print_generic_expr (stream, m_vop);
942 }
943
944 fprintf (stream, "\n");
945 }
946
947 /* Pop entries off the stack until we hit the NULL marker.
948 For each entry popped, use the SRC/DEST pair to restore
949 SRC to its prior value. */
950
951 void
952 const_and_copies::pop_to_marker (void)
953 {
954 while (m_stack.length () > 0)
955 {
956 tree prev_value, dest;
957
958 dest = m_stack.pop ();
959
960 /* A NULL value indicates we should stop unwinding, otherwise
961 pop off the next entry as they're recorded in pairs. */
962 if (dest == NULL)
963 break;
964
965 if (dump_file && (dump_flags & TDF_DETAILS))
966 {
967 fprintf (dump_file, "<<<< COPY ");
968 print_generic_expr (dump_file, dest);
969 fprintf (dump_file, " = ");
970 print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
971 fprintf (dump_file, "\n");
972 }
973
974 prev_value = m_stack.pop ();
975 set_ssa_name_value (dest, prev_value);
976 }
977 }
978
979 /* Record that X has the value Y and that X's previous value is PREV_X.
980
981 This variant does not follow the value chain for Y. */
982
983 void
984 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
985 {
986 if (dump_file && (dump_flags & TDF_DETAILS))
987 {
988 fprintf (dump_file, "0>>> COPY ");
989 print_generic_expr (dump_file, x);
990 fprintf (dump_file, " = ");
991 print_generic_expr (dump_file, y);
992 fprintf (dump_file, "\n");
993 }
994
995 set_ssa_name_value (x, y);
996 m_stack.reserve (2);
997 m_stack.quick_push (prev_x);
998 m_stack.quick_push (x);
999 }
1000
1001 /* Record that X has the value Y. */
1002
1003 void
1004 const_and_copies::record_const_or_copy (tree x, tree y)
1005 {
1006 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
1007 }
1008
1009 /* Record that X has the value Y and that X's previous value is PREV_X.
1010
1011 This variant follow's Y value chain. */
1012
1013 void
1014 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
1015 {
1016 /* Y may be NULL if we are invalidating entries in the table. */
1017 if (y && TREE_CODE (y) == SSA_NAME)
1018 {
1019 tree tmp = SSA_NAME_VALUE (y);
1020 y = tmp ? tmp : y;
1021 }
1022
1023 record_const_or_copy_raw (x, y, prev_x);
1024 }
1025
1026 bool
1027 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
1028 {
1029 const struct hashable_expr *expr1 = p1->expr ();
1030 const class expr_hash_elt *stamp1 = p1->stamp ();
1031 const struct hashable_expr *expr2 = p2->expr ();
1032 const class expr_hash_elt *stamp2 = p2->stamp ();
1033
1034 /* This case should apply only when removing entries from the table. */
1035 if (stamp1 == stamp2)
1036 return true;
1037
1038 if (p1->hash () != p2->hash ())
1039 return false;
1040
1041 /* In case of a collision, both RHS have to be identical and have the
1042 same VUSE operands. */
1043 if (hashable_expr_equal_p (expr1, expr2)
1044 && types_compatible_p (expr1->type, expr2->type))
1045 return true;
1046
1047 return false;
1048 }
1049
1050 /* Given a conditional expression COND as a tree, initialize
1051 a hashable_expr expression EXPR. The conditional must be a
1052 comparison or logical negation. A constant or a variable is
1053 not permitted. */
1054
1055 void
1056 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
1057 {
1058 expr->type = boolean_type_node;
1059
1060 if (COMPARISON_CLASS_P (cond))
1061 {
1062 expr->kind = EXPR_BINARY;
1063 expr->ops.binary.op = TREE_CODE (cond);
1064 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
1065 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
1066 }
1067 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
1068 {
1069 expr->kind = EXPR_UNARY;
1070 expr->ops.unary.op = TRUTH_NOT_EXPR;
1071 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
1072 }
1073 else
1074 gcc_unreachable ();
1075 }
1076
1077 /* Build a cond_equivalence record indicating that the comparison
1078 CODE holds between operands OP0 and OP1 and push it to **P. */
1079
1080 static void
1081 build_and_record_new_cond (enum tree_code code,
1082 tree op0, tree op1,
1083 vec<cond_equivalence> *p,
1084 bool val = true)
1085 {
1086 cond_equivalence c;
1087 struct hashable_expr *cond = &c.cond;
1088
1089 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1090
1091 cond->type = boolean_type_node;
1092 cond->kind = EXPR_BINARY;
1093 cond->ops.binary.op = code;
1094 cond->ops.binary.opnd0 = op0;
1095 cond->ops.binary.opnd1 = op1;
1096
1097 c.value = val ? boolean_true_node : boolean_false_node;
1098 p->safe_push (c);
1099 }
1100
1101 /* Record that COND is true and INVERTED is false into the edge information
1102 structure. Also record that any conditions dominated by COND are true
1103 as well.
1104
1105 For example, if a < b is true, then a <= b must also be true. */
1106
1107 void
1108 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
1109 {
1110 tree op0, op1;
1111 cond_equivalence c;
1112
1113 if (!COMPARISON_CLASS_P (cond))
1114 return;
1115
1116 op0 = TREE_OPERAND (cond, 0);
1117 op1 = TREE_OPERAND (cond, 1);
1118
1119 switch (TREE_CODE (cond))
1120 {
1121 case LT_EXPR:
1122 case GT_EXPR:
1123 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1124 {
1125 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1126 build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1127 }
1128
1129 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1130 ? LE_EXPR : GE_EXPR),
1131 op0, op1, p);
1132 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1133 build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1134 break;
1135
1136 case GE_EXPR:
1137 case LE_EXPR:
1138 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1139 {
1140 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1141 }
1142 break;
1143
1144 case EQ_EXPR:
1145 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1146 {
1147 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1148 }
1149 build_and_record_new_cond (LE_EXPR, op0, op1, p);
1150 build_and_record_new_cond (GE_EXPR, op0, op1, p);
1151 break;
1152
1153 case UNORDERED_EXPR:
1154 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1155 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1156 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1157 build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1158 build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1159 build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1160 break;
1161
1162 case UNLT_EXPR:
1163 case UNGT_EXPR:
1164 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1165 ? UNLE_EXPR : UNGE_EXPR),
1166 op0, op1, p);
1167 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1168 break;
1169
1170 case UNEQ_EXPR:
1171 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1172 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1173 break;
1174
1175 case LTGT_EXPR:
1176 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1177 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1178 break;
1179
1180 default:
1181 break;
1182 }
1183
1184 /* Now store the original true and false conditions into the first
1185 two slots. */
1186 initialize_expr_from_cond (cond, &c.cond);
1187 c.value = boolean_true_node;
1188 p->safe_push (c);
1189
1190 /* It is possible for INVERTED to be the negation of a comparison,
1191 and not a valid RHS or GIMPLE_COND condition. This happens because
1192 invert_truthvalue may return such an expression when asked to invert
1193 a floating-point comparison. These comparisons are not assumed to
1194 obey the trichotomy law. */
1195 initialize_expr_from_cond (inverted, &c.cond);
1196 c.value = boolean_false_node;
1197 p->safe_push (c);
1198 }