]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-scopedtables.c
* doc/install.texi (Specific, alpha): Remove note to use
[thirdparty/gcc.git] / gcc / tree-ssa-scopedtables.c
1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
37 #include "params.h"
38
39 static bool hashable_expr_equal_p (const struct hashable_expr *,
40 const struct hashable_expr *);
41
42 /* Initialize local stacks for this optimizer and record equivalences
43 upon entry to BB. Equivalences can come from the edge traversed to
44 reach BB or they may come from PHI nodes at the start of BB. */
45
46 /* Pop items off the unwinding stack, removing each from the hash table
47 until a marker is encountered. */
48
49 void
50 avail_exprs_stack::pop_to_marker ()
51 {
52 /* Remove all the expressions made available in this block. */
53 while (m_stack.length () > 0)
54 {
55 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
56 expr_hash_elt **slot;
57
58 if (victim.first == NULL)
59 break;
60
61 /* This must precede the actual removal from the hash table,
62 as ELEMENT and the table entry may share a call argument
63 vector which will be freed during removal. */
64 if (dump_file && (dump_flags & TDF_DETAILS))
65 {
66 fprintf (dump_file, "<<<< ");
67 victim.first->print (dump_file);
68 }
69
70 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
71 gcc_assert (slot && *slot == victim.first);
72 if (victim.second != NULL)
73 {
74 delete *slot;
75 *slot = victim.second;
76 }
77 else
78 m_avail_exprs->clear_slot (slot);
79 }
80 }
81
82 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
83 from the hash table. */
84
85 void
86 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
87 class expr_hash_elt *elt2,
88 char type)
89 {
90 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
91 {
92 fprintf (dump_file, "%c>>> ", type);
93 elt1->print (dump_file);
94 }
95
96 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
97 }
98
99 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
100 the desired memory state. */
101
102 static void *
103 vuse_eq (ao_ref *, tree vuse1, void *data)
104 {
105 tree vuse2 = (tree) data;
106 if (vuse1 == vuse2)
107 return data;
108
109 return NULL;
110 }
111
112 /* We looked for STMT in the hash table, but did not find it.
113
114 If STMT is an assignment from a binary operator, we may know something
115 about the operands relationship to each other which would allow
116 us to derive a constant value for the RHS of STMT. */
117
118 tree
119 avail_exprs_stack::simplify_binary_operation (gimple *stmt,
120 class expr_hash_elt element)
121 {
122 if (is_gimple_assign (stmt))
123 {
124 struct hashable_expr *expr = element.expr ();
125 if (expr->kind == EXPR_BINARY)
126 {
127 enum tree_code code = expr->ops.binary.op;
128
129 switch (code)
130 {
131 /* For these cases, if we know the operands
132 are equal, then we know the result. */
133 case MIN_EXPR:
134 case MAX_EXPR:
135 case BIT_IOR_EXPR:
136 case BIT_AND_EXPR:
137 case BIT_XOR_EXPR:
138 case MINUS_EXPR:
139 case TRUNC_DIV_EXPR:
140 case CEIL_DIV_EXPR:
141 case FLOOR_DIV_EXPR:
142 case ROUND_DIV_EXPR:
143 case EXACT_DIV_EXPR:
144 case TRUNC_MOD_EXPR:
145 case CEIL_MOD_EXPR:
146 case FLOOR_MOD_EXPR:
147 case ROUND_MOD_EXPR:
148 {
149 /* Build a simple equality expr and query the hash table
150 for it. */
151 struct hashable_expr expr;
152 expr.type = boolean_type_node;
153 expr.kind = EXPR_BINARY;
154 expr.ops.binary.op = EQ_EXPR;
155 expr.ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
156 expr.ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
157 class expr_hash_elt element2 (&expr, NULL_TREE);
158 expr_hash_elt **slot
159 = m_avail_exprs->find_slot (&element2, NO_INSERT);
160 tree result_type = TREE_TYPE (gimple_assign_lhs (stmt));
161
162 /* If the query was successful and returned a nonzero
163 result, then we know that the operands of the binary
164 expression are the same. In many cases this allows
165 us to compute a constant result of the expression
166 at compile time, even if we do not know the exact
167 values of the operands. */
168 if (slot && *slot && integer_onep ((*slot)->lhs ()))
169 {
170 switch (code)
171 {
172 case MIN_EXPR:
173 case MAX_EXPR:
174 case BIT_IOR_EXPR:
175 case BIT_AND_EXPR:
176 return gimple_assign_rhs1 (stmt);
177
178 case MINUS_EXPR:
179 /* This is unsafe for certain floats even in non-IEEE
180 formats. In IEEE, it is unsafe because it does
181 wrong for NaNs. */
182 if (FLOAT_TYPE_P (result_type)
183 && HONOR_NANS (result_type))
184 break;
185 /* FALLTHRU */
186 case BIT_XOR_EXPR:
187 case TRUNC_MOD_EXPR:
188 case CEIL_MOD_EXPR:
189 case FLOOR_MOD_EXPR:
190 case ROUND_MOD_EXPR:
191 return build_zero_cst (result_type);
192
193 case TRUNC_DIV_EXPR:
194 case CEIL_DIV_EXPR:
195 case FLOOR_DIV_EXPR:
196 case ROUND_DIV_EXPR:
197 case EXACT_DIV_EXPR:
198 /* Avoid _Fract types where we can't build 1. */
199 if (ALL_FRACT_MODE_P (TYPE_MODE (result_type)))
200 break;
201 return build_one_cst (result_type);
202
203 default:
204 gcc_unreachable ();
205 }
206 }
207 break;
208 }
209
210 default:
211 break;
212 }
213 }
214 }
215 return NULL_TREE;
216 }
217
218 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
219 If found, return its LHS. Otherwise insert STMT in the table and
220 return NULL_TREE.
221
222 Also, when an expression is first inserted in the table, it is also
223 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
224 we finish processing this block and its children. */
225
226 tree
227 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
228 {
229 expr_hash_elt **slot;
230 tree lhs;
231
232 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
233 if (gimple_code (stmt) == GIMPLE_PHI)
234 lhs = gimple_phi_result (stmt);
235 else
236 lhs = gimple_get_lhs (stmt);
237
238 class expr_hash_elt element (stmt, lhs);
239
240 if (dump_file && (dump_flags & TDF_DETAILS))
241 {
242 fprintf (dump_file, "LKUP ");
243 element.print (dump_file);
244 }
245
246 /* Don't bother remembering constant assignments and copy operations.
247 Constants and copy operations are handled by the constant/copy propagator
248 in optimize_stmt. */
249 if (element.expr()->kind == EXPR_SINGLE
250 && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
251 || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
252 return NULL_TREE;
253
254 /* Finally try to find the expression in the main expression hash table. */
255 slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
256 if (slot == NULL)
257 {
258 return NULL_TREE;
259 }
260 else if (*slot == NULL)
261 {
262 /* If we did not find the expression in the hash table, we may still
263 be able to produce a result for some expressions. */
264 tree retval = avail_exprs_stack::simplify_binary_operation (stmt,
265 element);
266
267 /* We have, in effect, allocated *SLOT for ELEMENT at this point.
268 We must initialize *SLOT to a real entry, even if we found a
269 way to prove ELEMENT was a constant after not finding ELEMENT
270 in the hash table.
271
272 An uninitialized or empty slot is an indication no prior objects
273 entered into the hash table had a hash collection with ELEMENT.
274
275 If we fail to do so and had such entries in the table, they
276 would become unreachable. */
277 class expr_hash_elt *element2 = new expr_hash_elt (element);
278 *slot = element2;
279
280 record_expr (element2, NULL, '2');
281 return retval;
282 }
283
284 /* If we found a redundant memory operation do an alias walk to
285 check if we can re-use it. */
286 if (gimple_vuse (stmt) != (*slot)->vop ())
287 {
288 tree vuse1 = (*slot)->vop ();
289 tree vuse2 = gimple_vuse (stmt);
290 /* If we have a load of a register and a candidate in the
291 hash with vuse1 then try to reach its stmt by walking
292 up the virtual use-def chain using walk_non_aliased_vuses.
293 But don't do this when removing expressions from the hash. */
294 ao_ref ref;
295 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
296 if (!(vuse1 && vuse2
297 && gimple_assign_single_p (stmt)
298 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
299 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
300 ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
301 && walk_non_aliased_vuses (&ref, vuse2, true, vuse_eq, NULL, NULL,
302 limit, vuse1) != NULL))
303 {
304 if (insert)
305 {
306 class expr_hash_elt *element2 = new expr_hash_elt (element);
307
308 /* Insert the expr into the hash by replacing the current
309 entry and recording the value to restore in the
310 avail_exprs_stack. */
311 record_expr (element2, *slot, '2');
312 *slot = element2;
313 }
314 return NULL_TREE;
315 }
316 }
317
318 /* Extract the LHS of the assignment so that it can be used as the current
319 definition of another variable. */
320 lhs = (*slot)->lhs ();
321
322 /* Valueize the result. */
323 if (TREE_CODE (lhs) == SSA_NAME)
324 {
325 tree tem = SSA_NAME_VALUE (lhs);
326 if (tem)
327 lhs = tem;
328 }
329
330 if (dump_file && (dump_flags & TDF_DETAILS))
331 {
332 fprintf (dump_file, "FIND: ");
333 print_generic_expr (dump_file, lhs);
334 fprintf (dump_file, "\n");
335 }
336
337 return lhs;
338 }
339
340 /* Enter condition equivalence P into the hash table.
341
342 This indicates that a conditional expression has a known
343 boolean value. */
344
345 void
346 avail_exprs_stack::record_cond (cond_equivalence *p)
347 {
348 class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
349 expr_hash_elt **slot;
350
351 slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
352 if (*slot == NULL)
353 {
354 *slot = element;
355 record_expr (element, NULL, '1');
356 }
357 else
358 delete element;
359 }
360
361 /* Generate a hash value for a pair of expressions. This can be used
362 iteratively by passing a previous result in HSTATE.
363
364 The same hash value is always returned for a given pair of expressions,
365 regardless of the order in which they are presented. This is useful in
366 hashing the operands of commutative functions. */
367
368 namespace inchash
369 {
370
371 static void
372 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
373 {
374 hash one, two;
375
376 inchash::add_expr (t1, one);
377 inchash::add_expr (t2, two);
378 hstate.add_commutative (one, two);
379 }
380
381 /* Compute a hash value for a hashable_expr value EXPR and a
382 previously accumulated hash value VAL. If two hashable_expr
383 values compare equal with hashable_expr_equal_p, they must
384 hash to the same value, given an identical value of VAL.
385 The logic is intended to follow inchash::add_expr in tree.c. */
386
387 static void
388 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
389 {
390 switch (expr->kind)
391 {
392 case EXPR_SINGLE:
393 inchash::add_expr (expr->ops.single.rhs, hstate);
394 break;
395
396 case EXPR_UNARY:
397 hstate.add_object (expr->ops.unary.op);
398
399 /* Make sure to include signedness in the hash computation.
400 Don't hash the type, that can lead to having nodes which
401 compare equal according to operand_equal_p, but which
402 have different hash codes. */
403 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
404 || expr->ops.unary.op == NON_LVALUE_EXPR)
405 hstate.add_int (TYPE_UNSIGNED (expr->type));
406
407 inchash::add_expr (expr->ops.unary.opnd, hstate);
408 break;
409
410 case EXPR_BINARY:
411 hstate.add_object (expr->ops.binary.op);
412 if (commutative_tree_code (expr->ops.binary.op))
413 inchash::add_expr_commutative (expr->ops.binary.opnd0,
414 expr->ops.binary.opnd1, hstate);
415 else
416 {
417 inchash::add_expr (expr->ops.binary.opnd0, hstate);
418 inchash::add_expr (expr->ops.binary.opnd1, hstate);
419 }
420 break;
421
422 case EXPR_TERNARY:
423 hstate.add_object (expr->ops.ternary.op);
424 if (commutative_ternary_tree_code (expr->ops.ternary.op))
425 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
426 expr->ops.ternary.opnd1, hstate);
427 else
428 {
429 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
430 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
431 }
432 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
433 break;
434
435 case EXPR_CALL:
436 {
437 size_t i;
438 enum tree_code code = CALL_EXPR;
439 gcall *fn_from;
440
441 hstate.add_object (code);
442 fn_from = expr->ops.call.fn_from;
443 if (gimple_call_internal_p (fn_from))
444 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
445 else
446 inchash::add_expr (gimple_call_fn (fn_from), hstate);
447 for (i = 0; i < expr->ops.call.nargs; i++)
448 inchash::add_expr (expr->ops.call.args[i], hstate);
449 }
450 break;
451
452 case EXPR_PHI:
453 {
454 size_t i;
455
456 for (i = 0; i < expr->ops.phi.nargs; i++)
457 inchash::add_expr (expr->ops.phi.args[i], hstate);
458 }
459 break;
460
461 default:
462 gcc_unreachable ();
463 }
464 }
465
466 }
467
468 /* Hashing and equality functions. We compute a value number for expressions
469 using the code of the expression and the SSA numbers of its operands. */
470
471 static hashval_t
472 avail_expr_hash (class expr_hash_elt *p)
473 {
474 const struct hashable_expr *expr = p->expr ();
475 inchash::hash hstate;
476
477 if (expr->kind == EXPR_SINGLE)
478 {
479 /* T could potentially be a switch index or a goto dest. */
480 tree t = expr->ops.single.rhs;
481 if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
482 {
483 /* Make equivalent statements of both these kinds hash together.
484 Dealing with both MEM_REF and ARRAY_REF allows us not to care
485 about equivalence with other statements not considered here. */
486 bool reverse;
487 poly_int64 offset, size, max_size;
488 tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
489 &reverse);
490 /* Strictly, we could try to normalize variable-sized accesses too,
491 but here we just deal with the common case. */
492 if (known_size_p (max_size)
493 && known_eq (size, max_size))
494 {
495 enum tree_code code = MEM_REF;
496 hstate.add_object (code);
497 inchash::add_expr (base, hstate,
498 TREE_CODE (base) == MEM_REF
499 ? OEP_ADDRESS_OF : 0);
500 hstate.add_object (offset);
501 hstate.add_object (size);
502 return hstate.end ();
503 }
504 }
505 }
506
507 inchash::add_hashable_expr (expr, hstate);
508
509 return hstate.end ();
510 }
511
512 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
513 to each other. (That is, they return the value of the same bit of memory.)
514
515 Return TRUE if the two are so equivalent; FALSE if not (which could still
516 mean the two are equivalent by other means). */
517
518 static bool
519 equal_mem_array_ref_p (tree t0, tree t1)
520 {
521 if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
522 return false;
523 if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
524 return false;
525
526 if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
527 return false;
528 bool rev0;
529 poly_int64 off0, sz0, max0;
530 tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
531 if (!known_size_p (max0)
532 || maybe_ne (sz0, max0))
533 return false;
534
535 bool rev1;
536 poly_int64 off1, sz1, max1;
537 tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
538 if (!known_size_p (max1)
539 || maybe_ne (sz1, max1))
540 return false;
541
542 if (rev0 != rev1 || maybe_ne (sz0, sz1) || maybe_ne (off0, off1))
543 return false;
544
545 return operand_equal_p (base0, base1,
546 (TREE_CODE (base0) == MEM_REF
547 || TREE_CODE (base0) == TARGET_MEM_REF)
548 && (TREE_CODE (base1) == MEM_REF
549 || TREE_CODE (base1) == TARGET_MEM_REF)
550 ? OEP_ADDRESS_OF : 0);
551 }
552
553 /* Compare two hashable_expr structures for equivalence. They are
554 considered equivalent when the expressions they denote must
555 necessarily be equal. The logic is intended to follow that of
556 operand_equal_p in fold-const.c */
557
558 static bool
559 hashable_expr_equal_p (const struct hashable_expr *expr0,
560 const struct hashable_expr *expr1)
561 {
562 tree type0 = expr0->type;
563 tree type1 = expr1->type;
564
565 /* If either type is NULL, there is nothing to check. */
566 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
567 return false;
568
569 /* If both types don't have the same signedness, precision, and mode,
570 then we can't consider them equal. */
571 if (type0 != type1
572 && (TREE_CODE (type0) == ERROR_MARK
573 || TREE_CODE (type1) == ERROR_MARK
574 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
575 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
576 || TYPE_MODE (type0) != TYPE_MODE (type1)))
577 return false;
578
579 if (expr0->kind != expr1->kind)
580 return false;
581
582 switch (expr0->kind)
583 {
584 case EXPR_SINGLE:
585 return equal_mem_array_ref_p (expr0->ops.single.rhs,
586 expr1->ops.single.rhs)
587 || operand_equal_p (expr0->ops.single.rhs,
588 expr1->ops.single.rhs, 0);
589 case EXPR_UNARY:
590 if (expr0->ops.unary.op != expr1->ops.unary.op)
591 return false;
592
593 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
594 || expr0->ops.unary.op == NON_LVALUE_EXPR)
595 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
596 return false;
597
598 return operand_equal_p (expr0->ops.unary.opnd,
599 expr1->ops.unary.opnd, 0);
600
601 case EXPR_BINARY:
602 if (expr0->ops.binary.op != expr1->ops.binary.op)
603 return false;
604
605 if (operand_equal_p (expr0->ops.binary.opnd0,
606 expr1->ops.binary.opnd0, 0)
607 && operand_equal_p (expr0->ops.binary.opnd1,
608 expr1->ops.binary.opnd1, 0))
609 return true;
610
611 /* For commutative ops, allow the other order. */
612 return (commutative_tree_code (expr0->ops.binary.op)
613 && operand_equal_p (expr0->ops.binary.opnd0,
614 expr1->ops.binary.opnd1, 0)
615 && operand_equal_p (expr0->ops.binary.opnd1,
616 expr1->ops.binary.opnd0, 0));
617
618 case EXPR_TERNARY:
619 if (expr0->ops.ternary.op != expr1->ops.ternary.op
620 || !operand_equal_p (expr0->ops.ternary.opnd2,
621 expr1->ops.ternary.opnd2, 0))
622 return false;
623
624 /* BIT_INSERT_EXPR has an implict operand as the type precision
625 of op1. Need to check to make sure they are the same. */
626 if (expr0->ops.ternary.op == BIT_INSERT_EXPR
627 && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
628 && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
629 && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
630 != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
631 return false;
632
633 if (operand_equal_p (expr0->ops.ternary.opnd0,
634 expr1->ops.ternary.opnd0, 0)
635 && operand_equal_p (expr0->ops.ternary.opnd1,
636 expr1->ops.ternary.opnd1, 0))
637 return true;
638
639 /* For commutative ops, allow the other order. */
640 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
641 && operand_equal_p (expr0->ops.ternary.opnd0,
642 expr1->ops.ternary.opnd1, 0)
643 && operand_equal_p (expr0->ops.ternary.opnd1,
644 expr1->ops.ternary.opnd0, 0));
645
646 case EXPR_CALL:
647 {
648 size_t i;
649
650 /* If the calls are to different functions, then they
651 clearly cannot be equal. */
652 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
653 expr1->ops.call.fn_from))
654 return false;
655
656 if (! expr0->ops.call.pure)
657 return false;
658
659 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
660 return false;
661
662 for (i = 0; i < expr0->ops.call.nargs; i++)
663 if (! operand_equal_p (expr0->ops.call.args[i],
664 expr1->ops.call.args[i], 0))
665 return false;
666
667 if (stmt_could_throw_p (cfun, expr0->ops.call.fn_from))
668 {
669 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
670 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
671 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
672 return false;
673 }
674
675 return true;
676 }
677
678 case EXPR_PHI:
679 {
680 size_t i;
681
682 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
683 return false;
684
685 for (i = 0; i < expr0->ops.phi.nargs; i++)
686 if (! operand_equal_p (expr0->ops.phi.args[i],
687 expr1->ops.phi.args[i], 0))
688 return false;
689
690 return true;
691 }
692
693 default:
694 gcc_unreachable ();
695 }
696 }
697
698 /* Given a statement STMT, construct a hash table element. */
699
700 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
701 {
702 enum gimple_code code = gimple_code (stmt);
703 struct hashable_expr *expr = this->expr ();
704
705 if (code == GIMPLE_ASSIGN)
706 {
707 enum tree_code subcode = gimple_assign_rhs_code (stmt);
708
709 switch (get_gimple_rhs_class (subcode))
710 {
711 case GIMPLE_SINGLE_RHS:
712 expr->kind = EXPR_SINGLE;
713 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
714 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
715 break;
716 case GIMPLE_UNARY_RHS:
717 expr->kind = EXPR_UNARY;
718 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
719 if (CONVERT_EXPR_CODE_P (subcode))
720 subcode = NOP_EXPR;
721 expr->ops.unary.op = subcode;
722 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
723 break;
724 case GIMPLE_BINARY_RHS:
725 expr->kind = EXPR_BINARY;
726 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
727 expr->ops.binary.op = subcode;
728 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
729 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
730 break;
731 case GIMPLE_TERNARY_RHS:
732 expr->kind = EXPR_TERNARY;
733 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
734 expr->ops.ternary.op = subcode;
735 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
736 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
737 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
738 break;
739 default:
740 gcc_unreachable ();
741 }
742 }
743 else if (code == GIMPLE_COND)
744 {
745 expr->type = boolean_type_node;
746 expr->kind = EXPR_BINARY;
747 expr->ops.binary.op = gimple_cond_code (stmt);
748 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
749 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
750 }
751 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
752 {
753 size_t nargs = gimple_call_num_args (call_stmt);
754 size_t i;
755
756 gcc_assert (gimple_call_lhs (call_stmt));
757
758 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
759 expr->kind = EXPR_CALL;
760 expr->ops.call.fn_from = call_stmt;
761
762 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
763 expr->ops.call.pure = true;
764 else
765 expr->ops.call.pure = false;
766
767 expr->ops.call.nargs = nargs;
768 expr->ops.call.args = XCNEWVEC (tree, nargs);
769 for (i = 0; i < nargs; i++)
770 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
771 }
772 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
773 {
774 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
775 expr->kind = EXPR_SINGLE;
776 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
777 }
778 else if (code == GIMPLE_GOTO)
779 {
780 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
781 expr->kind = EXPR_SINGLE;
782 expr->ops.single.rhs = gimple_goto_dest (stmt);
783 }
784 else if (code == GIMPLE_PHI)
785 {
786 size_t nargs = gimple_phi_num_args (stmt);
787 size_t i;
788
789 expr->type = TREE_TYPE (gimple_phi_result (stmt));
790 expr->kind = EXPR_PHI;
791 expr->ops.phi.nargs = nargs;
792 expr->ops.phi.args = XCNEWVEC (tree, nargs);
793 for (i = 0; i < nargs; i++)
794 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
795 }
796 else
797 gcc_unreachable ();
798
799 m_lhs = orig_lhs;
800 m_vop = gimple_vuse (stmt);
801 m_hash = avail_expr_hash (this);
802 m_stamp = this;
803 }
804
805 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
806 construct a hash table element. */
807
808 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
809 {
810 m_expr = *orig;
811 m_lhs = orig_lhs;
812 m_vop = NULL_TREE;
813 m_hash = avail_expr_hash (this);
814 m_stamp = this;
815 }
816
817 /* Copy constructor for a hash table element. */
818
819 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
820 {
821 m_expr = old_elt.m_expr;
822 m_lhs = old_elt.m_lhs;
823 m_vop = old_elt.m_vop;
824 m_hash = old_elt.m_hash;
825 m_stamp = this;
826
827 /* Now deep copy the malloc'd space for CALL and PHI args. */
828 if (old_elt.m_expr.kind == EXPR_CALL)
829 {
830 size_t nargs = old_elt.m_expr.ops.call.nargs;
831 size_t i;
832
833 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
834 for (i = 0; i < nargs; i++)
835 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
836 }
837 else if (old_elt.m_expr.kind == EXPR_PHI)
838 {
839 size_t nargs = old_elt.m_expr.ops.phi.nargs;
840 size_t i;
841
842 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
843 for (i = 0; i < nargs; i++)
844 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
845 }
846 }
847
848 /* Calls and PHIs have a variable number of arguments that are allocated
849 on the heap. Thus we have to have a special dtor to release them. */
850
851 expr_hash_elt::~expr_hash_elt ()
852 {
853 if (m_expr.kind == EXPR_CALL)
854 free (m_expr.ops.call.args);
855 else if (m_expr.kind == EXPR_PHI)
856 free (m_expr.ops.phi.args);
857 }
858
859 /* Print a diagnostic dump of an expression hash table entry. */
860
861 void
862 expr_hash_elt::print (FILE *stream)
863 {
864 fprintf (stream, "STMT ");
865
866 if (m_lhs)
867 {
868 print_generic_expr (stream, m_lhs);
869 fprintf (stream, " = ");
870 }
871
872 switch (m_expr.kind)
873 {
874 case EXPR_SINGLE:
875 print_generic_expr (stream, m_expr.ops.single.rhs);
876 break;
877
878 case EXPR_UNARY:
879 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
880 print_generic_expr (stream, m_expr.ops.unary.opnd);
881 break;
882
883 case EXPR_BINARY:
884 print_generic_expr (stream, m_expr.ops.binary.opnd0);
885 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
886 print_generic_expr (stream, m_expr.ops.binary.opnd1);
887 break;
888
889 case EXPR_TERNARY:
890 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
891 print_generic_expr (stream, m_expr.ops.ternary.opnd0);
892 fputs (", ", stream);
893 print_generic_expr (stream, m_expr.ops.ternary.opnd1);
894 fputs (", ", stream);
895 print_generic_expr (stream, m_expr.ops.ternary.opnd2);
896 fputs (">", stream);
897 break;
898
899 case EXPR_CALL:
900 {
901 size_t i;
902 size_t nargs = m_expr.ops.call.nargs;
903 gcall *fn_from;
904
905 fn_from = m_expr.ops.call.fn_from;
906 if (gimple_call_internal_p (fn_from))
907 fprintf (stream, ".%s",
908 internal_fn_name (gimple_call_internal_fn (fn_from)));
909 else
910 print_generic_expr (stream, gimple_call_fn (fn_from));
911 fprintf (stream, " (");
912 for (i = 0; i < nargs; i++)
913 {
914 print_generic_expr (stream, m_expr.ops.call.args[i]);
915 if (i + 1 < nargs)
916 fprintf (stream, ", ");
917 }
918 fprintf (stream, ")");
919 }
920 break;
921
922 case EXPR_PHI:
923 {
924 size_t i;
925 size_t nargs = m_expr.ops.phi.nargs;
926
927 fprintf (stream, "PHI <");
928 for (i = 0; i < nargs; i++)
929 {
930 print_generic_expr (stream, m_expr.ops.phi.args[i]);
931 if (i + 1 < nargs)
932 fprintf (stream, ", ");
933 }
934 fprintf (stream, ">");
935 }
936 break;
937 }
938
939 if (m_vop)
940 {
941 fprintf (stream, " with ");
942 print_generic_expr (stream, m_vop);
943 }
944
945 fprintf (stream, "\n");
946 }
947
948 /* Pop entries off the stack until we hit the NULL marker.
949 For each entry popped, use the SRC/DEST pair to restore
950 SRC to its prior value. */
951
952 void
953 const_and_copies::pop_to_marker (void)
954 {
955 while (m_stack.length () > 0)
956 {
957 tree prev_value, dest;
958
959 dest = m_stack.pop ();
960
961 /* A NULL value indicates we should stop unwinding, otherwise
962 pop off the next entry as they're recorded in pairs. */
963 if (dest == NULL)
964 break;
965
966 if (dump_file && (dump_flags & TDF_DETAILS))
967 {
968 fprintf (dump_file, "<<<< COPY ");
969 print_generic_expr (dump_file, dest);
970 fprintf (dump_file, " = ");
971 print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
972 fprintf (dump_file, "\n");
973 }
974
975 prev_value = m_stack.pop ();
976 set_ssa_name_value (dest, prev_value);
977 }
978 }
979
980 /* Record that X has the value Y and that X's previous value is PREV_X.
981
982 This variant does not follow the value chain for Y. */
983
984 void
985 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
986 {
987 if (dump_file && (dump_flags & TDF_DETAILS))
988 {
989 fprintf (dump_file, "0>>> COPY ");
990 print_generic_expr (dump_file, x);
991 fprintf (dump_file, " = ");
992 print_generic_expr (dump_file, y);
993 fprintf (dump_file, "\n");
994 }
995
996 set_ssa_name_value (x, y);
997 m_stack.reserve (2);
998 m_stack.quick_push (prev_x);
999 m_stack.quick_push (x);
1000 }
1001
1002 /* Record that X has the value Y. */
1003
1004 void
1005 const_and_copies::record_const_or_copy (tree x, tree y)
1006 {
1007 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
1008 }
1009
1010 /* Record that X has the value Y and that X's previous value is PREV_X.
1011
1012 This variant follow's Y value chain. */
1013
1014 void
1015 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
1016 {
1017 /* Y may be NULL if we are invalidating entries in the table. */
1018 if (y && TREE_CODE (y) == SSA_NAME)
1019 {
1020 tree tmp = SSA_NAME_VALUE (y);
1021 y = tmp ? tmp : y;
1022 }
1023
1024 record_const_or_copy_raw (x, y, prev_x);
1025 }
1026
1027 bool
1028 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
1029 {
1030 const struct hashable_expr *expr1 = p1->expr ();
1031 const class expr_hash_elt *stamp1 = p1->stamp ();
1032 const struct hashable_expr *expr2 = p2->expr ();
1033 const class expr_hash_elt *stamp2 = p2->stamp ();
1034
1035 /* This case should apply only when removing entries from the table. */
1036 if (stamp1 == stamp2)
1037 return true;
1038
1039 if (p1->hash () != p2->hash ())
1040 return false;
1041
1042 /* In case of a collision, both RHS have to be identical and have the
1043 same VUSE operands. */
1044 if (hashable_expr_equal_p (expr1, expr2)
1045 && types_compatible_p (expr1->type, expr2->type))
1046 return true;
1047
1048 return false;
1049 }
1050
1051 /* Given a conditional expression COND as a tree, initialize
1052 a hashable_expr expression EXPR. The conditional must be a
1053 comparison or logical negation. A constant or a variable is
1054 not permitted. */
1055
1056 void
1057 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
1058 {
1059 expr->type = boolean_type_node;
1060
1061 if (COMPARISON_CLASS_P (cond))
1062 {
1063 expr->kind = EXPR_BINARY;
1064 expr->ops.binary.op = TREE_CODE (cond);
1065 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
1066 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
1067 }
1068 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
1069 {
1070 expr->kind = EXPR_UNARY;
1071 expr->ops.unary.op = TRUTH_NOT_EXPR;
1072 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
1073 }
1074 else
1075 gcc_unreachable ();
1076 }
1077
1078 /* Build a cond_equivalence record indicating that the comparison
1079 CODE holds between operands OP0 and OP1 and push it to **P. */
1080
1081 static void
1082 build_and_record_new_cond (enum tree_code code,
1083 tree op0, tree op1,
1084 vec<cond_equivalence> *p,
1085 bool val = true)
1086 {
1087 cond_equivalence c;
1088 struct hashable_expr *cond = &c.cond;
1089
1090 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1091
1092 cond->type = boolean_type_node;
1093 cond->kind = EXPR_BINARY;
1094 cond->ops.binary.op = code;
1095 cond->ops.binary.opnd0 = op0;
1096 cond->ops.binary.opnd1 = op1;
1097
1098 c.value = val ? boolean_true_node : boolean_false_node;
1099 p->safe_push (c);
1100 }
1101
1102 /* Record that COND is true and INVERTED is false into the edge information
1103 structure. Also record that any conditions dominated by COND are true
1104 as well.
1105
1106 For example, if a < b is true, then a <= b must also be true. */
1107
1108 void
1109 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
1110 {
1111 tree op0, op1;
1112 cond_equivalence c;
1113
1114 if (!COMPARISON_CLASS_P (cond))
1115 return;
1116
1117 op0 = TREE_OPERAND (cond, 0);
1118 op1 = TREE_OPERAND (cond, 1);
1119
1120 switch (TREE_CODE (cond))
1121 {
1122 case LT_EXPR:
1123 case GT_EXPR:
1124 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1125 {
1126 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1127 build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1128 }
1129
1130 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1131 ? LE_EXPR : GE_EXPR),
1132 op0, op1, p);
1133 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1134 build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1135 break;
1136
1137 case GE_EXPR:
1138 case LE_EXPR:
1139 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1140 {
1141 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1142 }
1143 break;
1144
1145 case EQ_EXPR:
1146 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1147 {
1148 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1149 }
1150 build_and_record_new_cond (LE_EXPR, op0, op1, p);
1151 build_and_record_new_cond (GE_EXPR, op0, op1, p);
1152 break;
1153
1154 case UNORDERED_EXPR:
1155 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1156 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1157 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1158 build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1159 build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1160 build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1161 break;
1162
1163 case UNLT_EXPR:
1164 case UNGT_EXPR:
1165 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1166 ? UNLE_EXPR : UNGE_EXPR),
1167 op0, op1, p);
1168 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1169 break;
1170
1171 case UNEQ_EXPR:
1172 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1173 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1174 break;
1175
1176 case LTGT_EXPR:
1177 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1178 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1179 break;
1180
1181 default:
1182 break;
1183 }
1184
1185 /* Now store the original true and false conditions into the first
1186 two slots. */
1187 initialize_expr_from_cond (cond, &c.cond);
1188 c.value = boolean_true_node;
1189 p->safe_push (c);
1190
1191 /* It is possible for INVERTED to be the negation of a comparison,
1192 and not a valid RHS or GIMPLE_COND condition. This happens because
1193 invert_truthvalue may return such an expression when asked to invert
1194 a floating-point comparison. These comparisons are not assumed to
1195 obey the trichotomy law. */
1196 initialize_expr_from_cond (inverted, &c.cond);
1197 c.value = boolean_false_node;
1198 p->safe_push (c);
1199 }