]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-scopedtables.c
Add default value for last argument of dump functions.
[thirdparty/gcc.git] / gcc / tree-ssa-scopedtables.c
CommitLineData
f6c72af4 1/* Header file for SSA dominator optimizations.
cbe34bb5 2 Copyright (C) 2013-2017 Free Software Foundation, Inc.
f6c72af4
JL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
957060b5
AM
23#include "function.h"
24#include "basic-block.h"
f6c72af4 25#include "tree.h"
957060b5 26#include "gimple.h"
f6c72af4 27#include "tree-pass.h"
957060b5 28#include "tree-pretty-print.h"
f6c72af4
JL
29#include "tree-ssa-scopedtables.h"
30#include "tree-ssa-threadedge.h"
d3139801
JL
31#include "stor-layout.h"
32#include "fold-const.h"
d3139801
JL
33#include "tree-eh.h"
34#include "internal-fn.h"
70c1e886 35#include "tree-dfa.h"
a3d514f2
JL
36#include "options.h"
37#include "params.h"
d3139801
JL
38
39static bool hashable_expr_equal_p (const struct hashable_expr *,
40 const struct hashable_expr *);
41
42/* Initialize local stacks for this optimizer and record equivalences
43 upon entry to BB. Equivalences can come from the edge traversed to
44 reach BB or they may come from PHI nodes at the start of BB. */
45
46/* Pop items off the unwinding stack, removing each from the hash table
47 until a marker is encountered. */
48
49void
50avail_exprs_stack::pop_to_marker ()
51{
52 /* Remove all the expressions made available in this block. */
53 while (m_stack.length () > 0)
54 {
55 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
56 expr_hash_elt **slot;
57
58 if (victim.first == NULL)
59 break;
60
61 /* This must precede the actual removal from the hash table,
62 as ELEMENT and the table entry may share a call argument
63 vector which will be freed during removal. */
64 if (dump_file && (dump_flags & TDF_DETAILS))
65 {
66 fprintf (dump_file, "<<<< ");
67 victim.first->print (dump_file);
68 }
69
70 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
71 gcc_assert (slot && *slot == victim.first);
72 if (victim.second != NULL)
73 {
74 delete *slot;
75 *slot = victim.second;
76 }
77 else
78 m_avail_exprs->clear_slot (slot);
79 }
80}
81
82/* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
83 from the hash table. */
84
85void
86avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
87 class expr_hash_elt *elt2,
88 char type)
89{
90 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
91 {
92 fprintf (dump_file, "%c>>> ", type);
93 elt1->print (dump_file);
94 }
95
96 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
97}
98
a3d514f2
JL
99/* Helper for walk_non_aliased_vuses. Determine if we arrived at
100 the desired memory state. */
101
102static void *
103vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
104{
105 tree vuse2 = (tree) data;
106 if (vuse1 == vuse2)
107 return data;
108
109 /* This bounds the stmt walks we perform on reference lookups
110 to O(1) instead of O(N) where N is the number of dominating
111 stores leading to a candidate. We re-use the SCCVN param
112 for this as it is basically the same complexity. */
113 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
114 return (void *)-1;
115
116 return NULL;
117}
118
119/* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
120 If found, return its LHS. Otherwise insert STMT in the table and
121 return NULL_TREE.
122
123 Also, when an expression is first inserted in the table, it is also
124 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
125 we finish processing this block and its children. */
126
127tree
128avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
129{
130 expr_hash_elt **slot;
131 tree lhs;
132
133 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
134 if (gimple_code (stmt) == GIMPLE_PHI)
135 lhs = gimple_phi_result (stmt);
136 else
137 lhs = gimple_get_lhs (stmt);
138
139 class expr_hash_elt element (stmt, lhs);
140
141 if (dump_file && (dump_flags & TDF_DETAILS))
142 {
143 fprintf (dump_file, "LKUP ");
144 element.print (dump_file);
145 }
146
147 /* Don't bother remembering constant assignments and copy operations.
148 Constants and copy operations are handled by the constant/copy propagator
149 in optimize_stmt. */
150 if (element.expr()->kind == EXPR_SINGLE
151 && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
152 || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
153 return NULL_TREE;
154
155 /* Finally try to find the expression in the main expression hash table. */
156 slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
157 if (slot == NULL)
158 {
159 return NULL_TREE;
160 }
161 else if (*slot == NULL)
162 {
163 class expr_hash_elt *element2 = new expr_hash_elt (element);
164 *slot = element2;
165
166 record_expr (element2, NULL, '2');
167 return NULL_TREE;
168 }
169
170 /* If we found a redundant memory operation do an alias walk to
171 check if we can re-use it. */
172 if (gimple_vuse (stmt) != (*slot)->vop ())
173 {
174 tree vuse1 = (*slot)->vop ();
175 tree vuse2 = gimple_vuse (stmt);
176 /* If we have a load of a register and a candidate in the
177 hash with vuse1 then try to reach its stmt by walking
178 up the virtual use-def chain using walk_non_aliased_vuses.
179 But don't do this when removing expressions from the hash. */
180 ao_ref ref;
181 if (!(vuse1 && vuse2
182 && gimple_assign_single_p (stmt)
183 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
184 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
185 ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
186 && walk_non_aliased_vuses (&ref, vuse2,
187 vuse_eq, NULL, NULL, vuse1) != NULL))
188 {
189 if (insert)
190 {
191 class expr_hash_elt *element2 = new expr_hash_elt (element);
192
193 /* Insert the expr into the hash by replacing the current
194 entry and recording the value to restore in the
195 avail_exprs_stack. */
196 record_expr (element2, *slot, '2');
197 *slot = element2;
198 }
199 return NULL_TREE;
200 }
201 }
202
203 /* Extract the LHS of the assignment so that it can be used as the current
204 definition of another variable. */
205 lhs = (*slot)->lhs ();
206
207 /* Valueize the result. */
208 if (TREE_CODE (lhs) == SSA_NAME)
209 {
210 tree tem = SSA_NAME_VALUE (lhs);
211 if (tem)
212 lhs = tem;
213 }
214
215 if (dump_file && (dump_flags & TDF_DETAILS))
216 {
217 fprintf (dump_file, "FIND: ");
ef6cb4c7 218 print_generic_expr (dump_file, lhs);
a3d514f2
JL
219 fprintf (dump_file, "\n");
220 }
221
222 return lhs;
223}
224
225/* Enter condition equivalence P into the hash table.
226
227 This indicates that a conditional expression has a known
228 boolean value. */
229
230void
231avail_exprs_stack::record_cond (cond_equivalence *p)
232{
233 class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
234 expr_hash_elt **slot;
235
236 slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
237 if (*slot == NULL)
238 {
239 *slot = element;
240 record_expr (element, NULL, '1');
241 }
242 else
243 delete element;
244}
245
d3139801
JL
246/* Generate a hash value for a pair of expressions. This can be used
247 iteratively by passing a previous result in HSTATE.
248
249 The same hash value is always returned for a given pair of expressions,
250 regardless of the order in which they are presented. This is useful in
251 hashing the operands of commutative functions. */
252
253namespace inchash
254{
255
256static void
257add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
258{
259 hash one, two;
260
261 inchash::add_expr (t1, one);
262 inchash::add_expr (t2, two);
263 hstate.add_commutative (one, two);
264}
265
266/* Compute a hash value for a hashable_expr value EXPR and a
267 previously accumulated hash value VAL. If two hashable_expr
268 values compare equal with hashable_expr_equal_p, they must
269 hash to the same value, given an identical value of VAL.
270 The logic is intended to follow inchash::add_expr in tree.c. */
271
272static void
273add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
274{
275 switch (expr->kind)
276 {
277 case EXPR_SINGLE:
278 inchash::add_expr (expr->ops.single.rhs, hstate);
279 break;
280
281 case EXPR_UNARY:
282 hstate.add_object (expr->ops.unary.op);
283
284 /* Make sure to include signedness in the hash computation.
285 Don't hash the type, that can lead to having nodes which
286 compare equal according to operand_equal_p, but which
287 have different hash codes. */
288 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
289 || expr->ops.unary.op == NON_LVALUE_EXPR)
290 hstate.add_int (TYPE_UNSIGNED (expr->type));
291
292 inchash::add_expr (expr->ops.unary.opnd, hstate);
293 break;
294
295 case EXPR_BINARY:
296 hstate.add_object (expr->ops.binary.op);
297 if (commutative_tree_code (expr->ops.binary.op))
298 inchash::add_expr_commutative (expr->ops.binary.opnd0,
299 expr->ops.binary.opnd1, hstate);
300 else
301 {
302 inchash::add_expr (expr->ops.binary.opnd0, hstate);
303 inchash::add_expr (expr->ops.binary.opnd1, hstate);
304 }
305 break;
306
307 case EXPR_TERNARY:
308 hstate.add_object (expr->ops.ternary.op);
309 if (commutative_ternary_tree_code (expr->ops.ternary.op))
310 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
311 expr->ops.ternary.opnd1, hstate);
312 else
313 {
314 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
315 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
316 }
317 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
318 break;
319
320 case EXPR_CALL:
321 {
322 size_t i;
323 enum tree_code code = CALL_EXPR;
324 gcall *fn_from;
325
326 hstate.add_object (code);
327 fn_from = expr->ops.call.fn_from;
328 if (gimple_call_internal_p (fn_from))
329 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
330 else
331 inchash::add_expr (gimple_call_fn (fn_from), hstate);
332 for (i = 0; i < expr->ops.call.nargs; i++)
333 inchash::add_expr (expr->ops.call.args[i], hstate);
334 }
335 break;
336
337 case EXPR_PHI:
338 {
339 size_t i;
340
341 for (i = 0; i < expr->ops.phi.nargs; i++)
342 inchash::add_expr (expr->ops.phi.args[i], hstate);
343 }
344 break;
345
346 default:
347 gcc_unreachable ();
348 }
349}
350
351}
352
353/* Hashing and equality functions. We compute a value number for expressions
354 using the code of the expression and the SSA numbers of its operands. */
355
356static hashval_t
357avail_expr_hash (class expr_hash_elt *p)
358{
359 const struct hashable_expr *expr = p->expr ();
360 inchash::hash hstate;
361
70c1e886
AL
362 if (expr->kind == EXPR_SINGLE)
363 {
364 /* T could potentially be a switch index or a goto dest. */
365 tree t = expr->ops.single.rhs;
879c27e3 366 if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
70c1e886
AL
367 {
368 /* Make equivalent statements of both these kinds hash together.
369 Dealing with both MEM_REF and ARRAY_REF allows us not to care
370 about equivalence with other statements not considered here. */
371 bool reverse;
372 HOST_WIDE_INT offset, size, max_size;
373 tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
374 &reverse);
375 /* Strictly, we could try to normalize variable-sized accesses too,
376 but here we just deal with the common case. */
e2c768b6
RB
377 if (size != -1
378 && size == max_size)
70c1e886
AL
379 {
380 enum tree_code code = MEM_REF;
381 hstate.add_object (code);
382 inchash::add_expr (base, hstate);
383 hstate.add_object (offset);
384 hstate.add_object (size);
385 return hstate.end ();
386 }
387 }
388 }
389
d3139801
JL
390 inchash::add_hashable_expr (expr, hstate);
391
392 return hstate.end ();
393}
394
70c1e886
AL
395/* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
396 to each other. (That is, they return the value of the same bit of memory.)
397
398 Return TRUE if the two are so equivalent; FALSE if not (which could still
399 mean the two are equivalent by other means). */
400
401static bool
402equal_mem_array_ref_p (tree t0, tree t1)
403{
879c27e3 404 if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
70c1e886 405 return false;
879c27e3 406 if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
70c1e886
AL
407 return false;
408
409 if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
410 return false;
411 bool rev0;
412 HOST_WIDE_INT off0, sz0, max0;
413 tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
e2c768b6
RB
414 if (sz0 == -1
415 || sz0 != max0)
416 return false;
70c1e886
AL
417
418 bool rev1;
419 HOST_WIDE_INT off1, sz1, max1;
420 tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
e2c768b6
RB
421 if (sz1 == -1
422 || sz1 != max1)
423 return false;
424
425 if (rev0 != rev1)
426 return false;
70c1e886 427
e2c768b6 428 /* Types were compatible, so this is a sanity check. */
70c1e886 429 gcc_assert (sz0 == sz1);
70c1e886
AL
430
431 return (off0 == off1) && operand_equal_p (base0, base1, 0);
432}
433
d3139801
JL
434/* Compare two hashable_expr structures for equivalence. They are
435 considered equivalent when the expressions they denote must
436 necessarily be equal. The logic is intended to follow that of
437 operand_equal_p in fold-const.c */
438
439static bool
440hashable_expr_equal_p (const struct hashable_expr *expr0,
441 const struct hashable_expr *expr1)
442{
443 tree type0 = expr0->type;
444 tree type1 = expr1->type;
445
446 /* If either type is NULL, there is nothing to check. */
447 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
448 return false;
449
450 /* If both types don't have the same signedness, precision, and mode,
451 then we can't consider them equal. */
452 if (type0 != type1
453 && (TREE_CODE (type0) == ERROR_MARK
454 || TREE_CODE (type1) == ERROR_MARK
455 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
456 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
457 || TYPE_MODE (type0) != TYPE_MODE (type1)))
458 return false;
459
460 if (expr0->kind != expr1->kind)
461 return false;
462
463 switch (expr0->kind)
464 {
465 case EXPR_SINGLE:
70c1e886
AL
466 return equal_mem_array_ref_p (expr0->ops.single.rhs,
467 expr1->ops.single.rhs)
468 || operand_equal_p (expr0->ops.single.rhs,
469 expr1->ops.single.rhs, 0);
d3139801
JL
470 case EXPR_UNARY:
471 if (expr0->ops.unary.op != expr1->ops.unary.op)
472 return false;
473
474 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
475 || expr0->ops.unary.op == NON_LVALUE_EXPR)
476 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
477 return false;
478
479 return operand_equal_p (expr0->ops.unary.opnd,
480 expr1->ops.unary.opnd, 0);
481
482 case EXPR_BINARY:
483 if (expr0->ops.binary.op != expr1->ops.binary.op)
484 return false;
485
486 if (operand_equal_p (expr0->ops.binary.opnd0,
487 expr1->ops.binary.opnd0, 0)
488 && operand_equal_p (expr0->ops.binary.opnd1,
489 expr1->ops.binary.opnd1, 0))
490 return true;
491
492 /* For commutative ops, allow the other order. */
493 return (commutative_tree_code (expr0->ops.binary.op)
494 && operand_equal_p (expr0->ops.binary.opnd0,
495 expr1->ops.binary.opnd1, 0)
496 && operand_equal_p (expr0->ops.binary.opnd1,
497 expr1->ops.binary.opnd0, 0));
498
499 case EXPR_TERNARY:
500 if (expr0->ops.ternary.op != expr1->ops.ternary.op
501 || !operand_equal_p (expr0->ops.ternary.opnd2,
502 expr1->ops.ternary.opnd2, 0))
503 return false;
504
505 if (operand_equal_p (expr0->ops.ternary.opnd0,
506 expr1->ops.ternary.opnd0, 0)
507 && operand_equal_p (expr0->ops.ternary.opnd1,
508 expr1->ops.ternary.opnd1, 0))
509 return true;
510
511 /* For commutative ops, allow the other order. */
512 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
513 && operand_equal_p (expr0->ops.ternary.opnd0,
514 expr1->ops.ternary.opnd1, 0)
515 && operand_equal_p (expr0->ops.ternary.opnd1,
516 expr1->ops.ternary.opnd0, 0));
517
518 case EXPR_CALL:
519 {
520 size_t i;
521
522 /* If the calls are to different functions, then they
523 clearly cannot be equal. */
524 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
525 expr1->ops.call.fn_from))
526 return false;
527
528 if (! expr0->ops.call.pure)
529 return false;
530
531 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
532 return false;
533
534 for (i = 0; i < expr0->ops.call.nargs; i++)
535 if (! operand_equal_p (expr0->ops.call.args[i],
536 expr1->ops.call.args[i], 0))
537 return false;
538
539 if (stmt_could_throw_p (expr0->ops.call.fn_from))
540 {
541 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
542 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
543 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
544 return false;
545 }
546
547 return true;
548 }
549
550 case EXPR_PHI:
551 {
552 size_t i;
553
554 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
555 return false;
556
557 for (i = 0; i < expr0->ops.phi.nargs; i++)
558 if (! operand_equal_p (expr0->ops.phi.args[i],
559 expr1->ops.phi.args[i], 0))
560 return false;
561
562 return true;
563 }
564
565 default:
566 gcc_unreachable ();
567 }
568}
569
570/* Given a statement STMT, construct a hash table element. */
571
355fe088 572expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
d3139801
JL
573{
574 enum gimple_code code = gimple_code (stmt);
575 struct hashable_expr *expr = this->expr ();
576
577 if (code == GIMPLE_ASSIGN)
578 {
579 enum tree_code subcode = gimple_assign_rhs_code (stmt);
580
581 switch (get_gimple_rhs_class (subcode))
582 {
583 case GIMPLE_SINGLE_RHS:
584 expr->kind = EXPR_SINGLE;
585 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
586 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
587 break;
588 case GIMPLE_UNARY_RHS:
589 expr->kind = EXPR_UNARY;
590 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
591 if (CONVERT_EXPR_CODE_P (subcode))
592 subcode = NOP_EXPR;
593 expr->ops.unary.op = subcode;
594 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
595 break;
596 case GIMPLE_BINARY_RHS:
597 expr->kind = EXPR_BINARY;
598 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
599 expr->ops.binary.op = subcode;
600 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
601 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
602 break;
603 case GIMPLE_TERNARY_RHS:
604 expr->kind = EXPR_TERNARY;
605 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
606 expr->ops.ternary.op = subcode;
607 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
608 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
609 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
610 break;
611 default:
612 gcc_unreachable ();
613 }
614 }
615 else if (code == GIMPLE_COND)
616 {
617 expr->type = boolean_type_node;
618 expr->kind = EXPR_BINARY;
619 expr->ops.binary.op = gimple_cond_code (stmt);
620 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
621 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
622 }
623 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
624 {
625 size_t nargs = gimple_call_num_args (call_stmt);
626 size_t i;
627
628 gcc_assert (gimple_call_lhs (call_stmt));
629
630 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
631 expr->kind = EXPR_CALL;
632 expr->ops.call.fn_from = call_stmt;
633
634 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
635 expr->ops.call.pure = true;
636 else
637 expr->ops.call.pure = false;
638
639 expr->ops.call.nargs = nargs;
640 expr->ops.call.args = XCNEWVEC (tree, nargs);
641 for (i = 0; i < nargs; i++)
642 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
643 }
644 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
645 {
646 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
647 expr->kind = EXPR_SINGLE;
648 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
649 }
650 else if (code == GIMPLE_GOTO)
651 {
652 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
653 expr->kind = EXPR_SINGLE;
654 expr->ops.single.rhs = gimple_goto_dest (stmt);
655 }
656 else if (code == GIMPLE_PHI)
657 {
658 size_t nargs = gimple_phi_num_args (stmt);
659 size_t i;
660
661 expr->type = TREE_TYPE (gimple_phi_result (stmt));
662 expr->kind = EXPR_PHI;
663 expr->ops.phi.nargs = nargs;
664 expr->ops.phi.args = XCNEWVEC (tree, nargs);
665 for (i = 0; i < nargs; i++)
666 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
667 }
668 else
669 gcc_unreachable ();
670
671 m_lhs = orig_lhs;
672 m_vop = gimple_vuse (stmt);
673 m_hash = avail_expr_hash (this);
674 m_stamp = this;
675}
676
677/* Given a hashable_expr expression ORIG and an ORIG_LHS,
678 construct a hash table element. */
679
680expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
681{
682 m_expr = *orig;
683 m_lhs = orig_lhs;
684 m_vop = NULL_TREE;
685 m_hash = avail_expr_hash (this);
686 m_stamp = this;
687}
688
689/* Copy constructor for a hash table element. */
690
691expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
692{
693 m_expr = old_elt.m_expr;
694 m_lhs = old_elt.m_lhs;
695 m_vop = old_elt.m_vop;
696 m_hash = old_elt.m_hash;
697 m_stamp = this;
698
699 /* Now deep copy the malloc'd space for CALL and PHI args. */
700 if (old_elt.m_expr.kind == EXPR_CALL)
701 {
702 size_t nargs = old_elt.m_expr.ops.call.nargs;
703 size_t i;
704
705 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
706 for (i = 0; i < nargs; i++)
707 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
708 }
709 else if (old_elt.m_expr.kind == EXPR_PHI)
710 {
711 size_t nargs = old_elt.m_expr.ops.phi.nargs;
712 size_t i;
713
714 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
715 for (i = 0; i < nargs; i++)
716 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
717 }
718}
719
720/* Calls and PHIs have a variable number of arguments that are allocated
721 on the heap. Thus we have to have a special dtor to release them. */
722
723expr_hash_elt::~expr_hash_elt ()
724{
725 if (m_expr.kind == EXPR_CALL)
726 free (m_expr.ops.call.args);
727 else if (m_expr.kind == EXPR_PHI)
728 free (m_expr.ops.phi.args);
729}
730
731/* Print a diagnostic dump of an expression hash table entry. */
732
733void
734expr_hash_elt::print (FILE *stream)
735{
736 fprintf (stream, "STMT ");
737
738 if (m_lhs)
739 {
ef6cb4c7 740 print_generic_expr (stream, m_lhs);
d3139801
JL
741 fprintf (stream, " = ");
742 }
743
744 switch (m_expr.kind)
745 {
746 case EXPR_SINGLE:
ef6cb4c7
ML
747 print_generic_expr (stream, m_expr.ops.single.rhs);
748 break;
d3139801
JL
749
750 case EXPR_UNARY:
751 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
ef6cb4c7
ML
752 print_generic_expr (stream, m_expr.ops.unary.opnd);
753 break;
d3139801
JL
754
755 case EXPR_BINARY:
ef6cb4c7 756 print_generic_expr (stream, m_expr.ops.binary.opnd0);
d3139801 757 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
ef6cb4c7
ML
758 print_generic_expr (stream, m_expr.ops.binary.opnd1);
759 break;
d3139801
JL
760
761 case EXPR_TERNARY:
762 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
ef6cb4c7 763 print_generic_expr (stream, m_expr.ops.ternary.opnd0);
d3139801 764 fputs (", ", stream);
ef6cb4c7 765 print_generic_expr (stream, m_expr.ops.ternary.opnd1);
d3139801 766 fputs (", ", stream);
ef6cb4c7 767 print_generic_expr (stream, m_expr.ops.ternary.opnd2);
d3139801 768 fputs (">", stream);
ef6cb4c7 769 break;
d3139801
JL
770
771 case EXPR_CALL:
772 {
773 size_t i;
774 size_t nargs = m_expr.ops.call.nargs;
775 gcall *fn_from;
776
777 fn_from = m_expr.ops.call.fn_from;
778 if (gimple_call_internal_p (fn_from))
779 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
780 stream);
781 else
ef6cb4c7 782 print_generic_expr (stream, gimple_call_fn (fn_from));
d3139801
JL
783 fprintf (stream, " (");
784 for (i = 0; i < nargs; i++)
785 {
ef6cb4c7 786 print_generic_expr (stream, m_expr.ops.call.args[i]);
d3139801
JL
787 if (i + 1 < nargs)
788 fprintf (stream, ", ");
789 }
790 fprintf (stream, ")");
791 }
792 break;
793
794 case EXPR_PHI:
795 {
796 size_t i;
797 size_t nargs = m_expr.ops.phi.nargs;
798
799 fprintf (stream, "PHI <");
800 for (i = 0; i < nargs; i++)
801 {
ef6cb4c7 802 print_generic_expr (stream, m_expr.ops.phi.args[i]);
d3139801
JL
803 if (i + 1 < nargs)
804 fprintf (stream, ", ");
805 }
806 fprintf (stream, ">");
807 }
808 break;
809 }
810
811 if (m_vop)
812 {
813 fprintf (stream, " with ");
ef6cb4c7 814 print_generic_expr (stream, m_vop);
d3139801
JL
815 }
816
817 fprintf (stream, "\n");
818}
f6c72af4 819
f6c72af4
JL
820/* Pop entries off the stack until we hit the NULL marker.
821 For each entry popped, use the SRC/DEST pair to restore
822 SRC to its prior value. */
823
824void
825const_and_copies::pop_to_marker (void)
826{
f2a4ca15 827 while (m_stack.length () > 0)
f6c72af4
JL
828 {
829 tree prev_value, dest;
830
f2a4ca15 831 dest = m_stack.pop ();
f6c72af4
JL
832
833 /* A NULL value indicates we should stop unwinding, otherwise
834 pop off the next entry as they're recorded in pairs. */
835 if (dest == NULL)
836 break;
837
838 if (dump_file && (dump_flags & TDF_DETAILS))
839 {
840 fprintf (dump_file, "<<<< COPY ");
ef6cb4c7 841 print_generic_expr (dump_file, dest);
f6c72af4 842 fprintf (dump_file, " = ");
ef6cb4c7 843 print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
f6c72af4
JL
844 fprintf (dump_file, "\n");
845 }
846
f2a4ca15 847 prev_value = m_stack.pop ();
f6c72af4
JL
848 set_ssa_name_value (dest, prev_value);
849 }
850}
851
0b604d2d
JL
852/* Record that X has the value Y and that X's previous value is PREV_X.
853
854 This variant does not follow the value chain for Y. */
855
856void
857const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
858{
859 if (dump_file && (dump_flags & TDF_DETAILS))
860 {
861 fprintf (dump_file, "0>>> COPY ");
ef6cb4c7 862 print_generic_expr (dump_file, x);
0b604d2d 863 fprintf (dump_file, " = ");
ef6cb4c7 864 print_generic_expr (dump_file, y);
0b604d2d
JL
865 fprintf (dump_file, "\n");
866 }
867
868 set_ssa_name_value (x, y);
869 m_stack.reserve (2);
870 m_stack.quick_push (prev_x);
871 m_stack.quick_push (x);
872}
873
f6c72af4
JL
874/* Record that X has the value Y. */
875
876void
877const_and_copies::record_const_or_copy (tree x, tree y)
878{
879 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
880}
881
0b604d2d
JL
882/* Record that X has the value Y and that X's previous value is PREV_X.
883
884 This variant follow's Y value chain. */
f6c72af4
JL
885
886void
887const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
888{
889 /* Y may be NULL if we are invalidating entries in the table. */
890 if (y && TREE_CODE (y) == SSA_NAME)
891 {
892 tree tmp = SSA_NAME_VALUE (y);
893 y = tmp ? tmp : y;
894 }
895
0b604d2d 896 record_const_or_copy_raw (x, y, prev_x);
f6c72af4
JL
897}
898
d3139801
JL
899bool
900expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
901{
902 const struct hashable_expr *expr1 = p1->expr ();
903 const struct expr_hash_elt *stamp1 = p1->stamp ();
904 const struct hashable_expr *expr2 = p2->expr ();
905 const struct expr_hash_elt *stamp2 = p2->stamp ();
906
907 /* This case should apply only when removing entries from the table. */
908 if (stamp1 == stamp2)
909 return true;
910
911 if (p1->hash () != p2->hash ())
912 return false;
913
914 /* In case of a collision, both RHS have to be identical and have the
915 same VUSE operands. */
916 if (hashable_expr_equal_p (expr1, expr2)
917 && types_compatible_p (expr1->type, expr2->type))
918 return true;
919
920 return false;
921}
922
923/* Given a conditional expression COND as a tree, initialize
924 a hashable_expr expression EXPR. The conditional must be a
925 comparison or logical negation. A constant or a variable is
926 not permitted. */
927
928void
929initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
930{
931 expr->type = boolean_type_node;
932
933 if (COMPARISON_CLASS_P (cond))
934 {
935 expr->kind = EXPR_BINARY;
936 expr->ops.binary.op = TREE_CODE (cond);
937 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
938 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
939 }
940 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
941 {
942 expr->kind = EXPR_UNARY;
943 expr->ops.unary.op = TRUTH_NOT_EXPR;
944 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
945 }
946 else
947 gcc_unreachable ();
948}
949
a3d514f2
JL
950/* Build a cond_equivalence record indicating that the comparison
951 CODE holds between operands OP0 and OP1 and push it to **P. */
952
953static void
954build_and_record_new_cond (enum tree_code code,
955 tree op0, tree op1,
956 vec<cond_equivalence> *p,
957 bool val = true)
958{
959 cond_equivalence c;
960 struct hashable_expr *cond = &c.cond;
961
962 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
963
964 cond->type = boolean_type_node;
965 cond->kind = EXPR_BINARY;
966 cond->ops.binary.op = code;
967 cond->ops.binary.opnd0 = op0;
968 cond->ops.binary.opnd1 = op1;
969
970 c.value = val ? boolean_true_node : boolean_false_node;
971 p->safe_push (c);
972}
973
974/* Record that COND is true and INVERTED is false into the edge information
975 structure. Also record that any conditions dominated by COND are true
976 as well.
977
978 For example, if a < b is true, then a <= b must also be true. */
979
980void
981record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
982{
983 tree op0, op1;
984 cond_equivalence c;
985
986 if (!COMPARISON_CLASS_P (cond))
987 return;
988
989 op0 = TREE_OPERAND (cond, 0);
990 op1 = TREE_OPERAND (cond, 1);
991
992 switch (TREE_CODE (cond))
993 {
994 case LT_EXPR:
995 case GT_EXPR:
996 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
997 {
998 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
999 build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1000 }
1001
1002 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1003 ? LE_EXPR : GE_EXPR),
1004 op0, op1, p);
1005 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1006 build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1007 break;
1008
1009 case GE_EXPR:
1010 case LE_EXPR:
1011 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1012 {
1013 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1014 }
1015 break;
1016
1017 case EQ_EXPR:
1018 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1019 {
1020 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1021 }
1022 build_and_record_new_cond (LE_EXPR, op0, op1, p);
1023 build_and_record_new_cond (GE_EXPR, op0, op1, p);
1024 break;
1025
1026 case UNORDERED_EXPR:
1027 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1028 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1029 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1030 build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1031 build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1032 build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1033 break;
1034
1035 case UNLT_EXPR:
1036 case UNGT_EXPR:
1037 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1038 ? UNLE_EXPR : UNGE_EXPR),
1039 op0, op1, p);
1040 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1041 break;
1042
1043 case UNEQ_EXPR:
1044 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1045 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1046 break;
1047
1048 case LTGT_EXPR:
1049 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1050 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1051 break;
1052
1053 default:
1054 break;
1055 }
1056
1057 /* Now store the original true and false conditions into the first
1058 two slots. */
1059 initialize_expr_from_cond (cond, &c.cond);
1060 c.value = boolean_true_node;
1061 p->safe_push (c);
1062
1063 /* It is possible for INVERTED to be the negation of a comparison,
1064 and not a valid RHS or GIMPLE_COND condition. This happens because
1065 invert_truthvalue may return such an expression when asked to invert
1066 a floating-point comparison. These comparisons are not assumed to
1067 obey the trichotomy law. */
1068 initialize_expr_from_cond (inverted, &c.cond);
1069 c.value = boolean_false_node;
1070 p->safe_push (c);
1071}