]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-ssa-strength-reduction.c
alias.c: Reorder #include statements and remove duplicates.
[thirdparty/gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "insn-config.h"
48 #include "emit-rtl.h"
49 #include "gimple-pretty-print.h"
50 #include "alias.h"
51 #include "fold-const.h"
52 #include "internal-fn.h"
53 #include "gimple-iterator.h"
54 #include "gimplify-me.h"
55 #include "stor-layout.h"
56 #include "flags.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "varasm.h"
61 #include "stmt.h"
62 #include "expr.h"
63 #include "cfgloop.h"
64 #include "tree-cfg.h"
65 #include "domwalk.h"
66 #include "params.h"
67 #include "tree-ssa-address.h"
68 #include "tree-affine.h"
69 #include "builtins.h"
70 \f
71 /* Information about a strength reduction candidate. Each statement
72 in the candidate table represents an expression of one of the
73 following forms (the special case of CAND_REF will be described
74 later):
75
76 (CAND_MULT) S1: X = (B + i) * S
77 (CAND_ADD) S1: X = B + (i * S)
78
79 Here X and B are SSA names, i is an integer constant, and S is
80 either an SSA name or a constant. We call B the "base," i the
81 "index", and S the "stride."
82
83 Any statement S0 that dominates S1 and is of the form:
84
85 (CAND_MULT) S0: Y = (B + i') * S
86 (CAND_ADD) S0: Y = B + (i' * S)
87
88 is called a "basis" for S1. In both cases, S1 may be replaced by
89
90 S1': X = Y + (i - i') * S,
91
92 where (i - i') * S is folded to the extent possible.
93
94 All gimple statements are visited in dominator order, and each
95 statement that may contribute to one of the forms of S1 above is
96 given at least one entry in the candidate table. Such statements
97 include addition, pointer addition, subtraction, multiplication,
98 negation, copies, and nontrivial type casts. If a statement may
99 represent more than one expression of the forms of S1 above,
100 multiple "interpretations" are stored in the table and chained
101 together. Examples:
102
103 * An add of two SSA names may treat either operand as the base.
104 * A multiply of two SSA names, likewise.
105 * A copy or cast may be thought of as either a CAND_MULT with
106 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
107
108 Candidate records are allocated from an obstack. They are addressed
109 both from a hash table keyed on S1, and from a vector of candidate
110 pointers arranged in predominator order.
111
112 Opportunity note
113 ----------------
114 Currently we don't recognize:
115
116 S0: Y = (S * i') - B
117 S1: X = (S * i) - B
118
119 as a strength reduction opportunity, even though this S1 would
120 also be replaceable by the S1' above. This can be added if it
121 comes up in practice.
122
123 Strength reduction in addressing
124 --------------------------------
125 There is another kind of candidate known as CAND_REF. A CAND_REF
126 describes a statement containing a memory reference having
127 complex addressing that might benefit from strength reduction.
128 Specifically, we are interested in references for which
129 get_inner_reference returns a base address, offset, and bitpos as
130 follows:
131
132 base: MEM_REF (T1, C1)
133 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
134 bitpos: C4 * BITS_PER_UNIT
135
136 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
137 arbitrary integer constants. Note that C2 may be zero, in which
138 case the offset will be MULT_EXPR (T2, C3).
139
140 When this pattern is recognized, the original memory reference
141 can be replaced with:
142
143 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
144 C1 + (C2 * C3) + C4)
145
146 which distributes the multiply to allow constant folding. When
147 two or more addressing expressions can be represented by MEM_REFs
148 of this form, differing only in the constants C1, C2, and C4,
149 making this substitution produces more efficient addressing during
150 the RTL phases. When there are not at least two expressions with
151 the same values of T1, T2, and C3, there is nothing to be gained
152 by the replacement.
153
154 Strength reduction of CAND_REFs uses the same infrastructure as
155 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
156 field, MULT_EXPR (T2, C3) in the stride (S) field, and
157 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
158 is thus another CAND_REF with the same B and S values. When at
159 least two CAND_REFs are chained together using the basis relation,
160 each of them is replaced as above, resulting in improved code
161 generation for addressing.
162
163 Conditional candidates
164 ======================
165
166 Conditional candidates are best illustrated with an example.
167 Consider the code sequence:
168
169 (1) x_0 = ...;
170 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
171 if (...)
172 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
173 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
174 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
175 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
176
177 Here strength reduction is complicated by the uncertain value of x_2.
178 A legitimate transformation is:
179
180 (1) x_0 = ...;
181 (2) a_0 = x_0 * 5;
182 if (...)
183 {
184 (3) [x_1 = x_0 + 1;]
185 (3a) t_1 = a_0 + 5;
186 }
187 (4) [x_2 = PHI <x_0, x_1>;]
188 (4a) t_2 = PHI <a_0, t_1>;
189 (5) [x_3 = x_2 + 1;]
190 (6r) a_1 = t_2 + 5;
191
192 where the bracketed instructions may go dead.
193
194 To recognize this opportunity, we have to observe that statement (6)
195 has a "hidden basis" (2). The hidden basis is unlike a normal basis
196 in that the statement and the hidden basis have different base SSA
197 names (x_2 and x_0, respectively). The relationship is established
198 when a statement's base name (x_2) is defined by a phi statement (4),
199 each argument of which (x_0, x_1) has an identical "derived base name."
200 If the argument is defined by a candidate (as x_1 is by (3)) that is a
201 CAND_ADD having a stride of 1, the derived base name of the argument is
202 the base name of the candidate (x_0). Otherwise, the argument itself
203 is its derived base name (as is the case with argument x_0).
204
205 The hidden basis for statement (6) is the nearest dominating candidate
206 whose base name is the derived base name (x_0) of the feeding phi (4),
207 and whose stride is identical to that of the statement. We can then
208 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
209 allowing the final replacement of (6) by the strength-reduced (6r).
210
211 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
212 A CAND_PHI is not a candidate for replacement, but is maintained in the
213 candidate table to ease discovery of hidden bases. Any phi statement
214 whose arguments share a common derived base name is entered into the
215 table with the derived base name, an (arbitrary) index of zero, and a
216 stride of 1. A statement with a hidden basis can then be detected by
217 simply looking up its feeding phi definition in the candidate table,
218 extracting the derived base name, and searching for a basis in the
219 usual manner after substituting the derived base name.
220
221 Note that the transformation is only valid when the original phi and
222 the statements that define the phi's arguments are all at the same
223 position in the loop hierarchy. */
224
225
226 /* Index into the candidate vector, offset by 1. VECs are zero-based,
227 while cand_idx's are one-based, with zero indicating null. */
228 typedef unsigned cand_idx;
229
230 /* The kind of candidate. */
231 enum cand_kind
232 {
233 CAND_MULT,
234 CAND_ADD,
235 CAND_REF,
236 CAND_PHI
237 };
238
239 struct slsr_cand_d
240 {
241 /* The candidate statement S1. */
242 gimple *cand_stmt;
243
244 /* The base expression B: often an SSA name, but not always. */
245 tree base_expr;
246
247 /* The stride S. */
248 tree stride;
249
250 /* The index constant i. */
251 widest_int index;
252
253 /* The type of the candidate. This is normally the type of base_expr,
254 but casts may have occurred when combining feeding instructions.
255 A candidate can only be a basis for candidates of the same final type.
256 (For CAND_REFs, this is the type to be used for operand 1 of the
257 replacement MEM_REF.) */
258 tree cand_type;
259
260 /* The kind of candidate (CAND_MULT, etc.). */
261 enum cand_kind kind;
262
263 /* Index of this candidate in the candidate vector. */
264 cand_idx cand_num;
265
266 /* Index of the next candidate record for the same statement.
267 A statement may be useful in more than one way (e.g., due to
268 commutativity). So we can have multiple "interpretations"
269 of a statement. */
270 cand_idx next_interp;
271
272 /* Index of the basis statement S0, if any, in the candidate vector. */
273 cand_idx basis;
274
275 /* First candidate for which this candidate is a basis, if one exists. */
276 cand_idx dependent;
277
278 /* Next candidate having the same basis as this one. */
279 cand_idx sibling;
280
281 /* If this is a conditional candidate, the CAND_PHI candidate
282 that defines the base SSA name B. */
283 cand_idx def_phi;
284
285 /* Savings that can be expected from eliminating dead code if this
286 candidate is replaced. */
287 int dead_savings;
288 };
289
290 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
291 typedef const struct slsr_cand_d *const_slsr_cand_t;
292
293 /* Pointers to candidates are chained together as part of a mapping
294 from base expressions to the candidates that use them. */
295
296 struct cand_chain_d
297 {
298 /* Base expression for the chain of candidates: often, but not
299 always, an SSA name. */
300 tree base_expr;
301
302 /* Pointer to a candidate. */
303 slsr_cand_t cand;
304
305 /* Chain pointer. */
306 struct cand_chain_d *next;
307
308 };
309
310 typedef struct cand_chain_d cand_chain, *cand_chain_t;
311 typedef const struct cand_chain_d *const_cand_chain_t;
312
313 /* Information about a unique "increment" associated with candidates
314 having an SSA name for a stride. An increment is the difference
315 between the index of the candidate and the index of its basis,
316 i.e., (i - i') as discussed in the module commentary.
317
318 When we are not going to generate address arithmetic we treat
319 increments that differ only in sign as the same, allowing sharing
320 of the cost of initializers. The absolute value of the increment
321 is stored in the incr_info. */
322
323 struct incr_info_d
324 {
325 /* The increment that relates a candidate to its basis. */
326 widest_int incr;
327
328 /* How many times the increment occurs in the candidate tree. */
329 unsigned count;
330
331 /* Cost of replacing candidates using this increment. Negative and
332 zero costs indicate replacement should be performed. */
333 int cost;
334
335 /* If this increment is profitable but is not -1, 0, or 1, it requires
336 an initializer T_0 = stride * incr to be found or introduced in the
337 nearest common dominator of all candidates. This field holds T_0
338 for subsequent use. */
339 tree initializer;
340
341 /* If the initializer was found to already exist, this is the block
342 where it was found. */
343 basic_block init_bb;
344 };
345
346 typedef struct incr_info_d incr_info, *incr_info_t;
347
348 /* Candidates are maintained in a vector. If candidate X dominates
349 candidate Y, then X appears before Y in the vector; but the
350 converse does not necessarily hold. */
351 static vec<slsr_cand_t> cand_vec;
352
353 enum cost_consts
354 {
355 COST_NEUTRAL = 0,
356 COST_INFINITE = 1000
357 };
358
359 enum stride_status
360 {
361 UNKNOWN_STRIDE = 0,
362 KNOWN_STRIDE = 1
363 };
364
365 enum phi_adjust_status
366 {
367 NOT_PHI_ADJUST = 0,
368 PHI_ADJUST = 1
369 };
370
371 enum count_phis_status
372 {
373 DONT_COUNT_PHIS = 0,
374 COUNT_PHIS = 1
375 };
376
377 /* Pointer map embodying a mapping from statements to candidates. */
378 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
379
380 /* Obstack for candidates. */
381 static struct obstack cand_obstack;
382
383 /* Obstack for candidate chains. */
384 static struct obstack chain_obstack;
385
386 /* An array INCR_VEC of incr_infos is used during analysis of related
387 candidates having an SSA name for a stride. INCR_VEC_LEN describes
388 its current length. MAX_INCR_VEC_LEN is used to avoid costly
389 pathological cases. */
390 static incr_info_t incr_vec;
391 static unsigned incr_vec_len;
392 const int MAX_INCR_VEC_LEN = 16;
393
394 /* For a chain of candidates with unknown stride, indicates whether or not
395 we must generate pointer arithmetic when replacing statements. */
396 static bool address_arithmetic_p;
397
398 /* Forward function declarations. */
399 static slsr_cand_t base_cand_from_table (tree);
400 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
401 static bool legal_cast_p_1 (tree, tree);
402 \f
403 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
404
405 static slsr_cand_t
406 lookup_cand (cand_idx idx)
407 {
408 return cand_vec[idx - 1];
409 }
410
411 /* Helper for hashing a candidate chain header. */
412
413 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
414 {
415 static inline hashval_t hash (const cand_chain *);
416 static inline bool equal (const cand_chain *, const cand_chain *);
417 };
418
419 inline hashval_t
420 cand_chain_hasher::hash (const cand_chain *p)
421 {
422 tree base_expr = p->base_expr;
423 return iterative_hash_expr (base_expr, 0);
424 }
425
426 inline bool
427 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
428 {
429 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
430 }
431
432 /* Hash table embodying a mapping from base exprs to chains of candidates. */
433 static hash_table<cand_chain_hasher> *base_cand_map;
434 \f
435 /* Pointer map used by tree_to_aff_combination_expand. */
436 static hash_map<tree, name_expansion *> *name_expansions;
437 /* Pointer map embodying a mapping from bases to alternative bases. */
438 static hash_map<tree, tree> *alt_base_map;
439
440 /* Given BASE, use the tree affine combiniation facilities to
441 find the underlying tree expression for BASE, with any
442 immediate offset excluded.
443
444 N.B. we should eliminate this backtracking with better forward
445 analysis in a future release. */
446
447 static tree
448 get_alternative_base (tree base)
449 {
450 tree *result = alt_base_map->get (base);
451
452 if (result == NULL)
453 {
454 tree expr;
455 aff_tree aff;
456
457 tree_to_aff_combination_expand (base, TREE_TYPE (base),
458 &aff, &name_expansions);
459 aff.offset = 0;
460 expr = aff_combination_to_tree (&aff);
461
462 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
463
464 return expr == base ? NULL : expr;
465 }
466
467 return *result;
468 }
469
470 /* Look in the candidate table for a CAND_PHI that defines BASE and
471 return it if found; otherwise return NULL. */
472
473 static cand_idx
474 find_phi_def (tree base)
475 {
476 slsr_cand_t c;
477
478 if (TREE_CODE (base) != SSA_NAME)
479 return 0;
480
481 c = base_cand_from_table (base);
482
483 if (!c || c->kind != CAND_PHI)
484 return 0;
485
486 return c->cand_num;
487 }
488
489 /* Helper routine for find_basis_for_candidate. May be called twice:
490 once for the candidate's base expr, and optionally again either for
491 the candidate's phi definition or for a CAND_REF's alternative base
492 expression. */
493
494 static slsr_cand_t
495 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
496 {
497 cand_chain mapping_key;
498 cand_chain_t chain;
499 slsr_cand_t basis = NULL;
500
501 // Limit potential of N^2 behavior for long candidate chains.
502 int iters = 0;
503 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
504
505 mapping_key.base_expr = base_expr;
506 chain = base_cand_map->find (&mapping_key);
507
508 for (; chain && iters < max_iters; chain = chain->next, ++iters)
509 {
510 slsr_cand_t one_basis = chain->cand;
511
512 if (one_basis->kind != c->kind
513 || one_basis->cand_stmt == c->cand_stmt
514 || !operand_equal_p (one_basis->stride, c->stride, 0)
515 || !types_compatible_p (one_basis->cand_type, c->cand_type)
516 || !dominated_by_p (CDI_DOMINATORS,
517 gimple_bb (c->cand_stmt),
518 gimple_bb (one_basis->cand_stmt)))
519 continue;
520
521 if (!basis || basis->cand_num < one_basis->cand_num)
522 basis = one_basis;
523 }
524
525 return basis;
526 }
527
528 /* Use the base expr from candidate C to look for possible candidates
529 that can serve as a basis for C. Each potential basis must also
530 appear in a block that dominates the candidate statement and have
531 the same stride and type. If more than one possible basis exists,
532 the one with highest index in the vector is chosen; this will be
533 the most immediately dominating basis. */
534
535 static int
536 find_basis_for_candidate (slsr_cand_t c)
537 {
538 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
539
540 /* If a candidate doesn't have a basis using its base expression,
541 it may have a basis hidden by one or more intervening phis. */
542 if (!basis && c->def_phi)
543 {
544 basic_block basis_bb, phi_bb;
545 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
546 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
547
548 if (basis)
549 {
550 /* A hidden basis must dominate the phi-definition of the
551 candidate's base name. */
552 phi_bb = gimple_bb (phi_cand->cand_stmt);
553 basis_bb = gimple_bb (basis->cand_stmt);
554
555 if (phi_bb == basis_bb
556 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
557 {
558 basis = NULL;
559 c->basis = 0;
560 }
561
562 /* If we found a hidden basis, estimate additional dead-code
563 savings if the phi and its feeding statements can be removed. */
564 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
565 c->dead_savings += phi_cand->dead_savings;
566 }
567 }
568
569 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
570 {
571 tree alt_base_expr = get_alternative_base (c->base_expr);
572 if (alt_base_expr)
573 basis = find_basis_for_base_expr (c, alt_base_expr);
574 }
575
576 if (basis)
577 {
578 c->sibling = basis->dependent;
579 basis->dependent = c->cand_num;
580 return basis->cand_num;
581 }
582
583 return 0;
584 }
585
586 /* Record a mapping from BASE to C, indicating that C may potentially serve
587 as a basis using that base expression. BASE may be the same as
588 C->BASE_EXPR; alternatively BASE can be a different tree that share the
589 underlining expression of C->BASE_EXPR. */
590
591 static void
592 record_potential_basis (slsr_cand_t c, tree base)
593 {
594 cand_chain_t node;
595 cand_chain **slot;
596
597 gcc_assert (base);
598
599 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
600 node->base_expr = base;
601 node->cand = c;
602 node->next = NULL;
603 slot = base_cand_map->find_slot (node, INSERT);
604
605 if (*slot)
606 {
607 cand_chain_t head = (cand_chain_t) (*slot);
608 node->next = head->next;
609 head->next = node;
610 }
611 else
612 *slot = node;
613 }
614
615 /* Allocate storage for a new candidate and initialize its fields.
616 Attempt to find a basis for the candidate.
617
618 For CAND_REF, an alternative base may also be recorded and used
619 to find a basis. This helps cases where the expression hidden
620 behind BASE (which is usually an SSA_NAME) has immediate offset,
621 e.g.
622
623 a2[i][j] = 1;
624 a2[i + 20][j] = 2; */
625
626 static slsr_cand_t
627 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
628 const widest_int &index, tree stride, tree ctype,
629 unsigned savings)
630 {
631 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
632 sizeof (slsr_cand));
633 c->cand_stmt = gs;
634 c->base_expr = base;
635 c->stride = stride;
636 c->index = index;
637 c->cand_type = ctype;
638 c->kind = kind;
639 c->cand_num = cand_vec.length () + 1;
640 c->next_interp = 0;
641 c->dependent = 0;
642 c->sibling = 0;
643 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
644 c->dead_savings = savings;
645
646 cand_vec.safe_push (c);
647
648 if (kind == CAND_PHI)
649 c->basis = 0;
650 else
651 c->basis = find_basis_for_candidate (c);
652
653 record_potential_basis (c, base);
654 if (flag_expensive_optimizations && kind == CAND_REF)
655 {
656 tree alt_base = get_alternative_base (base);
657 if (alt_base)
658 record_potential_basis (c, alt_base);
659 }
660
661 return c;
662 }
663
664 /* Determine the target cost of statement GS when compiling according
665 to SPEED. */
666
667 static int
668 stmt_cost (gimple *gs, bool speed)
669 {
670 tree lhs, rhs1, rhs2;
671 machine_mode lhs_mode;
672
673 gcc_assert (is_gimple_assign (gs));
674 lhs = gimple_assign_lhs (gs);
675 rhs1 = gimple_assign_rhs1 (gs);
676 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
677
678 switch (gimple_assign_rhs_code (gs))
679 {
680 case MULT_EXPR:
681 rhs2 = gimple_assign_rhs2 (gs);
682
683 if (tree_fits_shwi_p (rhs2))
684 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
685
686 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
687 return mul_cost (speed, lhs_mode);
688
689 case PLUS_EXPR:
690 case POINTER_PLUS_EXPR:
691 case MINUS_EXPR:
692 return add_cost (speed, lhs_mode);
693
694 case NEGATE_EXPR:
695 return neg_cost (speed, lhs_mode);
696
697 CASE_CONVERT:
698 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
699
700 /* Note that we don't assign costs to copies that in most cases
701 will go away. */
702 default:
703 ;
704 }
705
706 gcc_unreachable ();
707 return 0;
708 }
709
710 /* Look up the defining statement for BASE_IN and return a pointer
711 to its candidate in the candidate table, if any; otherwise NULL.
712 Only CAND_ADD and CAND_MULT candidates are returned. */
713
714 static slsr_cand_t
715 base_cand_from_table (tree base_in)
716 {
717 slsr_cand_t *result;
718
719 gimple *def = SSA_NAME_DEF_STMT (base_in);
720 if (!def)
721 return (slsr_cand_t) NULL;
722
723 result = stmt_cand_map->get (def);
724
725 if (result && (*result)->kind != CAND_REF)
726 return *result;
727
728 return (slsr_cand_t) NULL;
729 }
730
731 /* Add an entry to the statement-to-candidate mapping. */
732
733 static void
734 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
735 {
736 gcc_assert (!stmt_cand_map->put (gs, c));
737 }
738 \f
739 /* Given PHI which contains a phi statement, determine whether it
740 satisfies all the requirements of a phi candidate. If so, create
741 a candidate. Note that a CAND_PHI never has a basis itself, but
742 is used to help find a basis for subsequent candidates. */
743
744 static void
745 slsr_process_phi (gphi *phi, bool speed)
746 {
747 unsigned i;
748 tree arg0_base = NULL_TREE, base_type;
749 slsr_cand_t c;
750 struct loop *cand_loop = gimple_bb (phi)->loop_father;
751 unsigned savings = 0;
752
753 /* A CAND_PHI requires each of its arguments to have the same
754 derived base name. (See the module header commentary for a
755 definition of derived base names.) Furthermore, all feeding
756 definitions must be in the same position in the loop hierarchy
757 as PHI. */
758
759 for (i = 0; i < gimple_phi_num_args (phi); i++)
760 {
761 slsr_cand_t arg_cand;
762 tree arg = gimple_phi_arg_def (phi, i);
763 tree derived_base_name = NULL_TREE;
764 gimple *arg_stmt = NULL;
765 basic_block arg_bb = NULL;
766
767 if (TREE_CODE (arg) != SSA_NAME)
768 return;
769
770 arg_cand = base_cand_from_table (arg);
771
772 if (arg_cand)
773 {
774 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
775 {
776 if (!arg_cand->next_interp)
777 return;
778
779 arg_cand = lookup_cand (arg_cand->next_interp);
780 }
781
782 if (!integer_onep (arg_cand->stride))
783 return;
784
785 derived_base_name = arg_cand->base_expr;
786 arg_stmt = arg_cand->cand_stmt;
787 arg_bb = gimple_bb (arg_stmt);
788
789 /* Gather potential dead code savings if the phi statement
790 can be removed later on. */
791 if (has_single_use (arg))
792 {
793 if (gimple_code (arg_stmt) == GIMPLE_PHI)
794 savings += arg_cand->dead_savings;
795 else
796 savings += stmt_cost (arg_stmt, speed);
797 }
798 }
799 else
800 {
801 derived_base_name = arg;
802
803 if (SSA_NAME_IS_DEFAULT_DEF (arg))
804 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
805 else
806 gimple_bb (SSA_NAME_DEF_STMT (arg));
807 }
808
809 if (!arg_bb || arg_bb->loop_father != cand_loop)
810 return;
811
812 if (i == 0)
813 arg0_base = derived_base_name;
814 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
815 return;
816 }
817
818 /* Create the candidate. "alloc_cand_and_find_basis" is named
819 misleadingly for this case, as no basis will be sought for a
820 CAND_PHI. */
821 base_type = TREE_TYPE (arg0_base);
822
823 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
824 0, integer_one_node, base_type, savings);
825
826 /* Add the candidate to the statement-candidate mapping. */
827 add_cand_for_stmt (phi, c);
828 }
829
830 /* Given PBASE which is a pointer to tree, look up the defining
831 statement for it and check whether the candidate is in the
832 form of:
833
834 X = B + (1 * S), S is integer constant
835 X = B + (i * S), S is integer one
836
837 If so, set PBASE to the candidate's base_expr and return double
838 int (i * S).
839 Otherwise, just return double int zero. */
840
841 static widest_int
842 backtrace_base_for_ref (tree *pbase)
843 {
844 tree base_in = *pbase;
845 slsr_cand_t base_cand;
846
847 STRIP_NOPS (base_in);
848
849 /* Strip off widening conversion(s) to handle cases where
850 e.g. 'B' is widened from an 'int' in order to calculate
851 a 64-bit address. */
852 if (CONVERT_EXPR_P (base_in)
853 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
854 base_in = get_unwidened (base_in, NULL_TREE);
855
856 if (TREE_CODE (base_in) != SSA_NAME)
857 return 0;
858
859 base_cand = base_cand_from_table (base_in);
860
861 while (base_cand && base_cand->kind != CAND_PHI)
862 {
863 if (base_cand->kind == CAND_ADD
864 && base_cand->index == 1
865 && TREE_CODE (base_cand->stride) == INTEGER_CST)
866 {
867 /* X = B + (1 * S), S is integer constant. */
868 *pbase = base_cand->base_expr;
869 return wi::to_widest (base_cand->stride);
870 }
871 else if (base_cand->kind == CAND_ADD
872 && TREE_CODE (base_cand->stride) == INTEGER_CST
873 && integer_onep (base_cand->stride))
874 {
875 /* X = B + (i * S), S is integer one. */
876 *pbase = base_cand->base_expr;
877 return base_cand->index;
878 }
879
880 if (base_cand->next_interp)
881 base_cand = lookup_cand (base_cand->next_interp);
882 else
883 base_cand = NULL;
884 }
885
886 return 0;
887 }
888
889 /* Look for the following pattern:
890
891 *PBASE: MEM_REF (T1, C1)
892
893 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
894 or
895 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
896 or
897 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
898
899 *PINDEX: C4 * BITS_PER_UNIT
900
901 If not present, leave the input values unchanged and return FALSE.
902 Otherwise, modify the input values as follows and return TRUE:
903
904 *PBASE: T1
905 *POFFSET: MULT_EXPR (T2, C3)
906 *PINDEX: C1 + (C2 * C3) + C4
907
908 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
909 will be further restructured to:
910
911 *PBASE: T1
912 *POFFSET: MULT_EXPR (T2', C3)
913 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
914
915 static bool
916 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
917 tree *ptype)
918 {
919 tree base = *pbase, offset = *poffset;
920 widest_int index = *pindex;
921 tree mult_op0, t1, t2, type;
922 widest_int c1, c2, c3, c4, c5;
923
924 if (!base
925 || !offset
926 || TREE_CODE (base) != MEM_REF
927 || TREE_CODE (offset) != MULT_EXPR
928 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
929 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
930 return false;
931
932 t1 = TREE_OPERAND (base, 0);
933 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
934 type = TREE_TYPE (TREE_OPERAND (base, 1));
935
936 mult_op0 = TREE_OPERAND (offset, 0);
937 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
938
939 if (TREE_CODE (mult_op0) == PLUS_EXPR)
940
941 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
942 {
943 t2 = TREE_OPERAND (mult_op0, 0);
944 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
945 }
946 else
947 return false;
948
949 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
950
951 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
952 {
953 t2 = TREE_OPERAND (mult_op0, 0);
954 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
955 }
956 else
957 return false;
958
959 else
960 {
961 t2 = mult_op0;
962 c2 = 0;
963 }
964
965 c4 = wi::lrshift (index, LOG2_BITS_PER_UNIT);
966 c5 = backtrace_base_for_ref (&t2);
967
968 *pbase = t1;
969 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
970 wide_int_to_tree (sizetype, c3));
971 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
972 *ptype = type;
973
974 return true;
975 }
976
977 /* Given GS which contains a data reference, create a CAND_REF entry in
978 the candidate table and attempt to find a basis. */
979
980 static void
981 slsr_process_ref (gimple *gs)
982 {
983 tree ref_expr, base, offset, type;
984 HOST_WIDE_INT bitsize, bitpos;
985 machine_mode mode;
986 int unsignedp, volatilep;
987 slsr_cand_t c;
988
989 if (gimple_vdef (gs))
990 ref_expr = gimple_assign_lhs (gs);
991 else
992 ref_expr = gimple_assign_rhs1 (gs);
993
994 if (!handled_component_p (ref_expr)
995 || TREE_CODE (ref_expr) == BIT_FIELD_REF
996 || (TREE_CODE (ref_expr) == COMPONENT_REF
997 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
998 return;
999
1000 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1001 &unsignedp, &volatilep, false);
1002 widest_int index = bitpos;
1003
1004 if (!restructure_reference (&base, &offset, &index, &type))
1005 return;
1006
1007 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1008 type, 0);
1009
1010 /* Add the candidate to the statement-candidate mapping. */
1011 add_cand_for_stmt (gs, c);
1012 }
1013
1014 /* Create a candidate entry for a statement GS, where GS multiplies
1015 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1016 about the two SSA names into the new candidate. Return the new
1017 candidate. */
1018
1019 static slsr_cand_t
1020 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1021 {
1022 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1023 widest_int index;
1024 unsigned savings = 0;
1025 slsr_cand_t c;
1026 slsr_cand_t base_cand = base_cand_from_table (base_in);
1027
1028 /* Look at all interpretations of the base candidate, if necessary,
1029 to find information to propagate into this candidate. */
1030 while (base_cand && !base && base_cand->kind != CAND_PHI)
1031 {
1032
1033 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1034 {
1035 /* Y = (B + i') * 1
1036 X = Y * Z
1037 ================
1038 X = (B + i') * Z */
1039 base = base_cand->base_expr;
1040 index = base_cand->index;
1041 stride = stride_in;
1042 ctype = base_cand->cand_type;
1043 if (has_single_use (base_in))
1044 savings = (base_cand->dead_savings
1045 + stmt_cost (base_cand->cand_stmt, speed));
1046 }
1047 else if (base_cand->kind == CAND_ADD
1048 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1049 {
1050 /* Y = B + (i' * S), S constant
1051 X = Y * Z
1052 ============================
1053 X = B + ((i' * S) * Z) */
1054 base = base_cand->base_expr;
1055 index = base_cand->index * wi::to_widest (base_cand->stride);
1056 stride = stride_in;
1057 ctype = base_cand->cand_type;
1058 if (has_single_use (base_in))
1059 savings = (base_cand->dead_savings
1060 + stmt_cost (base_cand->cand_stmt, speed));
1061 }
1062
1063 if (base_cand->next_interp)
1064 base_cand = lookup_cand (base_cand->next_interp);
1065 else
1066 base_cand = NULL;
1067 }
1068
1069 if (!base)
1070 {
1071 /* No interpretations had anything useful to propagate, so
1072 produce X = (Y + 0) * Z. */
1073 base = base_in;
1074 index = 0;
1075 stride = stride_in;
1076 ctype = TREE_TYPE (base_in);
1077 }
1078
1079 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1080 ctype, savings);
1081 return c;
1082 }
1083
1084 /* Create a candidate entry for a statement GS, where GS multiplies
1085 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1086 information about BASE_IN into the new candidate. Return the new
1087 candidate. */
1088
1089 static slsr_cand_t
1090 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1091 {
1092 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1093 widest_int index, temp;
1094 unsigned savings = 0;
1095 slsr_cand_t c;
1096 slsr_cand_t base_cand = base_cand_from_table (base_in);
1097
1098 /* Look at all interpretations of the base candidate, if necessary,
1099 to find information to propagate into this candidate. */
1100 while (base_cand && !base && base_cand->kind != CAND_PHI)
1101 {
1102 if (base_cand->kind == CAND_MULT
1103 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1104 {
1105 /* Y = (B + i') * S, S constant
1106 X = Y * c
1107 ============================
1108 X = (B + i') * (S * c) */
1109 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1110 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1111 {
1112 base = base_cand->base_expr;
1113 index = base_cand->index;
1114 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1115 ctype = base_cand->cand_type;
1116 if (has_single_use (base_in))
1117 savings = (base_cand->dead_savings
1118 + stmt_cost (base_cand->cand_stmt, speed));
1119 }
1120 }
1121 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1122 {
1123 /* Y = B + (i' * 1)
1124 X = Y * c
1125 ===========================
1126 X = (B + i') * c */
1127 base = base_cand->base_expr;
1128 index = base_cand->index;
1129 stride = stride_in;
1130 ctype = base_cand->cand_type;
1131 if (has_single_use (base_in))
1132 savings = (base_cand->dead_savings
1133 + stmt_cost (base_cand->cand_stmt, speed));
1134 }
1135 else if (base_cand->kind == CAND_ADD
1136 && base_cand->index == 1
1137 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1138 {
1139 /* Y = B + (1 * S), S constant
1140 X = Y * c
1141 ===========================
1142 X = (B + S) * c */
1143 base = base_cand->base_expr;
1144 index = wi::to_widest (base_cand->stride);
1145 stride = stride_in;
1146 ctype = base_cand->cand_type;
1147 if (has_single_use (base_in))
1148 savings = (base_cand->dead_savings
1149 + stmt_cost (base_cand->cand_stmt, speed));
1150 }
1151
1152 if (base_cand->next_interp)
1153 base_cand = lookup_cand (base_cand->next_interp);
1154 else
1155 base_cand = NULL;
1156 }
1157
1158 if (!base)
1159 {
1160 /* No interpretations had anything useful to propagate, so
1161 produce X = (Y + 0) * c. */
1162 base = base_in;
1163 index = 0;
1164 stride = stride_in;
1165 ctype = TREE_TYPE (base_in);
1166 }
1167
1168 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1169 ctype, savings);
1170 return c;
1171 }
1172
1173 /* Given GS which is a multiply of scalar integers, make an appropriate
1174 entry in the candidate table. If this is a multiply of two SSA names,
1175 create two CAND_MULT interpretations and attempt to find a basis for
1176 each of them. Otherwise, create a single CAND_MULT and attempt to
1177 find a basis. */
1178
1179 static void
1180 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1181 {
1182 slsr_cand_t c, c2;
1183
1184 /* If this is a multiply of an SSA name with itself, it is highly
1185 unlikely that we will get a strength reduction opportunity, so
1186 don't record it as a candidate. This simplifies the logic for
1187 finding a basis, so if this is removed that must be considered. */
1188 if (rhs1 == rhs2)
1189 return;
1190
1191 if (TREE_CODE (rhs2) == SSA_NAME)
1192 {
1193 /* Record an interpretation of this statement in the candidate table
1194 assuming RHS1 is the base expression and RHS2 is the stride. */
1195 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1196
1197 /* Add the first interpretation to the statement-candidate mapping. */
1198 add_cand_for_stmt (gs, c);
1199
1200 /* Record another interpretation of this statement assuming RHS1
1201 is the stride and RHS2 is the base expression. */
1202 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1203 c->next_interp = c2->cand_num;
1204 }
1205 else
1206 {
1207 /* Record an interpretation for the multiply-immediate. */
1208 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1209
1210 /* Add the interpretation to the statement-candidate mapping. */
1211 add_cand_for_stmt (gs, c);
1212 }
1213 }
1214
1215 /* Create a candidate entry for a statement GS, where GS adds two
1216 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1217 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1218 information about the two SSA names into the new candidate.
1219 Return the new candidate. */
1220
1221 static slsr_cand_t
1222 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1223 bool subtract_p, bool speed)
1224 {
1225 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1226 widest_int index;
1227 unsigned savings = 0;
1228 slsr_cand_t c;
1229 slsr_cand_t base_cand = base_cand_from_table (base_in);
1230 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1231
1232 /* The most useful transformation is a multiply-immediate feeding
1233 an add or subtract. Look for that first. */
1234 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1235 {
1236 if (addend_cand->kind == CAND_MULT
1237 && addend_cand->index == 0
1238 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1239 {
1240 /* Z = (B + 0) * S, S constant
1241 X = Y +/- Z
1242 ===========================
1243 X = Y + ((+/-1 * S) * B) */
1244 base = base_in;
1245 index = wi::to_widest (addend_cand->stride);
1246 if (subtract_p)
1247 index = -index;
1248 stride = addend_cand->base_expr;
1249 ctype = TREE_TYPE (base_in);
1250 if (has_single_use (addend_in))
1251 savings = (addend_cand->dead_savings
1252 + stmt_cost (addend_cand->cand_stmt, speed));
1253 }
1254
1255 if (addend_cand->next_interp)
1256 addend_cand = lookup_cand (addend_cand->next_interp);
1257 else
1258 addend_cand = NULL;
1259 }
1260
1261 while (base_cand && !base && base_cand->kind != CAND_PHI)
1262 {
1263 if (base_cand->kind == CAND_ADD
1264 && (base_cand->index == 0
1265 || operand_equal_p (base_cand->stride,
1266 integer_zero_node, 0)))
1267 {
1268 /* Y = B + (i' * S), i' * S = 0
1269 X = Y +/- Z
1270 ============================
1271 X = B + (+/-1 * Z) */
1272 base = base_cand->base_expr;
1273 index = subtract_p ? -1 : 1;
1274 stride = addend_in;
1275 ctype = base_cand->cand_type;
1276 if (has_single_use (base_in))
1277 savings = (base_cand->dead_savings
1278 + stmt_cost (base_cand->cand_stmt, speed));
1279 }
1280 else if (subtract_p)
1281 {
1282 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1283
1284 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1285 {
1286 if (subtrahend_cand->kind == CAND_MULT
1287 && subtrahend_cand->index == 0
1288 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1289 {
1290 /* Z = (B + 0) * S, S constant
1291 X = Y - Z
1292 ===========================
1293 Value: X = Y + ((-1 * S) * B) */
1294 base = base_in;
1295 index = wi::to_widest (subtrahend_cand->stride);
1296 index = -index;
1297 stride = subtrahend_cand->base_expr;
1298 ctype = TREE_TYPE (base_in);
1299 if (has_single_use (addend_in))
1300 savings = (subtrahend_cand->dead_savings
1301 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1302 }
1303
1304 if (subtrahend_cand->next_interp)
1305 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1306 else
1307 subtrahend_cand = NULL;
1308 }
1309 }
1310
1311 if (base_cand->next_interp)
1312 base_cand = lookup_cand (base_cand->next_interp);
1313 else
1314 base_cand = NULL;
1315 }
1316
1317 if (!base)
1318 {
1319 /* No interpretations had anything useful to propagate, so
1320 produce X = Y + (1 * Z). */
1321 base = base_in;
1322 index = subtract_p ? -1 : 1;
1323 stride = addend_in;
1324 ctype = TREE_TYPE (base_in);
1325 }
1326
1327 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1328 ctype, savings);
1329 return c;
1330 }
1331
1332 /* Create a candidate entry for a statement GS, where GS adds SSA
1333 name BASE_IN to constant INDEX_IN. Propagate any known information
1334 about BASE_IN into the new candidate. Return the new candidate. */
1335
1336 static slsr_cand_t
1337 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1338 bool speed)
1339 {
1340 enum cand_kind kind = CAND_ADD;
1341 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1342 widest_int index, multiple;
1343 unsigned savings = 0;
1344 slsr_cand_t c;
1345 slsr_cand_t base_cand = base_cand_from_table (base_in);
1346
1347 while (base_cand && !base && base_cand->kind != CAND_PHI)
1348 {
1349 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1350
1351 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1352 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1353 sign, &multiple))
1354 {
1355 /* Y = (B + i') * S, S constant, c = kS for some integer k
1356 X = Y + c
1357 ============================
1358 X = (B + (i'+ k)) * S
1359 OR
1360 Y = B + (i' * S), S constant, c = kS for some integer k
1361 X = Y + c
1362 ============================
1363 X = (B + (i'+ k)) * S */
1364 kind = base_cand->kind;
1365 base = base_cand->base_expr;
1366 index = base_cand->index + multiple;
1367 stride = base_cand->stride;
1368 ctype = base_cand->cand_type;
1369 if (has_single_use (base_in))
1370 savings = (base_cand->dead_savings
1371 + stmt_cost (base_cand->cand_stmt, speed));
1372 }
1373
1374 if (base_cand->next_interp)
1375 base_cand = lookup_cand (base_cand->next_interp);
1376 else
1377 base_cand = NULL;
1378 }
1379
1380 if (!base)
1381 {
1382 /* No interpretations had anything useful to propagate, so
1383 produce X = Y + (c * 1). */
1384 kind = CAND_ADD;
1385 base = base_in;
1386 index = index_in;
1387 stride = integer_one_node;
1388 ctype = TREE_TYPE (base_in);
1389 }
1390
1391 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1392 ctype, savings);
1393 return c;
1394 }
1395
1396 /* Given GS which is an add or subtract of scalar integers or pointers,
1397 make at least one appropriate entry in the candidate table. */
1398
1399 static void
1400 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1401 {
1402 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1403 slsr_cand_t c = NULL, c2;
1404
1405 if (TREE_CODE (rhs2) == SSA_NAME)
1406 {
1407 /* First record an interpretation assuming RHS1 is the base expression
1408 and RHS2 is the stride. But it doesn't make sense for the
1409 stride to be a pointer, so don't record a candidate in that case. */
1410 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1411 {
1412 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1413
1414 /* Add the first interpretation to the statement-candidate
1415 mapping. */
1416 add_cand_for_stmt (gs, c);
1417 }
1418
1419 /* If the two RHS operands are identical, or this is a subtract,
1420 we're done. */
1421 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1422 return;
1423
1424 /* Otherwise, record another interpretation assuming RHS2 is the
1425 base expression and RHS1 is the stride, again provided that the
1426 stride is not a pointer. */
1427 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1428 {
1429 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1430 if (c)
1431 c->next_interp = c2->cand_num;
1432 else
1433 add_cand_for_stmt (gs, c2);
1434 }
1435 }
1436 else
1437 {
1438 /* Record an interpretation for the add-immediate. */
1439 widest_int index = wi::to_widest (rhs2);
1440 if (subtract_p)
1441 index = -index;
1442
1443 c = create_add_imm_cand (gs, rhs1, index, speed);
1444
1445 /* Add the interpretation to the statement-candidate mapping. */
1446 add_cand_for_stmt (gs, c);
1447 }
1448 }
1449
1450 /* Given GS which is a negate of a scalar integer, make an appropriate
1451 entry in the candidate table. A negate is equivalent to a multiply
1452 by -1. */
1453
1454 static void
1455 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1456 {
1457 /* Record a CAND_MULT interpretation for the multiply by -1. */
1458 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1459
1460 /* Add the interpretation to the statement-candidate mapping. */
1461 add_cand_for_stmt (gs, c);
1462 }
1463
1464 /* Help function for legal_cast_p, operating on two trees. Checks
1465 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1466 for more details. */
1467
1468 static bool
1469 legal_cast_p_1 (tree lhs, tree rhs)
1470 {
1471 tree lhs_type, rhs_type;
1472 unsigned lhs_size, rhs_size;
1473 bool lhs_wraps, rhs_wraps;
1474
1475 lhs_type = TREE_TYPE (lhs);
1476 rhs_type = TREE_TYPE (rhs);
1477 lhs_size = TYPE_PRECISION (lhs_type);
1478 rhs_size = TYPE_PRECISION (rhs_type);
1479 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1480 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1481
1482 if (lhs_size < rhs_size
1483 || (rhs_wraps && !lhs_wraps)
1484 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1485 return false;
1486
1487 return true;
1488 }
1489
1490 /* Return TRUE if GS is a statement that defines an SSA name from
1491 a conversion and is legal for us to combine with an add and multiply
1492 in the candidate table. For example, suppose we have:
1493
1494 A = B + i;
1495 C = (type) A;
1496 D = C * S;
1497
1498 Without the type-cast, we would create a CAND_MULT for D with base B,
1499 index i, and stride S. We want to record this candidate only if it
1500 is equivalent to apply the type cast following the multiply:
1501
1502 A = B + i;
1503 E = A * S;
1504 D = (type) E;
1505
1506 We will record the type with the candidate for D. This allows us
1507 to use a similar previous candidate as a basis. If we have earlier seen
1508
1509 A' = B + i';
1510 C' = (type) A';
1511 D' = C' * S;
1512
1513 we can replace D with
1514
1515 D = D' + (i - i') * S;
1516
1517 But if moving the type-cast would change semantics, we mustn't do this.
1518
1519 This is legitimate for casts from a non-wrapping integral type to
1520 any integral type of the same or larger size. It is not legitimate
1521 to convert a wrapping type to a non-wrapping type, or to a wrapping
1522 type of a different size. I.e., with a wrapping type, we must
1523 assume that the addition B + i could wrap, in which case performing
1524 the multiply before or after one of the "illegal" type casts will
1525 have different semantics. */
1526
1527 static bool
1528 legal_cast_p (gimple *gs, tree rhs)
1529 {
1530 if (!is_gimple_assign (gs)
1531 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1532 return false;
1533
1534 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1535 }
1536
1537 /* Given GS which is a cast to a scalar integer type, determine whether
1538 the cast is legal for strength reduction. If so, make at least one
1539 appropriate entry in the candidate table. */
1540
1541 static void
1542 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1543 {
1544 tree lhs, ctype;
1545 slsr_cand_t base_cand, c, c2;
1546 unsigned savings = 0;
1547
1548 if (!legal_cast_p (gs, rhs1))
1549 return;
1550
1551 lhs = gimple_assign_lhs (gs);
1552 base_cand = base_cand_from_table (rhs1);
1553 ctype = TREE_TYPE (lhs);
1554
1555 if (base_cand && base_cand->kind != CAND_PHI)
1556 {
1557 while (base_cand)
1558 {
1559 /* Propagate all data from the base candidate except the type,
1560 which comes from the cast, and the base candidate's cast,
1561 which is no longer applicable. */
1562 if (has_single_use (rhs1))
1563 savings = (base_cand->dead_savings
1564 + stmt_cost (base_cand->cand_stmt, speed));
1565
1566 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1567 base_cand->base_expr,
1568 base_cand->index, base_cand->stride,
1569 ctype, savings);
1570 if (base_cand->next_interp)
1571 base_cand = lookup_cand (base_cand->next_interp);
1572 else
1573 base_cand = NULL;
1574 }
1575 }
1576 else
1577 {
1578 /* If nothing is known about the RHS, create fresh CAND_ADD and
1579 CAND_MULT interpretations:
1580
1581 X = Y + (0 * 1)
1582 X = (Y + 0) * 1
1583
1584 The first of these is somewhat arbitrary, but the choice of
1585 1 for the stride simplifies the logic for propagating casts
1586 into their uses. */
1587 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1588 0, integer_one_node, ctype, 0);
1589 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1590 0, integer_one_node, ctype, 0);
1591 c->next_interp = c2->cand_num;
1592 }
1593
1594 /* Add the first (or only) interpretation to the statement-candidate
1595 mapping. */
1596 add_cand_for_stmt (gs, c);
1597 }
1598
1599 /* Given GS which is a copy of a scalar integer type, make at least one
1600 appropriate entry in the candidate table.
1601
1602 This interface is included for completeness, but is unnecessary
1603 if this pass immediately follows a pass that performs copy
1604 propagation, such as DOM. */
1605
1606 static void
1607 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1608 {
1609 slsr_cand_t base_cand, c, c2;
1610 unsigned savings = 0;
1611
1612 base_cand = base_cand_from_table (rhs1);
1613
1614 if (base_cand && base_cand->kind != CAND_PHI)
1615 {
1616 while (base_cand)
1617 {
1618 /* Propagate all data from the base candidate. */
1619 if (has_single_use (rhs1))
1620 savings = (base_cand->dead_savings
1621 + stmt_cost (base_cand->cand_stmt, speed));
1622
1623 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1624 base_cand->base_expr,
1625 base_cand->index, base_cand->stride,
1626 base_cand->cand_type, savings);
1627 if (base_cand->next_interp)
1628 base_cand = lookup_cand (base_cand->next_interp);
1629 else
1630 base_cand = NULL;
1631 }
1632 }
1633 else
1634 {
1635 /* If nothing is known about the RHS, create fresh CAND_ADD and
1636 CAND_MULT interpretations:
1637
1638 X = Y + (0 * 1)
1639 X = (Y + 0) * 1
1640
1641 The first of these is somewhat arbitrary, but the choice of
1642 1 for the stride simplifies the logic for propagating casts
1643 into their uses. */
1644 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1645 0, integer_one_node, TREE_TYPE (rhs1), 0);
1646 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1647 0, integer_one_node, TREE_TYPE (rhs1), 0);
1648 c->next_interp = c2->cand_num;
1649 }
1650
1651 /* Add the first (or only) interpretation to the statement-candidate
1652 mapping. */
1653 add_cand_for_stmt (gs, c);
1654 }
1655 \f
1656 class find_candidates_dom_walker : public dom_walker
1657 {
1658 public:
1659 find_candidates_dom_walker (cdi_direction direction)
1660 : dom_walker (direction) {}
1661 virtual void before_dom_children (basic_block);
1662 };
1663
1664 /* Find strength-reduction candidates in block BB. */
1665
1666 void
1667 find_candidates_dom_walker::before_dom_children (basic_block bb)
1668 {
1669 bool speed = optimize_bb_for_speed_p (bb);
1670
1671 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1672 gsi_next (&gsi))
1673 slsr_process_phi (gsi.phi (), speed);
1674
1675 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1676 gsi_next (&gsi))
1677 {
1678 gimple *gs = gsi_stmt (gsi);
1679
1680 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1681 slsr_process_ref (gs);
1682
1683 else if (is_gimple_assign (gs)
1684 && SCALAR_INT_MODE_P
1685 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1686 {
1687 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1688
1689 switch (gimple_assign_rhs_code (gs))
1690 {
1691 case MULT_EXPR:
1692 case PLUS_EXPR:
1693 rhs1 = gimple_assign_rhs1 (gs);
1694 rhs2 = gimple_assign_rhs2 (gs);
1695 /* Should never happen, but currently some buggy situations
1696 in earlier phases put constants in rhs1. */
1697 if (TREE_CODE (rhs1) != SSA_NAME)
1698 continue;
1699 break;
1700
1701 /* Possible future opportunity: rhs1 of a ptr+ can be
1702 an ADDR_EXPR. */
1703 case POINTER_PLUS_EXPR:
1704 case MINUS_EXPR:
1705 rhs2 = gimple_assign_rhs2 (gs);
1706 /* Fall-through. */
1707
1708 CASE_CONVERT:
1709 case MODIFY_EXPR:
1710 case NEGATE_EXPR:
1711 rhs1 = gimple_assign_rhs1 (gs);
1712 if (TREE_CODE (rhs1) != SSA_NAME)
1713 continue;
1714 break;
1715
1716 default:
1717 ;
1718 }
1719
1720 switch (gimple_assign_rhs_code (gs))
1721 {
1722 case MULT_EXPR:
1723 slsr_process_mul (gs, rhs1, rhs2, speed);
1724 break;
1725
1726 case PLUS_EXPR:
1727 case POINTER_PLUS_EXPR:
1728 case MINUS_EXPR:
1729 slsr_process_add (gs, rhs1, rhs2, speed);
1730 break;
1731
1732 case NEGATE_EXPR:
1733 slsr_process_neg (gs, rhs1, speed);
1734 break;
1735
1736 CASE_CONVERT:
1737 slsr_process_cast (gs, rhs1, speed);
1738 break;
1739
1740 case MODIFY_EXPR:
1741 slsr_process_copy (gs, rhs1, speed);
1742 break;
1743
1744 default:
1745 ;
1746 }
1747 }
1748 }
1749 }
1750 \f
1751 /* Dump a candidate for debug. */
1752
1753 static void
1754 dump_candidate (slsr_cand_t c)
1755 {
1756 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1757 gimple_bb (c->cand_stmt)->index);
1758 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1759 switch (c->kind)
1760 {
1761 case CAND_MULT:
1762 fputs (" MULT : (", dump_file);
1763 print_generic_expr (dump_file, c->base_expr, 0);
1764 fputs (" + ", dump_file);
1765 print_decs (c->index, dump_file);
1766 fputs (") * ", dump_file);
1767 print_generic_expr (dump_file, c->stride, 0);
1768 fputs (" : ", dump_file);
1769 break;
1770 case CAND_ADD:
1771 fputs (" ADD : ", dump_file);
1772 print_generic_expr (dump_file, c->base_expr, 0);
1773 fputs (" + (", dump_file);
1774 print_decs (c->index, dump_file);
1775 fputs (" * ", dump_file);
1776 print_generic_expr (dump_file, c->stride, 0);
1777 fputs (") : ", dump_file);
1778 break;
1779 case CAND_REF:
1780 fputs (" REF : ", dump_file);
1781 print_generic_expr (dump_file, c->base_expr, 0);
1782 fputs (" + (", dump_file);
1783 print_generic_expr (dump_file, c->stride, 0);
1784 fputs (") + ", dump_file);
1785 print_decs (c->index, dump_file);
1786 fputs (" : ", dump_file);
1787 break;
1788 case CAND_PHI:
1789 fputs (" PHI : ", dump_file);
1790 print_generic_expr (dump_file, c->base_expr, 0);
1791 fputs (" + (unknown * ", dump_file);
1792 print_generic_expr (dump_file, c->stride, 0);
1793 fputs (") : ", dump_file);
1794 break;
1795 default:
1796 gcc_unreachable ();
1797 }
1798 print_generic_expr (dump_file, c->cand_type, 0);
1799 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1800 c->basis, c->dependent, c->sibling);
1801 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1802 c->next_interp, c->dead_savings);
1803 if (c->def_phi)
1804 fprintf (dump_file, " phi: %d\n", c->def_phi);
1805 fputs ("\n", dump_file);
1806 }
1807
1808 /* Dump the candidate vector for debug. */
1809
1810 static void
1811 dump_cand_vec (void)
1812 {
1813 unsigned i;
1814 slsr_cand_t c;
1815
1816 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1817
1818 FOR_EACH_VEC_ELT (cand_vec, i, c)
1819 dump_candidate (c);
1820 }
1821
1822 /* Callback used to dump the candidate chains hash table. */
1823
1824 int
1825 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1826 {
1827 const_cand_chain_t chain = *slot;
1828 cand_chain_t p;
1829
1830 print_generic_expr (dump_file, chain->base_expr, 0);
1831 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1832
1833 for (p = chain->next; p; p = p->next)
1834 fprintf (dump_file, " -> %d", p->cand->cand_num);
1835
1836 fputs ("\n", dump_file);
1837 return 1;
1838 }
1839
1840 /* Dump the candidate chains. */
1841
1842 static void
1843 dump_cand_chains (void)
1844 {
1845 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1846 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1847 (NULL);
1848 fputs ("\n", dump_file);
1849 }
1850
1851 /* Dump the increment vector for debug. */
1852
1853 static void
1854 dump_incr_vec (void)
1855 {
1856 if (dump_file && (dump_flags & TDF_DETAILS))
1857 {
1858 unsigned i;
1859
1860 fprintf (dump_file, "\nIncrement vector:\n\n");
1861
1862 for (i = 0; i < incr_vec_len; i++)
1863 {
1864 fprintf (dump_file, "%3d increment: ", i);
1865 print_decs (incr_vec[i].incr, dump_file);
1866 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1867 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1868 fputs ("\n initializer: ", dump_file);
1869 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1870 fputs ("\n\n", dump_file);
1871 }
1872 }
1873 }
1874 \f
1875 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1876 data reference. */
1877
1878 static void
1879 replace_ref (tree *expr, slsr_cand_t c)
1880 {
1881 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1882 unsigned HOST_WIDE_INT misalign;
1883 unsigned align;
1884
1885 /* Ensure the memory reference carries the minimum alignment
1886 requirement for the data type. See PR58041. */
1887 get_object_alignment_1 (*expr, &align, &misalign);
1888 if (misalign != 0)
1889 align = (misalign & -misalign);
1890 if (align < TYPE_ALIGN (acc_type))
1891 acc_type = build_aligned_type (acc_type, align);
1892
1893 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1894 c->base_expr, c->stride);
1895 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1896 wide_int_to_tree (c->cand_type, c->index));
1897
1898 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1899 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1900 TREE_OPERAND (mem_ref, 0)
1901 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1902 /*simple_p=*/true, NULL,
1903 /*before=*/true, GSI_SAME_STMT);
1904 copy_ref_info (mem_ref, *expr);
1905 *expr = mem_ref;
1906 update_stmt (c->cand_stmt);
1907 }
1908
1909 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1910 dependent of candidate C with an equivalent strength-reduced data
1911 reference. */
1912
1913 static void
1914 replace_refs (slsr_cand_t c)
1915 {
1916 if (dump_file && (dump_flags & TDF_DETAILS))
1917 {
1918 fputs ("Replacing reference: ", dump_file);
1919 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1920 }
1921
1922 if (gimple_vdef (c->cand_stmt))
1923 {
1924 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1925 replace_ref (lhs, c);
1926 }
1927 else
1928 {
1929 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1930 replace_ref (rhs, c);
1931 }
1932
1933 if (dump_file && (dump_flags & TDF_DETAILS))
1934 {
1935 fputs ("With: ", dump_file);
1936 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1937 fputs ("\n", dump_file);
1938 }
1939
1940 if (c->sibling)
1941 replace_refs (lookup_cand (c->sibling));
1942
1943 if (c->dependent)
1944 replace_refs (lookup_cand (c->dependent));
1945 }
1946
1947 /* Return TRUE if candidate C is dependent upon a PHI. */
1948
1949 static bool
1950 phi_dependent_cand_p (slsr_cand_t c)
1951 {
1952 /* A candidate is not necessarily dependent upon a PHI just because
1953 it has a phi definition for its base name. It may have a basis
1954 that relies upon the same phi definition, in which case the PHI
1955 is irrelevant to this candidate. */
1956 return (c->def_phi
1957 && c->basis
1958 && lookup_cand (c->basis)->def_phi != c->def_phi);
1959 }
1960
1961 /* Calculate the increment required for candidate C relative to
1962 its basis. */
1963
1964 static widest_int
1965 cand_increment (slsr_cand_t c)
1966 {
1967 slsr_cand_t basis;
1968
1969 /* If the candidate doesn't have a basis, just return its own
1970 index. This is useful in record_increments to help us find
1971 an existing initializer. Also, if the candidate's basis is
1972 hidden by a phi, then its own index will be the increment
1973 from the newly introduced phi basis. */
1974 if (!c->basis || phi_dependent_cand_p (c))
1975 return c->index;
1976
1977 basis = lookup_cand (c->basis);
1978 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1979 return c->index - basis->index;
1980 }
1981
1982 /* Calculate the increment required for candidate C relative to
1983 its basis. If we aren't going to generate pointer arithmetic
1984 for this candidate, return the absolute value of that increment
1985 instead. */
1986
1987 static inline widest_int
1988 cand_abs_increment (slsr_cand_t c)
1989 {
1990 widest_int increment = cand_increment (c);
1991
1992 if (!address_arithmetic_p && wi::neg_p (increment))
1993 increment = -increment;
1994
1995 return increment;
1996 }
1997
1998 /* Return TRUE iff candidate C has already been replaced under
1999 another interpretation. */
2000
2001 static inline bool
2002 cand_already_replaced (slsr_cand_t c)
2003 {
2004 return (gimple_bb (c->cand_stmt) == 0);
2005 }
2006
2007 /* Common logic used by replace_unconditional_candidate and
2008 replace_conditional_candidate. */
2009
2010 static void
2011 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2012 {
2013 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2014 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2015
2016 /* It is highly unlikely, but possible, that the resulting
2017 bump doesn't fit in a HWI. Abandon the replacement
2018 in this case. This does not affect siblings or dependents
2019 of C. Restriction to signed HWI is conservative for unsigned
2020 types but allows for safe negation without twisted logic. */
2021 if (wi::fits_shwi_p (bump)
2022 && bump.to_shwi () != HOST_WIDE_INT_MIN
2023 /* It is not useful to replace casts, copies, or adds of
2024 an SSA name and a constant. */
2025 && cand_code != MODIFY_EXPR
2026 && !CONVERT_EXPR_CODE_P (cand_code)
2027 && cand_code != PLUS_EXPR
2028 && cand_code != POINTER_PLUS_EXPR
2029 && cand_code != MINUS_EXPR)
2030 {
2031 enum tree_code code = PLUS_EXPR;
2032 tree bump_tree;
2033 gimple *stmt_to_print = NULL;
2034
2035 /* If the basis name and the candidate's LHS have incompatible
2036 types, introduce a cast. */
2037 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2038 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2039 if (wi::neg_p (bump))
2040 {
2041 code = MINUS_EXPR;
2042 bump = -bump;
2043 }
2044
2045 bump_tree = wide_int_to_tree (target_type, bump);
2046
2047 if (dump_file && (dump_flags & TDF_DETAILS))
2048 {
2049 fputs ("Replacing: ", dump_file);
2050 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2051 }
2052
2053 if (bump == 0)
2054 {
2055 tree lhs = gimple_assign_lhs (c->cand_stmt);
2056 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2057 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2058 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2059 gsi_replace (&gsi, copy_stmt, false);
2060 c->cand_stmt = copy_stmt;
2061 if (dump_file && (dump_flags & TDF_DETAILS))
2062 stmt_to_print = copy_stmt;
2063 }
2064 else
2065 {
2066 tree rhs1, rhs2;
2067 if (cand_code != NEGATE_EXPR) {
2068 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2069 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2070 }
2071 if (cand_code != NEGATE_EXPR
2072 && ((operand_equal_p (rhs1, basis_name, 0)
2073 && operand_equal_p (rhs2, bump_tree, 0))
2074 || (operand_equal_p (rhs1, bump_tree, 0)
2075 && operand_equal_p (rhs2, basis_name, 0))))
2076 {
2077 if (dump_file && (dump_flags & TDF_DETAILS))
2078 {
2079 fputs ("(duplicate, not actually replacing)", dump_file);
2080 stmt_to_print = c->cand_stmt;
2081 }
2082 }
2083 else
2084 {
2085 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2086 gimple_assign_set_rhs_with_ops (&gsi, code,
2087 basis_name, bump_tree);
2088 update_stmt (gsi_stmt (gsi));
2089 c->cand_stmt = gsi_stmt (gsi);
2090 if (dump_file && (dump_flags & TDF_DETAILS))
2091 stmt_to_print = gsi_stmt (gsi);
2092 }
2093 }
2094
2095 if (dump_file && (dump_flags & TDF_DETAILS))
2096 {
2097 fputs ("With: ", dump_file);
2098 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2099 fputs ("\n", dump_file);
2100 }
2101 }
2102 }
2103
2104 /* Replace candidate C with an add or subtract. Note that we only
2105 operate on CAND_MULTs with known strides, so we will never generate
2106 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2107 X = Y + ((i - i') * S), as described in the module commentary. The
2108 folded value ((i - i') * S) is referred to here as the "bump." */
2109
2110 static void
2111 replace_unconditional_candidate (slsr_cand_t c)
2112 {
2113 slsr_cand_t basis;
2114
2115 if (cand_already_replaced (c))
2116 return;
2117
2118 basis = lookup_cand (c->basis);
2119 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2120
2121 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2122 }
2123 \f
2124 /* Return the index in the increment vector of the given INCREMENT,
2125 or -1 if not found. The latter can occur if more than
2126 MAX_INCR_VEC_LEN increments have been found. */
2127
2128 static inline int
2129 incr_vec_index (const widest_int &increment)
2130 {
2131 unsigned i;
2132
2133 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2134 ;
2135
2136 if (i < incr_vec_len)
2137 return i;
2138 else
2139 return -1;
2140 }
2141
2142 /* Create a new statement along edge E to add BASIS_NAME to the product
2143 of INCREMENT and the stride of candidate C. Create and return a new
2144 SSA name from *VAR to be used as the LHS of the new statement.
2145 KNOWN_STRIDE is true iff C's stride is a constant. */
2146
2147 static tree
2148 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2149 widest_int increment, edge e, location_t loc,
2150 bool known_stride)
2151 {
2152 basic_block insert_bb;
2153 gimple_stmt_iterator gsi;
2154 tree lhs, basis_type;
2155 gassign *new_stmt;
2156
2157 /* If the add candidate along this incoming edge has the same
2158 index as C's hidden basis, the hidden basis represents this
2159 edge correctly. */
2160 if (increment == 0)
2161 return basis_name;
2162
2163 basis_type = TREE_TYPE (basis_name);
2164 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2165
2166 if (known_stride)
2167 {
2168 tree bump_tree;
2169 enum tree_code code = PLUS_EXPR;
2170 widest_int bump = increment * wi::to_widest (c->stride);
2171 if (wi::neg_p (bump))
2172 {
2173 code = MINUS_EXPR;
2174 bump = -bump;
2175 }
2176
2177 bump_tree = wide_int_to_tree (basis_type, bump);
2178 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2179 }
2180 else
2181 {
2182 int i;
2183 bool negate_incr = (!address_arithmetic_p && wi::neg_p (increment));
2184 i = incr_vec_index (negate_incr ? -increment : increment);
2185 gcc_assert (i >= 0);
2186
2187 if (incr_vec[i].initializer)
2188 {
2189 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2190 new_stmt = gimple_build_assign (lhs, code, basis_name,
2191 incr_vec[i].initializer);
2192 }
2193 else if (increment == 1)
2194 new_stmt = gimple_build_assign (lhs, PLUS_EXPR, basis_name, c->stride);
2195 else if (increment == -1)
2196 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name,
2197 c->stride);
2198 else
2199 gcc_unreachable ();
2200 }
2201
2202 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2203 gsi = gsi_last_bb (insert_bb);
2204
2205 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2206 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2207 else
2208 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2209
2210 gimple_set_location (new_stmt, loc);
2211
2212 if (dump_file && (dump_flags & TDF_DETAILS))
2213 {
2214 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2215 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2216 }
2217
2218 return lhs;
2219 }
2220
2221 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2222 is hidden by the phi node FROM_PHI, create a new phi node in the same
2223 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2224 with its phi arguments representing conditional adjustments to the
2225 hidden basis along conditional incoming paths. Those adjustments are
2226 made by creating add statements (and sometimes recursively creating
2227 phis) along those incoming paths. LOC is the location to attach to
2228 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2229 constant. */
2230
2231 static tree
2232 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2233 location_t loc, bool known_stride)
2234 {
2235 int i;
2236 tree name, phi_arg;
2237 gphi *phi;
2238 vec<tree> phi_args;
2239 slsr_cand_t basis = lookup_cand (c->basis);
2240 int nargs = gimple_phi_num_args (from_phi);
2241 basic_block phi_bb = gimple_bb (from_phi);
2242 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2243 phi_args.create (nargs);
2244
2245 /* Process each argument of the existing phi that represents
2246 conditionally-executed add candidates. */
2247 for (i = 0; i < nargs; i++)
2248 {
2249 edge e = (*phi_bb->preds)[i];
2250 tree arg = gimple_phi_arg_def (from_phi, i);
2251 tree feeding_def;
2252
2253 /* If the phi argument is the base name of the CAND_PHI, then
2254 this incoming arc should use the hidden basis. */
2255 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2256 if (basis->index == 0)
2257 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2258 else
2259 {
2260 widest_int incr = -basis->index;
2261 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2262 e, loc, known_stride);
2263 }
2264 else
2265 {
2266 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2267
2268 /* If there is another phi along this incoming edge, we must
2269 process it in the same fashion to ensure that all basis
2270 adjustments are made along its incoming edges. */
2271 if (gimple_code (arg_def) == GIMPLE_PHI)
2272 feeding_def = create_phi_basis (c, arg_def, basis_name,
2273 loc, known_stride);
2274 else
2275 {
2276 slsr_cand_t arg_cand = base_cand_from_table (arg);
2277 widest_int diff = arg_cand->index - basis->index;
2278 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2279 e, loc, known_stride);
2280 }
2281 }
2282
2283 /* Because of recursion, we need to save the arguments in a vector
2284 so we can create the PHI statement all at once. Otherwise the
2285 storage for the half-created PHI can be reclaimed. */
2286 phi_args.safe_push (feeding_def);
2287 }
2288
2289 /* Create the new phi basis. */
2290 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2291 phi = create_phi_node (name, phi_bb);
2292 SSA_NAME_DEF_STMT (name) = phi;
2293
2294 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2295 {
2296 edge e = (*phi_bb->preds)[i];
2297 add_phi_arg (phi, phi_arg, e, loc);
2298 }
2299
2300 update_stmt (phi);
2301
2302 if (dump_file && (dump_flags & TDF_DETAILS))
2303 {
2304 fputs ("Introducing new phi basis: ", dump_file);
2305 print_gimple_stmt (dump_file, phi, 0, 0);
2306 }
2307
2308 return name;
2309 }
2310
2311 /* Given a candidate C whose basis is hidden by at least one intervening
2312 phi, introduce a matching number of new phis to represent its basis
2313 adjusted by conditional increments along possible incoming paths. Then
2314 replace C as though it were an unconditional candidate, using the new
2315 basis. */
2316
2317 static void
2318 replace_conditional_candidate (slsr_cand_t c)
2319 {
2320 tree basis_name, name;
2321 slsr_cand_t basis;
2322 location_t loc;
2323
2324 /* Look up the LHS SSA name from C's basis. This will be the
2325 RHS1 of the adds we will introduce to create new phi arguments. */
2326 basis = lookup_cand (c->basis);
2327 basis_name = gimple_assign_lhs (basis->cand_stmt);
2328
2329 /* Create a new phi statement which will represent C's true basis
2330 after the transformation is complete. */
2331 loc = gimple_location (c->cand_stmt);
2332 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2333 basis_name, loc, KNOWN_STRIDE);
2334 /* Replace C with an add of the new basis phi and a constant. */
2335 widest_int bump = c->index * wi::to_widest (c->stride);
2336
2337 replace_mult_candidate (c, name, bump);
2338 }
2339
2340 /* Compute the expected costs of inserting basis adjustments for
2341 candidate C with phi-definition PHI. The cost of inserting
2342 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2343 which are themselves phi results, recursively calculate costs
2344 for those phis as well. */
2345
2346 static int
2347 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2348 {
2349 unsigned i;
2350 int cost = 0;
2351 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2352
2353 /* If we work our way back to a phi that isn't dominated by the hidden
2354 basis, this isn't a candidate for replacement. Indicate this by
2355 returning an unreasonably high cost. It's not easy to detect
2356 these situations when determining the basis, so we defer the
2357 decision until now. */
2358 basic_block phi_bb = gimple_bb (phi);
2359 slsr_cand_t basis = lookup_cand (c->basis);
2360 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2361
2362 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2363 return COST_INFINITE;
2364
2365 for (i = 0; i < gimple_phi_num_args (phi); i++)
2366 {
2367 tree arg = gimple_phi_arg_def (phi, i);
2368
2369 if (arg != phi_cand->base_expr)
2370 {
2371 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2372
2373 if (gimple_code (arg_def) == GIMPLE_PHI)
2374 cost += phi_add_costs (arg_def, c, one_add_cost);
2375 else
2376 {
2377 slsr_cand_t arg_cand = base_cand_from_table (arg);
2378
2379 if (arg_cand->index != c->index)
2380 cost += one_add_cost;
2381 }
2382 }
2383 }
2384
2385 return cost;
2386 }
2387
2388 /* For candidate C, each sibling of candidate C, and each dependent of
2389 candidate C, determine whether the candidate is dependent upon a
2390 phi that hides its basis. If not, replace the candidate unconditionally.
2391 Otherwise, determine whether the cost of introducing compensation code
2392 for the candidate is offset by the gains from strength reduction. If
2393 so, replace the candidate and introduce the compensation code. */
2394
2395 static void
2396 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2397 {
2398 if (phi_dependent_cand_p (c))
2399 {
2400 if (c->kind == CAND_MULT)
2401 {
2402 /* A candidate dependent upon a phi will replace a multiply by
2403 a constant with an add, and will insert at most one add for
2404 each phi argument. Add these costs with the potential dead-code
2405 savings to determine profitability. */
2406 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2407 int mult_savings = stmt_cost (c->cand_stmt, speed);
2408 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2409 tree phi_result = gimple_phi_result (phi);
2410 int one_add_cost = add_cost (speed,
2411 TYPE_MODE (TREE_TYPE (phi_result)));
2412 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2413 int cost = add_costs - mult_savings - c->dead_savings;
2414
2415 if (dump_file && (dump_flags & TDF_DETAILS))
2416 {
2417 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2418 fprintf (dump_file, " add_costs = %d\n", add_costs);
2419 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2420 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2421 fprintf (dump_file, " cost = %d\n", cost);
2422 if (cost <= COST_NEUTRAL)
2423 fputs (" Replacing...\n", dump_file);
2424 else
2425 fputs (" Not replaced.\n", dump_file);
2426 }
2427
2428 if (cost <= COST_NEUTRAL)
2429 replace_conditional_candidate (c);
2430 }
2431 }
2432 else
2433 replace_unconditional_candidate (c);
2434
2435 if (c->sibling)
2436 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2437
2438 if (c->dependent)
2439 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2440 }
2441 \f
2442 /* Count the number of candidates in the tree rooted at C that have
2443 not already been replaced under other interpretations. */
2444
2445 static int
2446 count_candidates (slsr_cand_t c)
2447 {
2448 unsigned count = cand_already_replaced (c) ? 0 : 1;
2449
2450 if (c->sibling)
2451 count += count_candidates (lookup_cand (c->sibling));
2452
2453 if (c->dependent)
2454 count += count_candidates (lookup_cand (c->dependent));
2455
2456 return count;
2457 }
2458
2459 /* Increase the count of INCREMENT by one in the increment vector.
2460 INCREMENT is associated with candidate C. If INCREMENT is to be
2461 conditionally executed as part of a conditional candidate replacement,
2462 IS_PHI_ADJUST is true, otherwise false. If an initializer
2463 T_0 = stride * I is provided by a candidate that dominates all
2464 candidates with the same increment, also record T_0 for subsequent use. */
2465
2466 static void
2467 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2468 {
2469 bool found = false;
2470 unsigned i;
2471
2472 /* Treat increments that differ only in sign as identical so as to
2473 share initializers, unless we are generating pointer arithmetic. */
2474 if (!address_arithmetic_p && wi::neg_p (increment))
2475 increment = -increment;
2476
2477 for (i = 0; i < incr_vec_len; i++)
2478 {
2479 if (incr_vec[i].incr == increment)
2480 {
2481 incr_vec[i].count++;
2482 found = true;
2483
2484 /* If we previously recorded an initializer that doesn't
2485 dominate this candidate, it's not going to be useful to
2486 us after all. */
2487 if (incr_vec[i].initializer
2488 && !dominated_by_p (CDI_DOMINATORS,
2489 gimple_bb (c->cand_stmt),
2490 incr_vec[i].init_bb))
2491 {
2492 incr_vec[i].initializer = NULL_TREE;
2493 incr_vec[i].init_bb = NULL;
2494 }
2495
2496 break;
2497 }
2498 }
2499
2500 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2501 {
2502 /* The first time we see an increment, create the entry for it.
2503 If this is the root candidate which doesn't have a basis, set
2504 the count to zero. We're only processing it so it can possibly
2505 provide an initializer for other candidates. */
2506 incr_vec[incr_vec_len].incr = increment;
2507 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2508 incr_vec[incr_vec_len].cost = COST_INFINITE;
2509
2510 /* Optimistically record the first occurrence of this increment
2511 as providing an initializer (if it does); we will revise this
2512 opinion later if it doesn't dominate all other occurrences.
2513 Exception: increments of -1, 0, 1 never need initializers;
2514 and phi adjustments don't ever provide initializers. */
2515 if (c->kind == CAND_ADD
2516 && !is_phi_adjust
2517 && c->index == increment
2518 && (wi::gts_p (increment, 1)
2519 || wi::lts_p (increment, -1))
2520 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2521 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2522 {
2523 tree t0 = NULL_TREE;
2524 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2525 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2526 if (operand_equal_p (rhs1, c->base_expr, 0))
2527 t0 = rhs2;
2528 else if (operand_equal_p (rhs2, c->base_expr, 0))
2529 t0 = rhs1;
2530 if (t0
2531 && SSA_NAME_DEF_STMT (t0)
2532 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2533 {
2534 incr_vec[incr_vec_len].initializer = t0;
2535 incr_vec[incr_vec_len++].init_bb
2536 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2537 }
2538 else
2539 {
2540 incr_vec[incr_vec_len].initializer = NULL_TREE;
2541 incr_vec[incr_vec_len++].init_bb = NULL;
2542 }
2543 }
2544 else
2545 {
2546 incr_vec[incr_vec_len].initializer = NULL_TREE;
2547 incr_vec[incr_vec_len++].init_bb = NULL;
2548 }
2549 }
2550 }
2551
2552 /* Given phi statement PHI that hides a candidate from its BASIS, find
2553 the increments along each incoming arc (recursively handling additional
2554 phis that may be present) and record them. These increments are the
2555 difference in index between the index-adjusting statements and the
2556 index of the basis. */
2557
2558 static void
2559 record_phi_increments (slsr_cand_t basis, gimple *phi)
2560 {
2561 unsigned i;
2562 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2563
2564 for (i = 0; i < gimple_phi_num_args (phi); i++)
2565 {
2566 tree arg = gimple_phi_arg_def (phi, i);
2567
2568 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2569 {
2570 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2571
2572 if (gimple_code (arg_def) == GIMPLE_PHI)
2573 record_phi_increments (basis, arg_def);
2574 else
2575 {
2576 slsr_cand_t arg_cand = base_cand_from_table (arg);
2577 widest_int diff = arg_cand->index - basis->index;
2578 record_increment (arg_cand, diff, PHI_ADJUST);
2579 }
2580 }
2581 }
2582 }
2583
2584 /* Determine how many times each unique increment occurs in the set
2585 of candidates rooted at C's parent, recording the data in the
2586 increment vector. For each unique increment I, if an initializer
2587 T_0 = stride * I is provided by a candidate that dominates all
2588 candidates with the same increment, also record T_0 for subsequent
2589 use. */
2590
2591 static void
2592 record_increments (slsr_cand_t c)
2593 {
2594 if (!cand_already_replaced (c))
2595 {
2596 if (!phi_dependent_cand_p (c))
2597 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2598 else
2599 {
2600 /* A candidate with a basis hidden by a phi will have one
2601 increment for its relationship to the index represented by
2602 the phi, and potentially additional increments along each
2603 incoming edge. For the root of the dependency tree (which
2604 has no basis), process just the initial index in case it has
2605 an initializer that can be used by subsequent candidates. */
2606 record_increment (c, c->index, NOT_PHI_ADJUST);
2607
2608 if (c->basis)
2609 record_phi_increments (lookup_cand (c->basis),
2610 lookup_cand (c->def_phi)->cand_stmt);
2611 }
2612 }
2613
2614 if (c->sibling)
2615 record_increments (lookup_cand (c->sibling));
2616
2617 if (c->dependent)
2618 record_increments (lookup_cand (c->dependent));
2619 }
2620
2621 /* Add up and return the costs of introducing add statements that
2622 require the increment INCR on behalf of candidate C and phi
2623 statement PHI. Accumulate into *SAVINGS the potential savings
2624 from removing existing statements that feed PHI and have no other
2625 uses. */
2626
2627 static int
2628 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2629 int *savings)
2630 {
2631 unsigned i;
2632 int cost = 0;
2633 slsr_cand_t basis = lookup_cand (c->basis);
2634 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2635
2636 for (i = 0; i < gimple_phi_num_args (phi); i++)
2637 {
2638 tree arg = gimple_phi_arg_def (phi, i);
2639
2640 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2641 {
2642 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2643
2644 if (gimple_code (arg_def) == GIMPLE_PHI)
2645 {
2646 int feeding_savings = 0;
2647 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2648 if (has_single_use (gimple_phi_result (arg_def)))
2649 *savings += feeding_savings;
2650 }
2651 else
2652 {
2653 slsr_cand_t arg_cand = base_cand_from_table (arg);
2654 widest_int diff = arg_cand->index - basis->index;
2655
2656 if (incr == diff)
2657 {
2658 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2659 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2660 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2661 if (has_single_use (lhs))
2662 *savings += stmt_cost (arg_cand->cand_stmt, true);
2663 }
2664 }
2665 }
2666 }
2667
2668 return cost;
2669 }
2670
2671 /* Return the first candidate in the tree rooted at C that has not
2672 already been replaced, favoring siblings over dependents. */
2673
2674 static slsr_cand_t
2675 unreplaced_cand_in_tree (slsr_cand_t c)
2676 {
2677 if (!cand_already_replaced (c))
2678 return c;
2679
2680 if (c->sibling)
2681 {
2682 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2683 if (sib)
2684 return sib;
2685 }
2686
2687 if (c->dependent)
2688 {
2689 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2690 if (dep)
2691 return dep;
2692 }
2693
2694 return NULL;
2695 }
2696
2697 /* Return TRUE if the candidates in the tree rooted at C should be
2698 optimized for speed, else FALSE. We estimate this based on the block
2699 containing the most dominant candidate in the tree that has not yet
2700 been replaced. */
2701
2702 static bool
2703 optimize_cands_for_speed_p (slsr_cand_t c)
2704 {
2705 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2706 gcc_assert (c2);
2707 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2708 }
2709
2710 /* Add COST_IN to the lowest cost of any dependent path starting at
2711 candidate C or any of its siblings, counting only candidates along
2712 such paths with increment INCR. Assume that replacing a candidate
2713 reduces cost by REPL_SAVINGS. Also account for savings from any
2714 statements that would go dead. If COUNT_PHIS is true, include
2715 costs of introducing feeding statements for conditional candidates. */
2716
2717 static int
2718 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2719 const widest_int &incr, bool count_phis)
2720 {
2721 int local_cost, sib_cost, savings = 0;
2722 widest_int cand_incr = cand_abs_increment (c);
2723
2724 if (cand_already_replaced (c))
2725 local_cost = cost_in;
2726 else if (incr == cand_incr)
2727 local_cost = cost_in - repl_savings - c->dead_savings;
2728 else
2729 local_cost = cost_in - c->dead_savings;
2730
2731 if (count_phis
2732 && phi_dependent_cand_p (c)
2733 && !cand_already_replaced (c))
2734 {
2735 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2736 local_cost += phi_incr_cost (c, incr, phi, &savings);
2737
2738 if (has_single_use (gimple_phi_result (phi)))
2739 local_cost -= savings;
2740 }
2741
2742 if (c->dependent)
2743 local_cost = lowest_cost_path (local_cost, repl_savings,
2744 lookup_cand (c->dependent), incr,
2745 count_phis);
2746
2747 if (c->sibling)
2748 {
2749 sib_cost = lowest_cost_path (cost_in, repl_savings,
2750 lookup_cand (c->sibling), incr,
2751 count_phis);
2752 local_cost = MIN (local_cost, sib_cost);
2753 }
2754
2755 return local_cost;
2756 }
2757
2758 /* Compute the total savings that would accrue from all replacements
2759 in the candidate tree rooted at C, counting only candidates with
2760 increment INCR. Assume that replacing a candidate reduces cost
2761 by REPL_SAVINGS. Also account for savings from statements that
2762 would go dead. */
2763
2764 static int
2765 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2766 bool count_phis)
2767 {
2768 int savings = 0;
2769 widest_int cand_incr = cand_abs_increment (c);
2770
2771 if (incr == cand_incr && !cand_already_replaced (c))
2772 savings += repl_savings + c->dead_savings;
2773
2774 if (count_phis
2775 && phi_dependent_cand_p (c)
2776 && !cand_already_replaced (c))
2777 {
2778 int phi_savings = 0;
2779 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2780 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2781
2782 if (has_single_use (gimple_phi_result (phi)))
2783 savings += phi_savings;
2784 }
2785
2786 if (c->dependent)
2787 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2788 count_phis);
2789
2790 if (c->sibling)
2791 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2792 count_phis);
2793
2794 return savings;
2795 }
2796
2797 /* Use target-specific costs to determine and record which increments
2798 in the current candidate tree are profitable to replace, assuming
2799 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2800 the candidate tree.
2801
2802 One slight limitation here is that we don't account for the possible
2803 introduction of casts in some cases. See replace_one_candidate for
2804 the cases where these are introduced. This should probably be cleaned
2805 up sometime. */
2806
2807 static void
2808 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
2809 {
2810 unsigned i;
2811
2812 for (i = 0; i < incr_vec_len; i++)
2813 {
2814 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2815
2816 /* If somehow this increment is bigger than a HWI, we won't
2817 be optimizing candidates that use it. And if the increment
2818 has a count of zero, nothing will be done with it. */
2819 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2820 incr_vec[i].cost = COST_INFINITE;
2821
2822 /* Increments of 0, 1, and -1 are always profitable to replace,
2823 because they always replace a multiply or add with an add or
2824 copy, and may cause one or more existing instructions to go
2825 dead. Exception: -1 can't be assumed to be profitable for
2826 pointer addition. */
2827 else if (incr == 0
2828 || incr == 1
2829 || (incr == -1
2830 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2831 != POINTER_PLUS_EXPR)))
2832 incr_vec[i].cost = COST_NEUTRAL;
2833
2834 /* FORNOW: If we need to add an initializer, give up if a cast from
2835 the candidate's type to its stride's type can lose precision.
2836 This could eventually be handled better by expressly retaining the
2837 result of a cast to a wider type in the stride. Example:
2838
2839 short int _1;
2840 _2 = (int) _1;
2841 _3 = _2 * 10;
2842 _4 = x + _3; ADD: x + (10 * _1) : int
2843 _5 = _2 * 15;
2844 _6 = x + _3; ADD: x + (15 * _1) : int
2845
2846 Right now replacing _6 would cause insertion of an initializer
2847 of the form "short int T = _1 * 5;" followed by a cast to
2848 int, which could overflow incorrectly. Had we recorded _2 or
2849 (int)_1 as the stride, this wouldn't happen. However, doing
2850 this breaks other opportunities, so this will require some
2851 care. */
2852 else if (!incr_vec[i].initializer
2853 && TREE_CODE (first_dep->stride) != INTEGER_CST
2854 && !legal_cast_p_1 (first_dep->stride,
2855 gimple_assign_lhs (first_dep->cand_stmt)))
2856
2857 incr_vec[i].cost = COST_INFINITE;
2858
2859 /* If we need to add an initializer, make sure we don't introduce
2860 a multiply by a pointer type, which can happen in certain cast
2861 scenarios. FIXME: When cleaning up these cast issues, we can
2862 afford to introduce the multiply provided we cast out to an
2863 unsigned int of appropriate size. */
2864 else if (!incr_vec[i].initializer
2865 && TREE_CODE (first_dep->stride) != INTEGER_CST
2866 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2867
2868 incr_vec[i].cost = COST_INFINITE;
2869
2870 /* For any other increment, if this is a multiply candidate, we
2871 must introduce a temporary T and initialize it with
2872 T_0 = stride * increment. When optimizing for speed, walk the
2873 candidate tree to calculate the best cost reduction along any
2874 path; if it offsets the fixed cost of inserting the initializer,
2875 replacing the increment is profitable. When optimizing for
2876 size, instead calculate the total cost reduction from replacing
2877 all candidates with this increment. */
2878 else if (first_dep->kind == CAND_MULT)
2879 {
2880 int cost = mult_by_coeff_cost (incr, mode, speed);
2881 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2882 if (speed)
2883 cost = lowest_cost_path (cost, repl_savings, first_dep,
2884 incr_vec[i].incr, COUNT_PHIS);
2885 else
2886 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2887 COUNT_PHIS);
2888
2889 incr_vec[i].cost = cost;
2890 }
2891
2892 /* If this is an add candidate, the initializer may already
2893 exist, so only calculate the cost of the initializer if it
2894 doesn't. We are replacing one add with another here, so the
2895 known replacement savings is zero. We will account for removal
2896 of dead instructions in lowest_cost_path or total_savings. */
2897 else
2898 {
2899 int cost = 0;
2900 if (!incr_vec[i].initializer)
2901 cost = mult_by_coeff_cost (incr, mode, speed);
2902
2903 if (speed)
2904 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2905 DONT_COUNT_PHIS);
2906 else
2907 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2908 DONT_COUNT_PHIS);
2909
2910 incr_vec[i].cost = cost;
2911 }
2912 }
2913 }
2914
2915 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2916 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2917 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2918 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2919 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2920
2921 static basic_block
2922 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2923 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2924 {
2925 basic_block ncd;
2926
2927 if (!bb1)
2928 {
2929 *where = c2;
2930 return bb2;
2931 }
2932
2933 if (!bb2)
2934 {
2935 *where = c1;
2936 return bb1;
2937 }
2938
2939 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2940
2941 /* If both candidates are in the same block, the earlier
2942 candidate wins. */
2943 if (bb1 == ncd && bb2 == ncd)
2944 {
2945 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2946 *where = c2;
2947 else
2948 *where = c1;
2949 }
2950
2951 /* Otherwise, if one of them produced a candidate in the
2952 dominator, that one wins. */
2953 else if (bb1 == ncd)
2954 *where = c1;
2955
2956 else if (bb2 == ncd)
2957 *where = c2;
2958
2959 /* If neither matches the dominator, neither wins. */
2960 else
2961 *where = NULL;
2962
2963 return ncd;
2964 }
2965
2966 /* Consider all candidates that feed PHI. Find the nearest common
2967 dominator of those candidates requiring the given increment INCR.
2968 Further find and return the nearest common dominator of this result
2969 with block NCD. If the returned block contains one or more of the
2970 candidates, return the earliest candidate in the block in *WHERE. */
2971
2972 static basic_block
2973 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
2974 basic_block ncd, slsr_cand_t *where)
2975 {
2976 unsigned i;
2977 slsr_cand_t basis = lookup_cand (c->basis);
2978 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2979
2980 for (i = 0; i < gimple_phi_num_args (phi); i++)
2981 {
2982 tree arg = gimple_phi_arg_def (phi, i);
2983
2984 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2985 {
2986 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2987
2988 if (gimple_code (arg_def) == GIMPLE_PHI)
2989 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
2990 where);
2991 else
2992 {
2993 slsr_cand_t arg_cand = base_cand_from_table (arg);
2994 widest_int diff = arg_cand->index - basis->index;
2995 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
2996
2997 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
2998 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
2999 }
3000 }
3001 }
3002
3003 return ncd;
3004 }
3005
3006 /* Consider the candidate C together with any candidates that feed
3007 C's phi dependence (if any). Find and return the nearest common
3008 dominator of those candidates requiring the given increment INCR.
3009 If the returned block contains one or more of the candidates,
3010 return the earliest candidate in the block in *WHERE. */
3011
3012 static basic_block
3013 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3014 {
3015 basic_block ncd = NULL;
3016
3017 if (cand_abs_increment (c) == incr)
3018 {
3019 ncd = gimple_bb (c->cand_stmt);
3020 *where = c;
3021 }
3022
3023 if (phi_dependent_cand_p (c))
3024 ncd = ncd_with_phi (c, incr,
3025 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3026 ncd, where);
3027
3028 return ncd;
3029 }
3030
3031 /* Consider all candidates in the tree rooted at C for which INCR
3032 represents the required increment of C relative to its basis.
3033 Find and return the basic block that most nearly dominates all
3034 such candidates. If the returned block contains one or more of
3035 the candidates, return the earliest candidate in the block in
3036 *WHERE. */
3037
3038 static basic_block
3039 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3040 slsr_cand_t *where)
3041 {
3042 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3043 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3044
3045 /* First find the NCD of all siblings and dependents. */
3046 if (c->sibling)
3047 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3048 incr, &sib_where);
3049 if (c->dependent)
3050 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3051 incr, &dep_where);
3052 if (!sib_ncd && !dep_ncd)
3053 {
3054 new_where = NULL;
3055 ncd = NULL;
3056 }
3057 else if (sib_ncd && !dep_ncd)
3058 {
3059 new_where = sib_where;
3060 ncd = sib_ncd;
3061 }
3062 else if (dep_ncd && !sib_ncd)
3063 {
3064 new_where = dep_where;
3065 ncd = dep_ncd;
3066 }
3067 else
3068 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3069 dep_where, &new_where);
3070
3071 /* If the candidate's increment doesn't match the one we're interested
3072 in (and nor do any increments for feeding defs of a phi-dependence),
3073 then the result depends only on siblings and dependents. */
3074 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3075
3076 if (!this_ncd || cand_already_replaced (c))
3077 {
3078 *where = new_where;
3079 return ncd;
3080 }
3081
3082 /* Otherwise, compare this candidate with the result from all siblings
3083 and dependents. */
3084 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3085
3086 return ncd;
3087 }
3088
3089 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3090
3091 static inline bool
3092 profitable_increment_p (unsigned index)
3093 {
3094 return (incr_vec[index].cost <= COST_NEUTRAL);
3095 }
3096
3097 /* For each profitable increment in the increment vector not equal to
3098 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3099 dominator of all statements in the candidate chain rooted at C
3100 that require that increment, and insert an initializer
3101 T_0 = stride * increment at that location. Record T_0 with the
3102 increment record. */
3103
3104 static void
3105 insert_initializers (slsr_cand_t c)
3106 {
3107 unsigned i;
3108
3109 for (i = 0; i < incr_vec_len; i++)
3110 {
3111 basic_block bb;
3112 slsr_cand_t where = NULL;
3113 gassign *init_stmt;
3114 tree stride_type, new_name, incr_tree;
3115 widest_int incr = incr_vec[i].incr;
3116
3117 if (!profitable_increment_p (i)
3118 || incr == 1
3119 || (incr == -1
3120 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3121 || incr == 0)
3122 continue;
3123
3124 /* We may have already identified an existing initializer that
3125 will suffice. */
3126 if (incr_vec[i].initializer)
3127 {
3128 if (dump_file && (dump_flags & TDF_DETAILS))
3129 {
3130 fputs ("Using existing initializer: ", dump_file);
3131 print_gimple_stmt (dump_file,
3132 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3133 0, 0);
3134 }
3135 continue;
3136 }
3137
3138 /* Find the block that most closely dominates all candidates
3139 with this increment. If there is at least one candidate in
3140 that block, the earliest one will be returned in WHERE. */
3141 bb = nearest_common_dominator_for_cands (c, incr, &where);
3142
3143 /* Create a new SSA name to hold the initializer's value. */
3144 stride_type = TREE_TYPE (c->stride);
3145 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3146 incr_vec[i].initializer = new_name;
3147
3148 /* Create the initializer and insert it in the latest possible
3149 dominating position. */
3150 incr_tree = wide_int_to_tree (stride_type, incr);
3151 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3152 c->stride, incr_tree);
3153 if (where)
3154 {
3155 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3156 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3157 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3158 }
3159 else
3160 {
3161 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3162 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3163
3164 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3165 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3166 else
3167 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3168
3169 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3170 }
3171
3172 if (dump_file && (dump_flags & TDF_DETAILS))
3173 {
3174 fputs ("Inserting initializer: ", dump_file);
3175 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3176 }
3177 }
3178 }
3179
3180 /* Return TRUE iff all required increments for candidates feeding PHI
3181 are profitable to replace on behalf of candidate C. */
3182
3183 static bool
3184 all_phi_incrs_profitable (slsr_cand_t c, gimple *phi)
3185 {
3186 unsigned i;
3187 slsr_cand_t basis = lookup_cand (c->basis);
3188 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3189
3190 for (i = 0; i < gimple_phi_num_args (phi); i++)
3191 {
3192 tree arg = gimple_phi_arg_def (phi, i);
3193
3194 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3195 {
3196 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3197
3198 if (gimple_code (arg_def) == GIMPLE_PHI)
3199 {
3200 if (!all_phi_incrs_profitable (c, arg_def))
3201 return false;
3202 }
3203 else
3204 {
3205 int j;
3206 slsr_cand_t arg_cand = base_cand_from_table (arg);
3207 widest_int increment = arg_cand->index - basis->index;
3208
3209 if (!address_arithmetic_p && wi::neg_p (increment))
3210 increment = -increment;
3211
3212 j = incr_vec_index (increment);
3213
3214 if (dump_file && (dump_flags & TDF_DETAILS))
3215 {
3216 fprintf (dump_file, " Conditional candidate %d, phi: ",
3217 c->cand_num);
3218 print_gimple_stmt (dump_file, phi, 0, 0);
3219 fputs (" increment: ", dump_file);
3220 print_decs (increment, dump_file);
3221 if (j < 0)
3222 fprintf (dump_file,
3223 "\n Not replaced; incr_vec overflow.\n");
3224 else {
3225 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3226 if (profitable_increment_p (j))
3227 fputs (" Replacing...\n", dump_file);
3228 else
3229 fputs (" Not replaced.\n", dump_file);
3230 }
3231 }
3232
3233 if (j < 0 || !profitable_increment_p (j))
3234 return false;
3235 }
3236 }
3237 }
3238
3239 return true;
3240 }
3241
3242 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3243 type TO_TYPE, and insert it in front of the statement represented
3244 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3245 the new SSA name. */
3246
3247 static tree
3248 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3249 {
3250 tree cast_lhs;
3251 gassign *cast_stmt;
3252 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3253
3254 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3255 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3256 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3257 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3258
3259 if (dump_file && (dump_flags & TDF_DETAILS))
3260 {
3261 fputs (" Inserting: ", dump_file);
3262 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3263 }
3264
3265 return cast_lhs;
3266 }
3267
3268 /* Replace the RHS of the statement represented by candidate C with
3269 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3270 leave C unchanged or just interchange its operands. The original
3271 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3272 If the replacement was made and we are doing a details dump,
3273 return the revised statement, else NULL. */
3274
3275 static gimple *
3276 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3277 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3278 slsr_cand_t c)
3279 {
3280 if (new_code != old_code
3281 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3282 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3283 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3284 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3285 {
3286 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3287 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3288 update_stmt (gsi_stmt (gsi));
3289 c->cand_stmt = gsi_stmt (gsi);
3290
3291 if (dump_file && (dump_flags & TDF_DETAILS))
3292 return gsi_stmt (gsi);
3293 }
3294
3295 else if (dump_file && (dump_flags & TDF_DETAILS))
3296 fputs (" (duplicate, not actually replacing)\n", dump_file);
3297
3298 return NULL;
3299 }
3300
3301 /* Strength-reduce the statement represented by candidate C by replacing
3302 it with an equivalent addition or subtraction. I is the index into
3303 the increment vector identifying C's increment. NEW_VAR is used to
3304 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3305 is the rhs1 to use in creating the add/subtract. */
3306
3307 static void
3308 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3309 {
3310 gimple *stmt_to_print = NULL;
3311 tree orig_rhs1, orig_rhs2;
3312 tree rhs2;
3313 enum tree_code orig_code, repl_code;
3314 widest_int cand_incr;
3315
3316 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3317 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3318 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3319 cand_incr = cand_increment (c);
3320
3321 if (dump_file && (dump_flags & TDF_DETAILS))
3322 {
3323 fputs ("Replacing: ", dump_file);
3324 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3325 stmt_to_print = c->cand_stmt;
3326 }
3327
3328 if (address_arithmetic_p)
3329 repl_code = POINTER_PLUS_EXPR;
3330 else
3331 repl_code = PLUS_EXPR;
3332
3333 /* If the increment has an initializer T_0, replace the candidate
3334 statement with an add of the basis name and the initializer. */
3335 if (incr_vec[i].initializer)
3336 {
3337 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3338 tree orig_type = TREE_TYPE (orig_rhs2);
3339
3340 if (types_compatible_p (orig_type, init_type))
3341 rhs2 = incr_vec[i].initializer;
3342 else
3343 rhs2 = introduce_cast_before_cand (c, orig_type,
3344 incr_vec[i].initializer);
3345
3346 if (incr_vec[i].incr != cand_incr)
3347 {
3348 gcc_assert (repl_code == PLUS_EXPR);
3349 repl_code = MINUS_EXPR;
3350 }
3351
3352 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3353 orig_code, orig_rhs1, orig_rhs2,
3354 c);
3355 }
3356
3357 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3358 with a subtract of the stride from the basis name, a copy
3359 from the basis name, or an add of the stride to the basis
3360 name, respectively. It may be necessary to introduce a
3361 cast (or reuse an existing cast). */
3362 else if (cand_incr == 1)
3363 {
3364 tree stride_type = TREE_TYPE (c->stride);
3365 tree orig_type = TREE_TYPE (orig_rhs2);
3366
3367 if (types_compatible_p (orig_type, stride_type))
3368 rhs2 = c->stride;
3369 else
3370 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3371
3372 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3373 orig_code, orig_rhs1, orig_rhs2,
3374 c);
3375 }
3376
3377 else if (cand_incr == -1)
3378 {
3379 tree stride_type = TREE_TYPE (c->stride);
3380 tree orig_type = TREE_TYPE (orig_rhs2);
3381 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3382
3383 if (types_compatible_p (orig_type, stride_type))
3384 rhs2 = c->stride;
3385 else
3386 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3387
3388 if (orig_code != MINUS_EXPR
3389 || !operand_equal_p (basis_name, orig_rhs1, 0)
3390 || !operand_equal_p (rhs2, orig_rhs2, 0))
3391 {
3392 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3393 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3394 update_stmt (gsi_stmt (gsi));
3395 c->cand_stmt = gsi_stmt (gsi);
3396
3397 if (dump_file && (dump_flags & TDF_DETAILS))
3398 stmt_to_print = gsi_stmt (gsi);
3399 }
3400 else if (dump_file && (dump_flags & TDF_DETAILS))
3401 fputs (" (duplicate, not actually replacing)\n", dump_file);
3402 }
3403
3404 else if (cand_incr == 0)
3405 {
3406 tree lhs = gimple_assign_lhs (c->cand_stmt);
3407 tree lhs_type = TREE_TYPE (lhs);
3408 tree basis_type = TREE_TYPE (basis_name);
3409
3410 if (types_compatible_p (lhs_type, basis_type))
3411 {
3412 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3413 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3414 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3415 gsi_replace (&gsi, copy_stmt, false);
3416 c->cand_stmt = copy_stmt;
3417
3418 if (dump_file && (dump_flags & TDF_DETAILS))
3419 stmt_to_print = copy_stmt;
3420 }
3421 else
3422 {
3423 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3424 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3425 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3426 gsi_replace (&gsi, cast_stmt, false);
3427 c->cand_stmt = cast_stmt;
3428
3429 if (dump_file && (dump_flags & TDF_DETAILS))
3430 stmt_to_print = cast_stmt;
3431 }
3432 }
3433 else
3434 gcc_unreachable ();
3435
3436 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3437 {
3438 fputs ("With: ", dump_file);
3439 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3440 fputs ("\n", dump_file);
3441 }
3442 }
3443
3444 /* For each candidate in the tree rooted at C, replace it with
3445 an increment if such has been shown to be profitable. */
3446
3447 static void
3448 replace_profitable_candidates (slsr_cand_t c)
3449 {
3450 if (!cand_already_replaced (c))
3451 {
3452 widest_int increment = cand_abs_increment (c);
3453 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3454 int i;
3455
3456 i = incr_vec_index (increment);
3457
3458 /* Only process profitable increments. Nothing useful can be done
3459 to a cast or copy. */
3460 if (i >= 0
3461 && profitable_increment_p (i)
3462 && orig_code != MODIFY_EXPR
3463 && !CONVERT_EXPR_CODE_P (orig_code))
3464 {
3465 if (phi_dependent_cand_p (c))
3466 {
3467 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
3468
3469 if (all_phi_incrs_profitable (c, phi))
3470 {
3471 /* Look up the LHS SSA name from C's basis. This will be
3472 the RHS1 of the adds we will introduce to create new
3473 phi arguments. */
3474 slsr_cand_t basis = lookup_cand (c->basis);
3475 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3476
3477 /* Create a new phi statement that will represent C's true
3478 basis after the transformation is complete. */
3479 location_t loc = gimple_location (c->cand_stmt);
3480 tree name = create_phi_basis (c, phi, basis_name,
3481 loc, UNKNOWN_STRIDE);
3482
3483 /* Replace C with an add of the new basis phi and the
3484 increment. */
3485 replace_one_candidate (c, i, name);
3486 }
3487 }
3488 else
3489 {
3490 slsr_cand_t basis = lookup_cand (c->basis);
3491 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3492 replace_one_candidate (c, i, basis_name);
3493 }
3494 }
3495 }
3496
3497 if (c->sibling)
3498 replace_profitable_candidates (lookup_cand (c->sibling));
3499
3500 if (c->dependent)
3501 replace_profitable_candidates (lookup_cand (c->dependent));
3502 }
3503 \f
3504 /* Analyze costs of related candidates in the candidate vector,
3505 and make beneficial replacements. */
3506
3507 static void
3508 analyze_candidates_and_replace (void)
3509 {
3510 unsigned i;
3511 slsr_cand_t c;
3512
3513 /* Each candidate that has a null basis and a non-null
3514 dependent is the root of a tree of related statements.
3515 Analyze each tree to determine a subset of those
3516 statements that can be replaced with maximum benefit. */
3517 FOR_EACH_VEC_ELT (cand_vec, i, c)
3518 {
3519 slsr_cand_t first_dep;
3520
3521 if (c->basis != 0 || c->dependent == 0)
3522 continue;
3523
3524 if (dump_file && (dump_flags & TDF_DETAILS))
3525 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3526 c->cand_num);
3527
3528 first_dep = lookup_cand (c->dependent);
3529
3530 /* If this is a chain of CAND_REFs, unconditionally replace
3531 each of them with a strength-reduced data reference. */
3532 if (c->kind == CAND_REF)
3533 replace_refs (c);
3534
3535 /* If the common stride of all related candidates is a known
3536 constant, each candidate without a phi-dependence can be
3537 profitably replaced. Each replaces a multiply by a single
3538 add, with the possibility that a feeding add also goes dead.
3539 A candidate with a phi-dependence is replaced only if the
3540 compensation code it requires is offset by the strength
3541 reduction savings. */
3542 else if (TREE_CODE (c->stride) == INTEGER_CST)
3543 replace_uncond_cands_and_profitable_phis (first_dep);
3544
3545 /* When the stride is an SSA name, it may still be profitable
3546 to replace some or all of the dependent candidates, depending
3547 on whether the introduced increments can be reused, or are
3548 less expensive to calculate than the replaced statements. */
3549 else
3550 {
3551 machine_mode mode;
3552 bool speed;
3553
3554 /* Determine whether we'll be generating pointer arithmetic
3555 when replacing candidates. */
3556 address_arithmetic_p = (c->kind == CAND_ADD
3557 && POINTER_TYPE_P (c->cand_type));
3558
3559 /* If all candidates have already been replaced under other
3560 interpretations, nothing remains to be done. */
3561 if (!count_candidates (c))
3562 continue;
3563
3564 /* Construct an array of increments for this candidate chain. */
3565 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3566 incr_vec_len = 0;
3567 record_increments (c);
3568
3569 /* Determine which increments are profitable to replace. */
3570 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3571 speed = optimize_cands_for_speed_p (c);
3572 analyze_increments (first_dep, mode, speed);
3573
3574 /* Insert initializers of the form T_0 = stride * increment
3575 for use in profitable replacements. */
3576 insert_initializers (first_dep);
3577 dump_incr_vec ();
3578
3579 /* Perform the replacements. */
3580 replace_profitable_candidates (first_dep);
3581 free (incr_vec);
3582 }
3583 }
3584 }
3585
3586 namespace {
3587
3588 const pass_data pass_data_strength_reduction =
3589 {
3590 GIMPLE_PASS, /* type */
3591 "slsr", /* name */
3592 OPTGROUP_NONE, /* optinfo_flags */
3593 TV_GIMPLE_SLSR, /* tv_id */
3594 ( PROP_cfg | PROP_ssa ), /* properties_required */
3595 0, /* properties_provided */
3596 0, /* properties_destroyed */
3597 0, /* todo_flags_start */
3598 0, /* todo_flags_finish */
3599 };
3600
3601 class pass_strength_reduction : public gimple_opt_pass
3602 {
3603 public:
3604 pass_strength_reduction (gcc::context *ctxt)
3605 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3606 {}
3607
3608 /* opt_pass methods: */
3609 virtual bool gate (function *) { return flag_tree_slsr; }
3610 virtual unsigned int execute (function *);
3611
3612 }; // class pass_strength_reduction
3613
3614 unsigned
3615 pass_strength_reduction::execute (function *fun)
3616 {
3617 /* Create the obstack where candidates will reside. */
3618 gcc_obstack_init (&cand_obstack);
3619
3620 /* Allocate the candidate vector. */
3621 cand_vec.create (128);
3622
3623 /* Allocate the mapping from statements to candidate indices. */
3624 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3625
3626 /* Create the obstack where candidate chains will reside. */
3627 gcc_obstack_init (&chain_obstack);
3628
3629 /* Allocate the mapping from base expressions to candidate chains. */
3630 base_cand_map = new hash_table<cand_chain_hasher> (500);
3631
3632 /* Allocate the mapping from bases to alternative bases. */
3633 alt_base_map = new hash_map<tree, tree>;
3634
3635 /* Initialize the loop optimizer. We need to detect flow across
3636 back edges, and this gives us dominator information as well. */
3637 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3638
3639 /* Walk the CFG in predominator order looking for strength reduction
3640 candidates. */
3641 find_candidates_dom_walker (CDI_DOMINATORS)
3642 .walk (fun->cfg->x_entry_block_ptr);
3643
3644 if (dump_file && (dump_flags & TDF_DETAILS))
3645 {
3646 dump_cand_vec ();
3647 dump_cand_chains ();
3648 }
3649
3650 delete alt_base_map;
3651 free_affine_expand_cache (&name_expansions);
3652
3653 /* Analyze costs and make appropriate replacements. */
3654 analyze_candidates_and_replace ();
3655
3656 loop_optimizer_finalize ();
3657 delete base_cand_map;
3658 base_cand_map = NULL;
3659 obstack_free (&chain_obstack, NULL);
3660 delete stmt_cand_map;
3661 cand_vec.release ();
3662 obstack_free (&cand_obstack, NULL);
3663
3664 return 0;
3665 }
3666
3667 } // anon namespace
3668
3669 gimple_opt_pass *
3670 make_pass_strength_reduction (gcc::context *ctxt)
3671 {
3672 return new pass_strength_reduction (ctxt);
3673 }