]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-ssa-strength-reduction.c
remove need for store_values_directly
[thirdparty/gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "hash-set.h"
40 #include "machmode.h"
41 #include "vec.h"
42 #include "double-int.h"
43 #include "input.h"
44 #include "alias.h"
45 #include "symtab.h"
46 #include "options.h"
47 #include "wide-int.h"
48 #include "inchash.h"
49 #include "tree.h"
50 #include "fold-const.h"
51 #include "predict.h"
52 #include "tm.h"
53 #include "hard-reg-set.h"
54 #include "function.h"
55 #include "dominance.h"
56 #include "cfg.h"
57 #include "basic-block.h"
58 #include "tree-ssa-alias.h"
59 #include "internal-fn.h"
60 #include "gimple-expr.h"
61 #include "is-a.h"
62 #include "gimple.h"
63 #include "gimple-iterator.h"
64 #include "gimplify-me.h"
65 #include "stor-layout.h"
66 #include "hashtab.h"
67 #include "rtl.h"
68 #include "flags.h"
69 #include "statistics.h"
70 #include "real.h"
71 #include "fixed-value.h"
72 #include "insn-config.h"
73 #include "expmed.h"
74 #include "dojump.h"
75 #include "explow.h"
76 #include "calls.h"
77 #include "emit-rtl.h"
78 #include "varasm.h"
79 #include "stmt.h"
80 #include "expr.h"
81 #include "tree-pass.h"
82 #include "cfgloop.h"
83 #include "gimple-pretty-print.h"
84 #include "gimple-ssa.h"
85 #include "tree-cfg.h"
86 #include "tree-phinodes.h"
87 #include "ssa-iterators.h"
88 #include "stringpool.h"
89 #include "tree-ssanames.h"
90 #include "domwalk.h"
91 #include "params.h"
92 #include "tree-ssa-address.h"
93 #include "tree-affine.h"
94 #include "wide-int-print.h"
95 #include "builtins.h"
96 \f
97 /* Information about a strength reduction candidate. Each statement
98 in the candidate table represents an expression of one of the
99 following forms (the special case of CAND_REF will be described
100 later):
101
102 (CAND_MULT) S1: X = (B + i) * S
103 (CAND_ADD) S1: X = B + (i * S)
104
105 Here X and B are SSA names, i is an integer constant, and S is
106 either an SSA name or a constant. We call B the "base," i the
107 "index", and S the "stride."
108
109 Any statement S0 that dominates S1 and is of the form:
110
111 (CAND_MULT) S0: Y = (B + i') * S
112 (CAND_ADD) S0: Y = B + (i' * S)
113
114 is called a "basis" for S1. In both cases, S1 may be replaced by
115
116 S1': X = Y + (i - i') * S,
117
118 where (i - i') * S is folded to the extent possible.
119
120 All gimple statements are visited in dominator order, and each
121 statement that may contribute to one of the forms of S1 above is
122 given at least one entry in the candidate table. Such statements
123 include addition, pointer addition, subtraction, multiplication,
124 negation, copies, and nontrivial type casts. If a statement may
125 represent more than one expression of the forms of S1 above,
126 multiple "interpretations" are stored in the table and chained
127 together. Examples:
128
129 * An add of two SSA names may treat either operand as the base.
130 * A multiply of two SSA names, likewise.
131 * A copy or cast may be thought of as either a CAND_MULT with
132 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
133
134 Candidate records are allocated from an obstack. They are addressed
135 both from a hash table keyed on S1, and from a vector of candidate
136 pointers arranged in predominator order.
137
138 Opportunity note
139 ----------------
140 Currently we don't recognize:
141
142 S0: Y = (S * i') - B
143 S1: X = (S * i) - B
144
145 as a strength reduction opportunity, even though this S1 would
146 also be replaceable by the S1' above. This can be added if it
147 comes up in practice.
148
149 Strength reduction in addressing
150 --------------------------------
151 There is another kind of candidate known as CAND_REF. A CAND_REF
152 describes a statement containing a memory reference having
153 complex addressing that might benefit from strength reduction.
154 Specifically, we are interested in references for which
155 get_inner_reference returns a base address, offset, and bitpos as
156 follows:
157
158 base: MEM_REF (T1, C1)
159 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
160 bitpos: C4 * BITS_PER_UNIT
161
162 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
163 arbitrary integer constants. Note that C2 may be zero, in which
164 case the offset will be MULT_EXPR (T2, C3).
165
166 When this pattern is recognized, the original memory reference
167 can be replaced with:
168
169 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
170 C1 + (C2 * C3) + C4)
171
172 which distributes the multiply to allow constant folding. When
173 two or more addressing expressions can be represented by MEM_REFs
174 of this form, differing only in the constants C1, C2, and C4,
175 making this substitution produces more efficient addressing during
176 the RTL phases. When there are not at least two expressions with
177 the same values of T1, T2, and C3, there is nothing to be gained
178 by the replacement.
179
180 Strength reduction of CAND_REFs uses the same infrastructure as
181 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
182 field, MULT_EXPR (T2, C3) in the stride (S) field, and
183 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
184 is thus another CAND_REF with the same B and S values. When at
185 least two CAND_REFs are chained together using the basis relation,
186 each of them is replaced as above, resulting in improved code
187 generation for addressing.
188
189 Conditional candidates
190 ======================
191
192 Conditional candidates are best illustrated with an example.
193 Consider the code sequence:
194
195 (1) x_0 = ...;
196 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
197 if (...)
198 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
199 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
200 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
201 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
202
203 Here strength reduction is complicated by the uncertain value of x_2.
204 A legitimate transformation is:
205
206 (1) x_0 = ...;
207 (2) a_0 = x_0 * 5;
208 if (...)
209 {
210 (3) [x_1 = x_0 + 1;]
211 (3a) t_1 = a_0 + 5;
212 }
213 (4) [x_2 = PHI <x_0, x_1>;]
214 (4a) t_2 = PHI <a_0, t_1>;
215 (5) [x_3 = x_2 + 1;]
216 (6r) a_1 = t_2 + 5;
217
218 where the bracketed instructions may go dead.
219
220 To recognize this opportunity, we have to observe that statement (6)
221 has a "hidden basis" (2). The hidden basis is unlike a normal basis
222 in that the statement and the hidden basis have different base SSA
223 names (x_2 and x_0, respectively). The relationship is established
224 when a statement's base name (x_2) is defined by a phi statement (4),
225 each argument of which (x_0, x_1) has an identical "derived base name."
226 If the argument is defined by a candidate (as x_1 is by (3)) that is a
227 CAND_ADD having a stride of 1, the derived base name of the argument is
228 the base name of the candidate (x_0). Otherwise, the argument itself
229 is its derived base name (as is the case with argument x_0).
230
231 The hidden basis for statement (6) is the nearest dominating candidate
232 whose base name is the derived base name (x_0) of the feeding phi (4),
233 and whose stride is identical to that of the statement. We can then
234 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
235 allowing the final replacement of (6) by the strength-reduced (6r).
236
237 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
238 A CAND_PHI is not a candidate for replacement, but is maintained in the
239 candidate table to ease discovery of hidden bases. Any phi statement
240 whose arguments share a common derived base name is entered into the
241 table with the derived base name, an (arbitrary) index of zero, and a
242 stride of 1. A statement with a hidden basis can then be detected by
243 simply looking up its feeding phi definition in the candidate table,
244 extracting the derived base name, and searching for a basis in the
245 usual manner after substituting the derived base name.
246
247 Note that the transformation is only valid when the original phi and
248 the statements that define the phi's arguments are all at the same
249 position in the loop hierarchy. */
250
251
252 /* Index into the candidate vector, offset by 1. VECs are zero-based,
253 while cand_idx's are one-based, with zero indicating null. */
254 typedef unsigned cand_idx;
255
256 /* The kind of candidate. */
257 enum cand_kind
258 {
259 CAND_MULT,
260 CAND_ADD,
261 CAND_REF,
262 CAND_PHI
263 };
264
265 struct slsr_cand_d
266 {
267 /* The candidate statement S1. */
268 gimple cand_stmt;
269
270 /* The base expression B: often an SSA name, but not always. */
271 tree base_expr;
272
273 /* The stride S. */
274 tree stride;
275
276 /* The index constant i. */
277 widest_int index;
278
279 /* The type of the candidate. This is normally the type of base_expr,
280 but casts may have occurred when combining feeding instructions.
281 A candidate can only be a basis for candidates of the same final type.
282 (For CAND_REFs, this is the type to be used for operand 1 of the
283 replacement MEM_REF.) */
284 tree cand_type;
285
286 /* The kind of candidate (CAND_MULT, etc.). */
287 enum cand_kind kind;
288
289 /* Index of this candidate in the candidate vector. */
290 cand_idx cand_num;
291
292 /* Index of the next candidate record for the same statement.
293 A statement may be useful in more than one way (e.g., due to
294 commutativity). So we can have multiple "interpretations"
295 of a statement. */
296 cand_idx next_interp;
297
298 /* Index of the basis statement S0, if any, in the candidate vector. */
299 cand_idx basis;
300
301 /* First candidate for which this candidate is a basis, if one exists. */
302 cand_idx dependent;
303
304 /* Next candidate having the same basis as this one. */
305 cand_idx sibling;
306
307 /* If this is a conditional candidate, the CAND_PHI candidate
308 that defines the base SSA name B. */
309 cand_idx def_phi;
310
311 /* Savings that can be expected from eliminating dead code if this
312 candidate is replaced. */
313 int dead_savings;
314 };
315
316 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
317 typedef const struct slsr_cand_d *const_slsr_cand_t;
318
319 /* Pointers to candidates are chained together as part of a mapping
320 from base expressions to the candidates that use them. */
321
322 struct cand_chain_d
323 {
324 /* Base expression for the chain of candidates: often, but not
325 always, an SSA name. */
326 tree base_expr;
327
328 /* Pointer to a candidate. */
329 slsr_cand_t cand;
330
331 /* Chain pointer. */
332 struct cand_chain_d *next;
333
334 };
335
336 typedef struct cand_chain_d cand_chain, *cand_chain_t;
337 typedef const struct cand_chain_d *const_cand_chain_t;
338
339 /* Information about a unique "increment" associated with candidates
340 having an SSA name for a stride. An increment is the difference
341 between the index of the candidate and the index of its basis,
342 i.e., (i - i') as discussed in the module commentary.
343
344 When we are not going to generate address arithmetic we treat
345 increments that differ only in sign as the same, allowing sharing
346 of the cost of initializers. The absolute value of the increment
347 is stored in the incr_info. */
348
349 struct incr_info_d
350 {
351 /* The increment that relates a candidate to its basis. */
352 widest_int incr;
353
354 /* How many times the increment occurs in the candidate tree. */
355 unsigned count;
356
357 /* Cost of replacing candidates using this increment. Negative and
358 zero costs indicate replacement should be performed. */
359 int cost;
360
361 /* If this increment is profitable but is not -1, 0, or 1, it requires
362 an initializer T_0 = stride * incr to be found or introduced in the
363 nearest common dominator of all candidates. This field holds T_0
364 for subsequent use. */
365 tree initializer;
366
367 /* If the initializer was found to already exist, this is the block
368 where it was found. */
369 basic_block init_bb;
370 };
371
372 typedef struct incr_info_d incr_info, *incr_info_t;
373
374 /* Candidates are maintained in a vector. If candidate X dominates
375 candidate Y, then X appears before Y in the vector; but the
376 converse does not necessarily hold. */
377 static vec<slsr_cand_t> cand_vec;
378
379 enum cost_consts
380 {
381 COST_NEUTRAL = 0,
382 COST_INFINITE = 1000
383 };
384
385 enum stride_status
386 {
387 UNKNOWN_STRIDE = 0,
388 KNOWN_STRIDE = 1
389 };
390
391 enum phi_adjust_status
392 {
393 NOT_PHI_ADJUST = 0,
394 PHI_ADJUST = 1
395 };
396
397 enum count_phis_status
398 {
399 DONT_COUNT_PHIS = 0,
400 COUNT_PHIS = 1
401 };
402
403 /* Pointer map embodying a mapping from statements to candidates. */
404 static hash_map<gimple, slsr_cand_t> *stmt_cand_map;
405
406 /* Obstack for candidates. */
407 static struct obstack cand_obstack;
408
409 /* Obstack for candidate chains. */
410 static struct obstack chain_obstack;
411
412 /* An array INCR_VEC of incr_infos is used during analysis of related
413 candidates having an SSA name for a stride. INCR_VEC_LEN describes
414 its current length. MAX_INCR_VEC_LEN is used to avoid costly
415 pathological cases. */
416 static incr_info_t incr_vec;
417 static unsigned incr_vec_len;
418 const int MAX_INCR_VEC_LEN = 16;
419
420 /* For a chain of candidates with unknown stride, indicates whether or not
421 we must generate pointer arithmetic when replacing statements. */
422 static bool address_arithmetic_p;
423
424 /* Forward function declarations. */
425 static slsr_cand_t base_cand_from_table (tree);
426 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
427 static bool legal_cast_p_1 (tree, tree);
428 \f
429 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
430
431 static slsr_cand_t
432 lookup_cand (cand_idx idx)
433 {
434 return cand_vec[idx - 1];
435 }
436
437 /* Helper for hashing a candidate chain header. */
438
439 struct cand_chain_hasher : typed_noop_remove <cand_chain>
440 {
441 typedef cand_chain *value_type;
442 typedef cand_chain *compare_type;
443 static inline hashval_t hash (const cand_chain *);
444 static inline bool equal (const cand_chain *, const cand_chain *);
445 };
446
447 inline hashval_t
448 cand_chain_hasher::hash (const cand_chain *p)
449 {
450 tree base_expr = p->base_expr;
451 return iterative_hash_expr (base_expr, 0);
452 }
453
454 inline bool
455 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
456 {
457 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
458 }
459
460 /* Hash table embodying a mapping from base exprs to chains of candidates. */
461 static hash_table<cand_chain_hasher> *base_cand_map;
462 \f
463 /* Pointer map used by tree_to_aff_combination_expand. */
464 static hash_map<tree, name_expansion *> *name_expansions;
465 /* Pointer map embodying a mapping from bases to alternative bases. */
466 static hash_map<tree, tree> *alt_base_map;
467
468 /* Given BASE, use the tree affine combiniation facilities to
469 find the underlying tree expression for BASE, with any
470 immediate offset excluded.
471
472 N.B. we should eliminate this backtracking with better forward
473 analysis in a future release. */
474
475 static tree
476 get_alternative_base (tree base)
477 {
478 tree *result = alt_base_map->get (base);
479
480 if (result == NULL)
481 {
482 tree expr;
483 aff_tree aff;
484
485 tree_to_aff_combination_expand (base, TREE_TYPE (base),
486 &aff, &name_expansions);
487 aff.offset = 0;
488 expr = aff_combination_to_tree (&aff);
489
490 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
491
492 return expr == base ? NULL : expr;
493 }
494
495 return *result;
496 }
497
498 /* Look in the candidate table for a CAND_PHI that defines BASE and
499 return it if found; otherwise return NULL. */
500
501 static cand_idx
502 find_phi_def (tree base)
503 {
504 slsr_cand_t c;
505
506 if (TREE_CODE (base) != SSA_NAME)
507 return 0;
508
509 c = base_cand_from_table (base);
510
511 if (!c || c->kind != CAND_PHI)
512 return 0;
513
514 return c->cand_num;
515 }
516
517 /* Helper routine for find_basis_for_candidate. May be called twice:
518 once for the candidate's base expr, and optionally again either for
519 the candidate's phi definition or for a CAND_REF's alternative base
520 expression. */
521
522 static slsr_cand_t
523 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
524 {
525 cand_chain mapping_key;
526 cand_chain_t chain;
527 slsr_cand_t basis = NULL;
528
529 // Limit potential of N^2 behavior for long candidate chains.
530 int iters = 0;
531 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
532
533 mapping_key.base_expr = base_expr;
534 chain = base_cand_map->find (&mapping_key);
535
536 for (; chain && iters < max_iters; chain = chain->next, ++iters)
537 {
538 slsr_cand_t one_basis = chain->cand;
539
540 if (one_basis->kind != c->kind
541 || one_basis->cand_stmt == c->cand_stmt
542 || !operand_equal_p (one_basis->stride, c->stride, 0)
543 || !types_compatible_p (one_basis->cand_type, c->cand_type)
544 || !dominated_by_p (CDI_DOMINATORS,
545 gimple_bb (c->cand_stmt),
546 gimple_bb (one_basis->cand_stmt)))
547 continue;
548
549 if (!basis || basis->cand_num < one_basis->cand_num)
550 basis = one_basis;
551 }
552
553 return basis;
554 }
555
556 /* Use the base expr from candidate C to look for possible candidates
557 that can serve as a basis for C. Each potential basis must also
558 appear in a block that dominates the candidate statement and have
559 the same stride and type. If more than one possible basis exists,
560 the one with highest index in the vector is chosen; this will be
561 the most immediately dominating basis. */
562
563 static int
564 find_basis_for_candidate (slsr_cand_t c)
565 {
566 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
567
568 /* If a candidate doesn't have a basis using its base expression,
569 it may have a basis hidden by one or more intervening phis. */
570 if (!basis && c->def_phi)
571 {
572 basic_block basis_bb, phi_bb;
573 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
574 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
575
576 if (basis)
577 {
578 /* A hidden basis must dominate the phi-definition of the
579 candidate's base name. */
580 phi_bb = gimple_bb (phi_cand->cand_stmt);
581 basis_bb = gimple_bb (basis->cand_stmt);
582
583 if (phi_bb == basis_bb
584 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
585 {
586 basis = NULL;
587 c->basis = 0;
588 }
589
590 /* If we found a hidden basis, estimate additional dead-code
591 savings if the phi and its feeding statements can be removed. */
592 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
593 c->dead_savings += phi_cand->dead_savings;
594 }
595 }
596
597 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
598 {
599 tree alt_base_expr = get_alternative_base (c->base_expr);
600 if (alt_base_expr)
601 basis = find_basis_for_base_expr (c, alt_base_expr);
602 }
603
604 if (basis)
605 {
606 c->sibling = basis->dependent;
607 basis->dependent = c->cand_num;
608 return basis->cand_num;
609 }
610
611 return 0;
612 }
613
614 /* Record a mapping from BASE to C, indicating that C may potentially serve
615 as a basis using that base expression. BASE may be the same as
616 C->BASE_EXPR; alternatively BASE can be a different tree that share the
617 underlining expression of C->BASE_EXPR. */
618
619 static void
620 record_potential_basis (slsr_cand_t c, tree base)
621 {
622 cand_chain_t node;
623 cand_chain **slot;
624
625 gcc_assert (base);
626
627 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
628 node->base_expr = base;
629 node->cand = c;
630 node->next = NULL;
631 slot = base_cand_map->find_slot (node, INSERT);
632
633 if (*slot)
634 {
635 cand_chain_t head = (cand_chain_t) (*slot);
636 node->next = head->next;
637 head->next = node;
638 }
639 else
640 *slot = node;
641 }
642
643 /* Allocate storage for a new candidate and initialize its fields.
644 Attempt to find a basis for the candidate.
645
646 For CAND_REF, an alternative base may also be recorded and used
647 to find a basis. This helps cases where the expression hidden
648 behind BASE (which is usually an SSA_NAME) has immediate offset,
649 e.g.
650
651 a2[i][j] = 1;
652 a2[i + 20][j] = 2; */
653
654 static slsr_cand_t
655 alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
656 const widest_int &index, tree stride, tree ctype,
657 unsigned savings)
658 {
659 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
660 sizeof (slsr_cand));
661 c->cand_stmt = gs;
662 c->base_expr = base;
663 c->stride = stride;
664 c->index = index;
665 c->cand_type = ctype;
666 c->kind = kind;
667 c->cand_num = cand_vec.length () + 1;
668 c->next_interp = 0;
669 c->dependent = 0;
670 c->sibling = 0;
671 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
672 c->dead_savings = savings;
673
674 cand_vec.safe_push (c);
675
676 if (kind == CAND_PHI)
677 c->basis = 0;
678 else
679 c->basis = find_basis_for_candidate (c);
680
681 record_potential_basis (c, base);
682 if (flag_expensive_optimizations && kind == CAND_REF)
683 {
684 tree alt_base = get_alternative_base (base);
685 if (alt_base)
686 record_potential_basis (c, alt_base);
687 }
688
689 return c;
690 }
691
692 /* Determine the target cost of statement GS when compiling according
693 to SPEED. */
694
695 static int
696 stmt_cost (gimple gs, bool speed)
697 {
698 tree lhs, rhs1, rhs2;
699 machine_mode lhs_mode;
700
701 gcc_assert (is_gimple_assign (gs));
702 lhs = gimple_assign_lhs (gs);
703 rhs1 = gimple_assign_rhs1 (gs);
704 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
705
706 switch (gimple_assign_rhs_code (gs))
707 {
708 case MULT_EXPR:
709 rhs2 = gimple_assign_rhs2 (gs);
710
711 if (tree_fits_shwi_p (rhs2))
712 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
713
714 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
715 return mul_cost (speed, lhs_mode);
716
717 case PLUS_EXPR:
718 case POINTER_PLUS_EXPR:
719 case MINUS_EXPR:
720 return add_cost (speed, lhs_mode);
721
722 case NEGATE_EXPR:
723 return neg_cost (speed, lhs_mode);
724
725 CASE_CONVERT:
726 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
727
728 /* Note that we don't assign costs to copies that in most cases
729 will go away. */
730 default:
731 ;
732 }
733
734 gcc_unreachable ();
735 return 0;
736 }
737
738 /* Look up the defining statement for BASE_IN and return a pointer
739 to its candidate in the candidate table, if any; otherwise NULL.
740 Only CAND_ADD and CAND_MULT candidates are returned. */
741
742 static slsr_cand_t
743 base_cand_from_table (tree base_in)
744 {
745 slsr_cand_t *result;
746
747 gimple def = SSA_NAME_DEF_STMT (base_in);
748 if (!def)
749 return (slsr_cand_t) NULL;
750
751 result = stmt_cand_map->get (def);
752
753 if (result && (*result)->kind != CAND_REF)
754 return *result;
755
756 return (slsr_cand_t) NULL;
757 }
758
759 /* Add an entry to the statement-to-candidate mapping. */
760
761 static void
762 add_cand_for_stmt (gimple gs, slsr_cand_t c)
763 {
764 gcc_assert (!stmt_cand_map->put (gs, c));
765 }
766 \f
767 /* Given PHI which contains a phi statement, determine whether it
768 satisfies all the requirements of a phi candidate. If so, create
769 a candidate. Note that a CAND_PHI never has a basis itself, but
770 is used to help find a basis for subsequent candidates. */
771
772 static void
773 slsr_process_phi (gphi *phi, bool speed)
774 {
775 unsigned i;
776 tree arg0_base = NULL_TREE, base_type;
777 slsr_cand_t c;
778 struct loop *cand_loop = gimple_bb (phi)->loop_father;
779 unsigned savings = 0;
780
781 /* A CAND_PHI requires each of its arguments to have the same
782 derived base name. (See the module header commentary for a
783 definition of derived base names.) Furthermore, all feeding
784 definitions must be in the same position in the loop hierarchy
785 as PHI. */
786
787 for (i = 0; i < gimple_phi_num_args (phi); i++)
788 {
789 slsr_cand_t arg_cand;
790 tree arg = gimple_phi_arg_def (phi, i);
791 tree derived_base_name = NULL_TREE;
792 gimple arg_stmt = NULL;
793 basic_block arg_bb = NULL;
794
795 if (TREE_CODE (arg) != SSA_NAME)
796 return;
797
798 arg_cand = base_cand_from_table (arg);
799
800 if (arg_cand)
801 {
802 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
803 {
804 if (!arg_cand->next_interp)
805 return;
806
807 arg_cand = lookup_cand (arg_cand->next_interp);
808 }
809
810 if (!integer_onep (arg_cand->stride))
811 return;
812
813 derived_base_name = arg_cand->base_expr;
814 arg_stmt = arg_cand->cand_stmt;
815 arg_bb = gimple_bb (arg_stmt);
816
817 /* Gather potential dead code savings if the phi statement
818 can be removed later on. */
819 if (has_single_use (arg))
820 {
821 if (gimple_code (arg_stmt) == GIMPLE_PHI)
822 savings += arg_cand->dead_savings;
823 else
824 savings += stmt_cost (arg_stmt, speed);
825 }
826 }
827 else
828 {
829 derived_base_name = arg;
830
831 if (SSA_NAME_IS_DEFAULT_DEF (arg))
832 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
833 else
834 gimple_bb (SSA_NAME_DEF_STMT (arg));
835 }
836
837 if (!arg_bb || arg_bb->loop_father != cand_loop)
838 return;
839
840 if (i == 0)
841 arg0_base = derived_base_name;
842 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
843 return;
844 }
845
846 /* Create the candidate. "alloc_cand_and_find_basis" is named
847 misleadingly for this case, as no basis will be sought for a
848 CAND_PHI. */
849 base_type = TREE_TYPE (arg0_base);
850
851 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
852 0, integer_one_node, base_type, savings);
853
854 /* Add the candidate to the statement-candidate mapping. */
855 add_cand_for_stmt (phi, c);
856 }
857
858 /* Given PBASE which is a pointer to tree, look up the defining
859 statement for it and check whether the candidate is in the
860 form of:
861
862 X = B + (1 * S), S is integer constant
863 X = B + (i * S), S is integer one
864
865 If so, set PBASE to the candidate's base_expr and return double
866 int (i * S).
867 Otherwise, just return double int zero. */
868
869 static widest_int
870 backtrace_base_for_ref (tree *pbase)
871 {
872 tree base_in = *pbase;
873 slsr_cand_t base_cand;
874
875 STRIP_NOPS (base_in);
876
877 /* Strip off widening conversion(s) to handle cases where
878 e.g. 'B' is widened from an 'int' in order to calculate
879 a 64-bit address. */
880 if (CONVERT_EXPR_P (base_in)
881 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
882 base_in = get_unwidened (base_in, NULL_TREE);
883
884 if (TREE_CODE (base_in) != SSA_NAME)
885 return 0;
886
887 base_cand = base_cand_from_table (base_in);
888
889 while (base_cand && base_cand->kind != CAND_PHI)
890 {
891 if (base_cand->kind == CAND_ADD
892 && base_cand->index == 1
893 && TREE_CODE (base_cand->stride) == INTEGER_CST)
894 {
895 /* X = B + (1 * S), S is integer constant. */
896 *pbase = base_cand->base_expr;
897 return wi::to_widest (base_cand->stride);
898 }
899 else if (base_cand->kind == CAND_ADD
900 && TREE_CODE (base_cand->stride) == INTEGER_CST
901 && integer_onep (base_cand->stride))
902 {
903 /* X = B + (i * S), S is integer one. */
904 *pbase = base_cand->base_expr;
905 return base_cand->index;
906 }
907
908 if (base_cand->next_interp)
909 base_cand = lookup_cand (base_cand->next_interp);
910 else
911 base_cand = NULL;
912 }
913
914 return 0;
915 }
916
917 /* Look for the following pattern:
918
919 *PBASE: MEM_REF (T1, C1)
920
921 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
922 or
923 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
924 or
925 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
926
927 *PINDEX: C4 * BITS_PER_UNIT
928
929 If not present, leave the input values unchanged and return FALSE.
930 Otherwise, modify the input values as follows and return TRUE:
931
932 *PBASE: T1
933 *POFFSET: MULT_EXPR (T2, C3)
934 *PINDEX: C1 + (C2 * C3) + C4
935
936 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
937 will be further restructured to:
938
939 *PBASE: T1
940 *POFFSET: MULT_EXPR (T2', C3)
941 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
942
943 static bool
944 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
945 tree *ptype)
946 {
947 tree base = *pbase, offset = *poffset;
948 widest_int index = *pindex;
949 tree mult_op0, t1, t2, type;
950 widest_int c1, c2, c3, c4, c5;
951
952 if (!base
953 || !offset
954 || TREE_CODE (base) != MEM_REF
955 || TREE_CODE (offset) != MULT_EXPR
956 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
957 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
958 return false;
959
960 t1 = TREE_OPERAND (base, 0);
961 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
962 type = TREE_TYPE (TREE_OPERAND (base, 1));
963
964 mult_op0 = TREE_OPERAND (offset, 0);
965 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
966
967 if (TREE_CODE (mult_op0) == PLUS_EXPR)
968
969 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
970 {
971 t2 = TREE_OPERAND (mult_op0, 0);
972 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
973 }
974 else
975 return false;
976
977 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
978
979 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
980 {
981 t2 = TREE_OPERAND (mult_op0, 0);
982 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
983 }
984 else
985 return false;
986
987 else
988 {
989 t2 = mult_op0;
990 c2 = 0;
991 }
992
993 c4 = wi::lrshift (index, LOG2_BITS_PER_UNIT);
994 c5 = backtrace_base_for_ref (&t2);
995
996 *pbase = t1;
997 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
998 wide_int_to_tree (sizetype, c3));
999 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
1000 *ptype = type;
1001
1002 return true;
1003 }
1004
1005 /* Given GS which contains a data reference, create a CAND_REF entry in
1006 the candidate table and attempt to find a basis. */
1007
1008 static void
1009 slsr_process_ref (gimple gs)
1010 {
1011 tree ref_expr, base, offset, type;
1012 HOST_WIDE_INT bitsize, bitpos;
1013 machine_mode mode;
1014 int unsignedp, volatilep;
1015 slsr_cand_t c;
1016
1017 if (gimple_vdef (gs))
1018 ref_expr = gimple_assign_lhs (gs);
1019 else
1020 ref_expr = gimple_assign_rhs1 (gs);
1021
1022 if (!handled_component_p (ref_expr)
1023 || TREE_CODE (ref_expr) == BIT_FIELD_REF
1024 || (TREE_CODE (ref_expr) == COMPONENT_REF
1025 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1026 return;
1027
1028 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1029 &unsignedp, &volatilep, false);
1030 widest_int index = bitpos;
1031
1032 if (!restructure_reference (&base, &offset, &index, &type))
1033 return;
1034
1035 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1036 type, 0);
1037
1038 /* Add the candidate to the statement-candidate mapping. */
1039 add_cand_for_stmt (gs, c);
1040 }
1041
1042 /* Create a candidate entry for a statement GS, where GS multiplies
1043 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1044 about the two SSA names into the new candidate. Return the new
1045 candidate. */
1046
1047 static slsr_cand_t
1048 create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1049 {
1050 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1051 widest_int index;
1052 unsigned savings = 0;
1053 slsr_cand_t c;
1054 slsr_cand_t base_cand = base_cand_from_table (base_in);
1055
1056 /* Look at all interpretations of the base candidate, if necessary,
1057 to find information to propagate into this candidate. */
1058 while (base_cand && !base && base_cand->kind != CAND_PHI)
1059 {
1060
1061 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1062 {
1063 /* Y = (B + i') * 1
1064 X = Y * Z
1065 ================
1066 X = (B + i') * Z */
1067 base = base_cand->base_expr;
1068 index = base_cand->index;
1069 stride = stride_in;
1070 ctype = base_cand->cand_type;
1071 if (has_single_use (base_in))
1072 savings = (base_cand->dead_savings
1073 + stmt_cost (base_cand->cand_stmt, speed));
1074 }
1075 else if (base_cand->kind == CAND_ADD
1076 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1077 {
1078 /* Y = B + (i' * S), S constant
1079 X = Y * Z
1080 ============================
1081 X = B + ((i' * S) * Z) */
1082 base = base_cand->base_expr;
1083 index = base_cand->index * wi::to_widest (base_cand->stride);
1084 stride = stride_in;
1085 ctype = base_cand->cand_type;
1086 if (has_single_use (base_in))
1087 savings = (base_cand->dead_savings
1088 + stmt_cost (base_cand->cand_stmt, speed));
1089 }
1090
1091 if (base_cand->next_interp)
1092 base_cand = lookup_cand (base_cand->next_interp);
1093 else
1094 base_cand = NULL;
1095 }
1096
1097 if (!base)
1098 {
1099 /* No interpretations had anything useful to propagate, so
1100 produce X = (Y + 0) * Z. */
1101 base = base_in;
1102 index = 0;
1103 stride = stride_in;
1104 ctype = TREE_TYPE (base_in);
1105 }
1106
1107 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1108 ctype, savings);
1109 return c;
1110 }
1111
1112 /* Create a candidate entry for a statement GS, where GS multiplies
1113 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1114 information about BASE_IN into the new candidate. Return the new
1115 candidate. */
1116
1117 static slsr_cand_t
1118 create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1119 {
1120 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1121 widest_int index, temp;
1122 unsigned savings = 0;
1123 slsr_cand_t c;
1124 slsr_cand_t base_cand = base_cand_from_table (base_in);
1125
1126 /* Look at all interpretations of the base candidate, if necessary,
1127 to find information to propagate into this candidate. */
1128 while (base_cand && !base && base_cand->kind != CAND_PHI)
1129 {
1130 if (base_cand->kind == CAND_MULT
1131 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1132 {
1133 /* Y = (B + i') * S, S constant
1134 X = Y * c
1135 ============================
1136 X = (B + i') * (S * c) */
1137 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1138 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1139 {
1140 base = base_cand->base_expr;
1141 index = base_cand->index;
1142 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1143 ctype = base_cand->cand_type;
1144 if (has_single_use (base_in))
1145 savings = (base_cand->dead_savings
1146 + stmt_cost (base_cand->cand_stmt, speed));
1147 }
1148 }
1149 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1150 {
1151 /* Y = B + (i' * 1)
1152 X = Y * c
1153 ===========================
1154 X = (B + i') * c */
1155 base = base_cand->base_expr;
1156 index = base_cand->index;
1157 stride = stride_in;
1158 ctype = base_cand->cand_type;
1159 if (has_single_use (base_in))
1160 savings = (base_cand->dead_savings
1161 + stmt_cost (base_cand->cand_stmt, speed));
1162 }
1163 else if (base_cand->kind == CAND_ADD
1164 && base_cand->index == 1
1165 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1166 {
1167 /* Y = B + (1 * S), S constant
1168 X = Y * c
1169 ===========================
1170 X = (B + S) * c */
1171 base = base_cand->base_expr;
1172 index = wi::to_widest (base_cand->stride);
1173 stride = stride_in;
1174 ctype = base_cand->cand_type;
1175 if (has_single_use (base_in))
1176 savings = (base_cand->dead_savings
1177 + stmt_cost (base_cand->cand_stmt, speed));
1178 }
1179
1180 if (base_cand->next_interp)
1181 base_cand = lookup_cand (base_cand->next_interp);
1182 else
1183 base_cand = NULL;
1184 }
1185
1186 if (!base)
1187 {
1188 /* No interpretations had anything useful to propagate, so
1189 produce X = (Y + 0) * c. */
1190 base = base_in;
1191 index = 0;
1192 stride = stride_in;
1193 ctype = TREE_TYPE (base_in);
1194 }
1195
1196 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1197 ctype, savings);
1198 return c;
1199 }
1200
1201 /* Given GS which is a multiply of scalar integers, make an appropriate
1202 entry in the candidate table. If this is a multiply of two SSA names,
1203 create two CAND_MULT interpretations and attempt to find a basis for
1204 each of them. Otherwise, create a single CAND_MULT and attempt to
1205 find a basis. */
1206
1207 static void
1208 slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
1209 {
1210 slsr_cand_t c, c2;
1211
1212 /* If this is a multiply of an SSA name with itself, it is highly
1213 unlikely that we will get a strength reduction opportunity, so
1214 don't record it as a candidate. This simplifies the logic for
1215 finding a basis, so if this is removed that must be considered. */
1216 if (rhs1 == rhs2)
1217 return;
1218
1219 if (TREE_CODE (rhs2) == SSA_NAME)
1220 {
1221 /* Record an interpretation of this statement in the candidate table
1222 assuming RHS1 is the base expression and RHS2 is the stride. */
1223 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1224
1225 /* Add the first interpretation to the statement-candidate mapping. */
1226 add_cand_for_stmt (gs, c);
1227
1228 /* Record another interpretation of this statement assuming RHS1
1229 is the stride and RHS2 is the base expression. */
1230 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1231 c->next_interp = c2->cand_num;
1232 }
1233 else
1234 {
1235 /* Record an interpretation for the multiply-immediate. */
1236 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1237
1238 /* Add the interpretation to the statement-candidate mapping. */
1239 add_cand_for_stmt (gs, c);
1240 }
1241 }
1242
1243 /* Create a candidate entry for a statement GS, where GS adds two
1244 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1245 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1246 information about the two SSA names into the new candidate.
1247 Return the new candidate. */
1248
1249 static slsr_cand_t
1250 create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
1251 bool subtract_p, bool speed)
1252 {
1253 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1254 widest_int index;
1255 unsigned savings = 0;
1256 slsr_cand_t c;
1257 slsr_cand_t base_cand = base_cand_from_table (base_in);
1258 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1259
1260 /* The most useful transformation is a multiply-immediate feeding
1261 an add or subtract. Look for that first. */
1262 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1263 {
1264 if (addend_cand->kind == CAND_MULT
1265 && addend_cand->index == 0
1266 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1267 {
1268 /* Z = (B + 0) * S, S constant
1269 X = Y +/- Z
1270 ===========================
1271 X = Y + ((+/-1 * S) * B) */
1272 base = base_in;
1273 index = wi::to_widest (addend_cand->stride);
1274 if (subtract_p)
1275 index = -index;
1276 stride = addend_cand->base_expr;
1277 ctype = TREE_TYPE (base_in);
1278 if (has_single_use (addend_in))
1279 savings = (addend_cand->dead_savings
1280 + stmt_cost (addend_cand->cand_stmt, speed));
1281 }
1282
1283 if (addend_cand->next_interp)
1284 addend_cand = lookup_cand (addend_cand->next_interp);
1285 else
1286 addend_cand = NULL;
1287 }
1288
1289 while (base_cand && !base && base_cand->kind != CAND_PHI)
1290 {
1291 if (base_cand->kind == CAND_ADD
1292 && (base_cand->index == 0
1293 || operand_equal_p (base_cand->stride,
1294 integer_zero_node, 0)))
1295 {
1296 /* Y = B + (i' * S), i' * S = 0
1297 X = Y +/- Z
1298 ============================
1299 X = B + (+/-1 * Z) */
1300 base = base_cand->base_expr;
1301 index = subtract_p ? -1 : 1;
1302 stride = addend_in;
1303 ctype = base_cand->cand_type;
1304 if (has_single_use (base_in))
1305 savings = (base_cand->dead_savings
1306 + stmt_cost (base_cand->cand_stmt, speed));
1307 }
1308 else if (subtract_p)
1309 {
1310 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1311
1312 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1313 {
1314 if (subtrahend_cand->kind == CAND_MULT
1315 && subtrahend_cand->index == 0
1316 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1317 {
1318 /* Z = (B + 0) * S, S constant
1319 X = Y - Z
1320 ===========================
1321 Value: X = Y + ((-1 * S) * B) */
1322 base = base_in;
1323 index = wi::to_widest (subtrahend_cand->stride);
1324 index = -index;
1325 stride = subtrahend_cand->base_expr;
1326 ctype = TREE_TYPE (base_in);
1327 if (has_single_use (addend_in))
1328 savings = (subtrahend_cand->dead_savings
1329 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1330 }
1331
1332 if (subtrahend_cand->next_interp)
1333 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1334 else
1335 subtrahend_cand = NULL;
1336 }
1337 }
1338
1339 if (base_cand->next_interp)
1340 base_cand = lookup_cand (base_cand->next_interp);
1341 else
1342 base_cand = NULL;
1343 }
1344
1345 if (!base)
1346 {
1347 /* No interpretations had anything useful to propagate, so
1348 produce X = Y + (1 * Z). */
1349 base = base_in;
1350 index = subtract_p ? -1 : 1;
1351 stride = addend_in;
1352 ctype = TREE_TYPE (base_in);
1353 }
1354
1355 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1356 ctype, savings);
1357 return c;
1358 }
1359
1360 /* Create a candidate entry for a statement GS, where GS adds SSA
1361 name BASE_IN to constant INDEX_IN. Propagate any known information
1362 about BASE_IN into the new candidate. Return the new candidate. */
1363
1364 static slsr_cand_t
1365 create_add_imm_cand (gimple gs, tree base_in, const widest_int &index_in,
1366 bool speed)
1367 {
1368 enum cand_kind kind = CAND_ADD;
1369 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1370 widest_int index, multiple;
1371 unsigned savings = 0;
1372 slsr_cand_t c;
1373 slsr_cand_t base_cand = base_cand_from_table (base_in);
1374
1375 while (base_cand && !base && base_cand->kind != CAND_PHI)
1376 {
1377 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1378
1379 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1380 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1381 sign, &multiple))
1382 {
1383 /* Y = (B + i') * S, S constant, c = kS for some integer k
1384 X = Y + c
1385 ============================
1386 X = (B + (i'+ k)) * S
1387 OR
1388 Y = B + (i' * S), S constant, c = kS for some integer k
1389 X = Y + c
1390 ============================
1391 X = (B + (i'+ k)) * S */
1392 kind = base_cand->kind;
1393 base = base_cand->base_expr;
1394 index = base_cand->index + multiple;
1395 stride = base_cand->stride;
1396 ctype = base_cand->cand_type;
1397 if (has_single_use (base_in))
1398 savings = (base_cand->dead_savings
1399 + stmt_cost (base_cand->cand_stmt, speed));
1400 }
1401
1402 if (base_cand->next_interp)
1403 base_cand = lookup_cand (base_cand->next_interp);
1404 else
1405 base_cand = NULL;
1406 }
1407
1408 if (!base)
1409 {
1410 /* No interpretations had anything useful to propagate, so
1411 produce X = Y + (c * 1). */
1412 kind = CAND_ADD;
1413 base = base_in;
1414 index = index_in;
1415 stride = integer_one_node;
1416 ctype = TREE_TYPE (base_in);
1417 }
1418
1419 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1420 ctype, savings);
1421 return c;
1422 }
1423
1424 /* Given GS which is an add or subtract of scalar integers or pointers,
1425 make at least one appropriate entry in the candidate table. */
1426
1427 static void
1428 slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
1429 {
1430 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1431 slsr_cand_t c = NULL, c2;
1432
1433 if (TREE_CODE (rhs2) == SSA_NAME)
1434 {
1435 /* First record an interpretation assuming RHS1 is the base expression
1436 and RHS2 is the stride. But it doesn't make sense for the
1437 stride to be a pointer, so don't record a candidate in that case. */
1438 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1439 {
1440 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1441
1442 /* Add the first interpretation to the statement-candidate
1443 mapping. */
1444 add_cand_for_stmt (gs, c);
1445 }
1446
1447 /* If the two RHS operands are identical, or this is a subtract,
1448 we're done. */
1449 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1450 return;
1451
1452 /* Otherwise, record another interpretation assuming RHS2 is the
1453 base expression and RHS1 is the stride, again provided that the
1454 stride is not a pointer. */
1455 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1456 {
1457 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1458 if (c)
1459 c->next_interp = c2->cand_num;
1460 else
1461 add_cand_for_stmt (gs, c2);
1462 }
1463 }
1464 else
1465 {
1466 /* Record an interpretation for the add-immediate. */
1467 widest_int index = wi::to_widest (rhs2);
1468 if (subtract_p)
1469 index = -index;
1470
1471 c = create_add_imm_cand (gs, rhs1, index, speed);
1472
1473 /* Add the interpretation to the statement-candidate mapping. */
1474 add_cand_for_stmt (gs, c);
1475 }
1476 }
1477
1478 /* Given GS which is a negate of a scalar integer, make an appropriate
1479 entry in the candidate table. A negate is equivalent to a multiply
1480 by -1. */
1481
1482 static void
1483 slsr_process_neg (gimple gs, tree rhs1, bool speed)
1484 {
1485 /* Record a CAND_MULT interpretation for the multiply by -1. */
1486 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1487
1488 /* Add the interpretation to the statement-candidate mapping. */
1489 add_cand_for_stmt (gs, c);
1490 }
1491
1492 /* Help function for legal_cast_p, operating on two trees. Checks
1493 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1494 for more details. */
1495
1496 static bool
1497 legal_cast_p_1 (tree lhs, tree rhs)
1498 {
1499 tree lhs_type, rhs_type;
1500 unsigned lhs_size, rhs_size;
1501 bool lhs_wraps, rhs_wraps;
1502
1503 lhs_type = TREE_TYPE (lhs);
1504 rhs_type = TREE_TYPE (rhs);
1505 lhs_size = TYPE_PRECISION (lhs_type);
1506 rhs_size = TYPE_PRECISION (rhs_type);
1507 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1508 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1509
1510 if (lhs_size < rhs_size
1511 || (rhs_wraps && !lhs_wraps)
1512 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1513 return false;
1514
1515 return true;
1516 }
1517
1518 /* Return TRUE if GS is a statement that defines an SSA name from
1519 a conversion and is legal for us to combine with an add and multiply
1520 in the candidate table. For example, suppose we have:
1521
1522 A = B + i;
1523 C = (type) A;
1524 D = C * S;
1525
1526 Without the type-cast, we would create a CAND_MULT for D with base B,
1527 index i, and stride S. We want to record this candidate only if it
1528 is equivalent to apply the type cast following the multiply:
1529
1530 A = B + i;
1531 E = A * S;
1532 D = (type) E;
1533
1534 We will record the type with the candidate for D. This allows us
1535 to use a similar previous candidate as a basis. If we have earlier seen
1536
1537 A' = B + i';
1538 C' = (type) A';
1539 D' = C' * S;
1540
1541 we can replace D with
1542
1543 D = D' + (i - i') * S;
1544
1545 But if moving the type-cast would change semantics, we mustn't do this.
1546
1547 This is legitimate for casts from a non-wrapping integral type to
1548 any integral type of the same or larger size. It is not legitimate
1549 to convert a wrapping type to a non-wrapping type, or to a wrapping
1550 type of a different size. I.e., with a wrapping type, we must
1551 assume that the addition B + i could wrap, in which case performing
1552 the multiply before or after one of the "illegal" type casts will
1553 have different semantics. */
1554
1555 static bool
1556 legal_cast_p (gimple gs, tree rhs)
1557 {
1558 if (!is_gimple_assign (gs)
1559 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1560 return false;
1561
1562 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1563 }
1564
1565 /* Given GS which is a cast to a scalar integer type, determine whether
1566 the cast is legal for strength reduction. If so, make at least one
1567 appropriate entry in the candidate table. */
1568
1569 static void
1570 slsr_process_cast (gimple gs, tree rhs1, bool speed)
1571 {
1572 tree lhs, ctype;
1573 slsr_cand_t base_cand, c, c2;
1574 unsigned savings = 0;
1575
1576 if (!legal_cast_p (gs, rhs1))
1577 return;
1578
1579 lhs = gimple_assign_lhs (gs);
1580 base_cand = base_cand_from_table (rhs1);
1581 ctype = TREE_TYPE (lhs);
1582
1583 if (base_cand && base_cand->kind != CAND_PHI)
1584 {
1585 while (base_cand)
1586 {
1587 /* Propagate all data from the base candidate except the type,
1588 which comes from the cast, and the base candidate's cast,
1589 which is no longer applicable. */
1590 if (has_single_use (rhs1))
1591 savings = (base_cand->dead_savings
1592 + stmt_cost (base_cand->cand_stmt, speed));
1593
1594 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1595 base_cand->base_expr,
1596 base_cand->index, base_cand->stride,
1597 ctype, savings);
1598 if (base_cand->next_interp)
1599 base_cand = lookup_cand (base_cand->next_interp);
1600 else
1601 base_cand = NULL;
1602 }
1603 }
1604 else
1605 {
1606 /* If nothing is known about the RHS, create fresh CAND_ADD and
1607 CAND_MULT interpretations:
1608
1609 X = Y + (0 * 1)
1610 X = (Y + 0) * 1
1611
1612 The first of these is somewhat arbitrary, but the choice of
1613 1 for the stride simplifies the logic for propagating casts
1614 into their uses. */
1615 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1616 0, integer_one_node, ctype, 0);
1617 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1618 0, integer_one_node, ctype, 0);
1619 c->next_interp = c2->cand_num;
1620 }
1621
1622 /* Add the first (or only) interpretation to the statement-candidate
1623 mapping. */
1624 add_cand_for_stmt (gs, c);
1625 }
1626
1627 /* Given GS which is a copy of a scalar integer type, make at least one
1628 appropriate entry in the candidate table.
1629
1630 This interface is included for completeness, but is unnecessary
1631 if this pass immediately follows a pass that performs copy
1632 propagation, such as DOM. */
1633
1634 static void
1635 slsr_process_copy (gimple gs, tree rhs1, bool speed)
1636 {
1637 slsr_cand_t base_cand, c, c2;
1638 unsigned savings = 0;
1639
1640 base_cand = base_cand_from_table (rhs1);
1641
1642 if (base_cand && base_cand->kind != CAND_PHI)
1643 {
1644 while (base_cand)
1645 {
1646 /* Propagate all data from the base candidate. */
1647 if (has_single_use (rhs1))
1648 savings = (base_cand->dead_savings
1649 + stmt_cost (base_cand->cand_stmt, speed));
1650
1651 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1652 base_cand->base_expr,
1653 base_cand->index, base_cand->stride,
1654 base_cand->cand_type, savings);
1655 if (base_cand->next_interp)
1656 base_cand = lookup_cand (base_cand->next_interp);
1657 else
1658 base_cand = NULL;
1659 }
1660 }
1661 else
1662 {
1663 /* If nothing is known about the RHS, create fresh CAND_ADD and
1664 CAND_MULT interpretations:
1665
1666 X = Y + (0 * 1)
1667 X = (Y + 0) * 1
1668
1669 The first of these is somewhat arbitrary, but the choice of
1670 1 for the stride simplifies the logic for propagating casts
1671 into their uses. */
1672 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1673 0, integer_one_node, TREE_TYPE (rhs1), 0);
1674 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1675 0, integer_one_node, TREE_TYPE (rhs1), 0);
1676 c->next_interp = c2->cand_num;
1677 }
1678
1679 /* Add the first (or only) interpretation to the statement-candidate
1680 mapping. */
1681 add_cand_for_stmt (gs, c);
1682 }
1683 \f
1684 class find_candidates_dom_walker : public dom_walker
1685 {
1686 public:
1687 find_candidates_dom_walker (cdi_direction direction)
1688 : dom_walker (direction) {}
1689 virtual void before_dom_children (basic_block);
1690 };
1691
1692 /* Find strength-reduction candidates in block BB. */
1693
1694 void
1695 find_candidates_dom_walker::before_dom_children (basic_block bb)
1696 {
1697 bool speed = optimize_bb_for_speed_p (bb);
1698
1699 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1700 gsi_next (&gsi))
1701 slsr_process_phi (gsi.phi (), speed);
1702
1703 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1704 gsi_next (&gsi))
1705 {
1706 gimple gs = gsi_stmt (gsi);
1707
1708 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1709 slsr_process_ref (gs);
1710
1711 else if (is_gimple_assign (gs)
1712 && SCALAR_INT_MODE_P
1713 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1714 {
1715 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1716
1717 switch (gimple_assign_rhs_code (gs))
1718 {
1719 case MULT_EXPR:
1720 case PLUS_EXPR:
1721 rhs1 = gimple_assign_rhs1 (gs);
1722 rhs2 = gimple_assign_rhs2 (gs);
1723 /* Should never happen, but currently some buggy situations
1724 in earlier phases put constants in rhs1. */
1725 if (TREE_CODE (rhs1) != SSA_NAME)
1726 continue;
1727 break;
1728
1729 /* Possible future opportunity: rhs1 of a ptr+ can be
1730 an ADDR_EXPR. */
1731 case POINTER_PLUS_EXPR:
1732 case MINUS_EXPR:
1733 rhs2 = gimple_assign_rhs2 (gs);
1734 /* Fall-through. */
1735
1736 CASE_CONVERT:
1737 case MODIFY_EXPR:
1738 case NEGATE_EXPR:
1739 rhs1 = gimple_assign_rhs1 (gs);
1740 if (TREE_CODE (rhs1) != SSA_NAME)
1741 continue;
1742 break;
1743
1744 default:
1745 ;
1746 }
1747
1748 switch (gimple_assign_rhs_code (gs))
1749 {
1750 case MULT_EXPR:
1751 slsr_process_mul (gs, rhs1, rhs2, speed);
1752 break;
1753
1754 case PLUS_EXPR:
1755 case POINTER_PLUS_EXPR:
1756 case MINUS_EXPR:
1757 slsr_process_add (gs, rhs1, rhs2, speed);
1758 break;
1759
1760 case NEGATE_EXPR:
1761 slsr_process_neg (gs, rhs1, speed);
1762 break;
1763
1764 CASE_CONVERT:
1765 slsr_process_cast (gs, rhs1, speed);
1766 break;
1767
1768 case MODIFY_EXPR:
1769 slsr_process_copy (gs, rhs1, speed);
1770 break;
1771
1772 default:
1773 ;
1774 }
1775 }
1776 }
1777 }
1778 \f
1779 /* Dump a candidate for debug. */
1780
1781 static void
1782 dump_candidate (slsr_cand_t c)
1783 {
1784 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1785 gimple_bb (c->cand_stmt)->index);
1786 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1787 switch (c->kind)
1788 {
1789 case CAND_MULT:
1790 fputs (" MULT : (", dump_file);
1791 print_generic_expr (dump_file, c->base_expr, 0);
1792 fputs (" + ", dump_file);
1793 print_decs (c->index, dump_file);
1794 fputs (") * ", dump_file);
1795 print_generic_expr (dump_file, c->stride, 0);
1796 fputs (" : ", dump_file);
1797 break;
1798 case CAND_ADD:
1799 fputs (" ADD : ", dump_file);
1800 print_generic_expr (dump_file, c->base_expr, 0);
1801 fputs (" + (", dump_file);
1802 print_decs (c->index, dump_file);
1803 fputs (" * ", dump_file);
1804 print_generic_expr (dump_file, c->stride, 0);
1805 fputs (") : ", dump_file);
1806 break;
1807 case CAND_REF:
1808 fputs (" REF : ", dump_file);
1809 print_generic_expr (dump_file, c->base_expr, 0);
1810 fputs (" + (", dump_file);
1811 print_generic_expr (dump_file, c->stride, 0);
1812 fputs (") + ", dump_file);
1813 print_decs (c->index, dump_file);
1814 fputs (" : ", dump_file);
1815 break;
1816 case CAND_PHI:
1817 fputs (" PHI : ", dump_file);
1818 print_generic_expr (dump_file, c->base_expr, 0);
1819 fputs (" + (unknown * ", dump_file);
1820 print_generic_expr (dump_file, c->stride, 0);
1821 fputs (") : ", dump_file);
1822 break;
1823 default:
1824 gcc_unreachable ();
1825 }
1826 print_generic_expr (dump_file, c->cand_type, 0);
1827 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1828 c->basis, c->dependent, c->sibling);
1829 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1830 c->next_interp, c->dead_savings);
1831 if (c->def_phi)
1832 fprintf (dump_file, " phi: %d\n", c->def_phi);
1833 fputs ("\n", dump_file);
1834 }
1835
1836 /* Dump the candidate vector for debug. */
1837
1838 static void
1839 dump_cand_vec (void)
1840 {
1841 unsigned i;
1842 slsr_cand_t c;
1843
1844 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1845
1846 FOR_EACH_VEC_ELT (cand_vec, i, c)
1847 dump_candidate (c);
1848 }
1849
1850 /* Callback used to dump the candidate chains hash table. */
1851
1852 int
1853 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1854 {
1855 const_cand_chain_t chain = *slot;
1856 cand_chain_t p;
1857
1858 print_generic_expr (dump_file, chain->base_expr, 0);
1859 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1860
1861 for (p = chain->next; p; p = p->next)
1862 fprintf (dump_file, " -> %d", p->cand->cand_num);
1863
1864 fputs ("\n", dump_file);
1865 return 1;
1866 }
1867
1868 /* Dump the candidate chains. */
1869
1870 static void
1871 dump_cand_chains (void)
1872 {
1873 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1874 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1875 (NULL);
1876 fputs ("\n", dump_file);
1877 }
1878
1879 /* Dump the increment vector for debug. */
1880
1881 static void
1882 dump_incr_vec (void)
1883 {
1884 if (dump_file && (dump_flags & TDF_DETAILS))
1885 {
1886 unsigned i;
1887
1888 fprintf (dump_file, "\nIncrement vector:\n\n");
1889
1890 for (i = 0; i < incr_vec_len; i++)
1891 {
1892 fprintf (dump_file, "%3d increment: ", i);
1893 print_decs (incr_vec[i].incr, dump_file);
1894 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1895 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1896 fputs ("\n initializer: ", dump_file);
1897 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1898 fputs ("\n\n", dump_file);
1899 }
1900 }
1901 }
1902 \f
1903 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1904 data reference. */
1905
1906 static void
1907 replace_ref (tree *expr, slsr_cand_t c)
1908 {
1909 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1910 unsigned HOST_WIDE_INT misalign;
1911 unsigned align;
1912
1913 /* Ensure the memory reference carries the minimum alignment
1914 requirement for the data type. See PR58041. */
1915 get_object_alignment_1 (*expr, &align, &misalign);
1916 if (misalign != 0)
1917 align = (misalign & -misalign);
1918 if (align < TYPE_ALIGN (acc_type))
1919 acc_type = build_aligned_type (acc_type, align);
1920
1921 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1922 c->base_expr, c->stride);
1923 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1924 wide_int_to_tree (c->cand_type, c->index));
1925
1926 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1927 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1928 TREE_OPERAND (mem_ref, 0)
1929 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1930 /*simple_p=*/true, NULL,
1931 /*before=*/true, GSI_SAME_STMT);
1932 copy_ref_info (mem_ref, *expr);
1933 *expr = mem_ref;
1934 update_stmt (c->cand_stmt);
1935 }
1936
1937 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1938 dependent of candidate C with an equivalent strength-reduced data
1939 reference. */
1940
1941 static void
1942 replace_refs (slsr_cand_t c)
1943 {
1944 if (dump_file && (dump_flags & TDF_DETAILS))
1945 {
1946 fputs ("Replacing reference: ", dump_file);
1947 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1948 }
1949
1950 if (gimple_vdef (c->cand_stmt))
1951 {
1952 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1953 replace_ref (lhs, c);
1954 }
1955 else
1956 {
1957 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1958 replace_ref (rhs, c);
1959 }
1960
1961 if (dump_file && (dump_flags & TDF_DETAILS))
1962 {
1963 fputs ("With: ", dump_file);
1964 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1965 fputs ("\n", dump_file);
1966 }
1967
1968 if (c->sibling)
1969 replace_refs (lookup_cand (c->sibling));
1970
1971 if (c->dependent)
1972 replace_refs (lookup_cand (c->dependent));
1973 }
1974
1975 /* Return TRUE if candidate C is dependent upon a PHI. */
1976
1977 static bool
1978 phi_dependent_cand_p (slsr_cand_t c)
1979 {
1980 /* A candidate is not necessarily dependent upon a PHI just because
1981 it has a phi definition for its base name. It may have a basis
1982 that relies upon the same phi definition, in which case the PHI
1983 is irrelevant to this candidate. */
1984 return (c->def_phi
1985 && c->basis
1986 && lookup_cand (c->basis)->def_phi != c->def_phi);
1987 }
1988
1989 /* Calculate the increment required for candidate C relative to
1990 its basis. */
1991
1992 static widest_int
1993 cand_increment (slsr_cand_t c)
1994 {
1995 slsr_cand_t basis;
1996
1997 /* If the candidate doesn't have a basis, just return its own
1998 index. This is useful in record_increments to help us find
1999 an existing initializer. Also, if the candidate's basis is
2000 hidden by a phi, then its own index will be the increment
2001 from the newly introduced phi basis. */
2002 if (!c->basis || phi_dependent_cand_p (c))
2003 return c->index;
2004
2005 basis = lookup_cand (c->basis);
2006 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
2007 return c->index - basis->index;
2008 }
2009
2010 /* Calculate the increment required for candidate C relative to
2011 its basis. If we aren't going to generate pointer arithmetic
2012 for this candidate, return the absolute value of that increment
2013 instead. */
2014
2015 static inline widest_int
2016 cand_abs_increment (slsr_cand_t c)
2017 {
2018 widest_int increment = cand_increment (c);
2019
2020 if (!address_arithmetic_p && wi::neg_p (increment))
2021 increment = -increment;
2022
2023 return increment;
2024 }
2025
2026 /* Return TRUE iff candidate C has already been replaced under
2027 another interpretation. */
2028
2029 static inline bool
2030 cand_already_replaced (slsr_cand_t c)
2031 {
2032 return (gimple_bb (c->cand_stmt) == 0);
2033 }
2034
2035 /* Common logic used by replace_unconditional_candidate and
2036 replace_conditional_candidate. */
2037
2038 static void
2039 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2040 {
2041 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2042 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2043
2044 /* It is highly unlikely, but possible, that the resulting
2045 bump doesn't fit in a HWI. Abandon the replacement
2046 in this case. This does not affect siblings or dependents
2047 of C. Restriction to signed HWI is conservative for unsigned
2048 types but allows for safe negation without twisted logic. */
2049 if (wi::fits_shwi_p (bump)
2050 && bump.to_shwi () != HOST_WIDE_INT_MIN
2051 /* It is not useful to replace casts, copies, or adds of
2052 an SSA name and a constant. */
2053 && cand_code != MODIFY_EXPR
2054 && !CONVERT_EXPR_CODE_P (cand_code)
2055 && cand_code != PLUS_EXPR
2056 && cand_code != POINTER_PLUS_EXPR
2057 && cand_code != MINUS_EXPR)
2058 {
2059 enum tree_code code = PLUS_EXPR;
2060 tree bump_tree;
2061 gimple stmt_to_print = NULL;
2062
2063 /* If the basis name and the candidate's LHS have incompatible
2064 types, introduce a cast. */
2065 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2066 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2067 if (wi::neg_p (bump))
2068 {
2069 code = MINUS_EXPR;
2070 bump = -bump;
2071 }
2072
2073 bump_tree = wide_int_to_tree (target_type, bump);
2074
2075 if (dump_file && (dump_flags & TDF_DETAILS))
2076 {
2077 fputs ("Replacing: ", dump_file);
2078 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2079 }
2080
2081 if (bump == 0)
2082 {
2083 tree lhs = gimple_assign_lhs (c->cand_stmt);
2084 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2085 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2086 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2087 gsi_replace (&gsi, copy_stmt, false);
2088 c->cand_stmt = copy_stmt;
2089 if (dump_file && (dump_flags & TDF_DETAILS))
2090 stmt_to_print = copy_stmt;
2091 }
2092 else
2093 {
2094 tree rhs1, rhs2;
2095 if (cand_code != NEGATE_EXPR) {
2096 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2097 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2098 }
2099 if (cand_code != NEGATE_EXPR
2100 && ((operand_equal_p (rhs1, basis_name, 0)
2101 && operand_equal_p (rhs2, bump_tree, 0))
2102 || (operand_equal_p (rhs1, bump_tree, 0)
2103 && operand_equal_p (rhs2, basis_name, 0))))
2104 {
2105 if (dump_file && (dump_flags & TDF_DETAILS))
2106 {
2107 fputs ("(duplicate, not actually replacing)", dump_file);
2108 stmt_to_print = c->cand_stmt;
2109 }
2110 }
2111 else
2112 {
2113 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2114 gimple_assign_set_rhs_with_ops (&gsi, code,
2115 basis_name, bump_tree);
2116 update_stmt (gsi_stmt (gsi));
2117 c->cand_stmt = gsi_stmt (gsi);
2118 if (dump_file && (dump_flags & TDF_DETAILS))
2119 stmt_to_print = gsi_stmt (gsi);
2120 }
2121 }
2122
2123 if (dump_file && (dump_flags & TDF_DETAILS))
2124 {
2125 fputs ("With: ", dump_file);
2126 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2127 fputs ("\n", dump_file);
2128 }
2129 }
2130 }
2131
2132 /* Replace candidate C with an add or subtract. Note that we only
2133 operate on CAND_MULTs with known strides, so we will never generate
2134 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2135 X = Y + ((i - i') * S), as described in the module commentary. The
2136 folded value ((i - i') * S) is referred to here as the "bump." */
2137
2138 static void
2139 replace_unconditional_candidate (slsr_cand_t c)
2140 {
2141 slsr_cand_t basis;
2142
2143 if (cand_already_replaced (c))
2144 return;
2145
2146 basis = lookup_cand (c->basis);
2147 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2148
2149 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2150 }
2151 \f
2152 /* Return the index in the increment vector of the given INCREMENT,
2153 or -1 if not found. The latter can occur if more than
2154 MAX_INCR_VEC_LEN increments have been found. */
2155
2156 static inline int
2157 incr_vec_index (const widest_int &increment)
2158 {
2159 unsigned i;
2160
2161 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2162 ;
2163
2164 if (i < incr_vec_len)
2165 return i;
2166 else
2167 return -1;
2168 }
2169
2170 /* Create a new statement along edge E to add BASIS_NAME to the product
2171 of INCREMENT and the stride of candidate C. Create and return a new
2172 SSA name from *VAR to be used as the LHS of the new statement.
2173 KNOWN_STRIDE is true iff C's stride is a constant. */
2174
2175 static tree
2176 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2177 widest_int increment, edge e, location_t loc,
2178 bool known_stride)
2179 {
2180 basic_block insert_bb;
2181 gimple_stmt_iterator gsi;
2182 tree lhs, basis_type;
2183 gassign *new_stmt;
2184
2185 /* If the add candidate along this incoming edge has the same
2186 index as C's hidden basis, the hidden basis represents this
2187 edge correctly. */
2188 if (increment == 0)
2189 return basis_name;
2190
2191 basis_type = TREE_TYPE (basis_name);
2192 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2193
2194 if (known_stride)
2195 {
2196 tree bump_tree;
2197 enum tree_code code = PLUS_EXPR;
2198 widest_int bump = increment * wi::to_widest (c->stride);
2199 if (wi::neg_p (bump))
2200 {
2201 code = MINUS_EXPR;
2202 bump = -bump;
2203 }
2204
2205 bump_tree = wide_int_to_tree (basis_type, bump);
2206 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2207 }
2208 else
2209 {
2210 int i;
2211 bool negate_incr = (!address_arithmetic_p && wi::neg_p (increment));
2212 i = incr_vec_index (negate_incr ? -increment : increment);
2213 gcc_assert (i >= 0);
2214
2215 if (incr_vec[i].initializer)
2216 {
2217 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2218 new_stmt = gimple_build_assign (lhs, code, basis_name,
2219 incr_vec[i].initializer);
2220 }
2221 else if (increment == 1)
2222 new_stmt = gimple_build_assign (lhs, PLUS_EXPR, basis_name, c->stride);
2223 else if (increment == -1)
2224 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name,
2225 c->stride);
2226 else
2227 gcc_unreachable ();
2228 }
2229
2230 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2231 gsi = gsi_last_bb (insert_bb);
2232
2233 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2234 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2235 else
2236 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2237
2238 gimple_set_location (new_stmt, loc);
2239
2240 if (dump_file && (dump_flags & TDF_DETAILS))
2241 {
2242 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2243 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2244 }
2245
2246 return lhs;
2247 }
2248
2249 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2250 is hidden by the phi node FROM_PHI, create a new phi node in the same
2251 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2252 with its phi arguments representing conditional adjustments to the
2253 hidden basis along conditional incoming paths. Those adjustments are
2254 made by creating add statements (and sometimes recursively creating
2255 phis) along those incoming paths. LOC is the location to attach to
2256 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2257 constant. */
2258
2259 static tree
2260 create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
2261 location_t loc, bool known_stride)
2262 {
2263 int i;
2264 tree name, phi_arg;
2265 gphi *phi;
2266 vec<tree> phi_args;
2267 slsr_cand_t basis = lookup_cand (c->basis);
2268 int nargs = gimple_phi_num_args (from_phi);
2269 basic_block phi_bb = gimple_bb (from_phi);
2270 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2271 phi_args.create (nargs);
2272
2273 /* Process each argument of the existing phi that represents
2274 conditionally-executed add candidates. */
2275 for (i = 0; i < nargs; i++)
2276 {
2277 edge e = (*phi_bb->preds)[i];
2278 tree arg = gimple_phi_arg_def (from_phi, i);
2279 tree feeding_def;
2280
2281 /* If the phi argument is the base name of the CAND_PHI, then
2282 this incoming arc should use the hidden basis. */
2283 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2284 if (basis->index == 0)
2285 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2286 else
2287 {
2288 widest_int incr = -basis->index;
2289 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2290 e, loc, known_stride);
2291 }
2292 else
2293 {
2294 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2295
2296 /* If there is another phi along this incoming edge, we must
2297 process it in the same fashion to ensure that all basis
2298 adjustments are made along its incoming edges. */
2299 if (gimple_code (arg_def) == GIMPLE_PHI)
2300 feeding_def = create_phi_basis (c, arg_def, basis_name,
2301 loc, known_stride);
2302 else
2303 {
2304 slsr_cand_t arg_cand = base_cand_from_table (arg);
2305 widest_int diff = arg_cand->index - basis->index;
2306 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2307 e, loc, known_stride);
2308 }
2309 }
2310
2311 /* Because of recursion, we need to save the arguments in a vector
2312 so we can create the PHI statement all at once. Otherwise the
2313 storage for the half-created PHI can be reclaimed. */
2314 phi_args.safe_push (feeding_def);
2315 }
2316
2317 /* Create the new phi basis. */
2318 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2319 phi = create_phi_node (name, phi_bb);
2320 SSA_NAME_DEF_STMT (name) = phi;
2321
2322 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2323 {
2324 edge e = (*phi_bb->preds)[i];
2325 add_phi_arg (phi, phi_arg, e, loc);
2326 }
2327
2328 update_stmt (phi);
2329
2330 if (dump_file && (dump_flags & TDF_DETAILS))
2331 {
2332 fputs ("Introducing new phi basis: ", dump_file);
2333 print_gimple_stmt (dump_file, phi, 0, 0);
2334 }
2335
2336 return name;
2337 }
2338
2339 /* Given a candidate C whose basis is hidden by at least one intervening
2340 phi, introduce a matching number of new phis to represent its basis
2341 adjusted by conditional increments along possible incoming paths. Then
2342 replace C as though it were an unconditional candidate, using the new
2343 basis. */
2344
2345 static void
2346 replace_conditional_candidate (slsr_cand_t c)
2347 {
2348 tree basis_name, name;
2349 slsr_cand_t basis;
2350 location_t loc;
2351
2352 /* Look up the LHS SSA name from C's basis. This will be the
2353 RHS1 of the adds we will introduce to create new phi arguments. */
2354 basis = lookup_cand (c->basis);
2355 basis_name = gimple_assign_lhs (basis->cand_stmt);
2356
2357 /* Create a new phi statement which will represent C's true basis
2358 after the transformation is complete. */
2359 loc = gimple_location (c->cand_stmt);
2360 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2361 basis_name, loc, KNOWN_STRIDE);
2362 /* Replace C with an add of the new basis phi and a constant. */
2363 widest_int bump = c->index * wi::to_widest (c->stride);
2364
2365 replace_mult_candidate (c, name, bump);
2366 }
2367
2368 /* Compute the expected costs of inserting basis adjustments for
2369 candidate C with phi-definition PHI. The cost of inserting
2370 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2371 which are themselves phi results, recursively calculate costs
2372 for those phis as well. */
2373
2374 static int
2375 phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
2376 {
2377 unsigned i;
2378 int cost = 0;
2379 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2380
2381 /* If we work our way back to a phi that isn't dominated by the hidden
2382 basis, this isn't a candidate for replacement. Indicate this by
2383 returning an unreasonably high cost. It's not easy to detect
2384 these situations when determining the basis, so we defer the
2385 decision until now. */
2386 basic_block phi_bb = gimple_bb (phi);
2387 slsr_cand_t basis = lookup_cand (c->basis);
2388 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2389
2390 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2391 return COST_INFINITE;
2392
2393 for (i = 0; i < gimple_phi_num_args (phi); i++)
2394 {
2395 tree arg = gimple_phi_arg_def (phi, i);
2396
2397 if (arg != phi_cand->base_expr)
2398 {
2399 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2400
2401 if (gimple_code (arg_def) == GIMPLE_PHI)
2402 cost += phi_add_costs (arg_def, c, one_add_cost);
2403 else
2404 {
2405 slsr_cand_t arg_cand = base_cand_from_table (arg);
2406
2407 if (arg_cand->index != c->index)
2408 cost += one_add_cost;
2409 }
2410 }
2411 }
2412
2413 return cost;
2414 }
2415
2416 /* For candidate C, each sibling of candidate C, and each dependent of
2417 candidate C, determine whether the candidate is dependent upon a
2418 phi that hides its basis. If not, replace the candidate unconditionally.
2419 Otherwise, determine whether the cost of introducing compensation code
2420 for the candidate is offset by the gains from strength reduction. If
2421 so, replace the candidate and introduce the compensation code. */
2422
2423 static void
2424 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2425 {
2426 if (phi_dependent_cand_p (c))
2427 {
2428 if (c->kind == CAND_MULT)
2429 {
2430 /* A candidate dependent upon a phi will replace a multiply by
2431 a constant with an add, and will insert at most one add for
2432 each phi argument. Add these costs with the potential dead-code
2433 savings to determine profitability. */
2434 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2435 int mult_savings = stmt_cost (c->cand_stmt, speed);
2436 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2437 tree phi_result = gimple_phi_result (phi);
2438 int one_add_cost = add_cost (speed,
2439 TYPE_MODE (TREE_TYPE (phi_result)));
2440 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2441 int cost = add_costs - mult_savings - c->dead_savings;
2442
2443 if (dump_file && (dump_flags & TDF_DETAILS))
2444 {
2445 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2446 fprintf (dump_file, " add_costs = %d\n", add_costs);
2447 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2448 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2449 fprintf (dump_file, " cost = %d\n", cost);
2450 if (cost <= COST_NEUTRAL)
2451 fputs (" Replacing...\n", dump_file);
2452 else
2453 fputs (" Not replaced.\n", dump_file);
2454 }
2455
2456 if (cost <= COST_NEUTRAL)
2457 replace_conditional_candidate (c);
2458 }
2459 }
2460 else
2461 replace_unconditional_candidate (c);
2462
2463 if (c->sibling)
2464 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2465
2466 if (c->dependent)
2467 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2468 }
2469 \f
2470 /* Count the number of candidates in the tree rooted at C that have
2471 not already been replaced under other interpretations. */
2472
2473 static int
2474 count_candidates (slsr_cand_t c)
2475 {
2476 unsigned count = cand_already_replaced (c) ? 0 : 1;
2477
2478 if (c->sibling)
2479 count += count_candidates (lookup_cand (c->sibling));
2480
2481 if (c->dependent)
2482 count += count_candidates (lookup_cand (c->dependent));
2483
2484 return count;
2485 }
2486
2487 /* Increase the count of INCREMENT by one in the increment vector.
2488 INCREMENT is associated with candidate C. If INCREMENT is to be
2489 conditionally executed as part of a conditional candidate replacement,
2490 IS_PHI_ADJUST is true, otherwise false. If an initializer
2491 T_0 = stride * I is provided by a candidate that dominates all
2492 candidates with the same increment, also record T_0 for subsequent use. */
2493
2494 static void
2495 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2496 {
2497 bool found = false;
2498 unsigned i;
2499
2500 /* Treat increments that differ only in sign as identical so as to
2501 share initializers, unless we are generating pointer arithmetic. */
2502 if (!address_arithmetic_p && wi::neg_p (increment))
2503 increment = -increment;
2504
2505 for (i = 0; i < incr_vec_len; i++)
2506 {
2507 if (incr_vec[i].incr == increment)
2508 {
2509 incr_vec[i].count++;
2510 found = true;
2511
2512 /* If we previously recorded an initializer that doesn't
2513 dominate this candidate, it's not going to be useful to
2514 us after all. */
2515 if (incr_vec[i].initializer
2516 && !dominated_by_p (CDI_DOMINATORS,
2517 gimple_bb (c->cand_stmt),
2518 incr_vec[i].init_bb))
2519 {
2520 incr_vec[i].initializer = NULL_TREE;
2521 incr_vec[i].init_bb = NULL;
2522 }
2523
2524 break;
2525 }
2526 }
2527
2528 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2529 {
2530 /* The first time we see an increment, create the entry for it.
2531 If this is the root candidate which doesn't have a basis, set
2532 the count to zero. We're only processing it so it can possibly
2533 provide an initializer for other candidates. */
2534 incr_vec[incr_vec_len].incr = increment;
2535 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2536 incr_vec[incr_vec_len].cost = COST_INFINITE;
2537
2538 /* Optimistically record the first occurrence of this increment
2539 as providing an initializer (if it does); we will revise this
2540 opinion later if it doesn't dominate all other occurrences.
2541 Exception: increments of -1, 0, 1 never need initializers;
2542 and phi adjustments don't ever provide initializers. */
2543 if (c->kind == CAND_ADD
2544 && !is_phi_adjust
2545 && c->index == increment
2546 && (wi::gts_p (increment, 1)
2547 || wi::lts_p (increment, -1))
2548 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2549 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2550 {
2551 tree t0 = NULL_TREE;
2552 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2553 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2554 if (operand_equal_p (rhs1, c->base_expr, 0))
2555 t0 = rhs2;
2556 else if (operand_equal_p (rhs2, c->base_expr, 0))
2557 t0 = rhs1;
2558 if (t0
2559 && SSA_NAME_DEF_STMT (t0)
2560 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2561 {
2562 incr_vec[incr_vec_len].initializer = t0;
2563 incr_vec[incr_vec_len++].init_bb
2564 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2565 }
2566 else
2567 {
2568 incr_vec[incr_vec_len].initializer = NULL_TREE;
2569 incr_vec[incr_vec_len++].init_bb = NULL;
2570 }
2571 }
2572 else
2573 {
2574 incr_vec[incr_vec_len].initializer = NULL_TREE;
2575 incr_vec[incr_vec_len++].init_bb = NULL;
2576 }
2577 }
2578 }
2579
2580 /* Given phi statement PHI that hides a candidate from its BASIS, find
2581 the increments along each incoming arc (recursively handling additional
2582 phis that may be present) and record them. These increments are the
2583 difference in index between the index-adjusting statements and the
2584 index of the basis. */
2585
2586 static void
2587 record_phi_increments (slsr_cand_t basis, gimple phi)
2588 {
2589 unsigned i;
2590 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2591
2592 for (i = 0; i < gimple_phi_num_args (phi); i++)
2593 {
2594 tree arg = gimple_phi_arg_def (phi, i);
2595
2596 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2597 {
2598 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2599
2600 if (gimple_code (arg_def) == GIMPLE_PHI)
2601 record_phi_increments (basis, arg_def);
2602 else
2603 {
2604 slsr_cand_t arg_cand = base_cand_from_table (arg);
2605 widest_int diff = arg_cand->index - basis->index;
2606 record_increment (arg_cand, diff, PHI_ADJUST);
2607 }
2608 }
2609 }
2610 }
2611
2612 /* Determine how many times each unique increment occurs in the set
2613 of candidates rooted at C's parent, recording the data in the
2614 increment vector. For each unique increment I, if an initializer
2615 T_0 = stride * I is provided by a candidate that dominates all
2616 candidates with the same increment, also record T_0 for subsequent
2617 use. */
2618
2619 static void
2620 record_increments (slsr_cand_t c)
2621 {
2622 if (!cand_already_replaced (c))
2623 {
2624 if (!phi_dependent_cand_p (c))
2625 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2626 else
2627 {
2628 /* A candidate with a basis hidden by a phi will have one
2629 increment for its relationship to the index represented by
2630 the phi, and potentially additional increments along each
2631 incoming edge. For the root of the dependency tree (which
2632 has no basis), process just the initial index in case it has
2633 an initializer that can be used by subsequent candidates. */
2634 record_increment (c, c->index, NOT_PHI_ADJUST);
2635
2636 if (c->basis)
2637 record_phi_increments (lookup_cand (c->basis),
2638 lookup_cand (c->def_phi)->cand_stmt);
2639 }
2640 }
2641
2642 if (c->sibling)
2643 record_increments (lookup_cand (c->sibling));
2644
2645 if (c->dependent)
2646 record_increments (lookup_cand (c->dependent));
2647 }
2648
2649 /* Add up and return the costs of introducing add statements that
2650 require the increment INCR on behalf of candidate C and phi
2651 statement PHI. Accumulate into *SAVINGS the potential savings
2652 from removing existing statements that feed PHI and have no other
2653 uses. */
2654
2655 static int
2656 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple phi, int *savings)
2657 {
2658 unsigned i;
2659 int cost = 0;
2660 slsr_cand_t basis = lookup_cand (c->basis);
2661 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2662
2663 for (i = 0; i < gimple_phi_num_args (phi); i++)
2664 {
2665 tree arg = gimple_phi_arg_def (phi, i);
2666
2667 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2668 {
2669 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2670
2671 if (gimple_code (arg_def) == GIMPLE_PHI)
2672 {
2673 int feeding_savings = 0;
2674 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2675 if (has_single_use (gimple_phi_result (arg_def)))
2676 *savings += feeding_savings;
2677 }
2678 else
2679 {
2680 slsr_cand_t arg_cand = base_cand_from_table (arg);
2681 widest_int diff = arg_cand->index - basis->index;
2682
2683 if (incr == diff)
2684 {
2685 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2686 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2687 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2688 if (has_single_use (lhs))
2689 *savings += stmt_cost (arg_cand->cand_stmt, true);
2690 }
2691 }
2692 }
2693 }
2694
2695 return cost;
2696 }
2697
2698 /* Return the first candidate in the tree rooted at C that has not
2699 already been replaced, favoring siblings over dependents. */
2700
2701 static slsr_cand_t
2702 unreplaced_cand_in_tree (slsr_cand_t c)
2703 {
2704 if (!cand_already_replaced (c))
2705 return c;
2706
2707 if (c->sibling)
2708 {
2709 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2710 if (sib)
2711 return sib;
2712 }
2713
2714 if (c->dependent)
2715 {
2716 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2717 if (dep)
2718 return dep;
2719 }
2720
2721 return NULL;
2722 }
2723
2724 /* Return TRUE if the candidates in the tree rooted at C should be
2725 optimized for speed, else FALSE. We estimate this based on the block
2726 containing the most dominant candidate in the tree that has not yet
2727 been replaced. */
2728
2729 static bool
2730 optimize_cands_for_speed_p (slsr_cand_t c)
2731 {
2732 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2733 gcc_assert (c2);
2734 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2735 }
2736
2737 /* Add COST_IN to the lowest cost of any dependent path starting at
2738 candidate C or any of its siblings, counting only candidates along
2739 such paths with increment INCR. Assume that replacing a candidate
2740 reduces cost by REPL_SAVINGS. Also account for savings from any
2741 statements that would go dead. If COUNT_PHIS is true, include
2742 costs of introducing feeding statements for conditional candidates. */
2743
2744 static int
2745 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2746 const widest_int &incr, bool count_phis)
2747 {
2748 int local_cost, sib_cost, savings = 0;
2749 widest_int cand_incr = cand_abs_increment (c);
2750
2751 if (cand_already_replaced (c))
2752 local_cost = cost_in;
2753 else if (incr == cand_incr)
2754 local_cost = cost_in - repl_savings - c->dead_savings;
2755 else
2756 local_cost = cost_in - c->dead_savings;
2757
2758 if (count_phis
2759 && phi_dependent_cand_p (c)
2760 && !cand_already_replaced (c))
2761 {
2762 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2763 local_cost += phi_incr_cost (c, incr, phi, &savings);
2764
2765 if (has_single_use (gimple_phi_result (phi)))
2766 local_cost -= savings;
2767 }
2768
2769 if (c->dependent)
2770 local_cost = lowest_cost_path (local_cost, repl_savings,
2771 lookup_cand (c->dependent), incr,
2772 count_phis);
2773
2774 if (c->sibling)
2775 {
2776 sib_cost = lowest_cost_path (cost_in, repl_savings,
2777 lookup_cand (c->sibling), incr,
2778 count_phis);
2779 local_cost = MIN (local_cost, sib_cost);
2780 }
2781
2782 return local_cost;
2783 }
2784
2785 /* Compute the total savings that would accrue from all replacements
2786 in the candidate tree rooted at C, counting only candidates with
2787 increment INCR. Assume that replacing a candidate reduces cost
2788 by REPL_SAVINGS. Also account for savings from statements that
2789 would go dead. */
2790
2791 static int
2792 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2793 bool count_phis)
2794 {
2795 int savings = 0;
2796 widest_int cand_incr = cand_abs_increment (c);
2797
2798 if (incr == cand_incr && !cand_already_replaced (c))
2799 savings += repl_savings + c->dead_savings;
2800
2801 if (count_phis
2802 && phi_dependent_cand_p (c)
2803 && !cand_already_replaced (c))
2804 {
2805 int phi_savings = 0;
2806 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2807 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2808
2809 if (has_single_use (gimple_phi_result (phi)))
2810 savings += phi_savings;
2811 }
2812
2813 if (c->dependent)
2814 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2815 count_phis);
2816
2817 if (c->sibling)
2818 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2819 count_phis);
2820
2821 return savings;
2822 }
2823
2824 /* Use target-specific costs to determine and record which increments
2825 in the current candidate tree are profitable to replace, assuming
2826 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2827 the candidate tree.
2828
2829 One slight limitation here is that we don't account for the possible
2830 introduction of casts in some cases. See replace_one_candidate for
2831 the cases where these are introduced. This should probably be cleaned
2832 up sometime. */
2833
2834 static void
2835 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
2836 {
2837 unsigned i;
2838
2839 for (i = 0; i < incr_vec_len; i++)
2840 {
2841 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2842
2843 /* If somehow this increment is bigger than a HWI, we won't
2844 be optimizing candidates that use it. And if the increment
2845 has a count of zero, nothing will be done with it. */
2846 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2847 incr_vec[i].cost = COST_INFINITE;
2848
2849 /* Increments of 0, 1, and -1 are always profitable to replace,
2850 because they always replace a multiply or add with an add or
2851 copy, and may cause one or more existing instructions to go
2852 dead. Exception: -1 can't be assumed to be profitable for
2853 pointer addition. */
2854 else if (incr == 0
2855 || incr == 1
2856 || (incr == -1
2857 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2858 != POINTER_PLUS_EXPR)))
2859 incr_vec[i].cost = COST_NEUTRAL;
2860
2861 /* FORNOW: If we need to add an initializer, give up if a cast from
2862 the candidate's type to its stride's type can lose precision.
2863 This could eventually be handled better by expressly retaining the
2864 result of a cast to a wider type in the stride. Example:
2865
2866 short int _1;
2867 _2 = (int) _1;
2868 _3 = _2 * 10;
2869 _4 = x + _3; ADD: x + (10 * _1) : int
2870 _5 = _2 * 15;
2871 _6 = x + _3; ADD: x + (15 * _1) : int
2872
2873 Right now replacing _6 would cause insertion of an initializer
2874 of the form "short int T = _1 * 5;" followed by a cast to
2875 int, which could overflow incorrectly. Had we recorded _2 or
2876 (int)_1 as the stride, this wouldn't happen. However, doing
2877 this breaks other opportunities, so this will require some
2878 care. */
2879 else if (!incr_vec[i].initializer
2880 && TREE_CODE (first_dep->stride) != INTEGER_CST
2881 && !legal_cast_p_1 (first_dep->stride,
2882 gimple_assign_lhs (first_dep->cand_stmt)))
2883
2884 incr_vec[i].cost = COST_INFINITE;
2885
2886 /* If we need to add an initializer, make sure we don't introduce
2887 a multiply by a pointer type, which can happen in certain cast
2888 scenarios. FIXME: When cleaning up these cast issues, we can
2889 afford to introduce the multiply provided we cast out to an
2890 unsigned int of appropriate size. */
2891 else if (!incr_vec[i].initializer
2892 && TREE_CODE (first_dep->stride) != INTEGER_CST
2893 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2894
2895 incr_vec[i].cost = COST_INFINITE;
2896
2897 /* For any other increment, if this is a multiply candidate, we
2898 must introduce a temporary T and initialize it with
2899 T_0 = stride * increment. When optimizing for speed, walk the
2900 candidate tree to calculate the best cost reduction along any
2901 path; if it offsets the fixed cost of inserting the initializer,
2902 replacing the increment is profitable. When optimizing for
2903 size, instead calculate the total cost reduction from replacing
2904 all candidates with this increment. */
2905 else if (first_dep->kind == CAND_MULT)
2906 {
2907 int cost = mult_by_coeff_cost (incr, mode, speed);
2908 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2909 if (speed)
2910 cost = lowest_cost_path (cost, repl_savings, first_dep,
2911 incr_vec[i].incr, COUNT_PHIS);
2912 else
2913 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2914 COUNT_PHIS);
2915
2916 incr_vec[i].cost = cost;
2917 }
2918
2919 /* If this is an add candidate, the initializer may already
2920 exist, so only calculate the cost of the initializer if it
2921 doesn't. We are replacing one add with another here, so the
2922 known replacement savings is zero. We will account for removal
2923 of dead instructions in lowest_cost_path or total_savings. */
2924 else
2925 {
2926 int cost = 0;
2927 if (!incr_vec[i].initializer)
2928 cost = mult_by_coeff_cost (incr, mode, speed);
2929
2930 if (speed)
2931 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2932 DONT_COUNT_PHIS);
2933 else
2934 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2935 DONT_COUNT_PHIS);
2936
2937 incr_vec[i].cost = cost;
2938 }
2939 }
2940 }
2941
2942 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2943 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2944 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2945 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2946 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2947
2948 static basic_block
2949 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2950 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2951 {
2952 basic_block ncd;
2953
2954 if (!bb1)
2955 {
2956 *where = c2;
2957 return bb2;
2958 }
2959
2960 if (!bb2)
2961 {
2962 *where = c1;
2963 return bb1;
2964 }
2965
2966 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2967
2968 /* If both candidates are in the same block, the earlier
2969 candidate wins. */
2970 if (bb1 == ncd && bb2 == ncd)
2971 {
2972 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2973 *where = c2;
2974 else
2975 *where = c1;
2976 }
2977
2978 /* Otherwise, if one of them produced a candidate in the
2979 dominator, that one wins. */
2980 else if (bb1 == ncd)
2981 *where = c1;
2982
2983 else if (bb2 == ncd)
2984 *where = c2;
2985
2986 /* If neither matches the dominator, neither wins. */
2987 else
2988 *where = NULL;
2989
2990 return ncd;
2991 }
2992
2993 /* Consider all candidates that feed PHI. Find the nearest common
2994 dominator of those candidates requiring the given increment INCR.
2995 Further find and return the nearest common dominator of this result
2996 with block NCD. If the returned block contains one or more of the
2997 candidates, return the earliest candidate in the block in *WHERE. */
2998
2999 static basic_block
3000 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
3001 basic_block ncd, slsr_cand_t *where)
3002 {
3003 unsigned i;
3004 slsr_cand_t basis = lookup_cand (c->basis);
3005 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3006
3007 for (i = 0; i < gimple_phi_num_args (phi); i++)
3008 {
3009 tree arg = gimple_phi_arg_def (phi, i);
3010
3011 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3012 {
3013 gimple arg_def = SSA_NAME_DEF_STMT (arg);
3014
3015 if (gimple_code (arg_def) == GIMPLE_PHI)
3016 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
3017 where);
3018 else
3019 {
3020 slsr_cand_t arg_cand = base_cand_from_table (arg);
3021 widest_int diff = arg_cand->index - basis->index;
3022 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3023
3024 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3025 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3026 }
3027 }
3028 }
3029
3030 return ncd;
3031 }
3032
3033 /* Consider the candidate C together with any candidates that feed
3034 C's phi dependence (if any). Find and return the nearest common
3035 dominator of those candidates requiring the given increment INCR.
3036 If the returned block contains one or more of the candidates,
3037 return the earliest candidate in the block in *WHERE. */
3038
3039 static basic_block
3040 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3041 {
3042 basic_block ncd = NULL;
3043
3044 if (cand_abs_increment (c) == incr)
3045 {
3046 ncd = gimple_bb (c->cand_stmt);
3047 *where = c;
3048 }
3049
3050 if (phi_dependent_cand_p (c))
3051 ncd = ncd_with_phi (c, incr,
3052 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3053 ncd, where);
3054
3055 return ncd;
3056 }
3057
3058 /* Consider all candidates in the tree rooted at C for which INCR
3059 represents the required increment of C relative to its basis.
3060 Find and return the basic block that most nearly dominates all
3061 such candidates. If the returned block contains one or more of
3062 the candidates, return the earliest candidate in the block in
3063 *WHERE. */
3064
3065 static basic_block
3066 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3067 slsr_cand_t *where)
3068 {
3069 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3070 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3071
3072 /* First find the NCD of all siblings and dependents. */
3073 if (c->sibling)
3074 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3075 incr, &sib_where);
3076 if (c->dependent)
3077 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3078 incr, &dep_where);
3079 if (!sib_ncd && !dep_ncd)
3080 {
3081 new_where = NULL;
3082 ncd = NULL;
3083 }
3084 else if (sib_ncd && !dep_ncd)
3085 {
3086 new_where = sib_where;
3087 ncd = sib_ncd;
3088 }
3089 else if (dep_ncd && !sib_ncd)
3090 {
3091 new_where = dep_where;
3092 ncd = dep_ncd;
3093 }
3094 else
3095 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3096 dep_where, &new_where);
3097
3098 /* If the candidate's increment doesn't match the one we're interested
3099 in (and nor do any increments for feeding defs of a phi-dependence),
3100 then the result depends only on siblings and dependents. */
3101 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3102
3103 if (!this_ncd || cand_already_replaced (c))
3104 {
3105 *where = new_where;
3106 return ncd;
3107 }
3108
3109 /* Otherwise, compare this candidate with the result from all siblings
3110 and dependents. */
3111 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3112
3113 return ncd;
3114 }
3115
3116 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3117
3118 static inline bool
3119 profitable_increment_p (unsigned index)
3120 {
3121 return (incr_vec[index].cost <= COST_NEUTRAL);
3122 }
3123
3124 /* For each profitable increment in the increment vector not equal to
3125 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3126 dominator of all statements in the candidate chain rooted at C
3127 that require that increment, and insert an initializer
3128 T_0 = stride * increment at that location. Record T_0 with the
3129 increment record. */
3130
3131 static void
3132 insert_initializers (slsr_cand_t c)
3133 {
3134 unsigned i;
3135
3136 for (i = 0; i < incr_vec_len; i++)
3137 {
3138 basic_block bb;
3139 slsr_cand_t where = NULL;
3140 gassign *init_stmt;
3141 tree stride_type, new_name, incr_tree;
3142 widest_int incr = incr_vec[i].incr;
3143
3144 if (!profitable_increment_p (i)
3145 || incr == 1
3146 || (incr == -1
3147 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3148 || incr == 0)
3149 continue;
3150
3151 /* We may have already identified an existing initializer that
3152 will suffice. */
3153 if (incr_vec[i].initializer)
3154 {
3155 if (dump_file && (dump_flags & TDF_DETAILS))
3156 {
3157 fputs ("Using existing initializer: ", dump_file);
3158 print_gimple_stmt (dump_file,
3159 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3160 0, 0);
3161 }
3162 continue;
3163 }
3164
3165 /* Find the block that most closely dominates all candidates
3166 with this increment. If there is at least one candidate in
3167 that block, the earliest one will be returned in WHERE. */
3168 bb = nearest_common_dominator_for_cands (c, incr, &where);
3169
3170 /* Create a new SSA name to hold the initializer's value. */
3171 stride_type = TREE_TYPE (c->stride);
3172 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3173 incr_vec[i].initializer = new_name;
3174
3175 /* Create the initializer and insert it in the latest possible
3176 dominating position. */
3177 incr_tree = wide_int_to_tree (stride_type, incr);
3178 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3179 c->stride, incr_tree);
3180 if (where)
3181 {
3182 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3183 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3184 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3185 }
3186 else
3187 {
3188 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3189 gimple basis_stmt = lookup_cand (c->basis)->cand_stmt;
3190
3191 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3192 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3193 else
3194 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3195
3196 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3197 }
3198
3199 if (dump_file && (dump_flags & TDF_DETAILS))
3200 {
3201 fputs ("Inserting initializer: ", dump_file);
3202 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3203 }
3204 }
3205 }
3206
3207 /* Return TRUE iff all required increments for candidates feeding PHI
3208 are profitable to replace on behalf of candidate C. */
3209
3210 static bool
3211 all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
3212 {
3213 unsigned i;
3214 slsr_cand_t basis = lookup_cand (c->basis);
3215 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3216
3217 for (i = 0; i < gimple_phi_num_args (phi); i++)
3218 {
3219 tree arg = gimple_phi_arg_def (phi, i);
3220
3221 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3222 {
3223 gimple arg_def = SSA_NAME_DEF_STMT (arg);
3224
3225 if (gimple_code (arg_def) == GIMPLE_PHI)
3226 {
3227 if (!all_phi_incrs_profitable (c, arg_def))
3228 return false;
3229 }
3230 else
3231 {
3232 int j;
3233 slsr_cand_t arg_cand = base_cand_from_table (arg);
3234 widest_int increment = arg_cand->index - basis->index;
3235
3236 if (!address_arithmetic_p && wi::neg_p (increment))
3237 increment = -increment;
3238
3239 j = incr_vec_index (increment);
3240
3241 if (dump_file && (dump_flags & TDF_DETAILS))
3242 {
3243 fprintf (dump_file, " Conditional candidate %d, phi: ",
3244 c->cand_num);
3245 print_gimple_stmt (dump_file, phi, 0, 0);
3246 fputs (" increment: ", dump_file);
3247 print_decs (increment, dump_file);
3248 if (j < 0)
3249 fprintf (dump_file,
3250 "\n Not replaced; incr_vec overflow.\n");
3251 else {
3252 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3253 if (profitable_increment_p (j))
3254 fputs (" Replacing...\n", dump_file);
3255 else
3256 fputs (" Not replaced.\n", dump_file);
3257 }
3258 }
3259
3260 if (j < 0 || !profitable_increment_p (j))
3261 return false;
3262 }
3263 }
3264 }
3265
3266 return true;
3267 }
3268
3269 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3270 type TO_TYPE, and insert it in front of the statement represented
3271 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3272 the new SSA name. */
3273
3274 static tree
3275 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3276 {
3277 tree cast_lhs;
3278 gassign *cast_stmt;
3279 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3280
3281 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3282 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3283 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3284 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3285
3286 if (dump_file && (dump_flags & TDF_DETAILS))
3287 {
3288 fputs (" Inserting: ", dump_file);
3289 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3290 }
3291
3292 return cast_lhs;
3293 }
3294
3295 /* Replace the RHS of the statement represented by candidate C with
3296 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3297 leave C unchanged or just interchange its operands. The original
3298 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3299 If the replacement was made and we are doing a details dump,
3300 return the revised statement, else NULL. */
3301
3302 static gimple
3303 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3304 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3305 slsr_cand_t c)
3306 {
3307 if (new_code != old_code
3308 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3309 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3310 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3311 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3312 {
3313 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3314 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3315 update_stmt (gsi_stmt (gsi));
3316 c->cand_stmt = gsi_stmt (gsi);
3317
3318 if (dump_file && (dump_flags & TDF_DETAILS))
3319 return gsi_stmt (gsi);
3320 }
3321
3322 else if (dump_file && (dump_flags & TDF_DETAILS))
3323 fputs (" (duplicate, not actually replacing)\n", dump_file);
3324
3325 return NULL;
3326 }
3327
3328 /* Strength-reduce the statement represented by candidate C by replacing
3329 it with an equivalent addition or subtraction. I is the index into
3330 the increment vector identifying C's increment. NEW_VAR is used to
3331 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3332 is the rhs1 to use in creating the add/subtract. */
3333
3334 static void
3335 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3336 {
3337 gimple stmt_to_print = NULL;
3338 tree orig_rhs1, orig_rhs2;
3339 tree rhs2;
3340 enum tree_code orig_code, repl_code;
3341 widest_int cand_incr;
3342
3343 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3344 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3345 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3346 cand_incr = cand_increment (c);
3347
3348 if (dump_file && (dump_flags & TDF_DETAILS))
3349 {
3350 fputs ("Replacing: ", dump_file);
3351 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3352 stmt_to_print = c->cand_stmt;
3353 }
3354
3355 if (address_arithmetic_p)
3356 repl_code = POINTER_PLUS_EXPR;
3357 else
3358 repl_code = PLUS_EXPR;
3359
3360 /* If the increment has an initializer T_0, replace the candidate
3361 statement with an add of the basis name and the initializer. */
3362 if (incr_vec[i].initializer)
3363 {
3364 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3365 tree orig_type = TREE_TYPE (orig_rhs2);
3366
3367 if (types_compatible_p (orig_type, init_type))
3368 rhs2 = incr_vec[i].initializer;
3369 else
3370 rhs2 = introduce_cast_before_cand (c, orig_type,
3371 incr_vec[i].initializer);
3372
3373 if (incr_vec[i].incr != cand_incr)
3374 {
3375 gcc_assert (repl_code == PLUS_EXPR);
3376 repl_code = MINUS_EXPR;
3377 }
3378
3379 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3380 orig_code, orig_rhs1, orig_rhs2,
3381 c);
3382 }
3383
3384 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3385 with a subtract of the stride from the basis name, a copy
3386 from the basis name, or an add of the stride to the basis
3387 name, respectively. It may be necessary to introduce a
3388 cast (or reuse an existing cast). */
3389 else if (cand_incr == 1)
3390 {
3391 tree stride_type = TREE_TYPE (c->stride);
3392 tree orig_type = TREE_TYPE (orig_rhs2);
3393
3394 if (types_compatible_p (orig_type, stride_type))
3395 rhs2 = c->stride;
3396 else
3397 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3398
3399 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3400 orig_code, orig_rhs1, orig_rhs2,
3401 c);
3402 }
3403
3404 else if (cand_incr == -1)
3405 {
3406 tree stride_type = TREE_TYPE (c->stride);
3407 tree orig_type = TREE_TYPE (orig_rhs2);
3408 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3409
3410 if (types_compatible_p (orig_type, stride_type))
3411 rhs2 = c->stride;
3412 else
3413 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3414
3415 if (orig_code != MINUS_EXPR
3416 || !operand_equal_p (basis_name, orig_rhs1, 0)
3417 || !operand_equal_p (rhs2, orig_rhs2, 0))
3418 {
3419 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3420 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3421 update_stmt (gsi_stmt (gsi));
3422 c->cand_stmt = gsi_stmt (gsi);
3423
3424 if (dump_file && (dump_flags & TDF_DETAILS))
3425 stmt_to_print = gsi_stmt (gsi);
3426 }
3427 else if (dump_file && (dump_flags & TDF_DETAILS))
3428 fputs (" (duplicate, not actually replacing)\n", dump_file);
3429 }
3430
3431 else if (cand_incr == 0)
3432 {
3433 tree lhs = gimple_assign_lhs (c->cand_stmt);
3434 tree lhs_type = TREE_TYPE (lhs);
3435 tree basis_type = TREE_TYPE (basis_name);
3436
3437 if (types_compatible_p (lhs_type, basis_type))
3438 {
3439 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3440 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3441 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3442 gsi_replace (&gsi, copy_stmt, false);
3443 c->cand_stmt = copy_stmt;
3444
3445 if (dump_file && (dump_flags & TDF_DETAILS))
3446 stmt_to_print = copy_stmt;
3447 }
3448 else
3449 {
3450 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3451 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3452 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3453 gsi_replace (&gsi, cast_stmt, false);
3454 c->cand_stmt = cast_stmt;
3455
3456 if (dump_file && (dump_flags & TDF_DETAILS))
3457 stmt_to_print = cast_stmt;
3458 }
3459 }
3460 else
3461 gcc_unreachable ();
3462
3463 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3464 {
3465 fputs ("With: ", dump_file);
3466 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3467 fputs ("\n", dump_file);
3468 }
3469 }
3470
3471 /* For each candidate in the tree rooted at C, replace it with
3472 an increment if such has been shown to be profitable. */
3473
3474 static void
3475 replace_profitable_candidates (slsr_cand_t c)
3476 {
3477 if (!cand_already_replaced (c))
3478 {
3479 widest_int increment = cand_abs_increment (c);
3480 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3481 int i;
3482
3483 i = incr_vec_index (increment);
3484
3485 /* Only process profitable increments. Nothing useful can be done
3486 to a cast or copy. */
3487 if (i >= 0
3488 && profitable_increment_p (i)
3489 && orig_code != MODIFY_EXPR
3490 && !CONVERT_EXPR_CODE_P (orig_code))
3491 {
3492 if (phi_dependent_cand_p (c))
3493 {
3494 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
3495
3496 if (all_phi_incrs_profitable (c, phi))
3497 {
3498 /* Look up the LHS SSA name from C's basis. This will be
3499 the RHS1 of the adds we will introduce to create new
3500 phi arguments. */
3501 slsr_cand_t basis = lookup_cand (c->basis);
3502 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3503
3504 /* Create a new phi statement that will represent C's true
3505 basis after the transformation is complete. */
3506 location_t loc = gimple_location (c->cand_stmt);
3507 tree name = create_phi_basis (c, phi, basis_name,
3508 loc, UNKNOWN_STRIDE);
3509
3510 /* Replace C with an add of the new basis phi and the
3511 increment. */
3512 replace_one_candidate (c, i, name);
3513 }
3514 }
3515 else
3516 {
3517 slsr_cand_t basis = lookup_cand (c->basis);
3518 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3519 replace_one_candidate (c, i, basis_name);
3520 }
3521 }
3522 }
3523
3524 if (c->sibling)
3525 replace_profitable_candidates (lookup_cand (c->sibling));
3526
3527 if (c->dependent)
3528 replace_profitable_candidates (lookup_cand (c->dependent));
3529 }
3530 \f
3531 /* Analyze costs of related candidates in the candidate vector,
3532 and make beneficial replacements. */
3533
3534 static void
3535 analyze_candidates_and_replace (void)
3536 {
3537 unsigned i;
3538 slsr_cand_t c;
3539
3540 /* Each candidate that has a null basis and a non-null
3541 dependent is the root of a tree of related statements.
3542 Analyze each tree to determine a subset of those
3543 statements that can be replaced with maximum benefit. */
3544 FOR_EACH_VEC_ELT (cand_vec, i, c)
3545 {
3546 slsr_cand_t first_dep;
3547
3548 if (c->basis != 0 || c->dependent == 0)
3549 continue;
3550
3551 if (dump_file && (dump_flags & TDF_DETAILS))
3552 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3553 c->cand_num);
3554
3555 first_dep = lookup_cand (c->dependent);
3556
3557 /* If this is a chain of CAND_REFs, unconditionally replace
3558 each of them with a strength-reduced data reference. */
3559 if (c->kind == CAND_REF)
3560 replace_refs (c);
3561
3562 /* If the common stride of all related candidates is a known
3563 constant, each candidate without a phi-dependence can be
3564 profitably replaced. Each replaces a multiply by a single
3565 add, with the possibility that a feeding add also goes dead.
3566 A candidate with a phi-dependence is replaced only if the
3567 compensation code it requires is offset by the strength
3568 reduction savings. */
3569 else if (TREE_CODE (c->stride) == INTEGER_CST)
3570 replace_uncond_cands_and_profitable_phis (first_dep);
3571
3572 /* When the stride is an SSA name, it may still be profitable
3573 to replace some or all of the dependent candidates, depending
3574 on whether the introduced increments can be reused, or are
3575 less expensive to calculate than the replaced statements. */
3576 else
3577 {
3578 machine_mode mode;
3579 bool speed;
3580
3581 /* Determine whether we'll be generating pointer arithmetic
3582 when replacing candidates. */
3583 address_arithmetic_p = (c->kind == CAND_ADD
3584 && POINTER_TYPE_P (c->cand_type));
3585
3586 /* If all candidates have already been replaced under other
3587 interpretations, nothing remains to be done. */
3588 if (!count_candidates (c))
3589 continue;
3590
3591 /* Construct an array of increments for this candidate chain. */
3592 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3593 incr_vec_len = 0;
3594 record_increments (c);
3595
3596 /* Determine which increments are profitable to replace. */
3597 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3598 speed = optimize_cands_for_speed_p (c);
3599 analyze_increments (first_dep, mode, speed);
3600
3601 /* Insert initializers of the form T_0 = stride * increment
3602 for use in profitable replacements. */
3603 insert_initializers (first_dep);
3604 dump_incr_vec ();
3605
3606 /* Perform the replacements. */
3607 replace_profitable_candidates (first_dep);
3608 free (incr_vec);
3609 }
3610 }
3611 }
3612
3613 namespace {
3614
3615 const pass_data pass_data_strength_reduction =
3616 {
3617 GIMPLE_PASS, /* type */
3618 "slsr", /* name */
3619 OPTGROUP_NONE, /* optinfo_flags */
3620 TV_GIMPLE_SLSR, /* tv_id */
3621 ( PROP_cfg | PROP_ssa ), /* properties_required */
3622 0, /* properties_provided */
3623 0, /* properties_destroyed */
3624 0, /* todo_flags_start */
3625 0, /* todo_flags_finish */
3626 };
3627
3628 class pass_strength_reduction : public gimple_opt_pass
3629 {
3630 public:
3631 pass_strength_reduction (gcc::context *ctxt)
3632 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3633 {}
3634
3635 /* opt_pass methods: */
3636 virtual bool gate (function *) { return flag_tree_slsr; }
3637 virtual unsigned int execute (function *);
3638
3639 }; // class pass_strength_reduction
3640
3641 unsigned
3642 pass_strength_reduction::execute (function *fun)
3643 {
3644 /* Create the obstack where candidates will reside. */
3645 gcc_obstack_init (&cand_obstack);
3646
3647 /* Allocate the candidate vector. */
3648 cand_vec.create (128);
3649
3650 /* Allocate the mapping from statements to candidate indices. */
3651 stmt_cand_map = new hash_map<gimple, slsr_cand_t>;
3652
3653 /* Create the obstack where candidate chains will reside. */
3654 gcc_obstack_init (&chain_obstack);
3655
3656 /* Allocate the mapping from base expressions to candidate chains. */
3657 base_cand_map = new hash_table<cand_chain_hasher> (500);
3658
3659 /* Allocate the mapping from bases to alternative bases. */
3660 alt_base_map = new hash_map<tree, tree>;
3661
3662 /* Initialize the loop optimizer. We need to detect flow across
3663 back edges, and this gives us dominator information as well. */
3664 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3665
3666 /* Walk the CFG in predominator order looking for strength reduction
3667 candidates. */
3668 find_candidates_dom_walker (CDI_DOMINATORS)
3669 .walk (fun->cfg->x_entry_block_ptr);
3670
3671 if (dump_file && (dump_flags & TDF_DETAILS))
3672 {
3673 dump_cand_vec ();
3674 dump_cand_chains ();
3675 }
3676
3677 delete alt_base_map;
3678 free_affine_expand_cache (&name_expansions);
3679
3680 /* Analyze costs and make appropriate replacements. */
3681 analyze_candidates_and_replace ();
3682
3683 loop_optimizer_finalize ();
3684 delete base_cand_map;
3685 base_cand_map = NULL;
3686 obstack_free (&chain_obstack, NULL);
3687 delete stmt_cand_map;
3688 cand_vec.release ();
3689 obstack_free (&cand_obstack, NULL);
3690
3691 return 0;
3692 }
3693
3694 } // anon namespace
3695
3696 gimple_opt_pass *
3697 make_pass_strength_reduction (gcc::context *ctxt)
3698 {
3699 return new pass_strength_reduction (ctxt);
3700 }