]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-ssa-strength-reduction.c
Put the CL into the right dir.
[thirdparty/gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2019 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "tree-eh.h"
59 #include "builtins.h"
60 \f
61 /* Information about a strength reduction candidate. Each statement
62 in the candidate table represents an expression of one of the
63 following forms (the special case of CAND_REF will be described
64 later):
65
66 (CAND_MULT) S1: X = (B + i) * S
67 (CAND_ADD) S1: X = B + (i * S)
68
69 Here X and B are SSA names, i is an integer constant, and S is
70 either an SSA name or a constant. We call B the "base," i the
71 "index", and S the "stride."
72
73 Any statement S0 that dominates S1 and is of the form:
74
75 (CAND_MULT) S0: Y = (B + i') * S
76 (CAND_ADD) S0: Y = B + (i' * S)
77
78 is called a "basis" for S1. In both cases, S1 may be replaced by
79
80 S1': X = Y + (i - i') * S,
81
82 where (i - i') * S is folded to the extent possible.
83
84 All gimple statements are visited in dominator order, and each
85 statement that may contribute to one of the forms of S1 above is
86 given at least one entry in the candidate table. Such statements
87 include addition, pointer addition, subtraction, multiplication,
88 negation, copies, and nontrivial type casts. If a statement may
89 represent more than one expression of the forms of S1 above,
90 multiple "interpretations" are stored in the table and chained
91 together. Examples:
92
93 * An add of two SSA names may treat either operand as the base.
94 * A multiply of two SSA names, likewise.
95 * A copy or cast may be thought of as either a CAND_MULT with
96 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
97
98 Candidate records are allocated from an obstack. They are addressed
99 both from a hash table keyed on S1, and from a vector of candidate
100 pointers arranged in predominator order.
101
102 Opportunity note
103 ----------------
104 Currently we don't recognize:
105
106 S0: Y = (S * i') - B
107 S1: X = (S * i) - B
108
109 as a strength reduction opportunity, even though this S1 would
110 also be replaceable by the S1' above. This can be added if it
111 comes up in practice.
112
113 Strength reduction in addressing
114 --------------------------------
115 There is another kind of candidate known as CAND_REF. A CAND_REF
116 describes a statement containing a memory reference having
117 complex addressing that might benefit from strength reduction.
118 Specifically, we are interested in references for which
119 get_inner_reference returns a base address, offset, and bitpos as
120 follows:
121
122 base: MEM_REF (T1, C1)
123 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
124 bitpos: C4 * BITS_PER_UNIT
125
126 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
127 arbitrary integer constants. Note that C2 may be zero, in which
128 case the offset will be MULT_EXPR (T2, C3).
129
130 When this pattern is recognized, the original memory reference
131 can be replaced with:
132
133 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
134 C1 + (C2 * C3) + C4)
135
136 which distributes the multiply to allow constant folding. When
137 two or more addressing expressions can be represented by MEM_REFs
138 of this form, differing only in the constants C1, C2, and C4,
139 making this substitution produces more efficient addressing during
140 the RTL phases. When there are not at least two expressions with
141 the same values of T1, T2, and C3, there is nothing to be gained
142 by the replacement.
143
144 Strength reduction of CAND_REFs uses the same infrastructure as
145 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
146 field, MULT_EXPR (T2, C3) in the stride (S) field, and
147 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
148 is thus another CAND_REF with the same B and S values. When at
149 least two CAND_REFs are chained together using the basis relation,
150 each of them is replaced as above, resulting in improved code
151 generation for addressing.
152
153 Conditional candidates
154 ======================
155
156 Conditional candidates are best illustrated with an example.
157 Consider the code sequence:
158
159 (1) x_0 = ...;
160 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
161 if (...)
162 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
163 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
164 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
165 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
166
167 Here strength reduction is complicated by the uncertain value of x_2.
168 A legitimate transformation is:
169
170 (1) x_0 = ...;
171 (2) a_0 = x_0 * 5;
172 if (...)
173 {
174 (3) [x_1 = x_0 + 1;]
175 (3a) t_1 = a_0 + 5;
176 }
177 (4) [x_2 = PHI <x_0, x_1>;]
178 (4a) t_2 = PHI <a_0, t_1>;
179 (5) [x_3 = x_2 + 1;]
180 (6r) a_1 = t_2 + 5;
181
182 where the bracketed instructions may go dead.
183
184 To recognize this opportunity, we have to observe that statement (6)
185 has a "hidden basis" (2). The hidden basis is unlike a normal basis
186 in that the statement and the hidden basis have different base SSA
187 names (x_2 and x_0, respectively). The relationship is established
188 when a statement's base name (x_2) is defined by a phi statement (4),
189 each argument of which (x_0, x_1) has an identical "derived base name."
190 If the argument is defined by a candidate (as x_1 is by (3)) that is a
191 CAND_ADD having a stride of 1, the derived base name of the argument is
192 the base name of the candidate (x_0). Otherwise, the argument itself
193 is its derived base name (as is the case with argument x_0).
194
195 The hidden basis for statement (6) is the nearest dominating candidate
196 whose base name is the derived base name (x_0) of the feeding phi (4),
197 and whose stride is identical to that of the statement. We can then
198 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
199 allowing the final replacement of (6) by the strength-reduced (6r).
200
201 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
202 A CAND_PHI is not a candidate for replacement, but is maintained in the
203 candidate table to ease discovery of hidden bases. Any phi statement
204 whose arguments share a common derived base name is entered into the
205 table with the derived base name, an (arbitrary) index of zero, and a
206 stride of 1. A statement with a hidden basis can then be detected by
207 simply looking up its feeding phi definition in the candidate table,
208 extracting the derived base name, and searching for a basis in the
209 usual manner after substituting the derived base name.
210
211 Note that the transformation is only valid when the original phi and
212 the statements that define the phi's arguments are all at the same
213 position in the loop hierarchy. */
214
215
216 /* Index into the candidate vector, offset by 1. VECs are zero-based,
217 while cand_idx's are one-based, with zero indicating null. */
218 typedef unsigned cand_idx;
219
220 /* The kind of candidate. */
221 enum cand_kind
222 {
223 CAND_MULT,
224 CAND_ADD,
225 CAND_REF,
226 CAND_PHI
227 };
228
229 class slsr_cand_d
230 {
231 public:
232 /* The candidate statement S1. */
233 gimple *cand_stmt;
234
235 /* The base expression B: often an SSA name, but not always. */
236 tree base_expr;
237
238 /* The stride S. */
239 tree stride;
240
241 /* The index constant i. */
242 widest_int index;
243
244 /* The type of the candidate. This is normally the type of base_expr,
245 but casts may have occurred when combining feeding instructions.
246 A candidate can only be a basis for candidates of the same final type.
247 (For CAND_REFs, this is the type to be used for operand 1 of the
248 replacement MEM_REF.) */
249 tree cand_type;
250
251 /* The type to be used to interpret the stride field when the stride
252 is not a constant. Normally the same as the type of the recorded
253 stride, but when the stride has been cast we need to maintain that
254 knowledge in order to make legal substitutions without losing
255 precision. When the stride is a constant, this will be sizetype. */
256 tree stride_type;
257
258 /* The kind of candidate (CAND_MULT, etc.). */
259 enum cand_kind kind;
260
261 /* Index of this candidate in the candidate vector. */
262 cand_idx cand_num;
263
264 /* Index of the next candidate record for the same statement.
265 A statement may be useful in more than one way (e.g., due to
266 commutativity). So we can have multiple "interpretations"
267 of a statement. */
268 cand_idx next_interp;
269
270 /* Index of the first candidate record in a chain for the same
271 statement. */
272 cand_idx first_interp;
273
274 /* Index of the basis statement S0, if any, in the candidate vector. */
275 cand_idx basis;
276
277 /* First candidate for which this candidate is a basis, if one exists. */
278 cand_idx dependent;
279
280 /* Next candidate having the same basis as this one. */
281 cand_idx sibling;
282
283 /* If this is a conditional candidate, the CAND_PHI candidate
284 that defines the base SSA name B. */
285 cand_idx def_phi;
286
287 /* Savings that can be expected from eliminating dead code if this
288 candidate is replaced. */
289 int dead_savings;
290
291 /* For PHI candidates, use a visited flag to keep from processing the
292 same PHI twice from multiple paths. */
293 int visited;
294
295 /* We sometimes have to cache a phi basis with a phi candidate to
296 avoid processing it twice. Valid only if visited==1. */
297 tree cached_basis;
298 };
299
300 typedef class slsr_cand_d slsr_cand, *slsr_cand_t;
301 typedef const class slsr_cand_d *const_slsr_cand_t;
302
303 /* Pointers to candidates are chained together as part of a mapping
304 from base expressions to the candidates that use them. */
305
306 struct cand_chain_d
307 {
308 /* Base expression for the chain of candidates: often, but not
309 always, an SSA name. */
310 tree base_expr;
311
312 /* Pointer to a candidate. */
313 slsr_cand_t cand;
314
315 /* Chain pointer. */
316 struct cand_chain_d *next;
317
318 };
319
320 typedef struct cand_chain_d cand_chain, *cand_chain_t;
321 typedef const struct cand_chain_d *const_cand_chain_t;
322
323 /* Information about a unique "increment" associated with candidates
324 having an SSA name for a stride. An increment is the difference
325 between the index of the candidate and the index of its basis,
326 i.e., (i - i') as discussed in the module commentary.
327
328 When we are not going to generate address arithmetic we treat
329 increments that differ only in sign as the same, allowing sharing
330 of the cost of initializers. The absolute value of the increment
331 is stored in the incr_info. */
332
333 class incr_info_d
334 {
335 public:
336 /* The increment that relates a candidate to its basis. */
337 widest_int incr;
338
339 /* How many times the increment occurs in the candidate tree. */
340 unsigned count;
341
342 /* Cost of replacing candidates using this increment. Negative and
343 zero costs indicate replacement should be performed. */
344 int cost;
345
346 /* If this increment is profitable but is not -1, 0, or 1, it requires
347 an initializer T_0 = stride * incr to be found or introduced in the
348 nearest common dominator of all candidates. This field holds T_0
349 for subsequent use. */
350 tree initializer;
351
352 /* If the initializer was found to already exist, this is the block
353 where it was found. */
354 basic_block init_bb;
355 };
356
357 typedef class incr_info_d incr_info, *incr_info_t;
358
359 /* Candidates are maintained in a vector. If candidate X dominates
360 candidate Y, then X appears before Y in the vector; but the
361 converse does not necessarily hold. */
362 static vec<slsr_cand_t> cand_vec;
363
364 enum cost_consts
365 {
366 COST_NEUTRAL = 0,
367 COST_INFINITE = 1000
368 };
369
370 enum stride_status
371 {
372 UNKNOWN_STRIDE = 0,
373 KNOWN_STRIDE = 1
374 };
375
376 enum phi_adjust_status
377 {
378 NOT_PHI_ADJUST = 0,
379 PHI_ADJUST = 1
380 };
381
382 enum count_phis_status
383 {
384 DONT_COUNT_PHIS = 0,
385 COUNT_PHIS = 1
386 };
387
388 /* Constrain how many PHI nodes we will visit for a conditional
389 candidate (depth and breadth). */
390 const int MAX_SPREAD = 16;
391
392 /* Pointer map embodying a mapping from statements to candidates. */
393 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
394
395 /* Obstack for candidates. */
396 static struct obstack cand_obstack;
397
398 /* Obstack for candidate chains. */
399 static struct obstack chain_obstack;
400
401 /* An array INCR_VEC of incr_infos is used during analysis of related
402 candidates having an SSA name for a stride. INCR_VEC_LEN describes
403 its current length. MAX_INCR_VEC_LEN is used to avoid costly
404 pathological cases. */
405 static incr_info_t incr_vec;
406 static unsigned incr_vec_len;
407 const int MAX_INCR_VEC_LEN = 16;
408
409 /* For a chain of candidates with unknown stride, indicates whether or not
410 we must generate pointer arithmetic when replacing statements. */
411 static bool address_arithmetic_p;
412
413 /* Forward function declarations. */
414 static slsr_cand_t base_cand_from_table (tree);
415 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
416 static bool legal_cast_p_1 (tree, tree);
417 \f
418 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
419
420 static slsr_cand_t
421 lookup_cand (cand_idx idx)
422 {
423 return cand_vec[idx];
424 }
425
426 /* Helper for hashing a candidate chain header. */
427
428 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
429 {
430 static inline hashval_t hash (const cand_chain *);
431 static inline bool equal (const cand_chain *, const cand_chain *);
432 };
433
434 inline hashval_t
435 cand_chain_hasher::hash (const cand_chain *p)
436 {
437 tree base_expr = p->base_expr;
438 return iterative_hash_expr (base_expr, 0);
439 }
440
441 inline bool
442 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
443 {
444 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
445 }
446
447 /* Hash table embodying a mapping from base exprs to chains of candidates. */
448 static hash_table<cand_chain_hasher> *base_cand_map;
449 \f
450 /* Pointer map used by tree_to_aff_combination_expand. */
451 static hash_map<tree, name_expansion *> *name_expansions;
452 /* Pointer map embodying a mapping from bases to alternative bases. */
453 static hash_map<tree, tree> *alt_base_map;
454
455 /* Given BASE, use the tree affine combiniation facilities to
456 find the underlying tree expression for BASE, with any
457 immediate offset excluded.
458
459 N.B. we should eliminate this backtracking with better forward
460 analysis in a future release. */
461
462 static tree
463 get_alternative_base (tree base)
464 {
465 tree *result = alt_base_map->get (base);
466
467 if (result == NULL)
468 {
469 tree expr;
470 aff_tree aff;
471
472 tree_to_aff_combination_expand (base, TREE_TYPE (base),
473 &aff, &name_expansions);
474 aff.offset = 0;
475 expr = aff_combination_to_tree (&aff);
476
477 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
478
479 return expr == base ? NULL : expr;
480 }
481
482 return *result;
483 }
484
485 /* Look in the candidate table for a CAND_PHI that defines BASE and
486 return it if found; otherwise return NULL. */
487
488 static cand_idx
489 find_phi_def (tree base)
490 {
491 slsr_cand_t c;
492
493 if (TREE_CODE (base) != SSA_NAME)
494 return 0;
495
496 c = base_cand_from_table (base);
497
498 if (!c || c->kind != CAND_PHI
499 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (c->cand_stmt)))
500 return 0;
501
502 return c->cand_num;
503 }
504
505 /* Determine whether all uses of NAME are directly or indirectly
506 used by STMT. That is, we want to know whether if STMT goes
507 dead, the definition of NAME also goes dead. */
508 static bool
509 uses_consumed_by_stmt (tree name, gimple *stmt, unsigned recurse = 0)
510 {
511 gimple *use_stmt;
512 imm_use_iterator iter;
513 bool retval = true;
514
515 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
516 {
517 if (use_stmt == stmt || is_gimple_debug (use_stmt))
518 continue;
519
520 if (!is_gimple_assign (use_stmt)
521 || !gimple_get_lhs (use_stmt)
522 || !is_gimple_reg (gimple_get_lhs (use_stmt))
523 || recurse >= 10
524 || !uses_consumed_by_stmt (gimple_get_lhs (use_stmt), stmt,
525 recurse + 1))
526 {
527 retval = false;
528 BREAK_FROM_IMM_USE_STMT (iter);
529 }
530 }
531
532 return retval;
533 }
534
535 /* Helper routine for find_basis_for_candidate. May be called twice:
536 once for the candidate's base expr, and optionally again either for
537 the candidate's phi definition or for a CAND_REF's alternative base
538 expression. */
539
540 static slsr_cand_t
541 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
542 {
543 cand_chain mapping_key;
544 cand_chain_t chain;
545 slsr_cand_t basis = NULL;
546
547 // Limit potential of N^2 behavior for long candidate chains.
548 int iters = 0;
549 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
550
551 mapping_key.base_expr = base_expr;
552 chain = base_cand_map->find (&mapping_key);
553
554 for (; chain && iters < max_iters; chain = chain->next, ++iters)
555 {
556 slsr_cand_t one_basis = chain->cand;
557
558 if (one_basis->kind != c->kind
559 || one_basis->cand_stmt == c->cand_stmt
560 || !operand_equal_p (one_basis->stride, c->stride, 0)
561 || !types_compatible_p (one_basis->cand_type, c->cand_type)
562 || !types_compatible_p (one_basis->stride_type, c->stride_type)
563 || !dominated_by_p (CDI_DOMINATORS,
564 gimple_bb (c->cand_stmt),
565 gimple_bb (one_basis->cand_stmt)))
566 continue;
567
568 tree lhs = gimple_assign_lhs (one_basis->cand_stmt);
569 if (lhs && TREE_CODE (lhs) == SSA_NAME
570 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
571 continue;
572
573 if (!basis || basis->cand_num < one_basis->cand_num)
574 basis = one_basis;
575 }
576
577 return basis;
578 }
579
580 /* Use the base expr from candidate C to look for possible candidates
581 that can serve as a basis for C. Each potential basis must also
582 appear in a block that dominates the candidate statement and have
583 the same stride and type. If more than one possible basis exists,
584 the one with highest index in the vector is chosen; this will be
585 the most immediately dominating basis. */
586
587 static int
588 find_basis_for_candidate (slsr_cand_t c)
589 {
590 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
591
592 /* If a candidate doesn't have a basis using its base expression,
593 it may have a basis hidden by one or more intervening phis. */
594 if (!basis && c->def_phi)
595 {
596 basic_block basis_bb, phi_bb;
597 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
598 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
599
600 if (basis)
601 {
602 /* A hidden basis must dominate the phi-definition of the
603 candidate's base name. */
604 phi_bb = gimple_bb (phi_cand->cand_stmt);
605 basis_bb = gimple_bb (basis->cand_stmt);
606
607 if (phi_bb == basis_bb
608 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
609 {
610 basis = NULL;
611 c->basis = 0;
612 }
613
614 /* If we found a hidden basis, estimate additional dead-code
615 savings if the phi and its feeding statements can be removed. */
616 tree feeding_var = gimple_phi_result (phi_cand->cand_stmt);
617 if (basis && uses_consumed_by_stmt (feeding_var, c->cand_stmt))
618 c->dead_savings += phi_cand->dead_savings;
619 }
620 }
621
622 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
623 {
624 tree alt_base_expr = get_alternative_base (c->base_expr);
625 if (alt_base_expr)
626 basis = find_basis_for_base_expr (c, alt_base_expr);
627 }
628
629 if (basis)
630 {
631 c->sibling = basis->dependent;
632 basis->dependent = c->cand_num;
633 return basis->cand_num;
634 }
635
636 return 0;
637 }
638
639 /* Record a mapping from BASE to C, indicating that C may potentially serve
640 as a basis using that base expression. BASE may be the same as
641 C->BASE_EXPR; alternatively BASE can be a different tree that share the
642 underlining expression of C->BASE_EXPR. */
643
644 static void
645 record_potential_basis (slsr_cand_t c, tree base)
646 {
647 cand_chain_t node;
648 cand_chain **slot;
649
650 gcc_assert (base);
651
652 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
653 node->base_expr = base;
654 node->cand = c;
655 node->next = NULL;
656 slot = base_cand_map->find_slot (node, INSERT);
657
658 if (*slot)
659 {
660 cand_chain_t head = (cand_chain_t) (*slot);
661 node->next = head->next;
662 head->next = node;
663 }
664 else
665 *slot = node;
666 }
667
668 /* Allocate storage for a new candidate and initialize its fields.
669 Attempt to find a basis for the candidate.
670
671 For CAND_REF, an alternative base may also be recorded and used
672 to find a basis. This helps cases where the expression hidden
673 behind BASE (which is usually an SSA_NAME) has immediate offset,
674 e.g.
675
676 a2[i][j] = 1;
677 a2[i + 20][j] = 2; */
678
679 static slsr_cand_t
680 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
681 const widest_int &index, tree stride, tree ctype,
682 tree stype, unsigned savings)
683 {
684 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
685 sizeof (slsr_cand));
686 c->cand_stmt = gs;
687 c->base_expr = base;
688 c->stride = stride;
689 c->index = index;
690 c->cand_type = ctype;
691 c->stride_type = stype;
692 c->kind = kind;
693 c->cand_num = cand_vec.length ();
694 c->next_interp = 0;
695 c->first_interp = c->cand_num;
696 c->dependent = 0;
697 c->sibling = 0;
698 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
699 c->dead_savings = savings;
700 c->visited = 0;
701 c->cached_basis = NULL_TREE;
702
703 cand_vec.safe_push (c);
704
705 if (kind == CAND_PHI)
706 c->basis = 0;
707 else
708 c->basis = find_basis_for_candidate (c);
709
710 record_potential_basis (c, base);
711 if (flag_expensive_optimizations && kind == CAND_REF)
712 {
713 tree alt_base = get_alternative_base (base);
714 if (alt_base)
715 record_potential_basis (c, alt_base);
716 }
717
718 return c;
719 }
720
721 /* Determine the target cost of statement GS when compiling according
722 to SPEED. */
723
724 static int
725 stmt_cost (gimple *gs, bool speed)
726 {
727 tree lhs, rhs1, rhs2;
728 machine_mode lhs_mode;
729
730 gcc_assert (is_gimple_assign (gs));
731 lhs = gimple_assign_lhs (gs);
732 rhs1 = gimple_assign_rhs1 (gs);
733 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
734
735 switch (gimple_assign_rhs_code (gs))
736 {
737 case MULT_EXPR:
738 rhs2 = gimple_assign_rhs2 (gs);
739
740 if (tree_fits_shwi_p (rhs2))
741 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
742
743 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
744 return mul_cost (speed, lhs_mode);
745
746 case PLUS_EXPR:
747 case POINTER_PLUS_EXPR:
748 case MINUS_EXPR:
749 return add_cost (speed, lhs_mode);
750
751 case NEGATE_EXPR:
752 return neg_cost (speed, lhs_mode);
753
754 CASE_CONVERT:
755 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
756
757 /* Note that we don't assign costs to copies that in most cases
758 will go away. */
759 case SSA_NAME:
760 return 0;
761
762 default:
763 ;
764 }
765
766 gcc_unreachable ();
767 return 0;
768 }
769
770 /* Look up the defining statement for BASE_IN and return a pointer
771 to its candidate in the candidate table, if any; otherwise NULL.
772 Only CAND_ADD and CAND_MULT candidates are returned. */
773
774 static slsr_cand_t
775 base_cand_from_table (tree base_in)
776 {
777 slsr_cand_t *result;
778
779 gimple *def = SSA_NAME_DEF_STMT (base_in);
780 if (!def)
781 return (slsr_cand_t) NULL;
782
783 result = stmt_cand_map->get (def);
784
785 if (result && (*result)->kind != CAND_REF)
786 return *result;
787
788 return (slsr_cand_t) NULL;
789 }
790
791 /* Add an entry to the statement-to-candidate mapping. */
792
793 static void
794 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
795 {
796 gcc_assert (!stmt_cand_map->put (gs, c));
797 }
798 \f
799 /* Given PHI which contains a phi statement, determine whether it
800 satisfies all the requirements of a phi candidate. If so, create
801 a candidate. Note that a CAND_PHI never has a basis itself, but
802 is used to help find a basis for subsequent candidates. */
803
804 static void
805 slsr_process_phi (gphi *phi, bool speed)
806 {
807 unsigned i;
808 tree arg0_base = NULL_TREE, base_type;
809 slsr_cand_t c;
810 class loop *cand_loop = gimple_bb (phi)->loop_father;
811 unsigned savings = 0;
812
813 /* A CAND_PHI requires each of its arguments to have the same
814 derived base name. (See the module header commentary for a
815 definition of derived base names.) Furthermore, all feeding
816 definitions must be in the same position in the loop hierarchy
817 as PHI. */
818
819 for (i = 0; i < gimple_phi_num_args (phi); i++)
820 {
821 slsr_cand_t arg_cand;
822 tree arg = gimple_phi_arg_def (phi, i);
823 tree derived_base_name = NULL_TREE;
824 gimple *arg_stmt = NULL;
825 basic_block arg_bb = NULL;
826
827 if (TREE_CODE (arg) != SSA_NAME)
828 return;
829
830 arg_cand = base_cand_from_table (arg);
831
832 if (arg_cand)
833 {
834 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
835 {
836 if (!arg_cand->next_interp)
837 return;
838
839 arg_cand = lookup_cand (arg_cand->next_interp);
840 }
841
842 if (!integer_onep (arg_cand->stride))
843 return;
844
845 derived_base_name = arg_cand->base_expr;
846 arg_stmt = arg_cand->cand_stmt;
847 arg_bb = gimple_bb (arg_stmt);
848
849 /* Gather potential dead code savings if the phi statement
850 can be removed later on. */
851 if (uses_consumed_by_stmt (arg, phi))
852 {
853 if (gimple_code (arg_stmt) == GIMPLE_PHI)
854 savings += arg_cand->dead_savings;
855 else
856 savings += stmt_cost (arg_stmt, speed);
857 }
858 }
859 else if (SSA_NAME_IS_DEFAULT_DEF (arg))
860 {
861 derived_base_name = arg;
862 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
863 }
864
865 if (!arg_bb || arg_bb->loop_father != cand_loop)
866 return;
867
868 if (i == 0)
869 arg0_base = derived_base_name;
870 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
871 return;
872 }
873
874 /* Create the candidate. "alloc_cand_and_find_basis" is named
875 misleadingly for this case, as no basis will be sought for a
876 CAND_PHI. */
877 base_type = TREE_TYPE (arg0_base);
878
879 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
880 0, integer_one_node, base_type,
881 sizetype, savings);
882
883 /* Add the candidate to the statement-candidate mapping. */
884 add_cand_for_stmt (phi, c);
885 }
886
887 /* Given PBASE which is a pointer to tree, look up the defining
888 statement for it and check whether the candidate is in the
889 form of:
890
891 X = B + (1 * S), S is integer constant
892 X = B + (i * S), S is integer one
893
894 If so, set PBASE to the candidate's base_expr and return double
895 int (i * S).
896 Otherwise, just return double int zero. */
897
898 static widest_int
899 backtrace_base_for_ref (tree *pbase)
900 {
901 tree base_in = *pbase;
902 slsr_cand_t base_cand;
903
904 STRIP_NOPS (base_in);
905
906 /* Strip off widening conversion(s) to handle cases where
907 e.g. 'B' is widened from an 'int' in order to calculate
908 a 64-bit address. */
909 if (CONVERT_EXPR_P (base_in)
910 && legal_cast_p_1 (TREE_TYPE (base_in),
911 TREE_TYPE (TREE_OPERAND (base_in, 0))))
912 base_in = get_unwidened (base_in, NULL_TREE);
913
914 if (TREE_CODE (base_in) != SSA_NAME)
915 return 0;
916
917 base_cand = base_cand_from_table (base_in);
918
919 while (base_cand && base_cand->kind != CAND_PHI)
920 {
921 if (base_cand->kind == CAND_ADD
922 && base_cand->index == 1
923 && TREE_CODE (base_cand->stride) == INTEGER_CST)
924 {
925 /* X = B + (1 * S), S is integer constant. */
926 *pbase = base_cand->base_expr;
927 return wi::to_widest (base_cand->stride);
928 }
929 else if (base_cand->kind == CAND_ADD
930 && TREE_CODE (base_cand->stride) == INTEGER_CST
931 && integer_onep (base_cand->stride))
932 {
933 /* X = B + (i * S), S is integer one. */
934 *pbase = base_cand->base_expr;
935 return base_cand->index;
936 }
937
938 base_cand = lookup_cand (base_cand->next_interp);
939 }
940
941 return 0;
942 }
943
944 /* Look for the following pattern:
945
946 *PBASE: MEM_REF (T1, C1)
947
948 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
949 or
950 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
951 or
952 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
953
954 *PINDEX: C4 * BITS_PER_UNIT
955
956 If not present, leave the input values unchanged and return FALSE.
957 Otherwise, modify the input values as follows and return TRUE:
958
959 *PBASE: T1
960 *POFFSET: MULT_EXPR (T2, C3)
961 *PINDEX: C1 + (C2 * C3) + C4
962
963 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
964 will be further restructured to:
965
966 *PBASE: T1
967 *POFFSET: MULT_EXPR (T2', C3)
968 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
969
970 static bool
971 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
972 tree *ptype)
973 {
974 tree base = *pbase, offset = *poffset;
975 widest_int index = *pindex;
976 tree mult_op0, t1, t2, type;
977 widest_int c1, c2, c3, c4, c5;
978 offset_int mem_offset;
979
980 if (!base
981 || !offset
982 || TREE_CODE (base) != MEM_REF
983 || !mem_ref_offset (base).is_constant (&mem_offset)
984 || TREE_CODE (offset) != MULT_EXPR
985 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
986 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
987 return false;
988
989 t1 = TREE_OPERAND (base, 0);
990 c1 = widest_int::from (mem_offset, SIGNED);
991 type = TREE_TYPE (TREE_OPERAND (base, 1));
992
993 mult_op0 = TREE_OPERAND (offset, 0);
994 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
995
996 if (TREE_CODE (mult_op0) == PLUS_EXPR)
997
998 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
999 {
1000 t2 = TREE_OPERAND (mult_op0, 0);
1001 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
1002 }
1003 else
1004 return false;
1005
1006 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
1007
1008 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
1009 {
1010 t2 = TREE_OPERAND (mult_op0, 0);
1011 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
1012 }
1013 else
1014 return false;
1015
1016 else
1017 {
1018 t2 = mult_op0;
1019 c2 = 0;
1020 }
1021
1022 c4 = index >> LOG2_BITS_PER_UNIT;
1023 c5 = backtrace_base_for_ref (&t2);
1024
1025 *pbase = t1;
1026 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
1027 wide_int_to_tree (sizetype, c3));
1028 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
1029 *ptype = type;
1030
1031 return true;
1032 }
1033
1034 /* Given GS which contains a data reference, create a CAND_REF entry in
1035 the candidate table and attempt to find a basis. */
1036
1037 static void
1038 slsr_process_ref (gimple *gs)
1039 {
1040 tree ref_expr, base, offset, type;
1041 poly_int64 bitsize, bitpos;
1042 machine_mode mode;
1043 int unsignedp, reversep, volatilep;
1044 slsr_cand_t c;
1045
1046 if (gimple_vdef (gs))
1047 ref_expr = gimple_assign_lhs (gs);
1048 else
1049 ref_expr = gimple_assign_rhs1 (gs);
1050
1051 if (!handled_component_p (ref_expr)
1052 || TREE_CODE (ref_expr) == BIT_FIELD_REF
1053 || (TREE_CODE (ref_expr) == COMPONENT_REF
1054 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1055 return;
1056
1057 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1058 &unsignedp, &reversep, &volatilep);
1059 HOST_WIDE_INT cbitpos;
1060 if (reversep || !bitpos.is_constant (&cbitpos))
1061 return;
1062 widest_int index = cbitpos;
1063
1064 if (!restructure_reference (&base, &offset, &index, &type))
1065 return;
1066
1067 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1068 type, sizetype, 0);
1069
1070 /* Add the candidate to the statement-candidate mapping. */
1071 add_cand_for_stmt (gs, c);
1072 }
1073
1074 /* Create a candidate entry for a statement GS, where GS multiplies
1075 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1076 about the two SSA names into the new candidate. Return the new
1077 candidate. */
1078
1079 static slsr_cand_t
1080 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1081 {
1082 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1083 tree stype = NULL_TREE;
1084 widest_int index;
1085 unsigned savings = 0;
1086 slsr_cand_t c;
1087 slsr_cand_t base_cand = base_cand_from_table (base_in);
1088
1089 /* Look at all interpretations of the base candidate, if necessary,
1090 to find information to propagate into this candidate. */
1091 while (base_cand && !base && base_cand->kind != CAND_PHI)
1092 {
1093
1094 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1095 {
1096 /* Y = (B + i') * 1
1097 X = Y * Z
1098 ================
1099 X = (B + i') * Z */
1100 base = base_cand->base_expr;
1101 index = base_cand->index;
1102 stride = stride_in;
1103 ctype = base_cand->cand_type;
1104 stype = TREE_TYPE (stride_in);
1105 if (has_single_use (base_in))
1106 savings = (base_cand->dead_savings
1107 + stmt_cost (base_cand->cand_stmt, speed));
1108 }
1109 else if (base_cand->kind == CAND_ADD
1110 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1111 {
1112 /* Y = B + (i' * S), S constant
1113 X = Y * Z
1114 ============================
1115 X = B + ((i' * S) * Z) */
1116 base = base_cand->base_expr;
1117 index = base_cand->index * wi::to_widest (base_cand->stride);
1118 stride = stride_in;
1119 ctype = base_cand->cand_type;
1120 stype = TREE_TYPE (stride_in);
1121 if (has_single_use (base_in))
1122 savings = (base_cand->dead_savings
1123 + stmt_cost (base_cand->cand_stmt, speed));
1124 }
1125
1126 base_cand = lookup_cand (base_cand->next_interp);
1127 }
1128
1129 if (!base)
1130 {
1131 /* No interpretations had anything useful to propagate, so
1132 produce X = (Y + 0) * Z. */
1133 base = base_in;
1134 index = 0;
1135 stride = stride_in;
1136 ctype = TREE_TYPE (base_in);
1137 stype = TREE_TYPE (stride_in);
1138 }
1139
1140 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1141 ctype, stype, savings);
1142 return c;
1143 }
1144
1145 /* Create a candidate entry for a statement GS, where GS multiplies
1146 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1147 information about BASE_IN into the new candidate. Return the new
1148 candidate. */
1149
1150 static slsr_cand_t
1151 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1152 {
1153 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1154 widest_int index, temp;
1155 unsigned savings = 0;
1156 slsr_cand_t c;
1157 slsr_cand_t base_cand = base_cand_from_table (base_in);
1158
1159 /* Look at all interpretations of the base candidate, if necessary,
1160 to find information to propagate into this candidate. */
1161 while (base_cand && !base && base_cand->kind != CAND_PHI)
1162 {
1163 if (base_cand->kind == CAND_MULT
1164 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1165 {
1166 /* Y = (B + i') * S, S constant
1167 X = Y * c
1168 ============================
1169 X = (B + i') * (S * c) */
1170 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1171 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1172 {
1173 base = base_cand->base_expr;
1174 index = base_cand->index;
1175 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1176 ctype = base_cand->cand_type;
1177 if (has_single_use (base_in))
1178 savings = (base_cand->dead_savings
1179 + stmt_cost (base_cand->cand_stmt, speed));
1180 }
1181 }
1182 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1183 {
1184 /* Y = B + (i' * 1)
1185 X = Y * c
1186 ===========================
1187 X = (B + i') * c */
1188 base = base_cand->base_expr;
1189 index = base_cand->index;
1190 stride = stride_in;
1191 ctype = base_cand->cand_type;
1192 if (has_single_use (base_in))
1193 savings = (base_cand->dead_savings
1194 + stmt_cost (base_cand->cand_stmt, speed));
1195 }
1196 else if (base_cand->kind == CAND_ADD
1197 && base_cand->index == 1
1198 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1199 {
1200 /* Y = B + (1 * S), S constant
1201 X = Y * c
1202 ===========================
1203 X = (B + S) * c */
1204 base = base_cand->base_expr;
1205 index = wi::to_widest (base_cand->stride);
1206 stride = stride_in;
1207 ctype = base_cand->cand_type;
1208 if (has_single_use (base_in))
1209 savings = (base_cand->dead_savings
1210 + stmt_cost (base_cand->cand_stmt, speed));
1211 }
1212
1213 base_cand = lookup_cand (base_cand->next_interp);
1214 }
1215
1216 if (!base)
1217 {
1218 /* No interpretations had anything useful to propagate, so
1219 produce X = (Y + 0) * c. */
1220 base = base_in;
1221 index = 0;
1222 stride = stride_in;
1223 ctype = TREE_TYPE (base_in);
1224 }
1225
1226 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1227 ctype, sizetype, savings);
1228 return c;
1229 }
1230
1231 /* Given GS which is a multiply of scalar integers, make an appropriate
1232 entry in the candidate table. If this is a multiply of two SSA names,
1233 create two CAND_MULT interpretations and attempt to find a basis for
1234 each of them. Otherwise, create a single CAND_MULT and attempt to
1235 find a basis. */
1236
1237 static void
1238 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1239 {
1240 slsr_cand_t c, c2;
1241
1242 /* If this is a multiply of an SSA name with itself, it is highly
1243 unlikely that we will get a strength reduction opportunity, so
1244 don't record it as a candidate. This simplifies the logic for
1245 finding a basis, so if this is removed that must be considered. */
1246 if (rhs1 == rhs2)
1247 return;
1248
1249 if (TREE_CODE (rhs2) == SSA_NAME)
1250 {
1251 /* Record an interpretation of this statement in the candidate table
1252 assuming RHS1 is the base expression and RHS2 is the stride. */
1253 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1254
1255 /* Add the first interpretation to the statement-candidate mapping. */
1256 add_cand_for_stmt (gs, c);
1257
1258 /* Record another interpretation of this statement assuming RHS1
1259 is the stride and RHS2 is the base expression. */
1260 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1261 c->next_interp = c2->cand_num;
1262 c2->first_interp = c->cand_num;
1263 }
1264 else if (TREE_CODE (rhs2) == INTEGER_CST && !integer_zerop (rhs2))
1265 {
1266 /* Record an interpretation for the multiply-immediate. */
1267 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1268
1269 /* Add the interpretation to the statement-candidate mapping. */
1270 add_cand_for_stmt (gs, c);
1271 }
1272 }
1273
1274 /* Create a candidate entry for a statement GS, where GS adds two
1275 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1276 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1277 information about the two SSA names into the new candidate.
1278 Return the new candidate. */
1279
1280 static slsr_cand_t
1281 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1282 bool subtract_p, bool speed)
1283 {
1284 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1285 tree stype = NULL_TREE;
1286 widest_int index;
1287 unsigned savings = 0;
1288 slsr_cand_t c;
1289 slsr_cand_t base_cand = base_cand_from_table (base_in);
1290 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1291
1292 /* The most useful transformation is a multiply-immediate feeding
1293 an add or subtract. Look for that first. */
1294 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1295 {
1296 if (addend_cand->kind == CAND_MULT
1297 && addend_cand->index == 0
1298 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1299 {
1300 /* Z = (B + 0) * S, S constant
1301 X = Y +/- Z
1302 ===========================
1303 X = Y + ((+/-1 * S) * B) */
1304 base = base_in;
1305 index = wi::to_widest (addend_cand->stride);
1306 if (subtract_p)
1307 index = -index;
1308 stride = addend_cand->base_expr;
1309 ctype = TREE_TYPE (base_in);
1310 stype = addend_cand->cand_type;
1311 if (has_single_use (addend_in))
1312 savings = (addend_cand->dead_savings
1313 + stmt_cost (addend_cand->cand_stmt, speed));
1314 }
1315
1316 addend_cand = lookup_cand (addend_cand->next_interp);
1317 }
1318
1319 while (base_cand && !base && base_cand->kind != CAND_PHI)
1320 {
1321 if (base_cand->kind == CAND_ADD
1322 && (base_cand->index == 0
1323 || operand_equal_p (base_cand->stride,
1324 integer_zero_node, 0)))
1325 {
1326 /* Y = B + (i' * S), i' * S = 0
1327 X = Y +/- Z
1328 ============================
1329 X = B + (+/-1 * Z) */
1330 base = base_cand->base_expr;
1331 index = subtract_p ? -1 : 1;
1332 stride = addend_in;
1333 ctype = base_cand->cand_type;
1334 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1335 : TREE_TYPE (addend_in));
1336 if (has_single_use (base_in))
1337 savings = (base_cand->dead_savings
1338 + stmt_cost (base_cand->cand_stmt, speed));
1339 }
1340 else if (subtract_p)
1341 {
1342 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1343
1344 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1345 {
1346 if (subtrahend_cand->kind == CAND_MULT
1347 && subtrahend_cand->index == 0
1348 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1349 {
1350 /* Z = (B + 0) * S, S constant
1351 X = Y - Z
1352 ===========================
1353 Value: X = Y + ((-1 * S) * B) */
1354 base = base_in;
1355 index = wi::to_widest (subtrahend_cand->stride);
1356 index = -index;
1357 stride = subtrahend_cand->base_expr;
1358 ctype = TREE_TYPE (base_in);
1359 stype = subtrahend_cand->cand_type;
1360 if (has_single_use (addend_in))
1361 savings = (subtrahend_cand->dead_savings
1362 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1363 }
1364
1365 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1366 }
1367 }
1368
1369 base_cand = lookup_cand (base_cand->next_interp);
1370 }
1371
1372 if (!base)
1373 {
1374 /* No interpretations had anything useful to propagate, so
1375 produce X = Y + (1 * Z). */
1376 base = base_in;
1377 index = subtract_p ? -1 : 1;
1378 stride = addend_in;
1379 ctype = TREE_TYPE (base_in);
1380 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1381 : TREE_TYPE (addend_in));
1382 }
1383
1384 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1385 ctype, stype, savings);
1386 return c;
1387 }
1388
1389 /* Create a candidate entry for a statement GS, where GS adds SSA
1390 name BASE_IN to constant INDEX_IN. Propagate any known information
1391 about BASE_IN into the new candidate. Return the new candidate. */
1392
1393 static slsr_cand_t
1394 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1395 bool speed)
1396 {
1397 enum cand_kind kind = CAND_ADD;
1398 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1399 tree stype = NULL_TREE;
1400 widest_int index, multiple;
1401 unsigned savings = 0;
1402 slsr_cand_t c;
1403 slsr_cand_t base_cand = base_cand_from_table (base_in);
1404
1405 while (base_cand && !base && base_cand->kind != CAND_PHI)
1406 {
1407 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1408
1409 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1410 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1411 sign, &multiple))
1412 {
1413 /* Y = (B + i') * S, S constant, c = kS for some integer k
1414 X = Y + c
1415 ============================
1416 X = (B + (i'+ k)) * S
1417 OR
1418 Y = B + (i' * S), S constant, c = kS for some integer k
1419 X = Y + c
1420 ============================
1421 X = (B + (i'+ k)) * S */
1422 kind = base_cand->kind;
1423 base = base_cand->base_expr;
1424 index = base_cand->index + multiple;
1425 stride = base_cand->stride;
1426 ctype = base_cand->cand_type;
1427 stype = base_cand->stride_type;
1428 if (has_single_use (base_in))
1429 savings = (base_cand->dead_savings
1430 + stmt_cost (base_cand->cand_stmt, speed));
1431 }
1432
1433 base_cand = lookup_cand (base_cand->next_interp);
1434 }
1435
1436 if (!base)
1437 {
1438 /* No interpretations had anything useful to propagate, so
1439 produce X = Y + (c * 1). */
1440 kind = CAND_ADD;
1441 base = base_in;
1442 index = index_in;
1443 stride = integer_one_node;
1444 ctype = TREE_TYPE (base_in);
1445 stype = sizetype;
1446 }
1447
1448 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1449 ctype, stype, savings);
1450 return c;
1451 }
1452
1453 /* Given GS which is an add or subtract of scalar integers or pointers,
1454 make at least one appropriate entry in the candidate table. */
1455
1456 static void
1457 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1458 {
1459 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1460 slsr_cand_t c = NULL, c2;
1461
1462 if (TREE_CODE (rhs2) == SSA_NAME)
1463 {
1464 /* First record an interpretation assuming RHS1 is the base expression
1465 and RHS2 is the stride. But it doesn't make sense for the
1466 stride to be a pointer, so don't record a candidate in that case. */
1467 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1468 {
1469 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1470
1471 /* Add the first interpretation to the statement-candidate
1472 mapping. */
1473 add_cand_for_stmt (gs, c);
1474 }
1475
1476 /* If the two RHS operands are identical, or this is a subtract,
1477 we're done. */
1478 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1479 return;
1480
1481 /* Otherwise, record another interpretation assuming RHS2 is the
1482 base expression and RHS1 is the stride, again provided that the
1483 stride is not a pointer. */
1484 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1485 {
1486 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1487 if (c)
1488 {
1489 c->next_interp = c2->cand_num;
1490 c2->first_interp = c->cand_num;
1491 }
1492 else
1493 add_cand_for_stmt (gs, c2);
1494 }
1495 }
1496 else if (TREE_CODE (rhs2) == INTEGER_CST)
1497 {
1498 /* Record an interpretation for the add-immediate. */
1499 widest_int index = wi::to_widest (rhs2);
1500 if (subtract_p)
1501 index = -index;
1502
1503 c = create_add_imm_cand (gs, rhs1, index, speed);
1504
1505 /* Add the interpretation to the statement-candidate mapping. */
1506 add_cand_for_stmt (gs, c);
1507 }
1508 }
1509
1510 /* Given GS which is a negate of a scalar integer, make an appropriate
1511 entry in the candidate table. A negate is equivalent to a multiply
1512 by -1. */
1513
1514 static void
1515 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1516 {
1517 /* Record a CAND_MULT interpretation for the multiply by -1. */
1518 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1519
1520 /* Add the interpretation to the statement-candidate mapping. */
1521 add_cand_for_stmt (gs, c);
1522 }
1523
1524 /* Help function for legal_cast_p, operating on two trees. Checks
1525 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1526 for more details. */
1527
1528 static bool
1529 legal_cast_p_1 (tree lhs_type, tree rhs_type)
1530 {
1531 unsigned lhs_size, rhs_size;
1532 bool lhs_wraps, rhs_wraps;
1533
1534 lhs_size = TYPE_PRECISION (lhs_type);
1535 rhs_size = TYPE_PRECISION (rhs_type);
1536 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1537 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1538
1539 if (lhs_size < rhs_size
1540 || (rhs_wraps && !lhs_wraps)
1541 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1542 return false;
1543
1544 return true;
1545 }
1546
1547 /* Return TRUE if GS is a statement that defines an SSA name from
1548 a conversion and is legal for us to combine with an add and multiply
1549 in the candidate table. For example, suppose we have:
1550
1551 A = B + i;
1552 C = (type) A;
1553 D = C * S;
1554
1555 Without the type-cast, we would create a CAND_MULT for D with base B,
1556 index i, and stride S. We want to record this candidate only if it
1557 is equivalent to apply the type cast following the multiply:
1558
1559 A = B + i;
1560 E = A * S;
1561 D = (type) E;
1562
1563 We will record the type with the candidate for D. This allows us
1564 to use a similar previous candidate as a basis. If we have earlier seen
1565
1566 A' = B + i';
1567 C' = (type) A';
1568 D' = C' * S;
1569
1570 we can replace D with
1571
1572 D = D' + (i - i') * S;
1573
1574 But if moving the type-cast would change semantics, we mustn't do this.
1575
1576 This is legitimate for casts from a non-wrapping integral type to
1577 any integral type of the same or larger size. It is not legitimate
1578 to convert a wrapping type to a non-wrapping type, or to a wrapping
1579 type of a different size. I.e., with a wrapping type, we must
1580 assume that the addition B + i could wrap, in which case performing
1581 the multiply before or after one of the "illegal" type casts will
1582 have different semantics. */
1583
1584 static bool
1585 legal_cast_p (gimple *gs, tree rhs)
1586 {
1587 if (!is_gimple_assign (gs)
1588 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1589 return false;
1590
1591 return legal_cast_p_1 (TREE_TYPE (gimple_assign_lhs (gs)), TREE_TYPE (rhs));
1592 }
1593
1594 /* Given GS which is a cast to a scalar integer type, determine whether
1595 the cast is legal for strength reduction. If so, make at least one
1596 appropriate entry in the candidate table. */
1597
1598 static void
1599 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1600 {
1601 tree lhs, ctype;
1602 slsr_cand_t base_cand, c = NULL, c2;
1603 unsigned savings = 0;
1604
1605 if (!legal_cast_p (gs, rhs1))
1606 return;
1607
1608 lhs = gimple_assign_lhs (gs);
1609 base_cand = base_cand_from_table (rhs1);
1610 ctype = TREE_TYPE (lhs);
1611
1612 if (base_cand && base_cand->kind != CAND_PHI)
1613 {
1614 slsr_cand_t first_cand = NULL;
1615
1616 while (base_cand)
1617 {
1618 /* Propagate all data from the base candidate except the type,
1619 which comes from the cast, and the base candidate's cast,
1620 which is no longer applicable. */
1621 if (has_single_use (rhs1))
1622 savings = (base_cand->dead_savings
1623 + stmt_cost (base_cand->cand_stmt, speed));
1624
1625 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1626 base_cand->base_expr,
1627 base_cand->index, base_cand->stride,
1628 ctype, base_cand->stride_type,
1629 savings);
1630 if (!first_cand)
1631 first_cand = c;
1632
1633 if (first_cand != c)
1634 c->first_interp = first_cand->cand_num;
1635
1636 base_cand = lookup_cand (base_cand->next_interp);
1637 }
1638 }
1639 else
1640 {
1641 /* If nothing is known about the RHS, create fresh CAND_ADD and
1642 CAND_MULT interpretations:
1643
1644 X = Y + (0 * 1)
1645 X = (Y + 0) * 1
1646
1647 The first of these is somewhat arbitrary, but the choice of
1648 1 for the stride simplifies the logic for propagating casts
1649 into their uses. */
1650 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1651 integer_one_node, ctype, sizetype, 0);
1652 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1653 integer_one_node, ctype, sizetype, 0);
1654 c->next_interp = c2->cand_num;
1655 c2->first_interp = c->cand_num;
1656 }
1657
1658 /* Add the first (or only) interpretation to the statement-candidate
1659 mapping. */
1660 add_cand_for_stmt (gs, c);
1661 }
1662
1663 /* Given GS which is a copy of a scalar integer type, make at least one
1664 appropriate entry in the candidate table.
1665
1666 This interface is included for completeness, but is unnecessary
1667 if this pass immediately follows a pass that performs copy
1668 propagation, such as DOM. */
1669
1670 static void
1671 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1672 {
1673 slsr_cand_t base_cand, c = NULL, c2;
1674 unsigned savings = 0;
1675
1676 base_cand = base_cand_from_table (rhs1);
1677
1678 if (base_cand && base_cand->kind != CAND_PHI)
1679 {
1680 slsr_cand_t first_cand = NULL;
1681
1682 while (base_cand)
1683 {
1684 /* Propagate all data from the base candidate. */
1685 if (has_single_use (rhs1))
1686 savings = (base_cand->dead_savings
1687 + stmt_cost (base_cand->cand_stmt, speed));
1688
1689 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1690 base_cand->base_expr,
1691 base_cand->index, base_cand->stride,
1692 base_cand->cand_type,
1693 base_cand->stride_type, savings);
1694 if (!first_cand)
1695 first_cand = c;
1696
1697 if (first_cand != c)
1698 c->first_interp = first_cand->cand_num;
1699
1700 base_cand = lookup_cand (base_cand->next_interp);
1701 }
1702 }
1703 else
1704 {
1705 /* If nothing is known about the RHS, create fresh CAND_ADD and
1706 CAND_MULT interpretations:
1707
1708 X = Y + (0 * 1)
1709 X = (Y + 0) * 1
1710
1711 The first of these is somewhat arbitrary, but the choice of
1712 1 for the stride simplifies the logic for propagating casts
1713 into their uses. */
1714 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1715 integer_one_node, TREE_TYPE (rhs1),
1716 sizetype, 0);
1717 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1718 integer_one_node, TREE_TYPE (rhs1),
1719 sizetype, 0);
1720 c->next_interp = c2->cand_num;
1721 c2->first_interp = c->cand_num;
1722 }
1723
1724 /* Add the first (or only) interpretation to the statement-candidate
1725 mapping. */
1726 add_cand_for_stmt (gs, c);
1727 }
1728 \f
1729 class find_candidates_dom_walker : public dom_walker
1730 {
1731 public:
1732 find_candidates_dom_walker (cdi_direction direction)
1733 : dom_walker (direction) {}
1734 virtual edge before_dom_children (basic_block);
1735 };
1736
1737 /* Find strength-reduction candidates in block BB. */
1738
1739 edge
1740 find_candidates_dom_walker::before_dom_children (basic_block bb)
1741 {
1742 bool speed = optimize_bb_for_speed_p (bb);
1743
1744 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1745 gsi_next (&gsi))
1746 slsr_process_phi (gsi.phi (), speed);
1747
1748 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1749 gsi_next (&gsi))
1750 {
1751 gimple *gs = gsi_stmt (gsi);
1752
1753 if (stmt_could_throw_p (cfun, gs))
1754 continue;
1755
1756 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1757 slsr_process_ref (gs);
1758
1759 else if (is_gimple_assign (gs)
1760 && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))
1761 || POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))))
1762 {
1763 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1764
1765 switch (gimple_assign_rhs_code (gs))
1766 {
1767 case MULT_EXPR:
1768 case PLUS_EXPR:
1769 rhs1 = gimple_assign_rhs1 (gs);
1770 rhs2 = gimple_assign_rhs2 (gs);
1771 /* Should never happen, but currently some buggy situations
1772 in earlier phases put constants in rhs1. */
1773 if (TREE_CODE (rhs1) != SSA_NAME)
1774 continue;
1775 break;
1776
1777 /* Possible future opportunity: rhs1 of a ptr+ can be
1778 an ADDR_EXPR. */
1779 case POINTER_PLUS_EXPR:
1780 case MINUS_EXPR:
1781 rhs2 = gimple_assign_rhs2 (gs);
1782 gcc_fallthrough ();
1783
1784 CASE_CONVERT:
1785 case SSA_NAME:
1786 case NEGATE_EXPR:
1787 rhs1 = gimple_assign_rhs1 (gs);
1788 if (TREE_CODE (rhs1) != SSA_NAME)
1789 continue;
1790 break;
1791
1792 default:
1793 ;
1794 }
1795
1796 switch (gimple_assign_rhs_code (gs))
1797 {
1798 case MULT_EXPR:
1799 slsr_process_mul (gs, rhs1, rhs2, speed);
1800 break;
1801
1802 case PLUS_EXPR:
1803 case POINTER_PLUS_EXPR:
1804 case MINUS_EXPR:
1805 slsr_process_add (gs, rhs1, rhs2, speed);
1806 break;
1807
1808 case NEGATE_EXPR:
1809 slsr_process_neg (gs, rhs1, speed);
1810 break;
1811
1812 CASE_CONVERT:
1813 slsr_process_cast (gs, rhs1, speed);
1814 break;
1815
1816 case SSA_NAME:
1817 slsr_process_copy (gs, rhs1, speed);
1818 break;
1819
1820 default:
1821 ;
1822 }
1823 }
1824 }
1825 return NULL;
1826 }
1827 \f
1828 /* Dump a candidate for debug. */
1829
1830 static void
1831 dump_candidate (slsr_cand_t c)
1832 {
1833 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1834 gimple_bb (c->cand_stmt)->index);
1835 print_gimple_stmt (dump_file, c->cand_stmt, 0);
1836 switch (c->kind)
1837 {
1838 case CAND_MULT:
1839 fputs (" MULT : (", dump_file);
1840 print_generic_expr (dump_file, c->base_expr);
1841 fputs (" + ", dump_file);
1842 print_decs (c->index, dump_file);
1843 fputs (") * ", dump_file);
1844 if (TREE_CODE (c->stride) != INTEGER_CST
1845 && c->stride_type != TREE_TYPE (c->stride))
1846 {
1847 fputs ("(", dump_file);
1848 print_generic_expr (dump_file, c->stride_type);
1849 fputs (")", dump_file);
1850 }
1851 print_generic_expr (dump_file, c->stride);
1852 fputs (" : ", dump_file);
1853 break;
1854 case CAND_ADD:
1855 fputs (" ADD : ", dump_file);
1856 print_generic_expr (dump_file, c->base_expr);
1857 fputs (" + (", dump_file);
1858 print_decs (c->index, dump_file);
1859 fputs (" * ", dump_file);
1860 if (TREE_CODE (c->stride) != INTEGER_CST
1861 && c->stride_type != TREE_TYPE (c->stride))
1862 {
1863 fputs ("(", dump_file);
1864 print_generic_expr (dump_file, c->stride_type);
1865 fputs (")", dump_file);
1866 }
1867 print_generic_expr (dump_file, c->stride);
1868 fputs (") : ", dump_file);
1869 break;
1870 case CAND_REF:
1871 fputs (" REF : ", dump_file);
1872 print_generic_expr (dump_file, c->base_expr);
1873 fputs (" + (", dump_file);
1874 print_generic_expr (dump_file, c->stride);
1875 fputs (") + ", dump_file);
1876 print_decs (c->index, dump_file);
1877 fputs (" : ", dump_file);
1878 break;
1879 case CAND_PHI:
1880 fputs (" PHI : ", dump_file);
1881 print_generic_expr (dump_file, c->base_expr);
1882 fputs (" + (unknown * ", dump_file);
1883 print_generic_expr (dump_file, c->stride);
1884 fputs (") : ", dump_file);
1885 break;
1886 default:
1887 gcc_unreachable ();
1888 }
1889 print_generic_expr (dump_file, c->cand_type);
1890 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1891 c->basis, c->dependent, c->sibling);
1892 fprintf (dump_file,
1893 " next-interp: %d first-interp: %d dead-savings: %d\n",
1894 c->next_interp, c->first_interp, c->dead_savings);
1895 if (c->def_phi)
1896 fprintf (dump_file, " phi: %d\n", c->def_phi);
1897 fputs ("\n", dump_file);
1898 }
1899
1900 /* Dump the candidate vector for debug. */
1901
1902 static void
1903 dump_cand_vec (void)
1904 {
1905 unsigned i;
1906 slsr_cand_t c;
1907
1908 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1909
1910 FOR_EACH_VEC_ELT (cand_vec, i, c)
1911 if (c != NULL)
1912 dump_candidate (c);
1913 }
1914
1915 /* Callback used to dump the candidate chains hash table. */
1916
1917 int
1918 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1919 {
1920 const_cand_chain_t chain = *slot;
1921 cand_chain_t p;
1922
1923 print_generic_expr (dump_file, chain->base_expr);
1924 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1925
1926 for (p = chain->next; p; p = p->next)
1927 fprintf (dump_file, " -> %d", p->cand->cand_num);
1928
1929 fputs ("\n", dump_file);
1930 return 1;
1931 }
1932
1933 /* Dump the candidate chains. */
1934
1935 static void
1936 dump_cand_chains (void)
1937 {
1938 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1939 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1940 (NULL);
1941 fputs ("\n", dump_file);
1942 }
1943
1944 /* Dump the increment vector for debug. */
1945
1946 static void
1947 dump_incr_vec (void)
1948 {
1949 if (dump_file && (dump_flags & TDF_DETAILS))
1950 {
1951 unsigned i;
1952
1953 fprintf (dump_file, "\nIncrement vector:\n\n");
1954
1955 for (i = 0; i < incr_vec_len; i++)
1956 {
1957 fprintf (dump_file, "%3d increment: ", i);
1958 print_decs (incr_vec[i].incr, dump_file);
1959 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1960 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1961 fputs ("\n initializer: ", dump_file);
1962 print_generic_expr (dump_file, incr_vec[i].initializer);
1963 fputs ("\n\n", dump_file);
1964 }
1965 }
1966 }
1967 \f
1968 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1969 data reference. */
1970
1971 static void
1972 replace_ref (tree *expr, slsr_cand_t c)
1973 {
1974 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1975 unsigned HOST_WIDE_INT misalign;
1976 unsigned align;
1977
1978 /* Ensure the memory reference carries the minimum alignment
1979 requirement for the data type. See PR58041. */
1980 get_object_alignment_1 (*expr, &align, &misalign);
1981 if (misalign != 0)
1982 align = least_bit_hwi (misalign);
1983 if (align < TYPE_ALIGN (acc_type))
1984 acc_type = build_aligned_type (acc_type, align);
1985
1986 add_expr = fold_build2 (POINTER_PLUS_EXPR, c->cand_type,
1987 c->base_expr, c->stride);
1988 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1989 wide_int_to_tree (c->cand_type, c->index));
1990
1991 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1992 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1993 TREE_OPERAND (mem_ref, 0)
1994 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1995 /*simple_p=*/true, NULL,
1996 /*before=*/true, GSI_SAME_STMT);
1997 copy_ref_info (mem_ref, *expr);
1998 *expr = mem_ref;
1999 update_stmt (c->cand_stmt);
2000 }
2001
2002 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
2003 dependent of candidate C with an equivalent strength-reduced data
2004 reference. */
2005
2006 static void
2007 replace_refs (slsr_cand_t c)
2008 {
2009 if (dump_file && (dump_flags & TDF_DETAILS))
2010 {
2011 fputs ("Replacing reference: ", dump_file);
2012 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2013 }
2014
2015 if (gimple_vdef (c->cand_stmt))
2016 {
2017 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
2018 replace_ref (lhs, c);
2019 }
2020 else
2021 {
2022 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
2023 replace_ref (rhs, c);
2024 }
2025
2026 if (dump_file && (dump_flags & TDF_DETAILS))
2027 {
2028 fputs ("With: ", dump_file);
2029 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2030 fputs ("\n", dump_file);
2031 }
2032
2033 if (c->sibling)
2034 replace_refs (lookup_cand (c->sibling));
2035
2036 if (c->dependent)
2037 replace_refs (lookup_cand (c->dependent));
2038 }
2039
2040 /* Return TRUE if candidate C is dependent upon a PHI. */
2041
2042 static bool
2043 phi_dependent_cand_p (slsr_cand_t c)
2044 {
2045 /* A candidate is not necessarily dependent upon a PHI just because
2046 it has a phi definition for its base name. It may have a basis
2047 that relies upon the same phi definition, in which case the PHI
2048 is irrelevant to this candidate. */
2049 return (c->def_phi
2050 && c->basis
2051 && lookup_cand (c->basis)->def_phi != c->def_phi);
2052 }
2053
2054 /* Calculate the increment required for candidate C relative to
2055 its basis. */
2056
2057 static widest_int
2058 cand_increment (slsr_cand_t c)
2059 {
2060 slsr_cand_t basis;
2061
2062 /* If the candidate doesn't have a basis, just return its own
2063 index. This is useful in record_increments to help us find
2064 an existing initializer. Also, if the candidate's basis is
2065 hidden by a phi, then its own index will be the increment
2066 from the newly introduced phi basis. */
2067 if (!c->basis || phi_dependent_cand_p (c))
2068 return c->index;
2069
2070 basis = lookup_cand (c->basis);
2071 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
2072 return c->index - basis->index;
2073 }
2074
2075 /* Calculate the increment required for candidate C relative to
2076 its basis. If we aren't going to generate pointer arithmetic
2077 for this candidate, return the absolute value of that increment
2078 instead. */
2079
2080 static inline widest_int
2081 cand_abs_increment (slsr_cand_t c)
2082 {
2083 widest_int increment = cand_increment (c);
2084
2085 if (!address_arithmetic_p && wi::neg_p (increment))
2086 increment = -increment;
2087
2088 return increment;
2089 }
2090
2091 /* Return TRUE iff candidate C has already been replaced under
2092 another interpretation. */
2093
2094 static inline bool
2095 cand_already_replaced (slsr_cand_t c)
2096 {
2097 return (gimple_bb (c->cand_stmt) == 0);
2098 }
2099
2100 /* Common logic used by replace_unconditional_candidate and
2101 replace_conditional_candidate. */
2102
2103 static void
2104 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2105 {
2106 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2107 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2108
2109 /* It is not useful to replace casts, copies, negates, or adds of
2110 an SSA name and a constant. */
2111 if (cand_code == SSA_NAME
2112 || CONVERT_EXPR_CODE_P (cand_code)
2113 || cand_code == PLUS_EXPR
2114 || cand_code == POINTER_PLUS_EXPR
2115 || cand_code == MINUS_EXPR
2116 || cand_code == NEGATE_EXPR)
2117 return;
2118
2119 enum tree_code code = PLUS_EXPR;
2120 tree bump_tree;
2121 gimple *stmt_to_print = NULL;
2122
2123 if (wi::neg_p (bump))
2124 {
2125 code = MINUS_EXPR;
2126 bump = -bump;
2127 }
2128
2129 /* It is possible that the resulting bump doesn't fit in target_type.
2130 Abandon the replacement in this case. This does not affect
2131 siblings or dependents of C. */
2132 if (bump != wi::ext (bump, TYPE_PRECISION (target_type),
2133 TYPE_SIGN (target_type)))
2134 return;
2135
2136 bump_tree = wide_int_to_tree (target_type, bump);
2137
2138 /* If the basis name and the candidate's LHS have incompatible types,
2139 introduce a cast. */
2140 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2141 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2142
2143 if (dump_file && (dump_flags & TDF_DETAILS))
2144 {
2145 fputs ("Replacing: ", dump_file);
2146 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2147 }
2148
2149 if (bump == 0)
2150 {
2151 tree lhs = gimple_assign_lhs (c->cand_stmt);
2152 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2153 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2154 slsr_cand_t cc = lookup_cand (c->first_interp);
2155 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2156 gsi_replace (&gsi, copy_stmt, false);
2157 while (cc)
2158 {
2159 cc->cand_stmt = copy_stmt;
2160 cc = lookup_cand (cc->next_interp);
2161 }
2162 if (dump_file && (dump_flags & TDF_DETAILS))
2163 stmt_to_print = copy_stmt;
2164 }
2165 else
2166 {
2167 tree rhs1, rhs2;
2168 if (cand_code != NEGATE_EXPR) {
2169 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2170 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2171 }
2172 if (cand_code != NEGATE_EXPR
2173 && ((operand_equal_p (rhs1, basis_name, 0)
2174 && operand_equal_p (rhs2, bump_tree, 0))
2175 || (operand_equal_p (rhs1, bump_tree, 0)
2176 && operand_equal_p (rhs2, basis_name, 0))))
2177 {
2178 if (dump_file && (dump_flags & TDF_DETAILS))
2179 {
2180 fputs ("(duplicate, not actually replacing)", dump_file);
2181 stmt_to_print = c->cand_stmt;
2182 }
2183 }
2184 else
2185 {
2186 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2187 slsr_cand_t cc = lookup_cand (c->first_interp);
2188 gimple_assign_set_rhs_with_ops (&gsi, code, basis_name, bump_tree);
2189 update_stmt (gsi_stmt (gsi));
2190 while (cc)
2191 {
2192 cc->cand_stmt = gsi_stmt (gsi);
2193 cc = lookup_cand (cc->next_interp);
2194 }
2195 if (dump_file && (dump_flags & TDF_DETAILS))
2196 stmt_to_print = gsi_stmt (gsi);
2197 }
2198 }
2199
2200 if (dump_file && (dump_flags & TDF_DETAILS))
2201 {
2202 fputs ("With: ", dump_file);
2203 print_gimple_stmt (dump_file, stmt_to_print, 0);
2204 fputs ("\n", dump_file);
2205 }
2206 }
2207
2208 /* Replace candidate C with an add or subtract. Note that we only
2209 operate on CAND_MULTs with known strides, so we will never generate
2210 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2211 X = Y + ((i - i') * S), as described in the module commentary. The
2212 folded value ((i - i') * S) is referred to here as the "bump." */
2213
2214 static void
2215 replace_unconditional_candidate (slsr_cand_t c)
2216 {
2217 slsr_cand_t basis;
2218
2219 if (cand_already_replaced (c))
2220 return;
2221
2222 basis = lookup_cand (c->basis);
2223 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2224
2225 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2226 }
2227 \f
2228 /* Return the index in the increment vector of the given INCREMENT,
2229 or -1 if not found. The latter can occur if more than
2230 MAX_INCR_VEC_LEN increments have been found. */
2231
2232 static inline int
2233 incr_vec_index (const widest_int &increment)
2234 {
2235 unsigned i;
2236
2237 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2238 ;
2239
2240 if (i < incr_vec_len)
2241 return i;
2242 else
2243 return -1;
2244 }
2245
2246 /* Create a new statement along edge E to add BASIS_NAME to the product
2247 of INCREMENT and the stride of candidate C. Create and return a new
2248 SSA name from *VAR to be used as the LHS of the new statement.
2249 KNOWN_STRIDE is true iff C's stride is a constant. */
2250
2251 static tree
2252 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2253 widest_int increment, edge e, location_t loc,
2254 bool known_stride)
2255 {
2256 tree lhs, basis_type;
2257 gassign *new_stmt, *cast_stmt = NULL;
2258
2259 /* If the add candidate along this incoming edge has the same
2260 index as C's hidden basis, the hidden basis represents this
2261 edge correctly. */
2262 if (increment == 0)
2263 return basis_name;
2264
2265 basis_type = TREE_TYPE (basis_name);
2266 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2267
2268 /* Occasionally people convert integers to pointers without a
2269 cast, leading us into trouble if we aren't careful. */
2270 enum tree_code plus_code
2271 = POINTER_TYPE_P (basis_type) ? POINTER_PLUS_EXPR : PLUS_EXPR;
2272
2273 if (known_stride)
2274 {
2275 tree bump_tree;
2276 enum tree_code code = plus_code;
2277 widest_int bump = increment * wi::to_widest (c->stride);
2278 if (wi::neg_p (bump) && !POINTER_TYPE_P (basis_type))
2279 {
2280 code = MINUS_EXPR;
2281 bump = -bump;
2282 }
2283
2284 tree stride_type = POINTER_TYPE_P (basis_type) ? sizetype : basis_type;
2285 bump_tree = wide_int_to_tree (stride_type, bump);
2286 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2287 }
2288 else
2289 {
2290 int i;
2291 bool negate_incr = !POINTER_TYPE_P (basis_type) && wi::neg_p (increment);
2292 i = incr_vec_index (negate_incr ? -increment : increment);
2293 gcc_assert (i >= 0);
2294
2295 if (incr_vec[i].initializer)
2296 {
2297 enum tree_code code = negate_incr ? MINUS_EXPR : plus_code;
2298 new_stmt = gimple_build_assign (lhs, code, basis_name,
2299 incr_vec[i].initializer);
2300 }
2301 else {
2302 tree stride;
2303
2304 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
2305 {
2306 tree cast_stride = make_temp_ssa_name (c->stride_type, NULL,
2307 "slsr");
2308 cast_stmt = gimple_build_assign (cast_stride, NOP_EXPR,
2309 c->stride);
2310 stride = cast_stride;
2311 }
2312 else
2313 stride = c->stride;
2314
2315 if (increment == 1)
2316 new_stmt = gimple_build_assign (lhs, plus_code, basis_name, stride);
2317 else if (increment == -1)
2318 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name, stride);
2319 else
2320 gcc_unreachable ();
2321 }
2322 }
2323
2324 if (cast_stmt)
2325 {
2326 gimple_set_location (cast_stmt, loc);
2327 gsi_insert_on_edge (e, cast_stmt);
2328 }
2329
2330 gimple_set_location (new_stmt, loc);
2331 gsi_insert_on_edge (e, new_stmt);
2332
2333 if (dump_file && (dump_flags & TDF_DETAILS))
2334 {
2335 if (cast_stmt)
2336 {
2337 fprintf (dump_file, "Inserting cast on edge %d->%d: ",
2338 e->src->index, e->dest->index);
2339 print_gimple_stmt (dump_file, cast_stmt, 0);
2340 }
2341 fprintf (dump_file, "Inserting on edge %d->%d: ", e->src->index,
2342 e->dest->index);
2343 print_gimple_stmt (dump_file, new_stmt, 0);
2344 }
2345
2346 return lhs;
2347 }
2348
2349 /* Clear the visited field for a tree of PHI candidates. */
2350
2351 static void
2352 clear_visited (gphi *phi)
2353 {
2354 unsigned i;
2355 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2356
2357 if (phi_cand->visited)
2358 {
2359 phi_cand->visited = 0;
2360
2361 for (i = 0; i < gimple_phi_num_args (phi); i++)
2362 {
2363 tree arg = gimple_phi_arg_def (phi, i);
2364 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2365 if (gimple_code (arg_def) == GIMPLE_PHI)
2366 clear_visited (as_a <gphi *> (arg_def));
2367 }
2368 }
2369 }
2370
2371 /* Recursive helper function for create_phi_basis. */
2372
2373 static tree
2374 create_phi_basis_1 (slsr_cand_t c, gimple *from_phi, tree basis_name,
2375 location_t loc, bool known_stride)
2376 {
2377 int i;
2378 tree name, phi_arg;
2379 gphi *phi;
2380 slsr_cand_t basis = lookup_cand (c->basis);
2381 int nargs = gimple_phi_num_args (from_phi);
2382 basic_block phi_bb = gimple_bb (from_phi);
2383 slsr_cand_t phi_cand = *stmt_cand_map->get (from_phi);
2384 auto_vec<tree> phi_args (nargs);
2385
2386 if (phi_cand->visited)
2387 return phi_cand->cached_basis;
2388 phi_cand->visited = 1;
2389
2390 /* Process each argument of the existing phi that represents
2391 conditionally-executed add candidates. */
2392 for (i = 0; i < nargs; i++)
2393 {
2394 edge e = (*phi_bb->preds)[i];
2395 tree arg = gimple_phi_arg_def (from_phi, i);
2396 tree feeding_def;
2397
2398 /* If the phi argument is the base name of the CAND_PHI, then
2399 this incoming arc should use the hidden basis. */
2400 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2401 if (basis->index == 0)
2402 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2403 else
2404 {
2405 widest_int incr = -basis->index;
2406 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2407 e, loc, known_stride);
2408 }
2409 else
2410 {
2411 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2412
2413 /* If there is another phi along this incoming edge, we must
2414 process it in the same fashion to ensure that all basis
2415 adjustments are made along its incoming edges. */
2416 if (gimple_code (arg_def) == GIMPLE_PHI)
2417 feeding_def = create_phi_basis_1 (c, arg_def, basis_name,
2418 loc, known_stride);
2419 else
2420 {
2421 slsr_cand_t arg_cand = base_cand_from_table (arg);
2422 widest_int diff = arg_cand->index - basis->index;
2423 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2424 e, loc, known_stride);
2425 }
2426 }
2427
2428 /* Because of recursion, we need to save the arguments in a vector
2429 so we can create the PHI statement all at once. Otherwise the
2430 storage for the half-created PHI can be reclaimed. */
2431 phi_args.safe_push (feeding_def);
2432 }
2433
2434 /* Create the new phi basis. */
2435 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2436 phi = create_phi_node (name, phi_bb);
2437 SSA_NAME_DEF_STMT (name) = phi;
2438
2439 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2440 {
2441 edge e = (*phi_bb->preds)[i];
2442 add_phi_arg (phi, phi_arg, e, loc);
2443 }
2444
2445 update_stmt (phi);
2446
2447 if (dump_file && (dump_flags & TDF_DETAILS))
2448 {
2449 fputs ("Introducing new phi basis: ", dump_file);
2450 print_gimple_stmt (dump_file, phi, 0);
2451 }
2452
2453 phi_cand->cached_basis = name;
2454 return name;
2455 }
2456
2457 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2458 is hidden by the phi node FROM_PHI, create a new phi node in the same
2459 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2460 with its phi arguments representing conditional adjustments to the
2461 hidden basis along conditional incoming paths. Those adjustments are
2462 made by creating add statements (and sometimes recursively creating
2463 phis) along those incoming paths. LOC is the location to attach to
2464 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2465 constant. */
2466
2467 static tree
2468 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2469 location_t loc, bool known_stride)
2470 {
2471 tree retval = create_phi_basis_1 (c, from_phi, basis_name, loc,
2472 known_stride);
2473 gcc_assert (retval);
2474 clear_visited (as_a <gphi *> (from_phi));
2475 return retval;
2476 }
2477
2478 /* Given a candidate C whose basis is hidden by at least one intervening
2479 phi, introduce a matching number of new phis to represent its basis
2480 adjusted by conditional increments along possible incoming paths. Then
2481 replace C as though it were an unconditional candidate, using the new
2482 basis. */
2483
2484 static void
2485 replace_conditional_candidate (slsr_cand_t c)
2486 {
2487 tree basis_name, name;
2488 slsr_cand_t basis;
2489 location_t loc;
2490
2491 /* Look up the LHS SSA name from C's basis. This will be the
2492 RHS1 of the adds we will introduce to create new phi arguments. */
2493 basis = lookup_cand (c->basis);
2494 basis_name = gimple_assign_lhs (basis->cand_stmt);
2495
2496 /* Create a new phi statement which will represent C's true basis
2497 after the transformation is complete. */
2498 loc = gimple_location (c->cand_stmt);
2499 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2500 basis_name, loc, KNOWN_STRIDE);
2501
2502 /* Replace C with an add of the new basis phi and a constant. */
2503 widest_int bump = c->index * wi::to_widest (c->stride);
2504
2505 replace_mult_candidate (c, name, bump);
2506 }
2507
2508 /* Recursive helper function for phi_add_costs. SPREAD is a measure of
2509 how many PHI nodes we have visited at this point in the tree walk. */
2510
2511 static int
2512 phi_add_costs_1 (gimple *phi, slsr_cand_t c, int one_add_cost, int *spread)
2513 {
2514 unsigned i;
2515 int cost = 0;
2516 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2517
2518 if (phi_cand->visited)
2519 return 0;
2520
2521 phi_cand->visited = 1;
2522 (*spread)++;
2523
2524 /* If we work our way back to a phi that isn't dominated by the hidden
2525 basis, this isn't a candidate for replacement. Indicate this by
2526 returning an unreasonably high cost. It's not easy to detect
2527 these situations when determining the basis, so we defer the
2528 decision until now. */
2529 basic_block phi_bb = gimple_bb (phi);
2530 slsr_cand_t basis = lookup_cand (c->basis);
2531 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2532
2533 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2534 return COST_INFINITE;
2535
2536 for (i = 0; i < gimple_phi_num_args (phi); i++)
2537 {
2538 tree arg = gimple_phi_arg_def (phi, i);
2539
2540 if (arg != phi_cand->base_expr)
2541 {
2542 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2543
2544 if (gimple_code (arg_def) == GIMPLE_PHI)
2545 {
2546 cost += phi_add_costs_1 (arg_def, c, one_add_cost, spread);
2547
2548 if (cost >= COST_INFINITE || *spread > MAX_SPREAD)
2549 return COST_INFINITE;
2550 }
2551 else
2552 {
2553 slsr_cand_t arg_cand = base_cand_from_table (arg);
2554
2555 if (arg_cand->index != c->index)
2556 cost += one_add_cost;
2557 }
2558 }
2559 }
2560
2561 return cost;
2562 }
2563
2564 /* Compute the expected costs of inserting basis adjustments for
2565 candidate C with phi-definition PHI. The cost of inserting
2566 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2567 which are themselves phi results, recursively calculate costs
2568 for those phis as well. */
2569
2570 static int
2571 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2572 {
2573 int spread = 0;
2574 int retval = phi_add_costs_1 (phi, c, one_add_cost, &spread);
2575 clear_visited (as_a <gphi *> (phi));
2576 return retval;
2577 }
2578 /* For candidate C, each sibling of candidate C, and each dependent of
2579 candidate C, determine whether the candidate is dependent upon a
2580 phi that hides its basis. If not, replace the candidate unconditionally.
2581 Otherwise, determine whether the cost of introducing compensation code
2582 for the candidate is offset by the gains from strength reduction. If
2583 so, replace the candidate and introduce the compensation code. */
2584
2585 static void
2586 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2587 {
2588 if (phi_dependent_cand_p (c))
2589 {
2590 /* A multiply candidate with a stride of 1 is just an artifice
2591 of a copy or cast; there is no value in replacing it. */
2592 if (c->kind == CAND_MULT && wi::to_widest (c->stride) != 1)
2593 {
2594 /* A candidate dependent upon a phi will replace a multiply by
2595 a constant with an add, and will insert at most one add for
2596 each phi argument. Add these costs with the potential dead-code
2597 savings to determine profitability. */
2598 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2599 int mult_savings = stmt_cost (c->cand_stmt, speed);
2600 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2601 tree phi_result = gimple_phi_result (phi);
2602 int one_add_cost = add_cost (speed,
2603 TYPE_MODE (TREE_TYPE (phi_result)));
2604 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2605 int cost = add_costs - mult_savings - c->dead_savings;
2606
2607 if (dump_file && (dump_flags & TDF_DETAILS))
2608 {
2609 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2610 fprintf (dump_file, " add_costs = %d\n", add_costs);
2611 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2612 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2613 fprintf (dump_file, " cost = %d\n", cost);
2614 if (cost <= COST_NEUTRAL)
2615 fputs (" Replacing...\n", dump_file);
2616 else
2617 fputs (" Not replaced.\n", dump_file);
2618 }
2619
2620 if (cost <= COST_NEUTRAL)
2621 replace_conditional_candidate (c);
2622 }
2623 }
2624 else
2625 replace_unconditional_candidate (c);
2626
2627 if (c->sibling)
2628 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2629
2630 if (c->dependent)
2631 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2632 }
2633 \f
2634 /* Count the number of candidates in the tree rooted at C that have
2635 not already been replaced under other interpretations. */
2636
2637 static int
2638 count_candidates (slsr_cand_t c)
2639 {
2640 unsigned count = cand_already_replaced (c) ? 0 : 1;
2641
2642 if (c->sibling)
2643 count += count_candidates (lookup_cand (c->sibling));
2644
2645 if (c->dependent)
2646 count += count_candidates (lookup_cand (c->dependent));
2647
2648 return count;
2649 }
2650
2651 /* Increase the count of INCREMENT by one in the increment vector.
2652 INCREMENT is associated with candidate C. If INCREMENT is to be
2653 conditionally executed as part of a conditional candidate replacement,
2654 IS_PHI_ADJUST is true, otherwise false. If an initializer
2655 T_0 = stride * I is provided by a candidate that dominates all
2656 candidates with the same increment, also record T_0 for subsequent use. */
2657
2658 static void
2659 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2660 {
2661 bool found = false;
2662 unsigned i;
2663
2664 /* Treat increments that differ only in sign as identical so as to
2665 share initializers, unless we are generating pointer arithmetic. */
2666 if (!address_arithmetic_p && wi::neg_p (increment))
2667 increment = -increment;
2668
2669 for (i = 0; i < incr_vec_len; i++)
2670 {
2671 if (incr_vec[i].incr == increment)
2672 {
2673 incr_vec[i].count++;
2674 found = true;
2675
2676 /* If we previously recorded an initializer that doesn't
2677 dominate this candidate, it's not going to be useful to
2678 us after all. */
2679 if (incr_vec[i].initializer
2680 && !dominated_by_p (CDI_DOMINATORS,
2681 gimple_bb (c->cand_stmt),
2682 incr_vec[i].init_bb))
2683 {
2684 incr_vec[i].initializer = NULL_TREE;
2685 incr_vec[i].init_bb = NULL;
2686 }
2687
2688 break;
2689 }
2690 }
2691
2692 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2693 {
2694 /* The first time we see an increment, create the entry for it.
2695 If this is the root candidate which doesn't have a basis, set
2696 the count to zero. We're only processing it so it can possibly
2697 provide an initializer for other candidates. */
2698 incr_vec[incr_vec_len].incr = increment;
2699 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2700 incr_vec[incr_vec_len].cost = COST_INFINITE;
2701
2702 /* Optimistically record the first occurrence of this increment
2703 as providing an initializer (if it does); we will revise this
2704 opinion later if it doesn't dominate all other occurrences.
2705 Exception: increments of 0, 1 never need initializers;
2706 and phi adjustments don't ever provide initializers. */
2707 if (c->kind == CAND_ADD
2708 && !is_phi_adjust
2709 && c->index == increment
2710 && (increment > 1 || increment < 0)
2711 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2712 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2713 {
2714 tree t0 = NULL_TREE;
2715 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2716 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2717 if (operand_equal_p (rhs1, c->base_expr, 0))
2718 t0 = rhs2;
2719 else if (operand_equal_p (rhs2, c->base_expr, 0))
2720 t0 = rhs1;
2721 if (t0
2722 && SSA_NAME_DEF_STMT (t0)
2723 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2724 {
2725 incr_vec[incr_vec_len].initializer = t0;
2726 incr_vec[incr_vec_len++].init_bb
2727 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2728 }
2729 else
2730 {
2731 incr_vec[incr_vec_len].initializer = NULL_TREE;
2732 incr_vec[incr_vec_len++].init_bb = NULL;
2733 }
2734 }
2735 else
2736 {
2737 incr_vec[incr_vec_len].initializer = NULL_TREE;
2738 incr_vec[incr_vec_len++].init_bb = NULL;
2739 }
2740 }
2741 }
2742
2743 /* Recursive helper function for record_phi_increments. */
2744
2745 static void
2746 record_phi_increments_1 (slsr_cand_t basis, gimple *phi)
2747 {
2748 unsigned i;
2749 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2750
2751 if (phi_cand->visited)
2752 return;
2753 phi_cand->visited = 1;
2754
2755 for (i = 0; i < gimple_phi_num_args (phi); i++)
2756 {
2757 tree arg = gimple_phi_arg_def (phi, i);
2758 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2759
2760 if (gimple_code (arg_def) == GIMPLE_PHI)
2761 record_phi_increments_1 (basis, arg_def);
2762 else
2763 {
2764 widest_int diff;
2765
2766 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2767 {
2768 diff = -basis->index;
2769 record_increment (phi_cand, diff, PHI_ADJUST);
2770 }
2771 else
2772 {
2773 slsr_cand_t arg_cand = base_cand_from_table (arg);
2774 diff = arg_cand->index - basis->index;
2775 record_increment (arg_cand, diff, PHI_ADJUST);
2776 }
2777 }
2778 }
2779 }
2780
2781 /* Given phi statement PHI that hides a candidate from its BASIS, find
2782 the increments along each incoming arc (recursively handling additional
2783 phis that may be present) and record them. These increments are the
2784 difference in index between the index-adjusting statements and the
2785 index of the basis. */
2786
2787 static void
2788 record_phi_increments (slsr_cand_t basis, gimple *phi)
2789 {
2790 record_phi_increments_1 (basis, phi);
2791 clear_visited (as_a <gphi *> (phi));
2792 }
2793
2794 /* Determine how many times each unique increment occurs in the set
2795 of candidates rooted at C's parent, recording the data in the
2796 increment vector. For each unique increment I, if an initializer
2797 T_0 = stride * I is provided by a candidate that dominates all
2798 candidates with the same increment, also record T_0 for subsequent
2799 use. */
2800
2801 static void
2802 record_increments (slsr_cand_t c)
2803 {
2804 if (!cand_already_replaced (c))
2805 {
2806 if (!phi_dependent_cand_p (c))
2807 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2808 else
2809 {
2810 /* A candidate with a basis hidden by a phi will have one
2811 increment for its relationship to the index represented by
2812 the phi, and potentially additional increments along each
2813 incoming edge. For the root of the dependency tree (which
2814 has no basis), process just the initial index in case it has
2815 an initializer that can be used by subsequent candidates. */
2816 record_increment (c, c->index, NOT_PHI_ADJUST);
2817
2818 if (c->basis)
2819 record_phi_increments (lookup_cand (c->basis),
2820 lookup_cand (c->def_phi)->cand_stmt);
2821 }
2822 }
2823
2824 if (c->sibling)
2825 record_increments (lookup_cand (c->sibling));
2826
2827 if (c->dependent)
2828 record_increments (lookup_cand (c->dependent));
2829 }
2830
2831 /* Recursive helper function for phi_incr_cost. */
2832
2833 static int
2834 phi_incr_cost_1 (slsr_cand_t c, const widest_int &incr, gimple *phi,
2835 int *savings)
2836 {
2837 unsigned i;
2838 int cost = 0;
2839 slsr_cand_t basis = lookup_cand (c->basis);
2840 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2841
2842 if (phi_cand->visited)
2843 return 0;
2844 phi_cand->visited = 1;
2845
2846 for (i = 0; i < gimple_phi_num_args (phi); i++)
2847 {
2848 tree arg = gimple_phi_arg_def (phi, i);
2849 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2850
2851 if (gimple_code (arg_def) == GIMPLE_PHI)
2852 {
2853 int feeding_savings = 0;
2854 tree feeding_var = gimple_phi_result (arg_def);
2855 cost += phi_incr_cost_1 (c, incr, arg_def, &feeding_savings);
2856 if (uses_consumed_by_stmt (feeding_var, phi))
2857 *savings += feeding_savings;
2858 }
2859 else
2860 {
2861 widest_int diff;
2862 slsr_cand_t arg_cand;
2863
2864 /* When the PHI argument is just a pass-through to the base
2865 expression of the hidden basis, the difference is zero minus
2866 the index of the basis. There is no potential savings by
2867 eliminating a statement in this case. */
2868 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2869 {
2870 arg_cand = (slsr_cand_t)NULL;
2871 diff = -basis->index;
2872 }
2873 else
2874 {
2875 arg_cand = base_cand_from_table (arg);
2876 diff = arg_cand->index - basis->index;
2877 }
2878
2879 if (incr == diff)
2880 {
2881 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2882 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2883 if (arg_cand)
2884 {
2885 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2886 if (uses_consumed_by_stmt (lhs, phi))
2887 *savings += stmt_cost (arg_cand->cand_stmt, true);
2888 }
2889 }
2890 }
2891 }
2892
2893 return cost;
2894 }
2895
2896 /* Add up and return the costs of introducing add statements that
2897 require the increment INCR on behalf of candidate C and phi
2898 statement PHI. Accumulate into *SAVINGS the potential savings
2899 from removing existing statements that feed PHI and have no other
2900 uses. */
2901
2902 static int
2903 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2904 int *savings)
2905 {
2906 int retval = phi_incr_cost_1 (c, incr, phi, savings);
2907 clear_visited (as_a <gphi *> (phi));
2908 return retval;
2909 }
2910
2911 /* Return the first candidate in the tree rooted at C that has not
2912 already been replaced, favoring siblings over dependents. */
2913
2914 static slsr_cand_t
2915 unreplaced_cand_in_tree (slsr_cand_t c)
2916 {
2917 if (!cand_already_replaced (c))
2918 return c;
2919
2920 if (c->sibling)
2921 {
2922 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2923 if (sib)
2924 return sib;
2925 }
2926
2927 if (c->dependent)
2928 {
2929 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2930 if (dep)
2931 return dep;
2932 }
2933
2934 return NULL;
2935 }
2936
2937 /* Return TRUE if the candidates in the tree rooted at C should be
2938 optimized for speed, else FALSE. We estimate this based on the block
2939 containing the most dominant candidate in the tree that has not yet
2940 been replaced. */
2941
2942 static bool
2943 optimize_cands_for_speed_p (slsr_cand_t c)
2944 {
2945 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2946 gcc_assert (c2);
2947 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2948 }
2949
2950 /* Add COST_IN to the lowest cost of any dependent path starting at
2951 candidate C or any of its siblings, counting only candidates along
2952 such paths with increment INCR. Assume that replacing a candidate
2953 reduces cost by REPL_SAVINGS. Also account for savings from any
2954 statements that would go dead. If COUNT_PHIS is true, include
2955 costs of introducing feeding statements for conditional candidates. */
2956
2957 static int
2958 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2959 const widest_int &incr, bool count_phis)
2960 {
2961 int local_cost, sib_cost, savings = 0;
2962 widest_int cand_incr = cand_abs_increment (c);
2963
2964 if (cand_already_replaced (c))
2965 local_cost = cost_in;
2966 else if (incr == cand_incr)
2967 local_cost = cost_in - repl_savings - c->dead_savings;
2968 else
2969 local_cost = cost_in - c->dead_savings;
2970
2971 if (count_phis
2972 && phi_dependent_cand_p (c)
2973 && !cand_already_replaced (c))
2974 {
2975 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2976 local_cost += phi_incr_cost (c, incr, phi, &savings);
2977
2978 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
2979 local_cost -= savings;
2980 }
2981
2982 if (c->dependent)
2983 local_cost = lowest_cost_path (local_cost, repl_savings,
2984 lookup_cand (c->dependent), incr,
2985 count_phis);
2986
2987 if (c->sibling)
2988 {
2989 sib_cost = lowest_cost_path (cost_in, repl_savings,
2990 lookup_cand (c->sibling), incr,
2991 count_phis);
2992 local_cost = MIN (local_cost, sib_cost);
2993 }
2994
2995 return local_cost;
2996 }
2997
2998 /* Compute the total savings that would accrue from all replacements
2999 in the candidate tree rooted at C, counting only candidates with
3000 increment INCR. Assume that replacing a candidate reduces cost
3001 by REPL_SAVINGS. Also account for savings from statements that
3002 would go dead. */
3003
3004 static int
3005 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
3006 bool count_phis)
3007 {
3008 int savings = 0;
3009 widest_int cand_incr = cand_abs_increment (c);
3010
3011 if (incr == cand_incr && !cand_already_replaced (c))
3012 savings += repl_savings + c->dead_savings;
3013
3014 if (count_phis
3015 && phi_dependent_cand_p (c)
3016 && !cand_already_replaced (c))
3017 {
3018 int phi_savings = 0;
3019 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
3020 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
3021
3022 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
3023 savings += phi_savings;
3024 }
3025
3026 if (c->dependent)
3027 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
3028 count_phis);
3029
3030 if (c->sibling)
3031 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
3032 count_phis);
3033
3034 return savings;
3035 }
3036
3037 /* Use target-specific costs to determine and record which increments
3038 in the current candidate tree are profitable to replace, assuming
3039 MODE and SPEED. FIRST_DEP is the first dependent of the root of
3040 the candidate tree.
3041
3042 One slight limitation here is that we don't account for the possible
3043 introduction of casts in some cases. See replace_one_candidate for
3044 the cases where these are introduced. This should probably be cleaned
3045 up sometime. */
3046
3047 static void
3048 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
3049 {
3050 unsigned i;
3051
3052 for (i = 0; i < incr_vec_len; i++)
3053 {
3054 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
3055
3056 /* If somehow this increment is bigger than a HWI, we won't
3057 be optimizing candidates that use it. And if the increment
3058 has a count of zero, nothing will be done with it. */
3059 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
3060 incr_vec[i].cost = COST_INFINITE;
3061
3062 /* Increments of 0, 1, and -1 are always profitable to replace,
3063 because they always replace a multiply or add with an add or
3064 copy, and may cause one or more existing instructions to go
3065 dead. Exception: -1 can't be assumed to be profitable for
3066 pointer addition. */
3067 else if (incr == 0
3068 || incr == 1
3069 || (incr == -1
3070 && !POINTER_TYPE_P (first_dep->cand_type)))
3071 incr_vec[i].cost = COST_NEUTRAL;
3072
3073 /* If we need to add an initializer, give up if a cast from the
3074 candidate's type to its stride's type can lose precision.
3075 Note that this already takes into account that the stride may
3076 have been cast to a wider type, in which case this test won't
3077 fire. Example:
3078
3079 short int _1;
3080 _2 = (int) _1;
3081 _3 = _2 * 10;
3082 _4 = x + _3; ADD: x + (10 * (int)_1) : int
3083 _5 = _2 * 15;
3084 _6 = x + _5; ADD: x + (15 * (int)_1) : int
3085
3086 Although the stride was a short int initially, the stride
3087 used in the analysis has been widened to an int, and such
3088 widening will be done in the initializer as well. */
3089 else if (!incr_vec[i].initializer
3090 && TREE_CODE (first_dep->stride) != INTEGER_CST
3091 && !legal_cast_p_1 (first_dep->stride_type,
3092 TREE_TYPE (gimple_assign_lhs
3093 (first_dep->cand_stmt))))
3094 incr_vec[i].cost = COST_INFINITE;
3095
3096 /* If we need to add an initializer, make sure we don't introduce
3097 a multiply by a pointer type, which can happen in certain cast
3098 scenarios. */
3099 else if (!incr_vec[i].initializer
3100 && TREE_CODE (first_dep->stride) != INTEGER_CST
3101 && POINTER_TYPE_P (first_dep->stride_type))
3102 incr_vec[i].cost = COST_INFINITE;
3103
3104 /* For any other increment, if this is a multiply candidate, we
3105 must introduce a temporary T and initialize it with
3106 T_0 = stride * increment. When optimizing for speed, walk the
3107 candidate tree to calculate the best cost reduction along any
3108 path; if it offsets the fixed cost of inserting the initializer,
3109 replacing the increment is profitable. When optimizing for
3110 size, instead calculate the total cost reduction from replacing
3111 all candidates with this increment. */
3112 else if (first_dep->kind == CAND_MULT)
3113 {
3114 int cost = mult_by_coeff_cost (incr, mode, speed);
3115 int repl_savings;
3116
3117 if (tree_fits_shwi_p (first_dep->stride))
3118 {
3119 HOST_WIDE_INT hwi_stride = tree_to_shwi (first_dep->stride);
3120 repl_savings = mult_by_coeff_cost (hwi_stride, mode, speed);
3121 }
3122 else
3123 repl_savings = mul_cost (speed, mode);
3124 repl_savings -= add_cost (speed, mode);
3125
3126 if (speed)
3127 cost = lowest_cost_path (cost, repl_savings, first_dep,
3128 incr_vec[i].incr, COUNT_PHIS);
3129 else
3130 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
3131 COUNT_PHIS);
3132
3133 incr_vec[i].cost = cost;
3134 }
3135
3136 /* If this is an add candidate, the initializer may already
3137 exist, so only calculate the cost of the initializer if it
3138 doesn't. We are replacing one add with another here, so the
3139 known replacement savings is zero. We will account for removal
3140 of dead instructions in lowest_cost_path or total_savings. */
3141 else
3142 {
3143 int cost = 0;
3144 if (!incr_vec[i].initializer)
3145 cost = mult_by_coeff_cost (incr, mode, speed);
3146
3147 if (speed)
3148 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
3149 DONT_COUNT_PHIS);
3150 else
3151 cost -= total_savings (0, first_dep, incr_vec[i].incr,
3152 DONT_COUNT_PHIS);
3153
3154 incr_vec[i].cost = cost;
3155 }
3156 }
3157 }
3158
3159 /* Return the nearest common dominator of BB1 and BB2. If the blocks
3160 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
3161 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
3162 return C2 in *WHERE; and if the NCD matches neither, return NULL in
3163 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
3164
3165 static basic_block
3166 ncd_for_two_cands (basic_block bb1, basic_block bb2,
3167 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
3168 {
3169 basic_block ncd;
3170
3171 if (!bb1)
3172 {
3173 *where = c2;
3174 return bb2;
3175 }
3176
3177 if (!bb2)
3178 {
3179 *where = c1;
3180 return bb1;
3181 }
3182
3183 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
3184
3185 /* If both candidates are in the same block, the earlier
3186 candidate wins. */
3187 if (bb1 == ncd && bb2 == ncd)
3188 {
3189 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
3190 *where = c2;
3191 else
3192 *where = c1;
3193 }
3194
3195 /* Otherwise, if one of them produced a candidate in the
3196 dominator, that one wins. */
3197 else if (bb1 == ncd)
3198 *where = c1;
3199
3200 else if (bb2 == ncd)
3201 *where = c2;
3202
3203 /* If neither matches the dominator, neither wins. */
3204 else
3205 *where = NULL;
3206
3207 return ncd;
3208 }
3209
3210 /* Consider all candidates that feed PHI. Find the nearest common
3211 dominator of those candidates requiring the given increment INCR.
3212 Further find and return the nearest common dominator of this result
3213 with block NCD. If the returned block contains one or more of the
3214 candidates, return the earliest candidate in the block in *WHERE. */
3215
3216 static basic_block
3217 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
3218 basic_block ncd, slsr_cand_t *where)
3219 {
3220 unsigned i;
3221 slsr_cand_t basis = lookup_cand (c->basis);
3222 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3223
3224 for (i = 0; i < gimple_phi_num_args (phi); i++)
3225 {
3226 tree arg = gimple_phi_arg_def (phi, i);
3227 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3228
3229 if (gimple_code (arg_def) == GIMPLE_PHI)
3230 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd, where);
3231 else
3232 {
3233 widest_int diff;
3234
3235 if (operand_equal_p (arg, phi_cand->base_expr, 0))
3236 diff = -basis->index;
3237 else
3238 {
3239 slsr_cand_t arg_cand = base_cand_from_table (arg);
3240 diff = arg_cand->index - basis->index;
3241 }
3242
3243 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3244
3245 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3246 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3247 }
3248 }
3249
3250 return ncd;
3251 }
3252
3253 /* Consider the candidate C together with any candidates that feed
3254 C's phi dependence (if any). Find and return the nearest common
3255 dominator of those candidates requiring the given increment INCR.
3256 If the returned block contains one or more of the candidates,
3257 return the earliest candidate in the block in *WHERE. */
3258
3259 static basic_block
3260 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3261 {
3262 basic_block ncd = NULL;
3263
3264 if (cand_abs_increment (c) == incr)
3265 {
3266 ncd = gimple_bb (c->cand_stmt);
3267 *where = c;
3268 }
3269
3270 if (phi_dependent_cand_p (c))
3271 ncd = ncd_with_phi (c, incr,
3272 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3273 ncd, where);
3274
3275 return ncd;
3276 }
3277
3278 /* Consider all candidates in the tree rooted at C for which INCR
3279 represents the required increment of C relative to its basis.
3280 Find and return the basic block that most nearly dominates all
3281 such candidates. If the returned block contains one or more of
3282 the candidates, return the earliest candidate in the block in
3283 *WHERE. */
3284
3285 static basic_block
3286 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3287 slsr_cand_t *where)
3288 {
3289 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3290 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3291
3292 /* First find the NCD of all siblings and dependents. */
3293 if (c->sibling)
3294 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3295 incr, &sib_where);
3296 if (c->dependent)
3297 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3298 incr, &dep_where);
3299 if (!sib_ncd && !dep_ncd)
3300 {
3301 new_where = NULL;
3302 ncd = NULL;
3303 }
3304 else if (sib_ncd && !dep_ncd)
3305 {
3306 new_where = sib_where;
3307 ncd = sib_ncd;
3308 }
3309 else if (dep_ncd && !sib_ncd)
3310 {
3311 new_where = dep_where;
3312 ncd = dep_ncd;
3313 }
3314 else
3315 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3316 dep_where, &new_where);
3317
3318 /* If the candidate's increment doesn't match the one we're interested
3319 in (and nor do any increments for feeding defs of a phi-dependence),
3320 then the result depends only on siblings and dependents. */
3321 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3322
3323 if (!this_ncd || cand_already_replaced (c))
3324 {
3325 *where = new_where;
3326 return ncd;
3327 }
3328
3329 /* Otherwise, compare this candidate with the result from all siblings
3330 and dependents. */
3331 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3332
3333 return ncd;
3334 }
3335
3336 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3337
3338 static inline bool
3339 profitable_increment_p (unsigned index)
3340 {
3341 return (incr_vec[index].cost <= COST_NEUTRAL);
3342 }
3343
3344 /* For each profitable increment in the increment vector not equal to
3345 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3346 dominator of all statements in the candidate chain rooted at C
3347 that require that increment, and insert an initializer
3348 T_0 = stride * increment at that location. Record T_0 with the
3349 increment record. */
3350
3351 static void
3352 insert_initializers (slsr_cand_t c)
3353 {
3354 unsigned i;
3355
3356 for (i = 0; i < incr_vec_len; i++)
3357 {
3358 basic_block bb;
3359 slsr_cand_t where = NULL;
3360 gassign *init_stmt;
3361 gassign *cast_stmt = NULL;
3362 tree new_name, incr_tree, init_stride;
3363 widest_int incr = incr_vec[i].incr;
3364
3365 if (!profitable_increment_p (i)
3366 || incr == 1
3367 || (incr == -1
3368 && (!POINTER_TYPE_P (lookup_cand (c->basis)->cand_type)))
3369 || incr == 0)
3370 continue;
3371
3372 /* We may have already identified an existing initializer that
3373 will suffice. */
3374 if (incr_vec[i].initializer)
3375 {
3376 if (dump_file && (dump_flags & TDF_DETAILS))
3377 {
3378 fputs ("Using existing initializer: ", dump_file);
3379 print_gimple_stmt (dump_file,
3380 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3381 0, TDF_NONE);
3382 }
3383 continue;
3384 }
3385
3386 /* Find the block that most closely dominates all candidates
3387 with this increment. If there is at least one candidate in
3388 that block, the earliest one will be returned in WHERE. */
3389 bb = nearest_common_dominator_for_cands (c, incr, &where);
3390
3391 /* If the NCD is not dominated by the block containing the
3392 definition of the stride, we can't legally insert a
3393 single initializer. Mark the increment as unprofitable
3394 so we don't make any replacements. FIXME: Multiple
3395 initializers could be placed with more analysis. */
3396 gimple *stride_def = SSA_NAME_DEF_STMT (c->stride);
3397 basic_block stride_bb = gimple_bb (stride_def);
3398
3399 if (stride_bb && !dominated_by_p (CDI_DOMINATORS, bb, stride_bb))
3400 {
3401 if (dump_file && (dump_flags & TDF_DETAILS))
3402 fprintf (dump_file,
3403 "Initializer #%d cannot be legally placed\n", i);
3404 incr_vec[i].cost = COST_INFINITE;
3405 continue;
3406 }
3407
3408 /* If the nominal stride has a different type than the recorded
3409 stride type, build a cast from the nominal stride to that type. */
3410 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
3411 {
3412 init_stride = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3413 cast_stmt = gimple_build_assign (init_stride, NOP_EXPR, c->stride);
3414 }
3415 else
3416 init_stride = c->stride;
3417
3418 /* Create a new SSA name to hold the initializer's value. */
3419 new_name = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3420 incr_vec[i].initializer = new_name;
3421
3422 /* Create the initializer and insert it in the latest possible
3423 dominating position. */
3424 incr_tree = wide_int_to_tree (c->stride_type, incr);
3425 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3426 init_stride, incr_tree);
3427 if (where)
3428 {
3429 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3430 location_t loc = gimple_location (where->cand_stmt);
3431
3432 if (cast_stmt)
3433 {
3434 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3435 gimple_set_location (cast_stmt, loc);
3436 }
3437
3438 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3439 gimple_set_location (init_stmt, loc);
3440 }
3441 else
3442 {
3443 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3444 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3445 location_t loc = gimple_location (basis_stmt);
3446
3447 if (!gsi_end_p (gsi) && stmt_ends_bb_p (gsi_stmt (gsi)))
3448 {
3449 if (cast_stmt)
3450 {
3451 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3452 gimple_set_location (cast_stmt, loc);
3453 }
3454 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3455 }
3456 else
3457 {
3458 if (cast_stmt)
3459 {
3460 gsi_insert_after (&gsi, cast_stmt, GSI_NEW_STMT);
3461 gimple_set_location (cast_stmt, loc);
3462 }
3463 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
3464 }
3465
3466 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3467 }
3468
3469 if (dump_file && (dump_flags & TDF_DETAILS))
3470 {
3471 if (cast_stmt)
3472 {
3473 fputs ("Inserting stride cast: ", dump_file);
3474 print_gimple_stmt (dump_file, cast_stmt, 0);
3475 }
3476 fputs ("Inserting initializer: ", dump_file);
3477 print_gimple_stmt (dump_file, init_stmt, 0);
3478 }
3479 }
3480 }
3481
3482 /* Recursive helper function for all_phi_incrs_profitable. */
3483
3484 static bool
3485 all_phi_incrs_profitable_1 (slsr_cand_t c, gphi *phi, int *spread)
3486 {
3487 unsigned i;
3488 slsr_cand_t basis = lookup_cand (c->basis);
3489 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3490
3491 if (phi_cand->visited)
3492 return true;
3493
3494 phi_cand->visited = 1;
3495 (*spread)++;
3496
3497 /* If the basis doesn't dominate the PHI (including when the PHI is
3498 in the same block as the basis), we won't be able to create a PHI
3499 using the basis here. */
3500 basic_block basis_bb = gimple_bb (basis->cand_stmt);
3501 basic_block phi_bb = gimple_bb (phi);
3502
3503 if (phi_bb == basis_bb
3504 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
3505 return false;
3506
3507 for (i = 0; i < gimple_phi_num_args (phi); i++)
3508 {
3509 /* If the PHI arg resides in a block not dominated by the basis,
3510 we won't be able to create a PHI using the basis here. */
3511 basic_block pred_bb = gimple_phi_arg_edge (phi, i)->src;
3512
3513 if (!dominated_by_p (CDI_DOMINATORS, pred_bb, basis_bb))
3514 return false;
3515
3516 tree arg = gimple_phi_arg_def (phi, i);
3517 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3518
3519 if (gimple_code (arg_def) == GIMPLE_PHI)
3520 {
3521 if (!all_phi_incrs_profitable_1 (c, as_a <gphi *> (arg_def), spread)
3522 || *spread > MAX_SPREAD)
3523 return false;
3524 }
3525 else
3526 {
3527 int j;
3528 widest_int increment;
3529
3530 if (operand_equal_p (arg, phi_cand->base_expr, 0))
3531 increment = -basis->index;
3532 else
3533 {
3534 slsr_cand_t arg_cand = base_cand_from_table (arg);
3535 increment = arg_cand->index - basis->index;
3536 }
3537
3538 if (!address_arithmetic_p && wi::neg_p (increment))
3539 increment = -increment;
3540
3541 j = incr_vec_index (increment);
3542
3543 if (dump_file && (dump_flags & TDF_DETAILS))
3544 {
3545 fprintf (dump_file, " Conditional candidate %d, phi: ",
3546 c->cand_num);
3547 print_gimple_stmt (dump_file, phi, 0);
3548 fputs (" increment: ", dump_file);
3549 print_decs (increment, dump_file);
3550 if (j < 0)
3551 fprintf (dump_file,
3552 "\n Not replaced; incr_vec overflow.\n");
3553 else {
3554 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3555 if (profitable_increment_p (j))
3556 fputs (" Replacing...\n", dump_file);
3557 else
3558 fputs (" Not replaced.\n", dump_file);
3559 }
3560 }
3561
3562 if (j < 0 || !profitable_increment_p (j))
3563 return false;
3564 }
3565 }
3566
3567 return true;
3568 }
3569
3570 /* Return TRUE iff all required increments for candidates feeding PHI
3571 are profitable (and legal!) to replace on behalf of candidate C. */
3572
3573 static bool
3574 all_phi_incrs_profitable (slsr_cand_t c, gphi *phi)
3575 {
3576 int spread = 0;
3577 bool retval = all_phi_incrs_profitable_1 (c, phi, &spread);
3578 clear_visited (phi);
3579 return retval;
3580 }
3581
3582 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3583 type TO_TYPE, and insert it in front of the statement represented
3584 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3585 the new SSA name. */
3586
3587 static tree
3588 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3589 {
3590 tree cast_lhs;
3591 gassign *cast_stmt;
3592 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3593
3594 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3595 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3596 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3597 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3598
3599 if (dump_file && (dump_flags & TDF_DETAILS))
3600 {
3601 fputs (" Inserting: ", dump_file);
3602 print_gimple_stmt (dump_file, cast_stmt, 0);
3603 }
3604
3605 return cast_lhs;
3606 }
3607
3608 /* Replace the RHS of the statement represented by candidate C with
3609 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3610 leave C unchanged or just interchange its operands. The original
3611 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3612 If the replacement was made and we are doing a details dump,
3613 return the revised statement, else NULL. */
3614
3615 static gimple *
3616 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3617 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3618 slsr_cand_t c)
3619 {
3620 if (new_code != old_code
3621 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3622 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3623 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3624 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3625 {
3626 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3627 slsr_cand_t cc = lookup_cand (c->first_interp);
3628 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3629 update_stmt (gsi_stmt (gsi));
3630 while (cc)
3631 {
3632 cc->cand_stmt = gsi_stmt (gsi);
3633 cc = lookup_cand (cc->next_interp);
3634 }
3635
3636 if (dump_file && (dump_flags & TDF_DETAILS))
3637 return gsi_stmt (gsi);
3638 }
3639
3640 else if (dump_file && (dump_flags & TDF_DETAILS))
3641 fputs (" (duplicate, not actually replacing)\n", dump_file);
3642
3643 return NULL;
3644 }
3645
3646 /* Strength-reduce the statement represented by candidate C by replacing
3647 it with an equivalent addition or subtraction. I is the index into
3648 the increment vector identifying C's increment. NEW_VAR is used to
3649 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3650 is the rhs1 to use in creating the add/subtract. */
3651
3652 static void
3653 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3654 {
3655 gimple *stmt_to_print = NULL;
3656 tree orig_rhs1, orig_rhs2;
3657 tree rhs2;
3658 enum tree_code orig_code, repl_code;
3659 widest_int cand_incr;
3660
3661 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3662 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3663 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3664 cand_incr = cand_increment (c);
3665
3666 /* If orig_rhs2 is NULL, we have already replaced this in situ with
3667 a copy statement under another interpretation. */
3668 if (!orig_rhs2)
3669 return;
3670
3671 if (dump_file && (dump_flags & TDF_DETAILS))
3672 {
3673 fputs ("Replacing: ", dump_file);
3674 print_gimple_stmt (dump_file, c->cand_stmt, 0);
3675 stmt_to_print = c->cand_stmt;
3676 }
3677
3678 if (address_arithmetic_p)
3679 repl_code = POINTER_PLUS_EXPR;
3680 else
3681 repl_code = PLUS_EXPR;
3682
3683 /* If the increment has an initializer T_0, replace the candidate
3684 statement with an add of the basis name and the initializer. */
3685 if (incr_vec[i].initializer)
3686 {
3687 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3688 tree orig_type = TREE_TYPE (orig_rhs2);
3689
3690 if (types_compatible_p (orig_type, init_type))
3691 rhs2 = incr_vec[i].initializer;
3692 else
3693 rhs2 = introduce_cast_before_cand (c, orig_type,
3694 incr_vec[i].initializer);
3695
3696 if (incr_vec[i].incr != cand_incr)
3697 {
3698 gcc_assert (repl_code == PLUS_EXPR);
3699 repl_code = MINUS_EXPR;
3700 }
3701
3702 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3703 orig_code, orig_rhs1, orig_rhs2,
3704 c);
3705 }
3706
3707 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3708 with a subtract of the stride from the basis name, a copy
3709 from the basis name, or an add of the stride to the basis
3710 name, respectively. It may be necessary to introduce a
3711 cast (or reuse an existing cast). */
3712 else if (cand_incr == 1)
3713 {
3714 tree stride_type = TREE_TYPE (c->stride);
3715 tree orig_type = TREE_TYPE (orig_rhs2);
3716
3717 if (types_compatible_p (orig_type, stride_type))
3718 rhs2 = c->stride;
3719 else
3720 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3721
3722 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3723 orig_code, orig_rhs1, orig_rhs2,
3724 c);
3725 }
3726
3727 else if (cand_incr == -1)
3728 {
3729 tree stride_type = TREE_TYPE (c->stride);
3730 tree orig_type = TREE_TYPE (orig_rhs2);
3731 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3732
3733 if (types_compatible_p (orig_type, stride_type))
3734 rhs2 = c->stride;
3735 else
3736 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3737
3738 if (orig_code != MINUS_EXPR
3739 || !operand_equal_p (basis_name, orig_rhs1, 0)
3740 || !operand_equal_p (rhs2, orig_rhs2, 0))
3741 {
3742 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3743 slsr_cand_t cc = lookup_cand (c->first_interp);
3744 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3745 update_stmt (gsi_stmt (gsi));
3746 while (cc)
3747 {
3748 cc->cand_stmt = gsi_stmt (gsi);
3749 cc = lookup_cand (cc->next_interp);
3750 }
3751
3752 if (dump_file && (dump_flags & TDF_DETAILS))
3753 stmt_to_print = gsi_stmt (gsi);
3754 }
3755 else if (dump_file && (dump_flags & TDF_DETAILS))
3756 fputs (" (duplicate, not actually replacing)\n", dump_file);
3757 }
3758
3759 else if (cand_incr == 0)
3760 {
3761 tree lhs = gimple_assign_lhs (c->cand_stmt);
3762 tree lhs_type = TREE_TYPE (lhs);
3763 tree basis_type = TREE_TYPE (basis_name);
3764
3765 if (types_compatible_p (lhs_type, basis_type))
3766 {
3767 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3768 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3769 slsr_cand_t cc = lookup_cand (c->first_interp);
3770 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3771 gsi_replace (&gsi, copy_stmt, false);
3772 while (cc)
3773 {
3774 cc->cand_stmt = copy_stmt;
3775 cc = lookup_cand (cc->next_interp);
3776 }
3777
3778 if (dump_file && (dump_flags & TDF_DETAILS))
3779 stmt_to_print = copy_stmt;
3780 }
3781 else
3782 {
3783 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3784 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3785 slsr_cand_t cc = lookup_cand (c->first_interp);
3786 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3787 gsi_replace (&gsi, cast_stmt, false);
3788 while (cc)
3789 {
3790 cc->cand_stmt = cast_stmt;
3791 cc = lookup_cand (cc->next_interp);
3792 }
3793
3794 if (dump_file && (dump_flags & TDF_DETAILS))
3795 stmt_to_print = cast_stmt;
3796 }
3797 }
3798 else
3799 gcc_unreachable ();
3800
3801 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3802 {
3803 fputs ("With: ", dump_file);
3804 print_gimple_stmt (dump_file, stmt_to_print, 0);
3805 fputs ("\n", dump_file);
3806 }
3807 }
3808
3809 /* For each candidate in the tree rooted at C, replace it with
3810 an increment if such has been shown to be profitable. */
3811
3812 static void
3813 replace_profitable_candidates (slsr_cand_t c)
3814 {
3815 if (!cand_already_replaced (c))
3816 {
3817 widest_int increment = cand_abs_increment (c);
3818 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3819 int i;
3820
3821 i = incr_vec_index (increment);
3822
3823 /* Only process profitable increments. Nothing useful can be done
3824 to a cast or copy. */
3825 if (i >= 0
3826 && profitable_increment_p (i)
3827 && orig_code != SSA_NAME
3828 && !CONVERT_EXPR_CODE_P (orig_code))
3829 {
3830 if (phi_dependent_cand_p (c))
3831 {
3832 gphi *phi = as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt);
3833
3834 if (all_phi_incrs_profitable (c, phi))
3835 {
3836 /* Look up the LHS SSA name from C's basis. This will be
3837 the RHS1 of the adds we will introduce to create new
3838 phi arguments. */
3839 slsr_cand_t basis = lookup_cand (c->basis);
3840 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3841
3842 /* Create a new phi statement that will represent C's true
3843 basis after the transformation is complete. */
3844 location_t loc = gimple_location (c->cand_stmt);
3845 tree name = create_phi_basis (c, phi, basis_name,
3846 loc, UNKNOWN_STRIDE);
3847
3848 /* Replace C with an add of the new basis phi and the
3849 increment. */
3850 replace_one_candidate (c, i, name);
3851 }
3852 }
3853 else
3854 {
3855 slsr_cand_t basis = lookup_cand (c->basis);
3856 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3857 replace_one_candidate (c, i, basis_name);
3858 }
3859 }
3860 }
3861
3862 if (c->sibling)
3863 replace_profitable_candidates (lookup_cand (c->sibling));
3864
3865 if (c->dependent)
3866 replace_profitable_candidates (lookup_cand (c->dependent));
3867 }
3868 \f
3869 /* Analyze costs of related candidates in the candidate vector,
3870 and make beneficial replacements. */
3871
3872 static void
3873 analyze_candidates_and_replace (void)
3874 {
3875 unsigned i;
3876 slsr_cand_t c;
3877
3878 /* Each candidate that has a null basis and a non-null
3879 dependent is the root of a tree of related statements.
3880 Analyze each tree to determine a subset of those
3881 statements that can be replaced with maximum benefit.
3882
3883 Note the first NULL element is skipped. */
3884 FOR_EACH_VEC_ELT_FROM (cand_vec, i, c, 1)
3885 {
3886 slsr_cand_t first_dep;
3887
3888 if (c->basis != 0 || c->dependent == 0)
3889 continue;
3890
3891 if (dump_file && (dump_flags & TDF_DETAILS))
3892 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3893 c->cand_num);
3894
3895 first_dep = lookup_cand (c->dependent);
3896
3897 /* If this is a chain of CAND_REFs, unconditionally replace
3898 each of them with a strength-reduced data reference. */
3899 if (c->kind == CAND_REF)
3900 replace_refs (c);
3901
3902 /* If the common stride of all related candidates is a known
3903 constant, each candidate without a phi-dependence can be
3904 profitably replaced. Each replaces a multiply by a single
3905 add, with the possibility that a feeding add also goes dead.
3906 A candidate with a phi-dependence is replaced only if the
3907 compensation code it requires is offset by the strength
3908 reduction savings. */
3909 else if (TREE_CODE (c->stride) == INTEGER_CST)
3910 replace_uncond_cands_and_profitable_phis (first_dep);
3911
3912 /* When the stride is an SSA name, it may still be profitable
3913 to replace some or all of the dependent candidates, depending
3914 on whether the introduced increments can be reused, or are
3915 less expensive to calculate than the replaced statements. */
3916 else
3917 {
3918 machine_mode mode;
3919 bool speed;
3920
3921 /* Determine whether we'll be generating pointer arithmetic
3922 when replacing candidates. */
3923 address_arithmetic_p = (c->kind == CAND_ADD
3924 && POINTER_TYPE_P (c->cand_type));
3925
3926 /* If all candidates have already been replaced under other
3927 interpretations, nothing remains to be done. */
3928 if (!count_candidates (c))
3929 continue;
3930
3931 /* Construct an array of increments for this candidate chain. */
3932 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3933 incr_vec_len = 0;
3934 record_increments (c);
3935
3936 /* Determine which increments are profitable to replace. */
3937 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3938 speed = optimize_cands_for_speed_p (c);
3939 analyze_increments (first_dep, mode, speed);
3940
3941 /* Insert initializers of the form T_0 = stride * increment
3942 for use in profitable replacements. */
3943 insert_initializers (first_dep);
3944 dump_incr_vec ();
3945
3946 /* Perform the replacements. */
3947 replace_profitable_candidates (first_dep);
3948 free (incr_vec);
3949 }
3950 }
3951
3952 /* For conditional candidates, we may have uncommitted insertions
3953 on edges to clean up. */
3954 gsi_commit_edge_inserts ();
3955 }
3956
3957 namespace {
3958
3959 const pass_data pass_data_strength_reduction =
3960 {
3961 GIMPLE_PASS, /* type */
3962 "slsr", /* name */
3963 OPTGROUP_NONE, /* optinfo_flags */
3964 TV_GIMPLE_SLSR, /* tv_id */
3965 ( PROP_cfg | PROP_ssa ), /* properties_required */
3966 0, /* properties_provided */
3967 0, /* properties_destroyed */
3968 0, /* todo_flags_start */
3969 0, /* todo_flags_finish */
3970 };
3971
3972 class pass_strength_reduction : public gimple_opt_pass
3973 {
3974 public:
3975 pass_strength_reduction (gcc::context *ctxt)
3976 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3977 {}
3978
3979 /* opt_pass methods: */
3980 virtual bool gate (function *) { return flag_tree_slsr; }
3981 virtual unsigned int execute (function *);
3982
3983 }; // class pass_strength_reduction
3984
3985 unsigned
3986 pass_strength_reduction::execute (function *fun)
3987 {
3988 /* Create the obstack where candidates will reside. */
3989 gcc_obstack_init (&cand_obstack);
3990
3991 /* Allocate the candidate vector and initialize the first NULL element. */
3992 cand_vec.create (128);
3993 cand_vec.safe_push (NULL);
3994
3995 /* Allocate the mapping from statements to candidate indices. */
3996 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3997
3998 /* Create the obstack where candidate chains will reside. */
3999 gcc_obstack_init (&chain_obstack);
4000
4001 /* Allocate the mapping from base expressions to candidate chains. */
4002 base_cand_map = new hash_table<cand_chain_hasher> (500);
4003
4004 /* Allocate the mapping from bases to alternative bases. */
4005 alt_base_map = new hash_map<tree, tree>;
4006
4007 /* Initialize the loop optimizer. We need to detect flow across
4008 back edges, and this gives us dominator information as well. */
4009 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4010
4011 /* Walk the CFG in predominator order looking for strength reduction
4012 candidates. */
4013 find_candidates_dom_walker (CDI_DOMINATORS)
4014 .walk (fun->cfg->x_entry_block_ptr);
4015
4016 if (dump_file && (dump_flags & TDF_DETAILS))
4017 {
4018 dump_cand_vec ();
4019 dump_cand_chains ();
4020 }
4021
4022 delete alt_base_map;
4023 free_affine_expand_cache (&name_expansions);
4024
4025 /* Analyze costs and make appropriate replacements. */
4026 analyze_candidates_and_replace ();
4027
4028 loop_optimizer_finalize ();
4029 delete base_cand_map;
4030 base_cand_map = NULL;
4031 obstack_free (&chain_obstack, NULL);
4032 delete stmt_cand_map;
4033 cand_vec.release ();
4034 obstack_free (&cand_obstack, NULL);
4035
4036 return 0;
4037 }
4038
4039 } // anon namespace
4040
4041 gimple_opt_pass *
4042 make_pass_strength_reduction (gcc::context *ctxt)
4043 {
4044 return new pass_strength_reduction (ctxt);
4045 }