]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-ssa-strength-reduction.c
Remove trailing whitespace. Add missing dbxout.c hunk.
[thirdparty/gcc.git] / gcc / gimple-ssa-strength-reduction.c
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
26
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
31
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "gimple-iterator.h"
42 #include "gimplify-me.h"
43 #include "stor-layout.h"
44 #include "expr.h"
45 #include "basic-block.h"
46 #include "tree-pass.h"
47 #include "cfgloop.h"
48 #include "gimple-pretty-print.h"
49 #include "gimple-ssa.h"
50 #include "tree-cfg.h"
51 #include "tree-phinodes.h"
52 #include "ssa-iterators.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "domwalk.h"
56 #include "pointer-set.h"
57 #include "expmed.h"
58 #include "params.h"
59 #include "hash-table.h"
60 #include "tree-ssa-address.h"
61 #include "wide-int-print.h"
62 \f
63 /* Information about a strength reduction candidate. Each statement
64 in the candidate table represents an expression of one of the
65 following forms (the special case of CAND_REF will be described
66 later):
67
68 (CAND_MULT) S1: X = (B + i) * S
69 (CAND_ADD) S1: X = B + (i * S)
70
71 Here X and B are SSA names, i is an integer constant, and S is
72 either an SSA name or a constant. We call B the "base," i the
73 "index", and S the "stride."
74
75 Any statement S0 that dominates S1 and is of the form:
76
77 (CAND_MULT) S0: Y = (B + i') * S
78 (CAND_ADD) S0: Y = B + (i' * S)
79
80 is called a "basis" for S1. In both cases, S1 may be replaced by
81
82 S1': X = Y + (i - i') * S,
83
84 where (i - i') * S is folded to the extent possible.
85
86 All gimple statements are visited in dominator order, and each
87 statement that may contribute to one of the forms of S1 above is
88 given at least one entry in the candidate table. Such statements
89 include addition, pointer addition, subtraction, multiplication,
90 negation, copies, and nontrivial type casts. If a statement may
91 represent more than one expression of the forms of S1 above,
92 multiple "interpretations" are stored in the table and chained
93 together. Examples:
94
95 * An add of two SSA names may treat either operand as the base.
96 * A multiply of two SSA names, likewise.
97 * A copy or cast may be thought of as either a CAND_MULT with
98 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
99
100 Candidate records are allocated from an obstack. They are addressed
101 both from a hash table keyed on S1, and from a vector of candidate
102 pointers arranged in predominator order.
103
104 Opportunity note
105 ----------------
106 Currently we don't recognize:
107
108 S0: Y = (S * i') - B
109 S1: X = (S * i) - B
110
111 as a strength reduction opportunity, even though this S1 would
112 also be replaceable by the S1' above. This can be added if it
113 comes up in practice.
114
115 Strength reduction in addressing
116 --------------------------------
117 There is another kind of candidate known as CAND_REF. A CAND_REF
118 describes a statement containing a memory reference having
119 complex addressing that might benefit from strength reduction.
120 Specifically, we are interested in references for which
121 get_inner_reference returns a base address, offset, and bitpos as
122 follows:
123
124 base: MEM_REF (T1, C1)
125 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
126 bitpos: C4 * BITS_PER_UNIT
127
128 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
129 arbitrary integer constants. Note that C2 may be zero, in which
130 case the offset will be MULT_EXPR (T2, C3).
131
132 When this pattern is recognized, the original memory reference
133 can be replaced with:
134
135 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
136 C1 + (C2 * C3) + C4)
137
138 which distributes the multiply to allow constant folding. When
139 two or more addressing expressions can be represented by MEM_REFs
140 of this form, differing only in the constants C1, C2, and C4,
141 making this substitution produces more efficient addressing during
142 the RTL phases. When there are not at least two expressions with
143 the same values of T1, T2, and C3, there is nothing to be gained
144 by the replacement.
145
146 Strength reduction of CAND_REFs uses the same infrastructure as
147 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
148 field, MULT_EXPR (T2, C3) in the stride (S) field, and
149 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
150 is thus another CAND_REF with the same B and S values. When at
151 least two CAND_REFs are chained together using the basis relation,
152 each of them is replaced as above, resulting in improved code
153 generation for addressing.
154
155 Conditional candidates
156 ======================
157
158 Conditional candidates are best illustrated with an example.
159 Consider the code sequence:
160
161 (1) x_0 = ...;
162 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
163 if (...)
164 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
165 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
166 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
167 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
168
169 Here strength reduction is complicated by the uncertain value of x_2.
170 A legitimate transformation is:
171
172 (1) x_0 = ...;
173 (2) a_0 = x_0 * 5;
174 if (...)
175 {
176 (3) [x_1 = x_0 + 1;]
177 (3a) t_1 = a_0 + 5;
178 }
179 (4) [x_2 = PHI <x_0, x_1>;]
180 (4a) t_2 = PHI <a_0, t_1>;
181 (5) [x_3 = x_2 + 1;]
182 (6r) a_1 = t_2 + 5;
183
184 where the bracketed instructions may go dead.
185
186 To recognize this opportunity, we have to observe that statement (6)
187 has a "hidden basis" (2). The hidden basis is unlike a normal basis
188 in that the statement and the hidden basis have different base SSA
189 names (x_2 and x_0, respectively). The relationship is established
190 when a statement's base name (x_2) is defined by a phi statement (4),
191 each argument of which (x_0, x_1) has an identical "derived base name."
192 If the argument is defined by a candidate (as x_1 is by (3)) that is a
193 CAND_ADD having a stride of 1, the derived base name of the argument is
194 the base name of the candidate (x_0). Otherwise, the argument itself
195 is its derived base name (as is the case with argument x_0).
196
197 The hidden basis for statement (6) is the nearest dominating candidate
198 whose base name is the derived base name (x_0) of the feeding phi (4),
199 and whose stride is identical to that of the statement. We can then
200 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
201 allowing the final replacement of (6) by the strength-reduced (6r).
202
203 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
204 A CAND_PHI is not a candidate for replacement, but is maintained in the
205 candidate table to ease discovery of hidden bases. Any phi statement
206 whose arguments share a common derived base name is entered into the
207 table with the derived base name, an (arbitrary) index of zero, and a
208 stride of 1. A statement with a hidden basis can then be detected by
209 simply looking up its feeding phi definition in the candidate table,
210 extracting the derived base name, and searching for a basis in the
211 usual manner after substituting the derived base name.
212
213 Note that the transformation is only valid when the original phi and
214 the statements that define the phi's arguments are all at the same
215 position in the loop hierarchy. */
216
217
218 /* Index into the candidate vector, offset by 1. VECs are zero-based,
219 while cand_idx's are one-based, with zero indicating null. */
220 typedef unsigned cand_idx;
221
222 /* The kind of candidate. */
223 enum cand_kind
224 {
225 CAND_MULT,
226 CAND_ADD,
227 CAND_REF,
228 CAND_PHI
229 };
230
231 struct slsr_cand_d
232 {
233 /* The candidate statement S1. */
234 gimple cand_stmt;
235
236 /* The base expression B: often an SSA name, but not always. */
237 tree base_expr;
238
239 /* The stride S. */
240 tree stride;
241
242 /* The index constant i. */
243 widest_int index;
244
245 /* The type of the candidate. This is normally the type of base_expr,
246 but casts may have occurred when combining feeding instructions.
247 A candidate can only be a basis for candidates of the same final type.
248 (For CAND_REFs, this is the type to be used for operand 1 of the
249 replacement MEM_REF.) */
250 tree cand_type;
251
252 /* The kind of candidate (CAND_MULT, etc.). */
253 enum cand_kind kind;
254
255 /* Index of this candidate in the candidate vector. */
256 cand_idx cand_num;
257
258 /* Index of the next candidate record for the same statement.
259 A statement may be useful in more than one way (e.g., due to
260 commutativity). So we can have multiple "interpretations"
261 of a statement. */
262 cand_idx next_interp;
263
264 /* Index of the basis statement S0, if any, in the candidate vector. */
265 cand_idx basis;
266
267 /* First candidate for which this candidate is a basis, if one exists. */
268 cand_idx dependent;
269
270 /* Next candidate having the same basis as this one. */
271 cand_idx sibling;
272
273 /* If this is a conditional candidate, the CAND_PHI candidate
274 that defines the base SSA name B. */
275 cand_idx def_phi;
276
277 /* Savings that can be expected from eliminating dead code if this
278 candidate is replaced. */
279 int dead_savings;
280 };
281
282 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
283 typedef const struct slsr_cand_d *const_slsr_cand_t;
284
285 /* Pointers to candidates are chained together as part of a mapping
286 from base expressions to the candidates that use them. */
287
288 struct cand_chain_d
289 {
290 /* Base expression for the chain of candidates: often, but not
291 always, an SSA name. */
292 tree base_expr;
293
294 /* Pointer to a candidate. */
295 slsr_cand_t cand;
296
297 /* Chain pointer. */
298 struct cand_chain_d *next;
299
300 };
301
302 typedef struct cand_chain_d cand_chain, *cand_chain_t;
303 typedef const struct cand_chain_d *const_cand_chain_t;
304
305 /* Information about a unique "increment" associated with candidates
306 having an SSA name for a stride. An increment is the difference
307 between the index of the candidate and the index of its basis,
308 i.e., (i - i') as discussed in the module commentary.
309
310 When we are not going to generate address arithmetic we treat
311 increments that differ only in sign as the same, allowing sharing
312 of the cost of initializers. The absolute value of the increment
313 is stored in the incr_info. */
314
315 struct incr_info_d
316 {
317 /* The increment that relates a candidate to its basis. */
318 widest_int incr;
319
320 /* How many times the increment occurs in the candidate tree. */
321 unsigned count;
322
323 /* Cost of replacing candidates using this increment. Negative and
324 zero costs indicate replacement should be performed. */
325 int cost;
326
327 /* If this increment is profitable but is not -1, 0, or 1, it requires
328 an initializer T_0 = stride * incr to be found or introduced in the
329 nearest common dominator of all candidates. This field holds T_0
330 for subsequent use. */
331 tree initializer;
332
333 /* If the initializer was found to already exist, this is the block
334 where it was found. */
335 basic_block init_bb;
336 };
337
338 typedef struct incr_info_d incr_info, *incr_info_t;
339
340 /* Candidates are maintained in a vector. If candidate X dominates
341 candidate Y, then X appears before Y in the vector; but the
342 converse does not necessarily hold. */
343 static vec<slsr_cand_t> cand_vec;
344
345 enum cost_consts
346 {
347 COST_NEUTRAL = 0,
348 COST_INFINITE = 1000
349 };
350
351 enum stride_status
352 {
353 UNKNOWN_STRIDE = 0,
354 KNOWN_STRIDE = 1
355 };
356
357 enum phi_adjust_status
358 {
359 NOT_PHI_ADJUST = 0,
360 PHI_ADJUST = 1
361 };
362
363 enum count_phis_status
364 {
365 DONT_COUNT_PHIS = 0,
366 COUNT_PHIS = 1
367 };
368
369 /* Pointer map embodying a mapping from statements to candidates. */
370 static struct pointer_map_t *stmt_cand_map;
371
372 /* Obstack for candidates. */
373 static struct obstack cand_obstack;
374
375 /* Obstack for candidate chains. */
376 static struct obstack chain_obstack;
377
378 /* An array INCR_VEC of incr_infos is used during analysis of related
379 candidates having an SSA name for a stride. INCR_VEC_LEN describes
380 its current length. MAX_INCR_VEC_LEN is used to avoid costly
381 pathological cases. */
382 static incr_info_t incr_vec;
383 static unsigned incr_vec_len;
384 const int MAX_INCR_VEC_LEN = 16;
385
386 /* For a chain of candidates with unknown stride, indicates whether or not
387 we must generate pointer arithmetic when replacing statements. */
388 static bool address_arithmetic_p;
389
390 /* Forward function declarations. */
391 static slsr_cand_t base_cand_from_table (tree);
392 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
393 static bool legal_cast_p_1 (tree, tree);
394 \f
395 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
396
397 static slsr_cand_t
398 lookup_cand (cand_idx idx)
399 {
400 return cand_vec[idx - 1];
401 }
402
403 /* Helper for hashing a candidate chain header. */
404
405 struct cand_chain_hasher : typed_noop_remove <cand_chain>
406 {
407 typedef cand_chain value_type;
408 typedef cand_chain compare_type;
409 static inline hashval_t hash (const value_type *);
410 static inline bool equal (const value_type *, const compare_type *);
411 };
412
413 inline hashval_t
414 cand_chain_hasher::hash (const value_type *p)
415 {
416 tree base_expr = p->base_expr;
417 return iterative_hash_expr (base_expr, 0);
418 }
419
420 inline bool
421 cand_chain_hasher::equal (const value_type *chain1, const compare_type *chain2)
422 {
423 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
424 }
425
426 /* Hash table embodying a mapping from base exprs to chains of candidates. */
427 static hash_table <cand_chain_hasher> base_cand_map;
428 \f
429 /* Look in the candidate table for a CAND_PHI that defines BASE and
430 return it if found; otherwise return NULL. */
431
432 static cand_idx
433 find_phi_def (tree base)
434 {
435 slsr_cand_t c;
436
437 if (TREE_CODE (base) != SSA_NAME)
438 return 0;
439
440 c = base_cand_from_table (base);
441
442 if (!c || c->kind != CAND_PHI)
443 return 0;
444
445 return c->cand_num;
446 }
447
448 /* Helper routine for find_basis_for_candidate. May be called twice:
449 once for the candidate's base expr, and optionally again for the
450 candidate's phi definition. */
451
452 static slsr_cand_t
453 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
454 {
455 cand_chain mapping_key;
456 cand_chain_t chain;
457 slsr_cand_t basis = NULL;
458
459 // Limit potential of N^2 behavior for long candidate chains.
460 int iters = 0;
461 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
462
463 mapping_key.base_expr = base_expr;
464 chain = base_cand_map.find (&mapping_key);
465
466 for (; chain && iters < max_iters; chain = chain->next, ++iters)
467 {
468 slsr_cand_t one_basis = chain->cand;
469
470 if (one_basis->kind != c->kind
471 || one_basis->cand_stmt == c->cand_stmt
472 || !operand_equal_p (one_basis->stride, c->stride, 0)
473 || !types_compatible_p (one_basis->cand_type, c->cand_type)
474 || !dominated_by_p (CDI_DOMINATORS,
475 gimple_bb (c->cand_stmt),
476 gimple_bb (one_basis->cand_stmt)))
477 continue;
478
479 if (!basis || basis->cand_num < one_basis->cand_num)
480 basis = one_basis;
481 }
482
483 return basis;
484 }
485
486 /* Use the base expr from candidate C to look for possible candidates
487 that can serve as a basis for C. Each potential basis must also
488 appear in a block that dominates the candidate statement and have
489 the same stride and type. If more than one possible basis exists,
490 the one with highest index in the vector is chosen; this will be
491 the most immediately dominating basis. */
492
493 static int
494 find_basis_for_candidate (slsr_cand_t c)
495 {
496 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
497
498 /* If a candidate doesn't have a basis using its base expression,
499 it may have a basis hidden by one or more intervening phis. */
500 if (!basis && c->def_phi)
501 {
502 basic_block basis_bb, phi_bb;
503 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
504 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
505
506 if (basis)
507 {
508 /* A hidden basis must dominate the phi-definition of the
509 candidate's base name. */
510 phi_bb = gimple_bb (phi_cand->cand_stmt);
511 basis_bb = gimple_bb (basis->cand_stmt);
512
513 if (phi_bb == basis_bb
514 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
515 {
516 basis = NULL;
517 c->basis = 0;
518 }
519
520 /* If we found a hidden basis, estimate additional dead-code
521 savings if the phi and its feeding statements can be removed. */
522 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
523 c->dead_savings += phi_cand->dead_savings;
524 }
525 }
526
527 if (basis)
528 {
529 c->sibling = basis->dependent;
530 basis->dependent = c->cand_num;
531 return basis->cand_num;
532 }
533
534 return 0;
535 }
536
537 /* Record a mapping from the base expression of C to C itself, indicating that
538 C may potentially serve as a basis using that base expression. */
539
540 static void
541 record_potential_basis (slsr_cand_t c)
542 {
543 cand_chain_t node;
544 cand_chain **slot;
545
546 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
547 node->base_expr = c->base_expr;
548 node->cand = c;
549 node->next = NULL;
550 slot = base_cand_map.find_slot (node, INSERT);
551
552 if (*slot)
553 {
554 cand_chain_t head = (cand_chain_t) (*slot);
555 node->next = head->next;
556 head->next = node;
557 }
558 else
559 *slot = node;
560 }
561
562 /* Allocate storage for a new candidate and initialize its fields.
563 Attempt to find a basis for the candidate. */
564
565 static slsr_cand_t
566 alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
567 const widest_int &index, tree stride, tree ctype,
568 unsigned savings)
569 {
570 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
571 sizeof (slsr_cand));
572 c->cand_stmt = gs;
573 c->base_expr = base;
574 c->stride = stride;
575 c->index = index;
576 c->cand_type = ctype;
577 c->kind = kind;
578 c->cand_num = cand_vec.length () + 1;
579 c->next_interp = 0;
580 c->dependent = 0;
581 c->sibling = 0;
582 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
583 c->dead_savings = savings;
584
585 cand_vec.safe_push (c);
586
587 if (kind == CAND_PHI)
588 c->basis = 0;
589 else
590 c->basis = find_basis_for_candidate (c);
591
592 record_potential_basis (c);
593
594 return c;
595 }
596
597 /* Determine the target cost of statement GS when compiling according
598 to SPEED. */
599
600 static int
601 stmt_cost (gimple gs, bool speed)
602 {
603 tree lhs, rhs1, rhs2;
604 enum machine_mode lhs_mode;
605
606 gcc_assert (is_gimple_assign (gs));
607 lhs = gimple_assign_lhs (gs);
608 rhs1 = gimple_assign_rhs1 (gs);
609 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
610
611 switch (gimple_assign_rhs_code (gs))
612 {
613 case MULT_EXPR:
614 rhs2 = gimple_assign_rhs2 (gs);
615
616 if (tree_fits_shwi_p (rhs2))
617 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
618
619 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
620 return mul_cost (speed, lhs_mode);
621
622 case PLUS_EXPR:
623 case POINTER_PLUS_EXPR:
624 case MINUS_EXPR:
625 return add_cost (speed, lhs_mode);
626
627 case NEGATE_EXPR:
628 return neg_cost (speed, lhs_mode);
629
630 case NOP_EXPR:
631 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
632
633 /* Note that we don't assign costs to copies that in most cases
634 will go away. */
635 default:
636 ;
637 }
638
639 gcc_unreachable ();
640 return 0;
641 }
642
643 /* Look up the defining statement for BASE_IN and return a pointer
644 to its candidate in the candidate table, if any; otherwise NULL.
645 Only CAND_ADD and CAND_MULT candidates are returned. */
646
647 static slsr_cand_t
648 base_cand_from_table (tree base_in)
649 {
650 slsr_cand_t *result;
651
652 gimple def = SSA_NAME_DEF_STMT (base_in);
653 if (!def)
654 return (slsr_cand_t) NULL;
655
656 result = (slsr_cand_t *) pointer_map_contains (stmt_cand_map, def);
657
658 if (result && (*result)->kind != CAND_REF)
659 return *result;
660
661 return (slsr_cand_t) NULL;
662 }
663
664 /* Add an entry to the statement-to-candidate mapping. */
665
666 static void
667 add_cand_for_stmt (gimple gs, slsr_cand_t c)
668 {
669 void **slot = pointer_map_insert (stmt_cand_map, gs);
670 gcc_assert (!*slot);
671 *slot = c;
672 }
673 \f
674 /* Given PHI which contains a phi statement, determine whether it
675 satisfies all the requirements of a phi candidate. If so, create
676 a candidate. Note that a CAND_PHI never has a basis itself, but
677 is used to help find a basis for subsequent candidates. */
678
679 static void
680 slsr_process_phi (gimple phi, bool speed)
681 {
682 unsigned i;
683 tree arg0_base = NULL_TREE, base_type;
684 slsr_cand_t c;
685 struct loop *cand_loop = gimple_bb (phi)->loop_father;
686 unsigned savings = 0;
687
688 /* A CAND_PHI requires each of its arguments to have the same
689 derived base name. (See the module header commentary for a
690 definition of derived base names.) Furthermore, all feeding
691 definitions must be in the same position in the loop hierarchy
692 as PHI. */
693
694 for (i = 0; i < gimple_phi_num_args (phi); i++)
695 {
696 slsr_cand_t arg_cand;
697 tree arg = gimple_phi_arg_def (phi, i);
698 tree derived_base_name = NULL_TREE;
699 gimple arg_stmt = NULL;
700 basic_block arg_bb = NULL;
701
702 if (TREE_CODE (arg) != SSA_NAME)
703 return;
704
705 arg_cand = base_cand_from_table (arg);
706
707 if (arg_cand)
708 {
709 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
710 {
711 if (!arg_cand->next_interp)
712 return;
713
714 arg_cand = lookup_cand (arg_cand->next_interp);
715 }
716
717 if (!integer_onep (arg_cand->stride))
718 return;
719
720 derived_base_name = arg_cand->base_expr;
721 arg_stmt = arg_cand->cand_stmt;
722 arg_bb = gimple_bb (arg_stmt);
723
724 /* Gather potential dead code savings if the phi statement
725 can be removed later on. */
726 if (has_single_use (arg))
727 {
728 if (gimple_code (arg_stmt) == GIMPLE_PHI)
729 savings += arg_cand->dead_savings;
730 else
731 savings += stmt_cost (arg_stmt, speed);
732 }
733 }
734 else
735 {
736 derived_base_name = arg;
737
738 if (SSA_NAME_IS_DEFAULT_DEF (arg))
739 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
740 else
741 gimple_bb (SSA_NAME_DEF_STMT (arg));
742 }
743
744 if (!arg_bb || arg_bb->loop_father != cand_loop)
745 return;
746
747 if (i == 0)
748 arg0_base = derived_base_name;
749 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
750 return;
751 }
752
753 /* Create the candidate. "alloc_cand_and_find_basis" is named
754 misleadingly for this case, as no basis will be sought for a
755 CAND_PHI. */
756 base_type = TREE_TYPE (arg0_base);
757
758 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
759 0, integer_one_node, base_type, savings);
760
761 /* Add the candidate to the statement-candidate mapping. */
762 add_cand_for_stmt (phi, c);
763 }
764
765 /* Given PBASE which is a pointer to tree, look up the defining
766 statement for it and check whether the candidate is in the
767 form of:
768
769 X = B + (1 * S), S is integer constant
770 X = B + (i * S), S is integer one
771
772 If so, set PBASE to the candidate's base_expr and return double
773 int (i * S).
774 Otherwise, just return double int zero. */
775
776 static widest_int
777 backtrace_base_for_ref (tree *pbase)
778 {
779 tree base_in = *pbase;
780 slsr_cand_t base_cand;
781
782 STRIP_NOPS (base_in);
783
784 /* Strip off widening conversion(s) to handle cases where
785 e.g. 'B' is widened from an 'int' in order to calculate
786 a 64-bit address. */
787 if (CONVERT_EXPR_P (base_in)
788 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
789 base_in = get_unwidened (base_in, NULL_TREE);
790
791 if (TREE_CODE (base_in) != SSA_NAME)
792 return 0;
793
794 base_cand = base_cand_from_table (base_in);
795
796 while (base_cand && base_cand->kind != CAND_PHI)
797 {
798 if (base_cand->kind == CAND_ADD
799 && base_cand->index == 1
800 && TREE_CODE (base_cand->stride) == INTEGER_CST)
801 {
802 /* X = B + (1 * S), S is integer constant. */
803 *pbase = base_cand->base_expr;
804 return wi::to_widest (base_cand->stride);
805 }
806 else if (base_cand->kind == CAND_ADD
807 && TREE_CODE (base_cand->stride) == INTEGER_CST
808 && integer_onep (base_cand->stride))
809 {
810 /* X = B + (i * S), S is integer one. */
811 *pbase = base_cand->base_expr;
812 return base_cand->index;
813 }
814
815 if (base_cand->next_interp)
816 base_cand = lookup_cand (base_cand->next_interp);
817 else
818 base_cand = NULL;
819 }
820
821 return 0;
822 }
823
824 /* Look for the following pattern:
825
826 *PBASE: MEM_REF (T1, C1)
827
828 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
829 or
830 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
831 or
832 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
833
834 *PINDEX: C4 * BITS_PER_UNIT
835
836 If not present, leave the input values unchanged and return FALSE.
837 Otherwise, modify the input values as follows and return TRUE:
838
839 *PBASE: T1
840 *POFFSET: MULT_EXPR (T2, C3)
841 *PINDEX: C1 + (C2 * C3) + C4
842
843 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
844 will be further restructured to:
845
846 *PBASE: T1
847 *POFFSET: MULT_EXPR (T2', C3)
848 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
849
850 static bool
851 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
852 tree *ptype)
853 {
854 tree base = *pbase, offset = *poffset;
855 widest_int index = *pindex;
856 tree mult_op0, t1, t2, type;
857 widest_int c1, c2, c3, c4, c5;
858
859 if (!base
860 || !offset
861 || TREE_CODE (base) != MEM_REF
862 || TREE_CODE (offset) != MULT_EXPR
863 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
864 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
865 return false;
866
867 t1 = TREE_OPERAND (base, 0);
868 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
869 type = TREE_TYPE (TREE_OPERAND (base, 1));
870
871 mult_op0 = TREE_OPERAND (offset, 0);
872 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
873
874 if (TREE_CODE (mult_op0) == PLUS_EXPR)
875
876 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
877 {
878 t2 = TREE_OPERAND (mult_op0, 0);
879 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
880 }
881 else
882 return false;
883
884 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
885
886 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
887 {
888 t2 = TREE_OPERAND (mult_op0, 0);
889 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
890 }
891 else
892 return false;
893
894 else
895 {
896 t2 = mult_op0;
897 c2 = 0;
898 }
899
900 c4 = wi::udiv_floor (index, BITS_PER_UNIT);
901 c5 = backtrace_base_for_ref (&t2);
902
903 *pbase = t1;
904 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
905 wide_int_to_tree (sizetype, c3));
906 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
907 *ptype = type;
908
909 return true;
910 }
911
912 /* Given GS which contains a data reference, create a CAND_REF entry in
913 the candidate table and attempt to find a basis. */
914
915 static void
916 slsr_process_ref (gimple gs)
917 {
918 tree ref_expr, base, offset, type;
919 HOST_WIDE_INT bitsize, bitpos;
920 enum machine_mode mode;
921 int unsignedp, volatilep;
922 slsr_cand_t c;
923
924 if (gimple_vdef (gs))
925 ref_expr = gimple_assign_lhs (gs);
926 else
927 ref_expr = gimple_assign_rhs1 (gs);
928
929 if (!handled_component_p (ref_expr)
930 || TREE_CODE (ref_expr) == BIT_FIELD_REF
931 || (TREE_CODE (ref_expr) == COMPONENT_REF
932 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
933 return;
934
935 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
936 &unsignedp, &volatilep, false);
937 widest_int index = bitpos;
938
939 if (!restructure_reference (&base, &offset, &index, &type))
940 return;
941
942 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
943 type, 0);
944
945 /* Add the candidate to the statement-candidate mapping. */
946 add_cand_for_stmt (gs, c);
947 }
948
949 /* Create a candidate entry for a statement GS, where GS multiplies
950 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
951 about the two SSA names into the new candidate. Return the new
952 candidate. */
953
954 static slsr_cand_t
955 create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
956 {
957 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
958 widest_int index;
959 unsigned savings = 0;
960 slsr_cand_t c;
961 slsr_cand_t base_cand = base_cand_from_table (base_in);
962
963 /* Look at all interpretations of the base candidate, if necessary,
964 to find information to propagate into this candidate. */
965 while (base_cand && !base && base_cand->kind != CAND_PHI)
966 {
967
968 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
969 {
970 /* Y = (B + i') * 1
971 X = Y * Z
972 ================
973 X = (B + i') * Z */
974 base = base_cand->base_expr;
975 index = base_cand->index;
976 stride = stride_in;
977 ctype = base_cand->cand_type;
978 if (has_single_use (base_in))
979 savings = (base_cand->dead_savings
980 + stmt_cost (base_cand->cand_stmt, speed));
981 }
982 else if (base_cand->kind == CAND_ADD
983 && TREE_CODE (base_cand->stride) == INTEGER_CST)
984 {
985 /* Y = B + (i' * S), S constant
986 X = Y * Z
987 ============================
988 X = B + ((i' * S) * Z) */
989 base = base_cand->base_expr;
990 index = base_cand->index * wi::to_widest (base_cand->stride);
991 stride = stride_in;
992 ctype = base_cand->cand_type;
993 if (has_single_use (base_in))
994 savings = (base_cand->dead_savings
995 + stmt_cost (base_cand->cand_stmt, speed));
996 }
997
998 if (base_cand->next_interp)
999 base_cand = lookup_cand (base_cand->next_interp);
1000 else
1001 base_cand = NULL;
1002 }
1003
1004 if (!base)
1005 {
1006 /* No interpretations had anything useful to propagate, so
1007 produce X = (Y + 0) * Z. */
1008 base = base_in;
1009 index = 0;
1010 stride = stride_in;
1011 ctype = TREE_TYPE (base_in);
1012 }
1013
1014 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1015 ctype, savings);
1016 return c;
1017 }
1018
1019 /* Create a candidate entry for a statement GS, where GS multiplies
1020 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1021 information about BASE_IN into the new candidate. Return the new
1022 candidate. */
1023
1024 static slsr_cand_t
1025 create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1026 {
1027 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1028 widest_int index, temp;
1029 unsigned savings = 0;
1030 slsr_cand_t c;
1031 slsr_cand_t base_cand = base_cand_from_table (base_in);
1032
1033 /* Look at all interpretations of the base candidate, if necessary,
1034 to find information to propagate into this candidate. */
1035 while (base_cand && !base && base_cand->kind != CAND_PHI)
1036 {
1037 if (base_cand->kind == CAND_MULT
1038 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1039 {
1040 /* Y = (B + i') * S, S constant
1041 X = Y * c
1042 ============================
1043 X = (B + i') * (S * c) */
1044 base = base_cand->base_expr;
1045 index = base_cand->index;
1046 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1047 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1048 ctype = base_cand->cand_type;
1049 if (has_single_use (base_in))
1050 savings = (base_cand->dead_savings
1051 + stmt_cost (base_cand->cand_stmt, speed));
1052 }
1053 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1054 {
1055 /* Y = B + (i' * 1)
1056 X = Y * c
1057 ===========================
1058 X = (B + i') * c */
1059 base = base_cand->base_expr;
1060 index = base_cand->index;
1061 stride = stride_in;
1062 ctype = base_cand->cand_type;
1063 if (has_single_use (base_in))
1064 savings = (base_cand->dead_savings
1065 + stmt_cost (base_cand->cand_stmt, speed));
1066 }
1067 else if (base_cand->kind == CAND_ADD
1068 && base_cand->index == 1
1069 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1070 {
1071 /* Y = B + (1 * S), S constant
1072 X = Y * c
1073 ===========================
1074 X = (B + S) * c */
1075 base = base_cand->base_expr;
1076 index = wi::to_widest (base_cand->stride);
1077 stride = stride_in;
1078 ctype = base_cand->cand_type;
1079 if (has_single_use (base_in))
1080 savings = (base_cand->dead_savings
1081 + stmt_cost (base_cand->cand_stmt, speed));
1082 }
1083
1084 if (base_cand->next_interp)
1085 base_cand = lookup_cand (base_cand->next_interp);
1086 else
1087 base_cand = NULL;
1088 }
1089
1090 if (!base)
1091 {
1092 /* No interpretations had anything useful to propagate, so
1093 produce X = (Y + 0) * c. */
1094 base = base_in;
1095 index = 0;
1096 stride = stride_in;
1097 ctype = TREE_TYPE (base_in);
1098 }
1099
1100 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1101 ctype, savings);
1102 return c;
1103 }
1104
1105 /* Given GS which is a multiply of scalar integers, make an appropriate
1106 entry in the candidate table. If this is a multiply of two SSA names,
1107 create two CAND_MULT interpretations and attempt to find a basis for
1108 each of them. Otherwise, create a single CAND_MULT and attempt to
1109 find a basis. */
1110
1111 static void
1112 slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
1113 {
1114 slsr_cand_t c, c2;
1115
1116 /* If this is a multiply of an SSA name with itself, it is highly
1117 unlikely that we will get a strength reduction opportunity, so
1118 don't record it as a candidate. This simplifies the logic for
1119 finding a basis, so if this is removed that must be considered. */
1120 if (rhs1 == rhs2)
1121 return;
1122
1123 if (TREE_CODE (rhs2) == SSA_NAME)
1124 {
1125 /* Record an interpretation of this statement in the candidate table
1126 assuming RHS1 is the base expression and RHS2 is the stride. */
1127 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1128
1129 /* Add the first interpretation to the statement-candidate mapping. */
1130 add_cand_for_stmt (gs, c);
1131
1132 /* Record another interpretation of this statement assuming RHS1
1133 is the stride and RHS2 is the base expression. */
1134 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1135 c->next_interp = c2->cand_num;
1136 }
1137 else
1138 {
1139 /* Record an interpretation for the multiply-immediate. */
1140 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1141
1142 /* Add the interpretation to the statement-candidate mapping. */
1143 add_cand_for_stmt (gs, c);
1144 }
1145 }
1146
1147 /* Create a candidate entry for a statement GS, where GS adds two
1148 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1149 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1150 information about the two SSA names into the new candidate.
1151 Return the new candidate. */
1152
1153 static slsr_cand_t
1154 create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
1155 bool subtract_p, bool speed)
1156 {
1157 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1158 widest_int index;
1159 unsigned savings = 0;
1160 slsr_cand_t c;
1161 slsr_cand_t base_cand = base_cand_from_table (base_in);
1162 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1163
1164 /* The most useful transformation is a multiply-immediate feeding
1165 an add or subtract. Look for that first. */
1166 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1167 {
1168 if (addend_cand->kind == CAND_MULT
1169 && addend_cand->index == 0
1170 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1171 {
1172 /* Z = (B + 0) * S, S constant
1173 X = Y +/- Z
1174 ===========================
1175 X = Y + ((+/-1 * S) * B) */
1176 base = base_in;
1177 index = wi::to_widest (addend_cand->stride);
1178 if (subtract_p)
1179 index = -index;
1180 stride = addend_cand->base_expr;
1181 ctype = TREE_TYPE (base_in);
1182 if (has_single_use (addend_in))
1183 savings = (addend_cand->dead_savings
1184 + stmt_cost (addend_cand->cand_stmt, speed));
1185 }
1186
1187 if (addend_cand->next_interp)
1188 addend_cand = lookup_cand (addend_cand->next_interp);
1189 else
1190 addend_cand = NULL;
1191 }
1192
1193 while (base_cand && !base && base_cand->kind != CAND_PHI)
1194 {
1195 if (base_cand->kind == CAND_ADD
1196 && (base_cand->index == 0
1197 || operand_equal_p (base_cand->stride,
1198 integer_zero_node, 0)))
1199 {
1200 /* Y = B + (i' * S), i' * S = 0
1201 X = Y +/- Z
1202 ============================
1203 X = B + (+/-1 * Z) */
1204 base = base_cand->base_expr;
1205 index = subtract_p ? -1 : 1;
1206 stride = addend_in;
1207 ctype = base_cand->cand_type;
1208 if (has_single_use (base_in))
1209 savings = (base_cand->dead_savings
1210 + stmt_cost (base_cand->cand_stmt, speed));
1211 }
1212 else if (subtract_p)
1213 {
1214 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1215
1216 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1217 {
1218 if (subtrahend_cand->kind == CAND_MULT
1219 && subtrahend_cand->index == 0
1220 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1221 {
1222 /* Z = (B + 0) * S, S constant
1223 X = Y - Z
1224 ===========================
1225 Value: X = Y + ((-1 * S) * B) */
1226 base = base_in;
1227 index = wi::to_widest (subtrahend_cand->stride);
1228 index = -index;
1229 stride = subtrahend_cand->base_expr;
1230 ctype = TREE_TYPE (base_in);
1231 if (has_single_use (addend_in))
1232 savings = (subtrahend_cand->dead_savings
1233 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1234 }
1235
1236 if (subtrahend_cand->next_interp)
1237 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1238 else
1239 subtrahend_cand = NULL;
1240 }
1241 }
1242
1243 if (base_cand->next_interp)
1244 base_cand = lookup_cand (base_cand->next_interp);
1245 else
1246 base_cand = NULL;
1247 }
1248
1249 if (!base)
1250 {
1251 /* No interpretations had anything useful to propagate, so
1252 produce X = Y + (1 * Z). */
1253 base = base_in;
1254 index = subtract_p ? -1 : 1;
1255 stride = addend_in;
1256 ctype = TREE_TYPE (base_in);
1257 }
1258
1259 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1260 ctype, savings);
1261 return c;
1262 }
1263
1264 /* Create a candidate entry for a statement GS, where GS adds SSA
1265 name BASE_IN to constant INDEX_IN. Propagate any known information
1266 about BASE_IN into the new candidate. Return the new candidate. */
1267
1268 static slsr_cand_t
1269 create_add_imm_cand (gimple gs, tree base_in, const widest_int &index_in,
1270 bool speed)
1271 {
1272 enum cand_kind kind = CAND_ADD;
1273 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1274 widest_int index, multiple;
1275 unsigned savings = 0;
1276 slsr_cand_t c;
1277 slsr_cand_t base_cand = base_cand_from_table (base_in);
1278
1279 while (base_cand && !base && base_cand->kind != CAND_PHI)
1280 {
1281 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1282
1283 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1284 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1285 sign, &multiple))
1286 {
1287 /* Y = (B + i') * S, S constant, c = kS for some integer k
1288 X = Y + c
1289 ============================
1290 X = (B + (i'+ k)) * S
1291 OR
1292 Y = B + (i' * S), S constant, c = kS for some integer k
1293 X = Y + c
1294 ============================
1295 X = (B + (i'+ k)) * S */
1296 kind = base_cand->kind;
1297 base = base_cand->base_expr;
1298 index = base_cand->index + multiple;
1299 stride = base_cand->stride;
1300 ctype = base_cand->cand_type;
1301 if (has_single_use (base_in))
1302 savings = (base_cand->dead_savings
1303 + stmt_cost (base_cand->cand_stmt, speed));
1304 }
1305
1306 if (base_cand->next_interp)
1307 base_cand = lookup_cand (base_cand->next_interp);
1308 else
1309 base_cand = NULL;
1310 }
1311
1312 if (!base)
1313 {
1314 /* No interpretations had anything useful to propagate, so
1315 produce X = Y + (c * 1). */
1316 kind = CAND_ADD;
1317 base = base_in;
1318 index = index_in;
1319 stride = integer_one_node;
1320 ctype = TREE_TYPE (base_in);
1321 }
1322
1323 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1324 ctype, savings);
1325 return c;
1326 }
1327
1328 /* Given GS which is an add or subtract of scalar integers or pointers,
1329 make at least one appropriate entry in the candidate table. */
1330
1331 static void
1332 slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
1333 {
1334 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1335 slsr_cand_t c = NULL, c2;
1336
1337 if (TREE_CODE (rhs2) == SSA_NAME)
1338 {
1339 /* First record an interpretation assuming RHS1 is the base expression
1340 and RHS2 is the stride. But it doesn't make sense for the
1341 stride to be a pointer, so don't record a candidate in that case. */
1342 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1343 {
1344 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1345
1346 /* Add the first interpretation to the statement-candidate
1347 mapping. */
1348 add_cand_for_stmt (gs, c);
1349 }
1350
1351 /* If the two RHS operands are identical, or this is a subtract,
1352 we're done. */
1353 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1354 return;
1355
1356 /* Otherwise, record another interpretation assuming RHS2 is the
1357 base expression and RHS1 is the stride, again provided that the
1358 stride is not a pointer. */
1359 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1360 {
1361 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1362 if (c)
1363 c->next_interp = c2->cand_num;
1364 else
1365 add_cand_for_stmt (gs, c2);
1366 }
1367 }
1368 else
1369 {
1370 /* Record an interpretation for the add-immediate. */
1371 widest_int index = wi::to_widest (rhs2);
1372 if (subtract_p)
1373 index = -index;
1374
1375 c = create_add_imm_cand (gs, rhs1, index, speed);
1376
1377 /* Add the interpretation to the statement-candidate mapping. */
1378 add_cand_for_stmt (gs, c);
1379 }
1380 }
1381
1382 /* Given GS which is a negate of a scalar integer, make an appropriate
1383 entry in the candidate table. A negate is equivalent to a multiply
1384 by -1. */
1385
1386 static void
1387 slsr_process_neg (gimple gs, tree rhs1, bool speed)
1388 {
1389 /* Record a CAND_MULT interpretation for the multiply by -1. */
1390 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1391
1392 /* Add the interpretation to the statement-candidate mapping. */
1393 add_cand_for_stmt (gs, c);
1394 }
1395
1396 /* Help function for legal_cast_p, operating on two trees. Checks
1397 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1398 for more details. */
1399
1400 static bool
1401 legal_cast_p_1 (tree lhs, tree rhs)
1402 {
1403 tree lhs_type, rhs_type;
1404 unsigned lhs_size, rhs_size;
1405 bool lhs_wraps, rhs_wraps;
1406
1407 lhs_type = TREE_TYPE (lhs);
1408 rhs_type = TREE_TYPE (rhs);
1409 lhs_size = TYPE_PRECISION (lhs_type);
1410 rhs_size = TYPE_PRECISION (rhs_type);
1411 lhs_wraps = TYPE_OVERFLOW_WRAPS (lhs_type);
1412 rhs_wraps = TYPE_OVERFLOW_WRAPS (rhs_type);
1413
1414 if (lhs_size < rhs_size
1415 || (rhs_wraps && !lhs_wraps)
1416 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1417 return false;
1418
1419 return true;
1420 }
1421
1422 /* Return TRUE if GS is a statement that defines an SSA name from
1423 a conversion and is legal for us to combine with an add and multiply
1424 in the candidate table. For example, suppose we have:
1425
1426 A = B + i;
1427 C = (type) A;
1428 D = C * S;
1429
1430 Without the type-cast, we would create a CAND_MULT for D with base B,
1431 index i, and stride S. We want to record this candidate only if it
1432 is equivalent to apply the type cast following the multiply:
1433
1434 A = B + i;
1435 E = A * S;
1436 D = (type) E;
1437
1438 We will record the type with the candidate for D. This allows us
1439 to use a similar previous candidate as a basis. If we have earlier seen
1440
1441 A' = B + i';
1442 C' = (type) A';
1443 D' = C' * S;
1444
1445 we can replace D with
1446
1447 D = D' + (i - i') * S;
1448
1449 But if moving the type-cast would change semantics, we mustn't do this.
1450
1451 This is legitimate for casts from a non-wrapping integral type to
1452 any integral type of the same or larger size. It is not legitimate
1453 to convert a wrapping type to a non-wrapping type, or to a wrapping
1454 type of a different size. I.e., with a wrapping type, we must
1455 assume that the addition B + i could wrap, in which case performing
1456 the multiply before or after one of the "illegal" type casts will
1457 have different semantics. */
1458
1459 static bool
1460 legal_cast_p (gimple gs, tree rhs)
1461 {
1462 if (!is_gimple_assign (gs)
1463 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1464 return false;
1465
1466 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1467 }
1468
1469 /* Given GS which is a cast to a scalar integer type, determine whether
1470 the cast is legal for strength reduction. If so, make at least one
1471 appropriate entry in the candidate table. */
1472
1473 static void
1474 slsr_process_cast (gimple gs, tree rhs1, bool speed)
1475 {
1476 tree lhs, ctype;
1477 slsr_cand_t base_cand, c, c2;
1478 unsigned savings = 0;
1479
1480 if (!legal_cast_p (gs, rhs1))
1481 return;
1482
1483 lhs = gimple_assign_lhs (gs);
1484 base_cand = base_cand_from_table (rhs1);
1485 ctype = TREE_TYPE (lhs);
1486
1487 if (base_cand && base_cand->kind != CAND_PHI)
1488 {
1489 while (base_cand)
1490 {
1491 /* Propagate all data from the base candidate except the type,
1492 which comes from the cast, and the base candidate's cast,
1493 which is no longer applicable. */
1494 if (has_single_use (rhs1))
1495 savings = (base_cand->dead_savings
1496 + stmt_cost (base_cand->cand_stmt, speed));
1497
1498 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1499 base_cand->base_expr,
1500 base_cand->index, base_cand->stride,
1501 ctype, savings);
1502 if (base_cand->next_interp)
1503 base_cand = lookup_cand (base_cand->next_interp);
1504 else
1505 base_cand = NULL;
1506 }
1507 }
1508 else
1509 {
1510 /* If nothing is known about the RHS, create fresh CAND_ADD and
1511 CAND_MULT interpretations:
1512
1513 X = Y + (0 * 1)
1514 X = (Y + 0) * 1
1515
1516 The first of these is somewhat arbitrary, but the choice of
1517 1 for the stride simplifies the logic for propagating casts
1518 into their uses. */
1519 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1520 0, integer_one_node, ctype, 0);
1521 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1522 0, integer_one_node, ctype, 0);
1523 c->next_interp = c2->cand_num;
1524 }
1525
1526 /* Add the first (or only) interpretation to the statement-candidate
1527 mapping. */
1528 add_cand_for_stmt (gs, c);
1529 }
1530
1531 /* Given GS which is a copy of a scalar integer type, make at least one
1532 appropriate entry in the candidate table.
1533
1534 This interface is included for completeness, but is unnecessary
1535 if this pass immediately follows a pass that performs copy
1536 propagation, such as DOM. */
1537
1538 static void
1539 slsr_process_copy (gimple gs, tree rhs1, bool speed)
1540 {
1541 slsr_cand_t base_cand, c, c2;
1542 unsigned savings = 0;
1543
1544 base_cand = base_cand_from_table (rhs1);
1545
1546 if (base_cand && base_cand->kind != CAND_PHI)
1547 {
1548 while (base_cand)
1549 {
1550 /* Propagate all data from the base candidate. */
1551 if (has_single_use (rhs1))
1552 savings = (base_cand->dead_savings
1553 + stmt_cost (base_cand->cand_stmt, speed));
1554
1555 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1556 base_cand->base_expr,
1557 base_cand->index, base_cand->stride,
1558 base_cand->cand_type, savings);
1559 if (base_cand->next_interp)
1560 base_cand = lookup_cand (base_cand->next_interp);
1561 else
1562 base_cand = NULL;
1563 }
1564 }
1565 else
1566 {
1567 /* If nothing is known about the RHS, create fresh CAND_ADD and
1568 CAND_MULT interpretations:
1569
1570 X = Y + (0 * 1)
1571 X = (Y + 0) * 1
1572
1573 The first of these is somewhat arbitrary, but the choice of
1574 1 for the stride simplifies the logic for propagating casts
1575 into their uses. */
1576 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1577 0, integer_one_node, TREE_TYPE (rhs1), 0);
1578 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1579 0, integer_one_node, TREE_TYPE (rhs1), 0);
1580 c->next_interp = c2->cand_num;
1581 }
1582
1583 /* Add the first (or only) interpretation to the statement-candidate
1584 mapping. */
1585 add_cand_for_stmt (gs, c);
1586 }
1587 \f
1588 class find_candidates_dom_walker : public dom_walker
1589 {
1590 public:
1591 find_candidates_dom_walker (cdi_direction direction)
1592 : dom_walker (direction) {}
1593 virtual void before_dom_children (basic_block);
1594 };
1595
1596 /* Find strength-reduction candidates in block BB. */
1597
1598 void
1599 find_candidates_dom_walker::before_dom_children (basic_block bb)
1600 {
1601 bool speed = optimize_bb_for_speed_p (bb);
1602 gimple_stmt_iterator gsi;
1603
1604 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1605 slsr_process_phi (gsi_stmt (gsi), speed);
1606
1607 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1608 {
1609 gimple gs = gsi_stmt (gsi);
1610
1611 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1612 slsr_process_ref (gs);
1613
1614 else if (is_gimple_assign (gs)
1615 && SCALAR_INT_MODE_P
1616 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1617 {
1618 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1619
1620 switch (gimple_assign_rhs_code (gs))
1621 {
1622 case MULT_EXPR:
1623 case PLUS_EXPR:
1624 rhs1 = gimple_assign_rhs1 (gs);
1625 rhs2 = gimple_assign_rhs2 (gs);
1626 /* Should never happen, but currently some buggy situations
1627 in earlier phases put constants in rhs1. */
1628 if (TREE_CODE (rhs1) != SSA_NAME)
1629 continue;
1630 break;
1631
1632 /* Possible future opportunity: rhs1 of a ptr+ can be
1633 an ADDR_EXPR. */
1634 case POINTER_PLUS_EXPR:
1635 case MINUS_EXPR:
1636 rhs2 = gimple_assign_rhs2 (gs);
1637 /* Fall-through. */
1638
1639 case NOP_EXPR:
1640 case MODIFY_EXPR:
1641 case NEGATE_EXPR:
1642 rhs1 = gimple_assign_rhs1 (gs);
1643 if (TREE_CODE (rhs1) != SSA_NAME)
1644 continue;
1645 break;
1646
1647 default:
1648 ;
1649 }
1650
1651 switch (gimple_assign_rhs_code (gs))
1652 {
1653 case MULT_EXPR:
1654 slsr_process_mul (gs, rhs1, rhs2, speed);
1655 break;
1656
1657 case PLUS_EXPR:
1658 case POINTER_PLUS_EXPR:
1659 case MINUS_EXPR:
1660 slsr_process_add (gs, rhs1, rhs2, speed);
1661 break;
1662
1663 case NEGATE_EXPR:
1664 slsr_process_neg (gs, rhs1, speed);
1665 break;
1666
1667 case NOP_EXPR:
1668 slsr_process_cast (gs, rhs1, speed);
1669 break;
1670
1671 case MODIFY_EXPR:
1672 slsr_process_copy (gs, rhs1, speed);
1673 break;
1674
1675 default:
1676 ;
1677 }
1678 }
1679 }
1680 }
1681 \f
1682 /* Dump a candidate for debug. */
1683
1684 static void
1685 dump_candidate (slsr_cand_t c)
1686 {
1687 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1688 gimple_bb (c->cand_stmt)->index);
1689 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1690 switch (c->kind)
1691 {
1692 case CAND_MULT:
1693 fputs (" MULT : (", dump_file);
1694 print_generic_expr (dump_file, c->base_expr, 0);
1695 fputs (" + ", dump_file);
1696 print_decs (c->index, dump_file);
1697 fputs (") * ", dump_file);
1698 print_generic_expr (dump_file, c->stride, 0);
1699 fputs (" : ", dump_file);
1700 break;
1701 case CAND_ADD:
1702 fputs (" ADD : ", dump_file);
1703 print_generic_expr (dump_file, c->base_expr, 0);
1704 fputs (" + (", dump_file);
1705 print_decs (c->index, dump_file);
1706 fputs (" * ", dump_file);
1707 print_generic_expr (dump_file, c->stride, 0);
1708 fputs (") : ", dump_file);
1709 break;
1710 case CAND_REF:
1711 fputs (" REF : ", dump_file);
1712 print_generic_expr (dump_file, c->base_expr, 0);
1713 fputs (" + (", dump_file);
1714 print_generic_expr (dump_file, c->stride, 0);
1715 fputs (") + ", dump_file);
1716 print_decs (c->index, dump_file);
1717 fputs (" : ", dump_file);
1718 break;
1719 case CAND_PHI:
1720 fputs (" PHI : ", dump_file);
1721 print_generic_expr (dump_file, c->base_expr, 0);
1722 fputs (" + (unknown * ", dump_file);
1723 print_generic_expr (dump_file, c->stride, 0);
1724 fputs (") : ", dump_file);
1725 break;
1726 default:
1727 gcc_unreachable ();
1728 }
1729 print_generic_expr (dump_file, c->cand_type, 0);
1730 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1731 c->basis, c->dependent, c->sibling);
1732 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1733 c->next_interp, c->dead_savings);
1734 if (c->def_phi)
1735 fprintf (dump_file, " phi: %d\n", c->def_phi);
1736 fputs ("\n", dump_file);
1737 }
1738
1739 /* Dump the candidate vector for debug. */
1740
1741 static void
1742 dump_cand_vec (void)
1743 {
1744 unsigned i;
1745 slsr_cand_t c;
1746
1747 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1748
1749 FOR_EACH_VEC_ELT (cand_vec, i, c)
1750 dump_candidate (c);
1751 }
1752
1753 /* Callback used to dump the candidate chains hash table. */
1754
1755 int
1756 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1757 {
1758 const_cand_chain_t chain = *slot;
1759 cand_chain_t p;
1760
1761 print_generic_expr (dump_file, chain->base_expr, 0);
1762 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1763
1764 for (p = chain->next; p; p = p->next)
1765 fprintf (dump_file, " -> %d", p->cand->cand_num);
1766
1767 fputs ("\n", dump_file);
1768 return 1;
1769 }
1770
1771 /* Dump the candidate chains. */
1772
1773 static void
1774 dump_cand_chains (void)
1775 {
1776 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1777 base_cand_map.traverse_noresize <void *, ssa_base_cand_dump_callback> (NULL);
1778 fputs ("\n", dump_file);
1779 }
1780
1781 /* Dump the increment vector for debug. */
1782
1783 static void
1784 dump_incr_vec (void)
1785 {
1786 if (dump_file && (dump_flags & TDF_DETAILS))
1787 {
1788 unsigned i;
1789
1790 fprintf (dump_file, "\nIncrement vector:\n\n");
1791
1792 for (i = 0; i < incr_vec_len; i++)
1793 {
1794 fprintf (dump_file, "%3d increment: ", i);
1795 print_decs (incr_vec[i].incr, dump_file);
1796 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1797 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1798 fputs ("\n initializer: ", dump_file);
1799 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1800 fputs ("\n\n", dump_file);
1801 }
1802 }
1803 }
1804 \f
1805 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1806 data reference. */
1807
1808 static void
1809 replace_ref (tree *expr, slsr_cand_t c)
1810 {
1811 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1812 unsigned HOST_WIDE_INT misalign;
1813 unsigned align;
1814
1815 /* Ensure the memory reference carries the minimum alignment
1816 requirement for the data type. See PR58041. */
1817 get_object_alignment_1 (*expr, &align, &misalign);
1818 if (misalign != 0)
1819 align = (misalign & -misalign);
1820 if (align < TYPE_ALIGN (acc_type))
1821 acc_type = build_aligned_type (acc_type, align);
1822
1823 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1824 c->base_expr, c->stride);
1825 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1826 wide_int_to_tree (c->cand_type, c->index));
1827
1828 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1829 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1830 TREE_OPERAND (mem_ref, 0)
1831 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1832 /*simple_p=*/true, NULL,
1833 /*before=*/true, GSI_SAME_STMT);
1834 copy_ref_info (mem_ref, *expr);
1835 *expr = mem_ref;
1836 update_stmt (c->cand_stmt);
1837 }
1838
1839 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1840 dependent of candidate C with an equivalent strength-reduced data
1841 reference. */
1842
1843 static void
1844 replace_refs (slsr_cand_t c)
1845 {
1846 if (gimple_vdef (c->cand_stmt))
1847 {
1848 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1849 replace_ref (lhs, c);
1850 }
1851 else
1852 {
1853 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1854 replace_ref (rhs, c);
1855 }
1856
1857 if (c->sibling)
1858 replace_refs (lookup_cand (c->sibling));
1859
1860 if (c->dependent)
1861 replace_refs (lookup_cand (c->dependent));
1862 }
1863
1864 /* Return TRUE if candidate C is dependent upon a PHI. */
1865
1866 static bool
1867 phi_dependent_cand_p (slsr_cand_t c)
1868 {
1869 /* A candidate is not necessarily dependent upon a PHI just because
1870 it has a phi definition for its base name. It may have a basis
1871 that relies upon the same phi definition, in which case the PHI
1872 is irrelevant to this candidate. */
1873 return (c->def_phi
1874 && c->basis
1875 && lookup_cand (c->basis)->def_phi != c->def_phi);
1876 }
1877
1878 /* Calculate the increment required for candidate C relative to
1879 its basis. */
1880
1881 static widest_int
1882 cand_increment (slsr_cand_t c)
1883 {
1884 slsr_cand_t basis;
1885
1886 /* If the candidate doesn't have a basis, just return its own
1887 index. This is useful in record_increments to help us find
1888 an existing initializer. Also, if the candidate's basis is
1889 hidden by a phi, then its own index will be the increment
1890 from the newly introduced phi basis. */
1891 if (!c->basis || phi_dependent_cand_p (c))
1892 return c->index;
1893
1894 basis = lookup_cand (c->basis);
1895 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1896 return c->index - basis->index;
1897 }
1898
1899 /* Calculate the increment required for candidate C relative to
1900 its basis. If we aren't going to generate pointer arithmetic
1901 for this candidate, return the absolute value of that increment
1902 instead. */
1903
1904 static inline widest_int
1905 cand_abs_increment (slsr_cand_t c)
1906 {
1907 widest_int increment = cand_increment (c);
1908
1909 if (!address_arithmetic_p && wi::neg_p (increment))
1910 increment = -increment;
1911
1912 return increment;
1913 }
1914
1915 /* Return TRUE iff candidate C has already been replaced under
1916 another interpretation. */
1917
1918 static inline bool
1919 cand_already_replaced (slsr_cand_t c)
1920 {
1921 return (gimple_bb (c->cand_stmt) == 0);
1922 }
1923
1924 /* Common logic used by replace_unconditional_candidate and
1925 replace_conditional_candidate. */
1926
1927 static void
1928 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
1929 {
1930 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
1931 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
1932
1933 /* It is highly unlikely, but possible, that the resulting
1934 bump doesn't fit in a HWI. Abandon the replacement
1935 in this case. This does not affect siblings or dependents
1936 of C. Restriction to signed HWI is conservative for unsigned
1937 types but allows for safe negation without twisted logic. */
1938 if (wi::fits_shwi_p (bump)
1939 && bump.to_shwi () != HOST_WIDE_INT_MIN
1940 /* It is not useful to replace casts, copies, or adds of
1941 an SSA name and a constant. */
1942 && cand_code != MODIFY_EXPR
1943 && cand_code != NOP_EXPR
1944 && cand_code != PLUS_EXPR
1945 && cand_code != POINTER_PLUS_EXPR
1946 && cand_code != MINUS_EXPR)
1947 {
1948 enum tree_code code = PLUS_EXPR;
1949 tree bump_tree;
1950 gimple stmt_to_print = NULL;
1951
1952 /* If the basis name and the candidate's LHS have incompatible
1953 types, introduce a cast. */
1954 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
1955 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
1956 if (wi::neg_p (bump))
1957 {
1958 code = MINUS_EXPR;
1959 bump = -bump;
1960 }
1961
1962 bump_tree = wide_int_to_tree (target_type, bump);
1963
1964 if (dump_file && (dump_flags & TDF_DETAILS))
1965 {
1966 fputs ("Replacing: ", dump_file);
1967 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1968 }
1969
1970 if (bump == 0)
1971 {
1972 tree lhs = gimple_assign_lhs (c->cand_stmt);
1973 gimple copy_stmt = gimple_build_assign (lhs, basis_name);
1974 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1975 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
1976 gsi_replace (&gsi, copy_stmt, false);
1977 c->cand_stmt = copy_stmt;
1978 if (dump_file && (dump_flags & TDF_DETAILS))
1979 stmt_to_print = copy_stmt;
1980 }
1981 else
1982 {
1983 tree rhs1, rhs2;
1984 if (cand_code != NEGATE_EXPR) {
1985 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
1986 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
1987 }
1988 if (cand_code != NEGATE_EXPR
1989 && ((operand_equal_p (rhs1, basis_name, 0)
1990 && operand_equal_p (rhs2, bump_tree, 0))
1991 || (operand_equal_p (rhs1, bump_tree, 0)
1992 && operand_equal_p (rhs2, basis_name, 0))))
1993 {
1994 if (dump_file && (dump_flags & TDF_DETAILS))
1995 {
1996 fputs ("(duplicate, not actually replacing)", dump_file);
1997 stmt_to_print = c->cand_stmt;
1998 }
1999 }
2000 else
2001 {
2002 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2003 gimple_assign_set_rhs_with_ops (&gsi, code,
2004 basis_name, bump_tree);
2005 update_stmt (gsi_stmt (gsi));
2006 c->cand_stmt = gsi_stmt (gsi);
2007 if (dump_file && (dump_flags & TDF_DETAILS))
2008 stmt_to_print = gsi_stmt (gsi);
2009 }
2010 }
2011
2012 if (dump_file && (dump_flags & TDF_DETAILS))
2013 {
2014 fputs ("With: ", dump_file);
2015 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2016 fputs ("\n", dump_file);
2017 }
2018 }
2019 }
2020
2021 /* Replace candidate C with an add or subtract. Note that we only
2022 operate on CAND_MULTs with known strides, so we will never generate
2023 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2024 X = Y + ((i - i') * S), as described in the module commentary. The
2025 folded value ((i - i') * S) is referred to here as the "bump." */
2026
2027 static void
2028 replace_unconditional_candidate (slsr_cand_t c)
2029 {
2030 slsr_cand_t basis;
2031
2032 if (cand_already_replaced (c))
2033 return;
2034
2035 basis = lookup_cand (c->basis);
2036 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2037
2038 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2039 }
2040 \f
2041 /* Return the index in the increment vector of the given INCREMENT,
2042 or -1 if not found. The latter can occur if more than
2043 MAX_INCR_VEC_LEN increments have been found. */
2044
2045 static inline int
2046 incr_vec_index (widest_int increment)
2047 {
2048 unsigned i;
2049
2050 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2051 ;
2052
2053 if (i < incr_vec_len)
2054 return i;
2055 else
2056 return -1;
2057 }
2058
2059 /* Create a new statement along edge E to add BASIS_NAME to the product
2060 of INCREMENT and the stride of candidate C. Create and return a new
2061 SSA name from *VAR to be used as the LHS of the new statement.
2062 KNOWN_STRIDE is true iff C's stride is a constant. */
2063
2064 static tree
2065 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2066 widest_int increment, edge e, location_t loc,
2067 bool known_stride)
2068 {
2069 basic_block insert_bb;
2070 gimple_stmt_iterator gsi;
2071 tree lhs, basis_type;
2072 gimple new_stmt;
2073
2074 /* If the add candidate along this incoming edge has the same
2075 index as C's hidden basis, the hidden basis represents this
2076 edge correctly. */
2077 if (increment == 0)
2078 return basis_name;
2079
2080 basis_type = TREE_TYPE (basis_name);
2081 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2082
2083 if (known_stride)
2084 {
2085 tree bump_tree;
2086 enum tree_code code = PLUS_EXPR;
2087 widest_int bump = increment * wi::to_widest (c->stride);
2088 if (wi::neg_p (bump))
2089 {
2090 code = MINUS_EXPR;
2091 bump = -bump;
2092 }
2093
2094 bump_tree = wide_int_to_tree (basis_type, bump);
2095 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2096 bump_tree);
2097 }
2098 else
2099 {
2100 int i;
2101 bool negate_incr = (!address_arithmetic_p && wi::neg_p (increment));
2102 i = incr_vec_index (negate_incr ? -increment : increment);
2103 gcc_assert (i >= 0);
2104
2105 if (incr_vec[i].initializer)
2106 {
2107 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2108 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2109 incr_vec[i].initializer);
2110 }
2111 else if (increment == 1)
2112 new_stmt = gimple_build_assign_with_ops (PLUS_EXPR, lhs, basis_name,
2113 c->stride);
2114 else if (increment == -1)
2115 new_stmt = gimple_build_assign_with_ops (MINUS_EXPR, lhs, basis_name,
2116 c->stride);
2117 else
2118 gcc_unreachable ();
2119 }
2120
2121 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2122 gsi = gsi_last_bb (insert_bb);
2123
2124 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2125 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2126 else
2127 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2128
2129 gimple_set_location (new_stmt, loc);
2130
2131 if (dump_file && (dump_flags & TDF_DETAILS))
2132 {
2133 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2134 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2135 }
2136
2137 return lhs;
2138 }
2139
2140 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2141 is hidden by the phi node FROM_PHI, create a new phi node in the same
2142 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2143 with its phi arguments representing conditional adjustments to the
2144 hidden basis along conditional incoming paths. Those adjustments are
2145 made by creating add statements (and sometimes recursively creating
2146 phis) along those incoming paths. LOC is the location to attach to
2147 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2148 constant. */
2149
2150 static tree
2151 create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
2152 location_t loc, bool known_stride)
2153 {
2154 int i;
2155 tree name, phi_arg;
2156 gimple phi;
2157 vec<tree> phi_args;
2158 slsr_cand_t basis = lookup_cand (c->basis);
2159 int nargs = gimple_phi_num_args (from_phi);
2160 basic_block phi_bb = gimple_bb (from_phi);
2161 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2162 phi_args.create (nargs);
2163
2164 /* Process each argument of the existing phi that represents
2165 conditionally-executed add candidates. */
2166 for (i = 0; i < nargs; i++)
2167 {
2168 edge e = (*phi_bb->preds)[i];
2169 tree arg = gimple_phi_arg_def (from_phi, i);
2170 tree feeding_def;
2171
2172 /* If the phi argument is the base name of the CAND_PHI, then
2173 this incoming arc should use the hidden basis. */
2174 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2175 if (basis->index == 0)
2176 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2177 else
2178 {
2179 widest_int incr = -basis->index;
2180 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2181 e, loc, known_stride);
2182 }
2183 else
2184 {
2185 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2186
2187 /* If there is another phi along this incoming edge, we must
2188 process it in the same fashion to ensure that all basis
2189 adjustments are made along its incoming edges. */
2190 if (gimple_code (arg_def) == GIMPLE_PHI)
2191 feeding_def = create_phi_basis (c, arg_def, basis_name,
2192 loc, known_stride);
2193 else
2194 {
2195 slsr_cand_t arg_cand = base_cand_from_table (arg);
2196 widest_int diff = arg_cand->index - basis->index;
2197 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2198 e, loc, known_stride);
2199 }
2200 }
2201
2202 /* Because of recursion, we need to save the arguments in a vector
2203 so we can create the PHI statement all at once. Otherwise the
2204 storage for the half-created PHI can be reclaimed. */
2205 phi_args.safe_push (feeding_def);
2206 }
2207
2208 /* Create the new phi basis. */
2209 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2210 phi = create_phi_node (name, phi_bb);
2211 SSA_NAME_DEF_STMT (name) = phi;
2212
2213 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2214 {
2215 edge e = (*phi_bb->preds)[i];
2216 add_phi_arg (phi, phi_arg, e, loc);
2217 }
2218
2219 update_stmt (phi);
2220
2221 if (dump_file && (dump_flags & TDF_DETAILS))
2222 {
2223 fputs ("Introducing new phi basis: ", dump_file);
2224 print_gimple_stmt (dump_file, phi, 0, 0);
2225 }
2226
2227 return name;
2228 }
2229
2230 /* Given a candidate C whose basis is hidden by at least one intervening
2231 phi, introduce a matching number of new phis to represent its basis
2232 adjusted by conditional increments along possible incoming paths. Then
2233 replace C as though it were an unconditional candidate, using the new
2234 basis. */
2235
2236 static void
2237 replace_conditional_candidate (slsr_cand_t c)
2238 {
2239 tree basis_name, name;
2240 slsr_cand_t basis;
2241 location_t loc;
2242
2243 /* Look up the LHS SSA name from C's basis. This will be the
2244 RHS1 of the adds we will introduce to create new phi arguments. */
2245 basis = lookup_cand (c->basis);
2246 basis_name = gimple_assign_lhs (basis->cand_stmt);
2247
2248 /* Create a new phi statement which will represent C's true basis
2249 after the transformation is complete. */
2250 loc = gimple_location (c->cand_stmt);
2251 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2252 basis_name, loc, KNOWN_STRIDE);
2253 /* Replace C with an add of the new basis phi and a constant. */
2254 widest_int bump = c->index * wi::to_widest (c->stride);
2255
2256 replace_mult_candidate (c, name, bump);
2257 }
2258
2259 /* Compute the expected costs of inserting basis adjustments for
2260 candidate C with phi-definition PHI. The cost of inserting
2261 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2262 which are themselves phi results, recursively calculate costs
2263 for those phis as well. */
2264
2265 static int
2266 phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
2267 {
2268 unsigned i;
2269 int cost = 0;
2270 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2271
2272 /* If we work our way back to a phi that isn't dominated by the hidden
2273 basis, this isn't a candidate for replacement. Indicate this by
2274 returning an unreasonably high cost. It's not easy to detect
2275 these situations when determining the basis, so we defer the
2276 decision until now. */
2277 basic_block phi_bb = gimple_bb (phi);
2278 slsr_cand_t basis = lookup_cand (c->basis);
2279 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2280
2281 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2282 return COST_INFINITE;
2283
2284 for (i = 0; i < gimple_phi_num_args (phi); i++)
2285 {
2286 tree arg = gimple_phi_arg_def (phi, i);
2287
2288 if (arg != phi_cand->base_expr)
2289 {
2290 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2291
2292 if (gimple_code (arg_def) == GIMPLE_PHI)
2293 cost += phi_add_costs (arg_def, c, one_add_cost);
2294 else
2295 {
2296 slsr_cand_t arg_cand = base_cand_from_table (arg);
2297
2298 if (arg_cand->index != c->index)
2299 cost += one_add_cost;
2300 }
2301 }
2302 }
2303
2304 return cost;
2305 }
2306
2307 /* For candidate C, each sibling of candidate C, and each dependent of
2308 candidate C, determine whether the candidate is dependent upon a
2309 phi that hides its basis. If not, replace the candidate unconditionally.
2310 Otherwise, determine whether the cost of introducing compensation code
2311 for the candidate is offset by the gains from strength reduction. If
2312 so, replace the candidate and introduce the compensation code. */
2313
2314 static void
2315 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2316 {
2317 if (phi_dependent_cand_p (c))
2318 {
2319 if (c->kind == CAND_MULT)
2320 {
2321 /* A candidate dependent upon a phi will replace a multiply by
2322 a constant with an add, and will insert at most one add for
2323 each phi argument. Add these costs with the potential dead-code
2324 savings to determine profitability. */
2325 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2326 int mult_savings = stmt_cost (c->cand_stmt, speed);
2327 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2328 tree phi_result = gimple_phi_result (phi);
2329 int one_add_cost = add_cost (speed,
2330 TYPE_MODE (TREE_TYPE (phi_result)));
2331 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2332 int cost = add_costs - mult_savings - c->dead_savings;
2333
2334 if (dump_file && (dump_flags & TDF_DETAILS))
2335 {
2336 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2337 fprintf (dump_file, " add_costs = %d\n", add_costs);
2338 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2339 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2340 fprintf (dump_file, " cost = %d\n", cost);
2341 if (cost <= COST_NEUTRAL)
2342 fputs (" Replacing...\n", dump_file);
2343 else
2344 fputs (" Not replaced.\n", dump_file);
2345 }
2346
2347 if (cost <= COST_NEUTRAL)
2348 replace_conditional_candidate (c);
2349 }
2350 }
2351 else
2352 replace_unconditional_candidate (c);
2353
2354 if (c->sibling)
2355 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2356
2357 if (c->dependent)
2358 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2359 }
2360 \f
2361 /* Count the number of candidates in the tree rooted at C that have
2362 not already been replaced under other interpretations. */
2363
2364 static int
2365 count_candidates (slsr_cand_t c)
2366 {
2367 unsigned count = cand_already_replaced (c) ? 0 : 1;
2368
2369 if (c->sibling)
2370 count += count_candidates (lookup_cand (c->sibling));
2371
2372 if (c->dependent)
2373 count += count_candidates (lookup_cand (c->dependent));
2374
2375 return count;
2376 }
2377
2378 /* Increase the count of INCREMENT by one in the increment vector.
2379 INCREMENT is associated with candidate C. If INCREMENT is to be
2380 conditionally executed as part of a conditional candidate replacement,
2381 IS_PHI_ADJUST is true, otherwise false. If an initializer
2382 T_0 = stride * I is provided by a candidate that dominates all
2383 candidates with the same increment, also record T_0 for subsequent use. */
2384
2385 static void
2386 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2387 {
2388 bool found = false;
2389 unsigned i;
2390
2391 /* Treat increments that differ only in sign as identical so as to
2392 share initializers, unless we are generating pointer arithmetic. */
2393 if (!address_arithmetic_p && wi::neg_p (increment))
2394 increment = -increment;
2395
2396 for (i = 0; i < incr_vec_len; i++)
2397 {
2398 if (incr_vec[i].incr == increment)
2399 {
2400 incr_vec[i].count++;
2401 found = true;
2402
2403 /* If we previously recorded an initializer that doesn't
2404 dominate this candidate, it's not going to be useful to
2405 us after all. */
2406 if (incr_vec[i].initializer
2407 && !dominated_by_p (CDI_DOMINATORS,
2408 gimple_bb (c->cand_stmt),
2409 incr_vec[i].init_bb))
2410 {
2411 incr_vec[i].initializer = NULL_TREE;
2412 incr_vec[i].init_bb = NULL;
2413 }
2414
2415 break;
2416 }
2417 }
2418
2419 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2420 {
2421 /* The first time we see an increment, create the entry for it.
2422 If this is the root candidate which doesn't have a basis, set
2423 the count to zero. We're only processing it so it can possibly
2424 provide an initializer for other candidates. */
2425 incr_vec[incr_vec_len].incr = increment;
2426 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2427 incr_vec[incr_vec_len].cost = COST_INFINITE;
2428
2429 /* Optimistically record the first occurrence of this increment
2430 as providing an initializer (if it does); we will revise this
2431 opinion later if it doesn't dominate all other occurrences.
2432 Exception: increments of -1, 0, 1 never need initializers;
2433 and phi adjustments don't ever provide initializers. */
2434 if (c->kind == CAND_ADD
2435 && !is_phi_adjust
2436 && c->index == increment
2437 && (wi::gts_p (increment, 1)
2438 || wi::lts_p (increment, -1))
2439 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2440 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2441 {
2442 tree t0 = NULL_TREE;
2443 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2444 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2445 if (operand_equal_p (rhs1, c->base_expr, 0))
2446 t0 = rhs2;
2447 else if (operand_equal_p (rhs2, c->base_expr, 0))
2448 t0 = rhs1;
2449 if (t0
2450 && SSA_NAME_DEF_STMT (t0)
2451 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2452 {
2453 incr_vec[incr_vec_len].initializer = t0;
2454 incr_vec[incr_vec_len++].init_bb
2455 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2456 }
2457 else
2458 {
2459 incr_vec[incr_vec_len].initializer = NULL_TREE;
2460 incr_vec[incr_vec_len++].init_bb = NULL;
2461 }
2462 }
2463 else
2464 {
2465 incr_vec[incr_vec_len].initializer = NULL_TREE;
2466 incr_vec[incr_vec_len++].init_bb = NULL;
2467 }
2468 }
2469 }
2470
2471 /* Given phi statement PHI that hides a candidate from its BASIS, find
2472 the increments along each incoming arc (recursively handling additional
2473 phis that may be present) and record them. These increments are the
2474 difference in index between the index-adjusting statements and the
2475 index of the basis. */
2476
2477 static void
2478 record_phi_increments (slsr_cand_t basis, gimple phi)
2479 {
2480 unsigned i;
2481 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2482
2483 for (i = 0; i < gimple_phi_num_args (phi); i++)
2484 {
2485 tree arg = gimple_phi_arg_def (phi, i);
2486
2487 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2488 {
2489 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2490
2491 if (gimple_code (arg_def) == GIMPLE_PHI)
2492 record_phi_increments (basis, arg_def);
2493 else
2494 {
2495 slsr_cand_t arg_cand = base_cand_from_table (arg);
2496 widest_int diff = arg_cand->index - basis->index;
2497 record_increment (arg_cand, diff, PHI_ADJUST);
2498 }
2499 }
2500 }
2501 }
2502
2503 /* Determine how many times each unique increment occurs in the set
2504 of candidates rooted at C's parent, recording the data in the
2505 increment vector. For each unique increment I, if an initializer
2506 T_0 = stride * I is provided by a candidate that dominates all
2507 candidates with the same increment, also record T_0 for subsequent
2508 use. */
2509
2510 static void
2511 record_increments (slsr_cand_t c)
2512 {
2513 if (!cand_already_replaced (c))
2514 {
2515 if (!phi_dependent_cand_p (c))
2516 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2517 else
2518 {
2519 /* A candidate with a basis hidden by a phi will have one
2520 increment for its relationship to the index represented by
2521 the phi, and potentially additional increments along each
2522 incoming edge. For the root of the dependency tree (which
2523 has no basis), process just the initial index in case it has
2524 an initializer that can be used by subsequent candidates. */
2525 record_increment (c, c->index, NOT_PHI_ADJUST);
2526
2527 if (c->basis)
2528 record_phi_increments (lookup_cand (c->basis),
2529 lookup_cand (c->def_phi)->cand_stmt);
2530 }
2531 }
2532
2533 if (c->sibling)
2534 record_increments (lookup_cand (c->sibling));
2535
2536 if (c->dependent)
2537 record_increments (lookup_cand (c->dependent));
2538 }
2539
2540 /* Add up and return the costs of introducing add statements that
2541 require the increment INCR on behalf of candidate C and phi
2542 statement PHI. Accumulate into *SAVINGS the potential savings
2543 from removing existing statements that feed PHI and have no other
2544 uses. */
2545
2546 static int
2547 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple phi, int *savings)
2548 {
2549 unsigned i;
2550 int cost = 0;
2551 slsr_cand_t basis = lookup_cand (c->basis);
2552 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2553
2554 for (i = 0; i < gimple_phi_num_args (phi); i++)
2555 {
2556 tree arg = gimple_phi_arg_def (phi, i);
2557
2558 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2559 {
2560 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2561
2562 if (gimple_code (arg_def) == GIMPLE_PHI)
2563 {
2564 int feeding_savings = 0;
2565 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2566 if (has_single_use (gimple_phi_result (arg_def)))
2567 *savings += feeding_savings;
2568 }
2569 else
2570 {
2571 slsr_cand_t arg_cand = base_cand_from_table (arg);
2572 widest_int diff = arg_cand->index - basis->index;
2573
2574 if (incr == diff)
2575 {
2576 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2577 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2578 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2579 if (has_single_use (lhs))
2580 *savings += stmt_cost (arg_cand->cand_stmt, true);
2581 }
2582 }
2583 }
2584 }
2585
2586 return cost;
2587 }
2588
2589 /* Return the first candidate in the tree rooted at C that has not
2590 already been replaced, favoring siblings over dependents. */
2591
2592 static slsr_cand_t
2593 unreplaced_cand_in_tree (slsr_cand_t c)
2594 {
2595 if (!cand_already_replaced (c))
2596 return c;
2597
2598 if (c->sibling)
2599 {
2600 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2601 if (sib)
2602 return sib;
2603 }
2604
2605 if (c->dependent)
2606 {
2607 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2608 if (dep)
2609 return dep;
2610 }
2611
2612 return NULL;
2613 }
2614
2615 /* Return TRUE if the candidates in the tree rooted at C should be
2616 optimized for speed, else FALSE. We estimate this based on the block
2617 containing the most dominant candidate in the tree that has not yet
2618 been replaced. */
2619
2620 static bool
2621 optimize_cands_for_speed_p (slsr_cand_t c)
2622 {
2623 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2624 gcc_assert (c2);
2625 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2626 }
2627
2628 /* Add COST_IN to the lowest cost of any dependent path starting at
2629 candidate C or any of its siblings, counting only candidates along
2630 such paths with increment INCR. Assume that replacing a candidate
2631 reduces cost by REPL_SAVINGS. Also account for savings from any
2632 statements that would go dead. If COUNT_PHIS is true, include
2633 costs of introducing feeding statements for conditional candidates. */
2634
2635 static int
2636 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2637 const widest_int &incr, bool count_phis)
2638 {
2639 int local_cost, sib_cost, savings = 0;
2640 widest_int cand_incr = cand_abs_increment (c);
2641
2642 if (cand_already_replaced (c))
2643 local_cost = cost_in;
2644 else if (incr == cand_incr)
2645 local_cost = cost_in - repl_savings - c->dead_savings;
2646 else
2647 local_cost = cost_in - c->dead_savings;
2648
2649 if (count_phis
2650 && phi_dependent_cand_p (c)
2651 && !cand_already_replaced (c))
2652 {
2653 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2654 local_cost += phi_incr_cost (c, incr, phi, &savings);
2655
2656 if (has_single_use (gimple_phi_result (phi)))
2657 local_cost -= savings;
2658 }
2659
2660 if (c->dependent)
2661 local_cost = lowest_cost_path (local_cost, repl_savings,
2662 lookup_cand (c->dependent), incr,
2663 count_phis);
2664
2665 if (c->sibling)
2666 {
2667 sib_cost = lowest_cost_path (cost_in, repl_savings,
2668 lookup_cand (c->sibling), incr,
2669 count_phis);
2670 local_cost = MIN (local_cost, sib_cost);
2671 }
2672
2673 return local_cost;
2674 }
2675
2676 /* Compute the total savings that would accrue from all replacements
2677 in the candidate tree rooted at C, counting only candidates with
2678 increment INCR. Assume that replacing a candidate reduces cost
2679 by REPL_SAVINGS. Also account for savings from statements that
2680 would go dead. */
2681
2682 static int
2683 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2684 bool count_phis)
2685 {
2686 int savings = 0;
2687 widest_int cand_incr = cand_abs_increment (c);
2688
2689 if (incr == cand_incr && !cand_already_replaced (c))
2690 savings += repl_savings + c->dead_savings;
2691
2692 if (count_phis
2693 && phi_dependent_cand_p (c)
2694 && !cand_already_replaced (c))
2695 {
2696 int phi_savings = 0;
2697 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2698 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2699
2700 if (has_single_use (gimple_phi_result (phi)))
2701 savings += phi_savings;
2702 }
2703
2704 if (c->dependent)
2705 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2706 count_phis);
2707
2708 if (c->sibling)
2709 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2710 count_phis);
2711
2712 return savings;
2713 }
2714
2715 /* Use target-specific costs to determine and record which increments
2716 in the current candidate tree are profitable to replace, assuming
2717 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2718 the candidate tree.
2719
2720 One slight limitation here is that we don't account for the possible
2721 introduction of casts in some cases. See replace_one_candidate for
2722 the cases where these are introduced. This should probably be cleaned
2723 up sometime. */
2724
2725 static void
2726 analyze_increments (slsr_cand_t first_dep, enum machine_mode mode, bool speed)
2727 {
2728 unsigned i;
2729
2730 for (i = 0; i < incr_vec_len; i++)
2731 {
2732 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2733
2734 /* If somehow this increment is bigger than a HWI, we won't
2735 be optimizing candidates that use it. And if the increment
2736 has a count of zero, nothing will be done with it. */
2737 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2738 incr_vec[i].cost = COST_INFINITE;
2739
2740 /* Increments of 0, 1, and -1 are always profitable to replace,
2741 because they always replace a multiply or add with an add or
2742 copy, and may cause one or more existing instructions to go
2743 dead. Exception: -1 can't be assumed to be profitable for
2744 pointer addition. */
2745 else if (incr == 0
2746 || incr == 1
2747 || (incr == -1
2748 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2749 != POINTER_PLUS_EXPR)))
2750 incr_vec[i].cost = COST_NEUTRAL;
2751
2752 /* FORNOW: If we need to add an initializer, give up if a cast from
2753 the candidate's type to its stride's type can lose precision.
2754 This could eventually be handled better by expressly retaining the
2755 result of a cast to a wider type in the stride. Example:
2756
2757 short int _1;
2758 _2 = (int) _1;
2759 _3 = _2 * 10;
2760 _4 = x + _3; ADD: x + (10 * _1) : int
2761 _5 = _2 * 15;
2762 _6 = x + _3; ADD: x + (15 * _1) : int
2763
2764 Right now replacing _6 would cause insertion of an initializer
2765 of the form "short int T = _1 * 5;" followed by a cast to
2766 int, which could overflow incorrectly. Had we recorded _2 or
2767 (int)_1 as the stride, this wouldn't happen. However, doing
2768 this breaks other opportunities, so this will require some
2769 care. */
2770 else if (!incr_vec[i].initializer
2771 && TREE_CODE (first_dep->stride) != INTEGER_CST
2772 && !legal_cast_p_1 (first_dep->stride,
2773 gimple_assign_lhs (first_dep->cand_stmt)))
2774
2775 incr_vec[i].cost = COST_INFINITE;
2776
2777 /* If we need to add an initializer, make sure we don't introduce
2778 a multiply by a pointer type, which can happen in certain cast
2779 scenarios. FIXME: When cleaning up these cast issues, we can
2780 afford to introduce the multiply provided we cast out to an
2781 unsigned int of appropriate size. */
2782 else if (!incr_vec[i].initializer
2783 && TREE_CODE (first_dep->stride) != INTEGER_CST
2784 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2785
2786 incr_vec[i].cost = COST_INFINITE;
2787
2788 /* For any other increment, if this is a multiply candidate, we
2789 must introduce a temporary T and initialize it with
2790 T_0 = stride * increment. When optimizing for speed, walk the
2791 candidate tree to calculate the best cost reduction along any
2792 path; if it offsets the fixed cost of inserting the initializer,
2793 replacing the increment is profitable. When optimizing for
2794 size, instead calculate the total cost reduction from replacing
2795 all candidates with this increment. */
2796 else if (first_dep->kind == CAND_MULT)
2797 {
2798 int cost = mult_by_coeff_cost (incr, mode, speed);
2799 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2800 if (speed)
2801 cost = lowest_cost_path (cost, repl_savings, first_dep,
2802 incr_vec[i].incr, COUNT_PHIS);
2803 else
2804 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2805 COUNT_PHIS);
2806
2807 incr_vec[i].cost = cost;
2808 }
2809
2810 /* If this is an add candidate, the initializer may already
2811 exist, so only calculate the cost of the initializer if it
2812 doesn't. We are replacing one add with another here, so the
2813 known replacement savings is zero. We will account for removal
2814 of dead instructions in lowest_cost_path or total_savings. */
2815 else
2816 {
2817 int cost = 0;
2818 if (!incr_vec[i].initializer)
2819 cost = mult_by_coeff_cost (incr, mode, speed);
2820
2821 if (speed)
2822 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2823 DONT_COUNT_PHIS);
2824 else
2825 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2826 DONT_COUNT_PHIS);
2827
2828 incr_vec[i].cost = cost;
2829 }
2830 }
2831 }
2832
2833 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2834 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2835 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2836 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2837 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2838
2839 static basic_block
2840 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2841 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2842 {
2843 basic_block ncd;
2844
2845 if (!bb1)
2846 {
2847 *where = c2;
2848 return bb2;
2849 }
2850
2851 if (!bb2)
2852 {
2853 *where = c1;
2854 return bb1;
2855 }
2856
2857 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2858
2859 /* If both candidates are in the same block, the earlier
2860 candidate wins. */
2861 if (bb1 == ncd && bb2 == ncd)
2862 {
2863 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2864 *where = c2;
2865 else
2866 *where = c1;
2867 }
2868
2869 /* Otherwise, if one of them produced a candidate in the
2870 dominator, that one wins. */
2871 else if (bb1 == ncd)
2872 *where = c1;
2873
2874 else if (bb2 == ncd)
2875 *where = c2;
2876
2877 /* If neither matches the dominator, neither wins. */
2878 else
2879 *where = NULL;
2880
2881 return ncd;
2882 }
2883
2884 /* Consider all candidates that feed PHI. Find the nearest common
2885 dominator of those candidates requiring the given increment INCR.
2886 Further find and return the nearest common dominator of this result
2887 with block NCD. If the returned block contains one or more of the
2888 candidates, return the earliest candidate in the block in *WHERE. */
2889
2890 static basic_block
2891 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gimple phi,
2892 basic_block ncd, slsr_cand_t *where)
2893 {
2894 unsigned i;
2895 slsr_cand_t basis = lookup_cand (c->basis);
2896 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2897
2898 for (i = 0; i < gimple_phi_num_args (phi); i++)
2899 {
2900 tree arg = gimple_phi_arg_def (phi, i);
2901
2902 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2903 {
2904 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2905
2906 if (gimple_code (arg_def) == GIMPLE_PHI)
2907 ncd = ncd_with_phi (c, incr, arg_def, ncd, where);
2908 else
2909 {
2910 slsr_cand_t arg_cand = base_cand_from_table (arg);
2911 widest_int diff = arg_cand->index - basis->index;
2912
2913 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
2914 ncd = ncd_for_two_cands (ncd, gimple_bb (arg_cand->cand_stmt),
2915 *where, arg_cand, where);
2916 }
2917 }
2918 }
2919
2920 return ncd;
2921 }
2922
2923 /* Consider the candidate C together with any candidates that feed
2924 C's phi dependence (if any). Find and return the nearest common
2925 dominator of those candidates requiring the given increment INCR.
2926 If the returned block contains one or more of the candidates,
2927 return the earliest candidate in the block in *WHERE. */
2928
2929 static basic_block
2930 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
2931 {
2932 basic_block ncd = NULL;
2933
2934 if (cand_abs_increment (c) == incr)
2935 {
2936 ncd = gimple_bb (c->cand_stmt);
2937 *where = c;
2938 }
2939
2940 if (phi_dependent_cand_p (c))
2941 ncd = ncd_with_phi (c, incr, lookup_cand (c->def_phi)->cand_stmt,
2942 ncd, where);
2943
2944 return ncd;
2945 }
2946
2947 /* Consider all candidates in the tree rooted at C for which INCR
2948 represents the required increment of C relative to its basis.
2949 Find and return the basic block that most nearly dominates all
2950 such candidates. If the returned block contains one or more of
2951 the candidates, return the earliest candidate in the block in
2952 *WHERE. */
2953
2954 static basic_block
2955 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
2956 slsr_cand_t *where)
2957 {
2958 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
2959 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
2960
2961 /* First find the NCD of all siblings and dependents. */
2962 if (c->sibling)
2963 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
2964 incr, &sib_where);
2965 if (c->dependent)
2966 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
2967 incr, &dep_where);
2968 if (!sib_ncd && !dep_ncd)
2969 {
2970 new_where = NULL;
2971 ncd = NULL;
2972 }
2973 else if (sib_ncd && !dep_ncd)
2974 {
2975 new_where = sib_where;
2976 ncd = sib_ncd;
2977 }
2978 else if (dep_ncd && !sib_ncd)
2979 {
2980 new_where = dep_where;
2981 ncd = dep_ncd;
2982 }
2983 else
2984 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
2985 dep_where, &new_where);
2986
2987 /* If the candidate's increment doesn't match the one we're interested
2988 in (and nor do any increments for feeding defs of a phi-dependence),
2989 then the result depends only on siblings and dependents. */
2990 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
2991
2992 if (!this_ncd || cand_already_replaced (c))
2993 {
2994 *where = new_where;
2995 return ncd;
2996 }
2997
2998 /* Otherwise, compare this candidate with the result from all siblings
2999 and dependents. */
3000 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3001
3002 return ncd;
3003 }
3004
3005 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3006
3007 static inline bool
3008 profitable_increment_p (unsigned index)
3009 {
3010 return (incr_vec[index].cost <= COST_NEUTRAL);
3011 }
3012
3013 /* For each profitable increment in the increment vector not equal to
3014 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3015 dominator of all statements in the candidate chain rooted at C
3016 that require that increment, and insert an initializer
3017 T_0 = stride * increment at that location. Record T_0 with the
3018 increment record. */
3019
3020 static void
3021 insert_initializers (slsr_cand_t c)
3022 {
3023 unsigned i;
3024
3025 for (i = 0; i < incr_vec_len; i++)
3026 {
3027 basic_block bb;
3028 slsr_cand_t where = NULL;
3029 gimple init_stmt;
3030 tree stride_type, new_name, incr_tree;
3031 widest_int incr = incr_vec[i].incr;
3032
3033 if (!profitable_increment_p (i)
3034 || incr == 1
3035 || (incr == -1
3036 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3037 || incr == 0)
3038 continue;
3039
3040 /* We may have already identified an existing initializer that
3041 will suffice. */
3042 if (incr_vec[i].initializer)
3043 {
3044 if (dump_file && (dump_flags & TDF_DETAILS))
3045 {
3046 fputs ("Using existing initializer: ", dump_file);
3047 print_gimple_stmt (dump_file,
3048 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3049 0, 0);
3050 }
3051 continue;
3052 }
3053
3054 /* Find the block that most closely dominates all candidates
3055 with this increment. If there is at least one candidate in
3056 that block, the earliest one will be returned in WHERE. */
3057 bb = nearest_common_dominator_for_cands (c, incr, &where);
3058
3059 /* Create a new SSA name to hold the initializer's value. */
3060 stride_type = TREE_TYPE (c->stride);
3061 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3062 incr_vec[i].initializer = new_name;
3063
3064 /* Create the initializer and insert it in the latest possible
3065 dominating position. */
3066 incr_tree = wide_int_to_tree (stride_type, incr);
3067 init_stmt = gimple_build_assign_with_ops (MULT_EXPR, new_name,
3068 c->stride, incr_tree);
3069 if (where)
3070 {
3071 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3072 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3073 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3074 }
3075 else
3076 {
3077 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3078 gimple basis_stmt = lookup_cand (c->basis)->cand_stmt;
3079
3080 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3081 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3082 else
3083 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3084
3085 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3086 }
3087
3088 if (dump_file && (dump_flags & TDF_DETAILS))
3089 {
3090 fputs ("Inserting initializer: ", dump_file);
3091 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3092 }
3093 }
3094 }
3095
3096 /* Return TRUE iff all required increments for candidates feeding PHI
3097 are profitable to replace on behalf of candidate C. */
3098
3099 static bool
3100 all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
3101 {
3102 unsigned i;
3103 slsr_cand_t basis = lookup_cand (c->basis);
3104 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3105
3106 for (i = 0; i < gimple_phi_num_args (phi); i++)
3107 {
3108 tree arg = gimple_phi_arg_def (phi, i);
3109
3110 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3111 {
3112 gimple arg_def = SSA_NAME_DEF_STMT (arg);
3113
3114 if (gimple_code (arg_def) == GIMPLE_PHI)
3115 {
3116 if (!all_phi_incrs_profitable (c, arg_def))
3117 return false;
3118 }
3119 else
3120 {
3121 int j;
3122 slsr_cand_t arg_cand = base_cand_from_table (arg);
3123 widest_int increment = arg_cand->index - basis->index;
3124
3125 if (!address_arithmetic_p && wi::neg_p (increment))
3126 increment = -increment;
3127
3128 j = incr_vec_index (increment);
3129
3130 if (dump_file && (dump_flags & TDF_DETAILS))
3131 {
3132 fprintf (dump_file, " Conditional candidate %d, phi: ",
3133 c->cand_num);
3134 print_gimple_stmt (dump_file, phi, 0, 0);
3135 fputs (" increment: ", dump_file);
3136 print_decs (increment, dump_file);
3137 if (j < 0)
3138 fprintf (dump_file,
3139 "\n Not replaced; incr_vec overflow.\n");
3140 else {
3141 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3142 if (profitable_increment_p (j))
3143 fputs (" Replacing...\n", dump_file);
3144 else
3145 fputs (" Not replaced.\n", dump_file);
3146 }
3147 }
3148
3149 if (j < 0 || !profitable_increment_p (j))
3150 return false;
3151 }
3152 }
3153 }
3154
3155 return true;
3156 }
3157
3158 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3159 type TO_TYPE, and insert it in front of the statement represented
3160 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3161 the new SSA name. */
3162
3163 static tree
3164 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3165 {
3166 tree cast_lhs;
3167 gimple cast_stmt;
3168 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3169
3170 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3171 cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, cast_lhs,
3172 from_expr, NULL_TREE);
3173 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3174 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3175
3176 if (dump_file && (dump_flags & TDF_DETAILS))
3177 {
3178 fputs (" Inserting: ", dump_file);
3179 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3180 }
3181
3182 return cast_lhs;
3183 }
3184
3185 /* Replace the RHS of the statement represented by candidate C with
3186 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3187 leave C unchanged or just interchange its operands. The original
3188 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3189 If the replacement was made and we are doing a details dump,
3190 return the revised statement, else NULL. */
3191
3192 static gimple
3193 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3194 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3195 slsr_cand_t c)
3196 {
3197 if (new_code != old_code
3198 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3199 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3200 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3201 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3202 {
3203 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3204 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3205 update_stmt (gsi_stmt (gsi));
3206 c->cand_stmt = gsi_stmt (gsi);
3207
3208 if (dump_file && (dump_flags & TDF_DETAILS))
3209 return gsi_stmt (gsi);
3210 }
3211
3212 else if (dump_file && (dump_flags & TDF_DETAILS))
3213 fputs (" (duplicate, not actually replacing)\n", dump_file);
3214
3215 return NULL;
3216 }
3217
3218 /* Strength-reduce the statement represented by candidate C by replacing
3219 it with an equivalent addition or subtraction. I is the index into
3220 the increment vector identifying C's increment. NEW_VAR is used to
3221 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3222 is the rhs1 to use in creating the add/subtract. */
3223
3224 static void
3225 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3226 {
3227 gimple stmt_to_print = NULL;
3228 tree orig_rhs1, orig_rhs2;
3229 tree rhs2;
3230 enum tree_code orig_code, repl_code;
3231 widest_int cand_incr;
3232
3233 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3234 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3235 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3236 cand_incr = cand_increment (c);
3237
3238 if (dump_file && (dump_flags & TDF_DETAILS))
3239 {
3240 fputs ("Replacing: ", dump_file);
3241 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3242 stmt_to_print = c->cand_stmt;
3243 }
3244
3245 if (address_arithmetic_p)
3246 repl_code = POINTER_PLUS_EXPR;
3247 else
3248 repl_code = PLUS_EXPR;
3249
3250 /* If the increment has an initializer T_0, replace the candidate
3251 statement with an add of the basis name and the initializer. */
3252 if (incr_vec[i].initializer)
3253 {
3254 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3255 tree orig_type = TREE_TYPE (orig_rhs2);
3256
3257 if (types_compatible_p (orig_type, init_type))
3258 rhs2 = incr_vec[i].initializer;
3259 else
3260 rhs2 = introduce_cast_before_cand (c, orig_type,
3261 incr_vec[i].initializer);
3262
3263 if (incr_vec[i].incr != cand_incr)
3264 {
3265 gcc_assert (repl_code == PLUS_EXPR);
3266 repl_code = MINUS_EXPR;
3267 }
3268
3269 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3270 orig_code, orig_rhs1, orig_rhs2,
3271 c);
3272 }
3273
3274 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3275 with a subtract of the stride from the basis name, a copy
3276 from the basis name, or an add of the stride to the basis
3277 name, respectively. It may be necessary to introduce a
3278 cast (or reuse an existing cast). */
3279 else if (cand_incr == 1)
3280 {
3281 tree stride_type = TREE_TYPE (c->stride);
3282 tree orig_type = TREE_TYPE (orig_rhs2);
3283
3284 if (types_compatible_p (orig_type, stride_type))
3285 rhs2 = c->stride;
3286 else
3287 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3288
3289 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3290 orig_code, orig_rhs1, orig_rhs2,
3291 c);
3292 }
3293
3294 else if (cand_incr == -1)
3295 {
3296 tree stride_type = TREE_TYPE (c->stride);
3297 tree orig_type = TREE_TYPE (orig_rhs2);
3298 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3299
3300 if (types_compatible_p (orig_type, stride_type))
3301 rhs2 = c->stride;
3302 else
3303 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3304
3305 if (orig_code != MINUS_EXPR
3306 || !operand_equal_p (basis_name, orig_rhs1, 0)
3307 || !operand_equal_p (rhs2, orig_rhs2, 0))
3308 {
3309 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3310 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3311 update_stmt (gsi_stmt (gsi));
3312 c->cand_stmt = gsi_stmt (gsi);
3313
3314 if (dump_file && (dump_flags & TDF_DETAILS))
3315 stmt_to_print = gsi_stmt (gsi);
3316 }
3317 else if (dump_file && (dump_flags & TDF_DETAILS))
3318 fputs (" (duplicate, not actually replacing)\n", dump_file);
3319 }
3320
3321 else if (cand_incr == 0)
3322 {
3323 tree lhs = gimple_assign_lhs (c->cand_stmt);
3324 tree lhs_type = TREE_TYPE (lhs);
3325 tree basis_type = TREE_TYPE (basis_name);
3326
3327 if (types_compatible_p (lhs_type, basis_type))
3328 {
3329 gimple copy_stmt = gimple_build_assign (lhs, basis_name);
3330 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3331 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3332 gsi_replace (&gsi, copy_stmt, false);
3333 c->cand_stmt = copy_stmt;
3334
3335 if (dump_file && (dump_flags & TDF_DETAILS))
3336 stmt_to_print = copy_stmt;
3337 }
3338 else
3339 {
3340 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3341 gimple cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
3342 basis_name,
3343 NULL_TREE);
3344 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3345 gsi_replace (&gsi, cast_stmt, false);
3346 c->cand_stmt = cast_stmt;
3347
3348 if (dump_file && (dump_flags & TDF_DETAILS))
3349 stmt_to_print = cast_stmt;
3350 }
3351 }
3352 else
3353 gcc_unreachable ();
3354
3355 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3356 {
3357 fputs ("With: ", dump_file);
3358 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3359 fputs ("\n", dump_file);
3360 }
3361 }
3362
3363 /* For each candidate in the tree rooted at C, replace it with
3364 an increment if such has been shown to be profitable. */
3365
3366 static void
3367 replace_profitable_candidates (slsr_cand_t c)
3368 {
3369 if (!cand_already_replaced (c))
3370 {
3371 widest_int increment = cand_abs_increment (c);
3372 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3373 int i;
3374
3375 i = incr_vec_index (increment);
3376
3377 /* Only process profitable increments. Nothing useful can be done
3378 to a cast or copy. */
3379 if (i >= 0
3380 && profitable_increment_p (i)
3381 && orig_code != MODIFY_EXPR
3382 && orig_code != NOP_EXPR)
3383 {
3384 if (phi_dependent_cand_p (c))
3385 {
3386 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
3387
3388 if (all_phi_incrs_profitable (c, phi))
3389 {
3390 /* Look up the LHS SSA name from C's basis. This will be
3391 the RHS1 of the adds we will introduce to create new
3392 phi arguments. */
3393 slsr_cand_t basis = lookup_cand (c->basis);
3394 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3395
3396 /* Create a new phi statement that will represent C's true
3397 basis after the transformation is complete. */
3398 location_t loc = gimple_location (c->cand_stmt);
3399 tree name = create_phi_basis (c, phi, basis_name,
3400 loc, UNKNOWN_STRIDE);
3401
3402 /* Replace C with an add of the new basis phi and the
3403 increment. */
3404 replace_one_candidate (c, i, name);
3405 }
3406 }
3407 else
3408 {
3409 slsr_cand_t basis = lookup_cand (c->basis);
3410 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3411 replace_one_candidate (c, i, basis_name);
3412 }
3413 }
3414 }
3415
3416 if (c->sibling)
3417 replace_profitable_candidates (lookup_cand (c->sibling));
3418
3419 if (c->dependent)
3420 replace_profitable_candidates (lookup_cand (c->dependent));
3421 }
3422 \f
3423 /* Analyze costs of related candidates in the candidate vector,
3424 and make beneficial replacements. */
3425
3426 static void
3427 analyze_candidates_and_replace (void)
3428 {
3429 unsigned i;
3430 slsr_cand_t c;
3431
3432 /* Each candidate that has a null basis and a non-null
3433 dependent is the root of a tree of related statements.
3434 Analyze each tree to determine a subset of those
3435 statements that can be replaced with maximum benefit. */
3436 FOR_EACH_VEC_ELT (cand_vec, i, c)
3437 {
3438 slsr_cand_t first_dep;
3439
3440 if (c->basis != 0 || c->dependent == 0)
3441 continue;
3442
3443 if (dump_file && (dump_flags & TDF_DETAILS))
3444 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3445 c->cand_num);
3446
3447 first_dep = lookup_cand (c->dependent);
3448
3449 /* If this is a chain of CAND_REFs, unconditionally replace
3450 each of them with a strength-reduced data reference. */
3451 if (c->kind == CAND_REF)
3452 replace_refs (c);
3453
3454 /* If the common stride of all related candidates is a known
3455 constant, each candidate without a phi-dependence can be
3456 profitably replaced. Each replaces a multiply by a single
3457 add, with the possibility that a feeding add also goes dead.
3458 A candidate with a phi-dependence is replaced only if the
3459 compensation code it requires is offset by the strength
3460 reduction savings. */
3461 else if (TREE_CODE (c->stride) == INTEGER_CST)
3462 replace_uncond_cands_and_profitable_phis (first_dep);
3463
3464 /* When the stride is an SSA name, it may still be profitable
3465 to replace some or all of the dependent candidates, depending
3466 on whether the introduced increments can be reused, or are
3467 less expensive to calculate than the replaced statements. */
3468 else
3469 {
3470 enum machine_mode mode;
3471 bool speed;
3472
3473 /* Determine whether we'll be generating pointer arithmetic
3474 when replacing candidates. */
3475 address_arithmetic_p = (c->kind == CAND_ADD
3476 && POINTER_TYPE_P (c->cand_type));
3477
3478 /* If all candidates have already been replaced under other
3479 interpretations, nothing remains to be done. */
3480 if (!count_candidates (c))
3481 continue;
3482
3483 /* Construct an array of increments for this candidate chain. */
3484 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3485 incr_vec_len = 0;
3486 record_increments (c);
3487
3488 /* Determine which increments are profitable to replace. */
3489 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3490 speed = optimize_cands_for_speed_p (c);
3491 analyze_increments (first_dep, mode, speed);
3492
3493 /* Insert initializers of the form T_0 = stride * increment
3494 for use in profitable replacements. */
3495 insert_initializers (first_dep);
3496 dump_incr_vec ();
3497
3498 /* Perform the replacements. */
3499 replace_profitable_candidates (first_dep);
3500 free (incr_vec);
3501 }
3502 }
3503 }
3504
3505 static unsigned
3506 execute_strength_reduction (void)
3507 {
3508 /* Create the obstack where candidates will reside. */
3509 gcc_obstack_init (&cand_obstack);
3510
3511 /* Allocate the candidate vector. */
3512 cand_vec.create (128);
3513
3514 /* Allocate the mapping from statements to candidate indices. */
3515 stmt_cand_map = pointer_map_create ();
3516
3517 /* Create the obstack where candidate chains will reside. */
3518 gcc_obstack_init (&chain_obstack);
3519
3520 /* Allocate the mapping from base expressions to candidate chains. */
3521 base_cand_map.create (500);
3522
3523 /* Initialize the loop optimizer. We need to detect flow across
3524 back edges, and this gives us dominator information as well. */
3525 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3526
3527 /* Walk the CFG in predominator order looking for strength reduction
3528 candidates. */
3529 find_candidates_dom_walker (CDI_DOMINATORS)
3530 .walk (cfun->cfg->x_entry_block_ptr);
3531
3532 if (dump_file && (dump_flags & TDF_DETAILS))
3533 {
3534 dump_cand_vec ();
3535 dump_cand_chains ();
3536 }
3537
3538 /* Analyze costs and make appropriate replacements. */
3539 analyze_candidates_and_replace ();
3540
3541 loop_optimizer_finalize ();
3542 base_cand_map.dispose ();
3543 obstack_free (&chain_obstack, NULL);
3544 pointer_map_destroy (stmt_cand_map);
3545 cand_vec.release ();
3546 obstack_free (&cand_obstack, NULL);
3547
3548 return 0;
3549 }
3550
3551 static bool
3552 gate_strength_reduction (void)
3553 {
3554 return flag_tree_slsr;
3555 }
3556
3557 namespace {
3558
3559 const pass_data pass_data_strength_reduction =
3560 {
3561 GIMPLE_PASS, /* type */
3562 "slsr", /* name */
3563 OPTGROUP_NONE, /* optinfo_flags */
3564 true, /* has_gate */
3565 true, /* has_execute */
3566 TV_GIMPLE_SLSR, /* tv_id */
3567 ( PROP_cfg | PROP_ssa ), /* properties_required */
3568 0, /* properties_provided */
3569 0, /* properties_destroyed */
3570 0, /* todo_flags_start */
3571 TODO_verify_ssa, /* todo_flags_finish */
3572 };
3573
3574 class pass_strength_reduction : public gimple_opt_pass
3575 {
3576 public:
3577 pass_strength_reduction (gcc::context *ctxt)
3578 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3579 {}
3580
3581 /* opt_pass methods: */
3582 bool gate () { return gate_strength_reduction (); }
3583 unsigned int execute () { return execute_strength_reduction (); }
3584
3585 }; // class pass_strength_reduction
3586
3587 } // anon namespace
3588
3589 gimple_opt_pass *
3590 make_pass_strength_reduction (gcc::context *ctxt)
3591 {
3592 return new pass_strength_reduction (ctxt);
3593 }