]>
Commit | Line | Data |
---|---|---|
ad4a85ad | 1 | /* Predictive commoning. |
3aea1f79 | 2 | Copyright (C) 2005-2014 Free Software Foundation, Inc. |
48e1416a | 3 | |
ad4a85ad | 4 | This file is part of GCC. |
48e1416a | 5 | |
ad4a85ad | 6 | GCC is free software; you can redistribute it and/or modify it |
7 | under the terms of the GNU General Public License as published by the | |
8c4c00c1 | 8 | Free Software Foundation; either version 3, or (at your option) any |
ad4a85ad | 9 | later version. |
48e1416a | 10 | |
ad4a85ad | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
48e1416a | 15 | |
ad4a85ad | 16 | You should have received a copy of the GNU General Public License |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
ad4a85ad | 19 | |
20 | /* This file implements the predictive commoning optimization. Predictive | |
21 | commoning can be viewed as CSE around a loop, and with some improvements, | |
22 | as generalized strength reduction-- i.e., reusing values computed in | |
23 | earlier iterations of a loop in the later ones. So far, the pass only | |
24 | handles the most useful case, that is, reusing values of memory references. | |
25 | If you think this is all just a special case of PRE, you are sort of right; | |
26 | however, concentrating on loops is simpler, and makes it possible to | |
27 | incorporate data dependence analysis to detect the opportunities, perform | |
28 | loop unrolling to avoid copies together with renaming immediately, | |
29 | and if needed, we could also take register pressure into account. | |
30 | ||
31 | Let us demonstrate what is done on an example: | |
48e1416a | 32 | |
ad4a85ad | 33 | for (i = 0; i < 100; i++) |
34 | { | |
35 | a[i+2] = a[i] + a[i+1]; | |
36 | b[10] = b[10] + i; | |
37 | c[i] = c[99 - i]; | |
38 | d[i] = d[i + 1]; | |
39 | } | |
40 | ||
41 | 1) We find data references in the loop, and split them to mutually | |
42 | independent groups (i.e., we find components of a data dependence | |
43 | graph). We ignore read-read dependences whose distance is not constant. | |
44 | (TODO -- we could also ignore antidependences). In this example, we | |
45 | find the following groups: | |
46 | ||
47 | a[i]{read}, a[i+1]{read}, a[i+2]{write} | |
48 | b[10]{read}, b[10]{write} | |
49 | c[99 - i]{read}, c[i]{write} | |
50 | d[i + 1]{read}, d[i]{write} | |
51 | ||
52 | 2) Inside each of the group, we verify several conditions: | |
53 | a) all the references must differ in indices only, and the indices | |
54 | must all have the same step | |
55 | b) the references must dominate loop latch (and thus, they must be | |
56 | ordered by dominance relation). | |
57 | c) the distance of the indices must be a small multiple of the step | |
58 | We are then able to compute the difference of the references (# of | |
59 | iterations before they point to the same place as the first of them). | |
60 | Also, in case there are writes in the loop, we split the groups into | |
61 | chains whose head is the write whose values are used by the reads in | |
62 | the same chain. The chains are then processed independently, | |
63 | making the further transformations simpler. Also, the shorter chains | |
64 | need the same number of registers, but may require lower unrolling | |
65 | factor in order to get rid of the copies on the loop latch. | |
48e1416a | 66 | |
ad4a85ad | 67 | In our example, we get the following chains (the chain for c is invalid). |
68 | ||
69 | a[i]{read,+0}, a[i+1]{read,-1}, a[i+2]{write,-2} | |
70 | b[10]{read,+0}, b[10]{write,+0} | |
71 | d[i + 1]{read,+0}, d[i]{write,+1} | |
72 | ||
73 | 3) For each read, we determine the read or write whose value it reuses, | |
74 | together with the distance of this reuse. I.e. we take the last | |
75 | reference before it with distance 0, or the last of the references | |
76 | with the smallest positive distance to the read. Then, we remove | |
77 | the references that are not used in any of these chains, discard the | |
78 | empty groups, and propagate all the links so that they point to the | |
48e1416a | 79 | single root reference of the chain (adjusting their distance |
ad4a85ad | 80 | appropriately). Some extra care needs to be taken for references with |
81 | step 0. In our example (the numbers indicate the distance of the | |
82 | reuse), | |
83 | ||
84 | a[i] --> (*) 2, a[i+1] --> (*) 1, a[i+2] (*) | |
85 | b[10] --> (*) 1, b[10] (*) | |
86 | ||
87 | 4) The chains are combined together if possible. If the corresponding | |
88 | elements of two chains are always combined together with the same | |
89 | operator, we remember just the result of this combination, instead | |
90 | of remembering the values separately. We may need to perform | |
91 | reassociation to enable combining, for example | |
92 | ||
93 | e[i] + f[i+1] + e[i+1] + f[i] | |
94 | ||
95 | can be reassociated as | |
96 | ||
97 | (e[i] + f[i]) + (e[i+1] + f[i+1]) | |
98 | ||
99 | and we can combine the chains for e and f into one chain. | |
100 | ||
101 | 5) For each root reference (end of the chain) R, let N be maximum distance | |
9d75589a | 102 | of a reference reusing its value. Variables R0 up to RN are created, |
ad4a85ad | 103 | together with phi nodes that transfer values from R1 .. RN to |
104 | R0 .. R(N-1). | |
105 | Initial values are loaded to R0..R(N-1) (in case not all references | |
106 | must necessarily be accessed and they may trap, we may fail here; | |
107 | TODO sometimes, the loads could be guarded by a check for the number | |
108 | of iterations). Values loaded/stored in roots are also copied to | |
109 | RN. Other reads are replaced with the appropriate variable Ri. | |
110 | Everything is put to SSA form. | |
111 | ||
112 | As a small improvement, if R0 is dead after the root (i.e., all uses of | |
113 | the value with the maximum distance dominate the root), we can avoid | |
114 | creating RN and use R0 instead of it. | |
115 | ||
116 | In our example, we get (only the parts concerning a and b are shown): | |
117 | for (i = 0; i < 100; i++) | |
118 | { | |
119 | f = phi (a[0], s); | |
120 | s = phi (a[1], f); | |
121 | x = phi (b[10], x); | |
122 | ||
123 | f = f + s; | |
124 | a[i+2] = f; | |
125 | x = x + i; | |
126 | b[10] = x; | |
127 | } | |
128 | ||
129 | 6) Factor F for unrolling is determined as the smallest common multiple of | |
130 | (N + 1) for each root reference (N for references for that we avoided | |
131 | creating RN). If F and the loop is small enough, loop is unrolled F | |
132 | times. The stores to RN (R0) in the copies of the loop body are | |
133 | periodically replaced with R0, R1, ... (R1, R2, ...), so that they can | |
134 | be coalesced and the copies can be eliminated. | |
48e1416a | 135 | |
ad4a85ad | 136 | TODO -- copy propagation and other optimizations may change the live |
137 | ranges of the temporary registers and prevent them from being coalesced; | |
138 | this may increase the register pressure. | |
139 | ||
140 | In our case, F = 2 and the (main loop of the) result is | |
141 | ||
142 | for (i = 0; i < ...; i += 2) | |
143 | { | |
144 | f = phi (a[0], f); | |
145 | s = phi (a[1], s); | |
146 | x = phi (b[10], x); | |
147 | ||
148 | f = f + s; | |
149 | a[i+2] = f; | |
150 | x = x + i; | |
151 | b[10] = x; | |
152 | ||
153 | s = s + f; | |
154 | a[i+3] = s; | |
155 | x = x + i; | |
156 | b[10] = x; | |
157 | } | |
158 | ||
159 | TODO -- stores killing other stores can be taken into account, e.g., | |
160 | for (i = 0; i < n; i++) | |
161 | { | |
162 | a[i] = 1; | |
163 | a[i+2] = 2; | |
164 | } | |
165 | ||
166 | can be replaced with | |
167 | ||
168 | t0 = a[0]; | |
169 | t1 = a[1]; | |
170 | for (i = 0; i < n; i++) | |
171 | { | |
172 | a[i] = 1; | |
173 | t2 = 2; | |
174 | t0 = t1; | |
175 | t1 = t2; | |
176 | } | |
177 | a[n] = t0; | |
178 | a[n+1] = t1; | |
179 | ||
180 | The interesting part is that this would generalize store motion; still, since | |
181 | sm is performed elsewhere, it does not seem that important. | |
182 | ||
183 | Predictive commoning can be generalized for arbitrary computations (not | |
184 | just memory loads), and also nontrivial transfer functions (e.g., replacing | |
185 | i * i with ii_last + 2 * i + 1), to generalize strength reduction. */ | |
186 | ||
187 | #include "config.h" | |
188 | #include "system.h" | |
189 | #include "coretypes.h" | |
190 | #include "tm.h" | |
191 | #include "tree.h" | |
192 | #include "tm_p.h" | |
193 | #include "cfgloop.h" | |
94ea8568 | 194 | #include "predict.h" |
195 | #include "vec.h" | |
196 | #include "hashtab.h" | |
197 | #include "hash-set.h" | |
198 | #include "machmode.h" | |
199 | #include "hard-reg-set.h" | |
200 | #include "input.h" | |
201 | #include "function.h" | |
202 | #include "dominance.h" | |
203 | #include "cfg.h" | |
bc61cadb | 204 | #include "basic-block.h" |
205 | #include "tree-ssa-alias.h" | |
206 | #include "internal-fn.h" | |
207 | #include "tree-eh.h" | |
208 | #include "gimple-expr.h" | |
209 | #include "is-a.h" | |
073c1fd5 | 210 | #include "gimple.h" |
a8783bee | 211 | #include "gimplify.h" |
dcf1a1ec | 212 | #include "gimple-iterator.h" |
e795d6e1 | 213 | #include "gimplify-me.h" |
073c1fd5 | 214 | #include "gimple-ssa.h" |
215 | #include "tree-phinodes.h" | |
216 | #include "ssa-iterators.h" | |
9ed99284 | 217 | #include "stringpool.h" |
073c1fd5 | 218 | #include "tree-ssanames.h" |
05d9c18a | 219 | #include "tree-ssa-loop-ivopts.h" |
220 | #include "tree-ssa-loop-manip.h" | |
221 | #include "tree-ssa-loop-niter.h" | |
073c1fd5 | 222 | #include "tree-ssa-loop.h" |
223 | #include "tree-into-ssa.h" | |
9ed99284 | 224 | #include "expr.h" |
073c1fd5 | 225 | #include "tree-dfa.h" |
69ee5dbb | 226 | #include "tree-ssa.h" |
ad4a85ad | 227 | #include "tree-data-ref.h" |
228 | #include "tree-scalar-evolution.h" | |
229 | #include "tree-chrec.h" | |
230 | #include "params.h" | |
ce084dfc | 231 | #include "gimple-pretty-print.h" |
ad4a85ad | 232 | #include "tree-pass.h" |
233 | #include "tree-affine.h" | |
234 | #include "tree-inline.h" | |
e913b5cd | 235 | #include "wide-int-print.h" |
ad4a85ad | 236 | |
237 | /* The maximum number of iterations between the considered memory | |
238 | references. */ | |
239 | ||
240 | #define MAX_DISTANCE (target_avail_regs < 16 ? 4 : 8) | |
48e1416a | 241 | |
75a70cf9 | 242 | /* Data references (or phi nodes that carry data reference values across |
243 | loop iterations). */ | |
ad4a85ad | 244 | |
26dbec0a | 245 | typedef struct dref_d |
ad4a85ad | 246 | { |
247 | /* The reference itself. */ | |
248 | struct data_reference *ref; | |
249 | ||
250 | /* The statement in that the reference appears. */ | |
75a70cf9 | 251 | gimple stmt; |
252 | ||
253 | /* In case that STMT is a phi node, this field is set to the SSA name | |
254 | defined by it in replace_phis_by_defined_names (in order to avoid | |
255 | pointing to phi node that got reallocated in the meantime). */ | |
256 | tree name_defined_by_phi; | |
ad4a85ad | 257 | |
258 | /* Distance of the reference from the root of the chain (in number of | |
259 | iterations of the loop). */ | |
260 | unsigned distance; | |
261 | ||
262 | /* Number of iterations offset from the first reference in the component. */ | |
5de9d3ed | 263 | widest_int offset; |
ad4a85ad | 264 | |
265 | /* Number of the reference in a component, in dominance ordering. */ | |
266 | unsigned pos; | |
267 | ||
268 | /* True if the memory reference is always accessed when the loop is | |
269 | entered. */ | |
270 | unsigned always_accessed : 1; | |
271 | } *dref; | |
272 | ||
ad4a85ad | 273 | |
274 | /* Type of the chain of the references. */ | |
275 | ||
276 | enum chain_type | |
277 | { | |
278 | /* The addresses of the references in the chain are constant. */ | |
279 | CT_INVARIANT, | |
280 | ||
281 | /* There are only loads in the chain. */ | |
282 | CT_LOAD, | |
283 | ||
284 | /* Root of the chain is store, the rest are loads. */ | |
285 | CT_STORE_LOAD, | |
286 | ||
287 | /* A combination of two chains. */ | |
288 | CT_COMBINATION | |
289 | }; | |
290 | ||
291 | /* Chains of data references. */ | |
292 | ||
293 | typedef struct chain | |
294 | { | |
295 | /* Type of the chain. */ | |
296 | enum chain_type type; | |
297 | ||
298 | /* For combination chains, the operator and the two chains that are | |
299 | combined, and the type of the result. */ | |
f4e36c33 | 300 | enum tree_code op; |
ad4a85ad | 301 | tree rslt_type; |
302 | struct chain *ch1, *ch2; | |
303 | ||
304 | /* The references in the chain. */ | |
f1f41a6c | 305 | vec<dref> refs; |
ad4a85ad | 306 | |
307 | /* The maximum distance of the reference in the chain from the root. */ | |
308 | unsigned length; | |
309 | ||
310 | /* The variables used to copy the value throughout iterations. */ | |
f1f41a6c | 311 | vec<tree> vars; |
ad4a85ad | 312 | |
313 | /* Initializers for the variables. */ | |
f1f41a6c | 314 | vec<tree> inits; |
ad4a85ad | 315 | |
316 | /* True if there is a use of a variable with the maximal distance | |
317 | that comes after the root in the loop. */ | |
318 | unsigned has_max_use_after : 1; | |
319 | ||
320 | /* True if all the memory references in the chain are always accessed. */ | |
321 | unsigned all_always_accessed : 1; | |
322 | ||
323 | /* True if this chain was combined together with some other chain. */ | |
324 | unsigned combined : 1; | |
325 | } *chain_p; | |
326 | ||
ad4a85ad | 327 | |
328 | /* Describes the knowledge about the step of the memory references in | |
329 | the component. */ | |
330 | ||
331 | enum ref_step_type | |
332 | { | |
333 | /* The step is zero. */ | |
334 | RS_INVARIANT, | |
335 | ||
336 | /* The step is nonzero. */ | |
337 | RS_NONZERO, | |
338 | ||
339 | /* The step may or may not be nonzero. */ | |
340 | RS_ANY | |
341 | }; | |
342 | ||
343 | /* Components of the data dependence graph. */ | |
344 | ||
345 | struct component | |
346 | { | |
347 | /* The references in the component. */ | |
f1f41a6c | 348 | vec<dref> refs; |
ad4a85ad | 349 | |
350 | /* What we know about the step of the references in the component. */ | |
351 | enum ref_step_type comp_step; | |
352 | ||
353 | /* Next component in the list. */ | |
354 | struct component *next; | |
355 | }; | |
356 | ||
357 | /* Bitmap of ssa names defined by looparound phi nodes covered by chains. */ | |
358 | ||
359 | static bitmap looparound_phis; | |
360 | ||
361 | /* Cache used by tree_to_aff_combination_expand. */ | |
362 | ||
5f8841a5 | 363 | static hash_map<tree, name_expansion *> *name_expansions; |
ad4a85ad | 364 | |
365 | /* Dumps data reference REF to FILE. */ | |
366 | ||
367 | extern void dump_dref (FILE *, dref); | |
368 | void | |
369 | dump_dref (FILE *file, dref ref) | |
370 | { | |
371 | if (ref->ref) | |
372 | { | |
373 | fprintf (file, " "); | |
374 | print_generic_expr (file, DR_REF (ref->ref), TDF_SLIM); | |
375 | fprintf (file, " (id %u%s)\n", ref->pos, | |
376 | DR_IS_READ (ref->ref) ? "" : ", write"); | |
377 | ||
378 | fprintf (file, " offset "); | |
e913b5cd | 379 | print_decs (ref->offset, file); |
ad4a85ad | 380 | fprintf (file, "\n"); |
381 | ||
382 | fprintf (file, " distance %u\n", ref->distance); | |
383 | } | |
384 | else | |
385 | { | |
75a70cf9 | 386 | if (gimple_code (ref->stmt) == GIMPLE_PHI) |
ad4a85ad | 387 | fprintf (file, " looparound ref\n"); |
388 | else | |
389 | fprintf (file, " combination ref\n"); | |
390 | fprintf (file, " in statement "); | |
75a70cf9 | 391 | print_gimple_stmt (file, ref->stmt, 0, TDF_SLIM); |
ad4a85ad | 392 | fprintf (file, "\n"); |
393 | fprintf (file, " distance %u\n", ref->distance); | |
394 | } | |
395 | ||
396 | } | |
397 | ||
398 | /* Dumps CHAIN to FILE. */ | |
399 | ||
400 | extern void dump_chain (FILE *, chain_p); | |
401 | void | |
402 | dump_chain (FILE *file, chain_p chain) | |
403 | { | |
404 | dref a; | |
405 | const char *chain_type; | |
406 | unsigned i; | |
407 | tree var; | |
408 | ||
409 | switch (chain->type) | |
410 | { | |
411 | case CT_INVARIANT: | |
412 | chain_type = "Load motion"; | |
413 | break; | |
414 | ||
415 | case CT_LOAD: | |
416 | chain_type = "Loads-only"; | |
417 | break; | |
418 | ||
419 | case CT_STORE_LOAD: | |
420 | chain_type = "Store-loads"; | |
421 | break; | |
422 | ||
423 | case CT_COMBINATION: | |
424 | chain_type = "Combination"; | |
425 | break; | |
426 | ||
427 | default: | |
428 | gcc_unreachable (); | |
429 | } | |
430 | ||
431 | fprintf (file, "%s chain %p%s\n", chain_type, (void *) chain, | |
432 | chain->combined ? " (combined)" : ""); | |
433 | if (chain->type != CT_INVARIANT) | |
434 | fprintf (file, " max distance %u%s\n", chain->length, | |
435 | chain->has_max_use_after ? "" : ", may reuse first"); | |
436 | ||
437 | if (chain->type == CT_COMBINATION) | |
438 | { | |
439 | fprintf (file, " equal to %p %s %p in type ", | |
f4e36c33 | 440 | (void *) chain->ch1, op_symbol_code (chain->op), |
ad4a85ad | 441 | (void *) chain->ch2); |
442 | print_generic_expr (file, chain->rslt_type, TDF_SLIM); | |
443 | fprintf (file, "\n"); | |
444 | } | |
445 | ||
f1f41a6c | 446 | if (chain->vars.exists ()) |
ad4a85ad | 447 | { |
448 | fprintf (file, " vars"); | |
f1f41a6c | 449 | FOR_EACH_VEC_ELT (chain->vars, i, var) |
ad4a85ad | 450 | { |
451 | fprintf (file, " "); | |
452 | print_generic_expr (file, var, TDF_SLIM); | |
453 | } | |
454 | fprintf (file, "\n"); | |
455 | } | |
456 | ||
f1f41a6c | 457 | if (chain->inits.exists ()) |
ad4a85ad | 458 | { |
459 | fprintf (file, " inits"); | |
f1f41a6c | 460 | FOR_EACH_VEC_ELT (chain->inits, i, var) |
ad4a85ad | 461 | { |
462 | fprintf (file, " "); | |
463 | print_generic_expr (file, var, TDF_SLIM); | |
464 | } | |
465 | fprintf (file, "\n"); | |
466 | } | |
467 | ||
468 | fprintf (file, " references:\n"); | |
f1f41a6c | 469 | FOR_EACH_VEC_ELT (chain->refs, i, a) |
ad4a85ad | 470 | dump_dref (file, a); |
471 | ||
472 | fprintf (file, "\n"); | |
473 | } | |
474 | ||
475 | /* Dumps CHAINS to FILE. */ | |
476 | ||
f1f41a6c | 477 | extern void dump_chains (FILE *, vec<chain_p> ); |
ad4a85ad | 478 | void |
f1f41a6c | 479 | dump_chains (FILE *file, vec<chain_p> chains) |
ad4a85ad | 480 | { |
481 | chain_p chain; | |
482 | unsigned i; | |
483 | ||
f1f41a6c | 484 | FOR_EACH_VEC_ELT (chains, i, chain) |
ad4a85ad | 485 | dump_chain (file, chain); |
486 | } | |
487 | ||
488 | /* Dumps COMP to FILE. */ | |
489 | ||
490 | extern void dump_component (FILE *, struct component *); | |
491 | void | |
492 | dump_component (FILE *file, struct component *comp) | |
493 | { | |
494 | dref a; | |
495 | unsigned i; | |
496 | ||
497 | fprintf (file, "Component%s:\n", | |
498 | comp->comp_step == RS_INVARIANT ? " (invariant)" : ""); | |
f1f41a6c | 499 | FOR_EACH_VEC_ELT (comp->refs, i, a) |
ad4a85ad | 500 | dump_dref (file, a); |
501 | fprintf (file, "\n"); | |
502 | } | |
503 | ||
504 | /* Dumps COMPS to FILE. */ | |
505 | ||
506 | extern void dump_components (FILE *, struct component *); | |
507 | void | |
508 | dump_components (FILE *file, struct component *comps) | |
509 | { | |
510 | struct component *comp; | |
511 | ||
512 | for (comp = comps; comp; comp = comp->next) | |
513 | dump_component (file, comp); | |
514 | } | |
515 | ||
516 | /* Frees a chain CHAIN. */ | |
517 | ||
518 | static void | |
519 | release_chain (chain_p chain) | |
520 | { | |
521 | dref ref; | |
522 | unsigned i; | |
523 | ||
524 | if (chain == NULL) | |
525 | return; | |
526 | ||
f1f41a6c | 527 | FOR_EACH_VEC_ELT (chain->refs, i, ref) |
ad4a85ad | 528 | free (ref); |
529 | ||
f1f41a6c | 530 | chain->refs.release (); |
531 | chain->vars.release (); | |
532 | chain->inits.release (); | |
ad4a85ad | 533 | |
534 | free (chain); | |
535 | } | |
536 | ||
537 | /* Frees CHAINS. */ | |
538 | ||
539 | static void | |
f1f41a6c | 540 | release_chains (vec<chain_p> chains) |
ad4a85ad | 541 | { |
542 | unsigned i; | |
543 | chain_p chain; | |
544 | ||
f1f41a6c | 545 | FOR_EACH_VEC_ELT (chains, i, chain) |
ad4a85ad | 546 | release_chain (chain); |
f1f41a6c | 547 | chains.release (); |
ad4a85ad | 548 | } |
549 | ||
550 | /* Frees a component COMP. */ | |
551 | ||
552 | static void | |
553 | release_component (struct component *comp) | |
554 | { | |
f1f41a6c | 555 | comp->refs.release (); |
ad4a85ad | 556 | free (comp); |
557 | } | |
558 | ||
559 | /* Frees list of components COMPS. */ | |
560 | ||
561 | static void | |
562 | release_components (struct component *comps) | |
563 | { | |
564 | struct component *act, *next; | |
565 | ||
566 | for (act = comps; act; act = next) | |
567 | { | |
568 | next = act->next; | |
569 | release_component (act); | |
570 | } | |
571 | } | |
572 | ||
573 | /* Finds a root of tree given by FATHERS containing A, and performs path | |
574 | shortening. */ | |
575 | ||
576 | static unsigned | |
577 | component_of (unsigned fathers[], unsigned a) | |
578 | { | |
579 | unsigned root, n; | |
580 | ||
581 | for (root = a; root != fathers[root]; root = fathers[root]) | |
582 | continue; | |
583 | ||
584 | for (; a != root; a = n) | |
585 | { | |
586 | n = fathers[a]; | |
587 | fathers[a] = root; | |
588 | } | |
589 | ||
590 | return root; | |
591 | } | |
592 | ||
593 | /* Join operation for DFU. FATHERS gives the tree, SIZES are sizes of the | |
594 | components, A and B are components to merge. */ | |
595 | ||
596 | static void | |
597 | merge_comps (unsigned fathers[], unsigned sizes[], unsigned a, unsigned b) | |
598 | { | |
599 | unsigned ca = component_of (fathers, a); | |
600 | unsigned cb = component_of (fathers, b); | |
601 | ||
602 | if (ca == cb) | |
603 | return; | |
604 | ||
605 | if (sizes[ca] < sizes[cb]) | |
606 | { | |
607 | sizes[cb] += sizes[ca]; | |
608 | fathers[ca] = cb; | |
609 | } | |
610 | else | |
611 | { | |
612 | sizes[ca] += sizes[cb]; | |
613 | fathers[cb] = ca; | |
614 | } | |
615 | } | |
616 | ||
617 | /* Returns true if A is a reference that is suitable for predictive commoning | |
618 | in the innermost loop that contains it. REF_STEP is set according to the | |
619 | step of the reference A. */ | |
620 | ||
621 | static bool | |
622 | suitable_reference_p (struct data_reference *a, enum ref_step_type *ref_step) | |
623 | { | |
624 | tree ref = DR_REF (a), step = DR_STEP (a); | |
625 | ||
626 | if (!step | |
450c0971 | 627 | || TREE_THIS_VOLATILE (ref) |
154edec0 | 628 | || !is_gimple_reg_type (TREE_TYPE (ref)) |
629 | || tree_could_throw_p (ref)) | |
ad4a85ad | 630 | return false; |
631 | ||
632 | if (integer_zerop (step)) | |
633 | *ref_step = RS_INVARIANT; | |
634 | else if (integer_nonzerop (step)) | |
635 | *ref_step = RS_NONZERO; | |
636 | else | |
637 | *ref_step = RS_ANY; | |
638 | ||
639 | return true; | |
640 | } | |
641 | ||
642 | /* Stores DR_OFFSET (DR) + DR_INIT (DR) to OFFSET. */ | |
643 | ||
644 | static void | |
645 | aff_combination_dr_offset (struct data_reference *dr, aff_tree *offset) | |
646 | { | |
a0553bff | 647 | tree type = TREE_TYPE (DR_OFFSET (dr)); |
ad4a85ad | 648 | aff_tree delta; |
649 | ||
a0553bff | 650 | tree_to_aff_combination_expand (DR_OFFSET (dr), type, offset, |
ad4a85ad | 651 | &name_expansions); |
5de9d3ed | 652 | aff_combination_const (&delta, type, wi::to_widest (DR_INIT (dr))); |
ad4a85ad | 653 | aff_combination_add (offset, &delta); |
654 | } | |
655 | ||
656 | /* Determines number of iterations of the innermost enclosing loop before B | |
657 | refers to exactly the same location as A and stores it to OFF. If A and | |
658 | B do not have the same step, they never meet, or anything else fails, | |
659 | returns false, otherwise returns true. Both A and B are assumed to | |
660 | satisfy suitable_reference_p. */ | |
661 | ||
662 | static bool | |
663 | determine_offset (struct data_reference *a, struct data_reference *b, | |
5de9d3ed | 664 | widest_int *off) |
ad4a85ad | 665 | { |
666 | aff_tree diff, baseb, step; | |
ca4882a2 | 667 | tree typea, typeb; |
668 | ||
669 | /* Check that both the references access the location in the same type. */ | |
670 | typea = TREE_TYPE (DR_REF (a)); | |
671 | typeb = TREE_TYPE (DR_REF (b)); | |
548044d8 | 672 | if (!useless_type_conversion_p (typeb, typea)) |
ca4882a2 | 673 | return false; |
ad4a85ad | 674 | |
675 | /* Check whether the base address and the step of both references is the | |
676 | same. */ | |
677 | if (!operand_equal_p (DR_STEP (a), DR_STEP (b), 0) | |
678 | || !operand_equal_p (DR_BASE_ADDRESS (a), DR_BASE_ADDRESS (b), 0)) | |
679 | return false; | |
680 | ||
681 | if (integer_zerop (DR_STEP (a))) | |
682 | { | |
683 | /* If the references have loop invariant address, check that they access | |
684 | exactly the same location. */ | |
e913b5cd | 685 | *off = 0; |
ad4a85ad | 686 | return (operand_equal_p (DR_OFFSET (a), DR_OFFSET (b), 0) |
687 | && operand_equal_p (DR_INIT (a), DR_INIT (b), 0)); | |
688 | } | |
689 | ||
690 | /* Compare the offsets of the addresses, and check whether the difference | |
691 | is a multiple of step. */ | |
692 | aff_combination_dr_offset (a, &diff); | |
693 | aff_combination_dr_offset (b, &baseb); | |
e913b5cd | 694 | aff_combination_scale (&baseb, -1); |
ad4a85ad | 695 | aff_combination_add (&diff, &baseb); |
696 | ||
a0553bff | 697 | tree_to_aff_combination_expand (DR_STEP (a), TREE_TYPE (DR_STEP (a)), |
ad4a85ad | 698 | &step, &name_expansions); |
699 | return aff_combination_constant_multiple_p (&diff, &step, off); | |
700 | } | |
701 | ||
702 | /* Returns the last basic block in LOOP for that we are sure that | |
703 | it is executed whenever the loop is entered. */ | |
704 | ||
705 | static basic_block | |
706 | last_always_executed_block (struct loop *loop) | |
707 | { | |
708 | unsigned i; | |
f1f41a6c | 709 | vec<edge> exits = get_loop_exit_edges (loop); |
ad4a85ad | 710 | edge ex; |
711 | basic_block last = loop->latch; | |
712 | ||
f1f41a6c | 713 | FOR_EACH_VEC_ELT (exits, i, ex) |
ad4a85ad | 714 | last = nearest_common_dominator (CDI_DOMINATORS, last, ex->src); |
f1f41a6c | 715 | exits.release (); |
ad4a85ad | 716 | |
717 | return last; | |
718 | } | |
719 | ||
720 | /* Splits dependence graph on DATAREFS described by DEPENDS to components. */ | |
721 | ||
722 | static struct component * | |
723 | split_data_refs_to_components (struct loop *loop, | |
f1f41a6c | 724 | vec<data_reference_p> datarefs, |
725 | vec<ddr_p> depends) | |
ad4a85ad | 726 | { |
f1f41a6c | 727 | unsigned i, n = datarefs.length (); |
ad4a85ad | 728 | unsigned ca, ia, ib, bad; |
729 | unsigned *comp_father = XNEWVEC (unsigned, n + 1); | |
730 | unsigned *comp_size = XNEWVEC (unsigned, n + 1); | |
731 | struct component **comps; | |
732 | struct data_reference *dr, *dra, *drb; | |
733 | struct data_dependence_relation *ddr; | |
734 | struct component *comp_list = NULL, *comp; | |
735 | dref dataref; | |
736 | basic_block last_always_executed = last_always_executed_block (loop); | |
48e1416a | 737 | |
f1f41a6c | 738 | FOR_EACH_VEC_ELT (datarefs, i, dr) |
ad4a85ad | 739 | { |
740 | if (!DR_REF (dr)) | |
741 | { | |
742 | /* A fake reference for call or asm_expr that may clobber memory; | |
743 | just fail. */ | |
744 | goto end; | |
745 | } | |
c71d3c24 | 746 | /* predcom pass isn't prepared to handle calls with data references. */ |
747 | if (is_gimple_call (DR_STMT (dr))) | |
748 | goto end; | |
5c205353 | 749 | dr->aux = (void *) (size_t) i; |
ad4a85ad | 750 | comp_father[i] = i; |
751 | comp_size[i] = 1; | |
752 | } | |
753 | ||
754 | /* A component reserved for the "bad" data references. */ | |
755 | comp_father[n] = n; | |
756 | comp_size[n] = 1; | |
757 | ||
f1f41a6c | 758 | FOR_EACH_VEC_ELT (datarefs, i, dr) |
ad4a85ad | 759 | { |
760 | enum ref_step_type dummy; | |
761 | ||
762 | if (!suitable_reference_p (dr, &dummy)) | |
763 | { | |
5c205353 | 764 | ia = (unsigned) (size_t) dr->aux; |
ad4a85ad | 765 | merge_comps (comp_father, comp_size, n, ia); |
766 | } | |
767 | } | |
768 | ||
f1f41a6c | 769 | FOR_EACH_VEC_ELT (depends, i, ddr) |
ad4a85ad | 770 | { |
5de9d3ed | 771 | widest_int dummy_off; |
ad4a85ad | 772 | |
773 | if (DDR_ARE_DEPENDENT (ddr) == chrec_known) | |
774 | continue; | |
775 | ||
776 | dra = DDR_A (ddr); | |
777 | drb = DDR_B (ddr); | |
5c205353 | 778 | ia = component_of (comp_father, (unsigned) (size_t) dra->aux); |
779 | ib = component_of (comp_father, (unsigned) (size_t) drb->aux); | |
ad4a85ad | 780 | if (ia == ib) |
781 | continue; | |
782 | ||
783 | bad = component_of (comp_father, n); | |
784 | ||
785 | /* If both A and B are reads, we may ignore unsuitable dependences. */ | |
19f2ced2 | 786 | if (DR_IS_READ (dra) && DR_IS_READ (drb)) |
787 | { | |
788 | if (ia == bad || ib == bad | |
789 | || !determine_offset (dra, drb, &dummy_off)) | |
790 | continue; | |
791 | } | |
792 | /* If A is read and B write or vice versa and there is unsuitable | |
793 | dependence, instead of merging both components into a component | |
794 | that will certainly not pass suitable_component_p, just put the | |
795 | read into bad component, perhaps at least the write together with | |
796 | all the other data refs in it's component will be optimizable. */ | |
797 | else if (DR_IS_READ (dra) && ib != bad) | |
798 | { | |
799 | if (ia == bad) | |
800 | continue; | |
801 | else if (!determine_offset (dra, drb, &dummy_off)) | |
802 | { | |
803 | merge_comps (comp_father, comp_size, bad, ia); | |
804 | continue; | |
805 | } | |
806 | } | |
807 | else if (DR_IS_READ (drb) && ia != bad) | |
808 | { | |
809 | if (ib == bad) | |
810 | continue; | |
811 | else if (!determine_offset (dra, drb, &dummy_off)) | |
812 | { | |
813 | merge_comps (comp_father, comp_size, bad, ib); | |
814 | continue; | |
815 | } | |
816 | } | |
48e1416a | 817 | |
ad4a85ad | 818 | merge_comps (comp_father, comp_size, ia, ib); |
819 | } | |
820 | ||
821 | comps = XCNEWVEC (struct component *, n); | |
822 | bad = component_of (comp_father, n); | |
f1f41a6c | 823 | FOR_EACH_VEC_ELT (datarefs, i, dr) |
ad4a85ad | 824 | { |
5c205353 | 825 | ia = (unsigned) (size_t) dr->aux; |
ad4a85ad | 826 | ca = component_of (comp_father, ia); |
827 | if (ca == bad) | |
828 | continue; | |
829 | ||
830 | comp = comps[ca]; | |
831 | if (!comp) | |
832 | { | |
833 | comp = XCNEW (struct component); | |
f1f41a6c | 834 | comp->refs.create (comp_size[ca]); |
ad4a85ad | 835 | comps[ca] = comp; |
836 | } | |
837 | ||
26dbec0a | 838 | dataref = XCNEW (struct dref_d); |
ad4a85ad | 839 | dataref->ref = dr; |
840 | dataref->stmt = DR_STMT (dr); | |
e913b5cd | 841 | dataref->offset = 0; |
ad4a85ad | 842 | dataref->distance = 0; |
843 | ||
844 | dataref->always_accessed | |
845 | = dominated_by_p (CDI_DOMINATORS, last_always_executed, | |
75a70cf9 | 846 | gimple_bb (dataref->stmt)); |
f1f41a6c | 847 | dataref->pos = comp->refs.length (); |
848 | comp->refs.quick_push (dataref); | |
ad4a85ad | 849 | } |
850 | ||
851 | for (i = 0; i < n; i++) | |
852 | { | |
853 | comp = comps[i]; | |
854 | if (comp) | |
855 | { | |
856 | comp->next = comp_list; | |
857 | comp_list = comp; | |
858 | } | |
859 | } | |
860 | free (comps); | |
861 | ||
862 | end: | |
863 | free (comp_father); | |
864 | free (comp_size); | |
865 | return comp_list; | |
866 | } | |
867 | ||
868 | /* Returns true if the component COMP satisfies the conditions | |
310d2511 | 869 | described in 2) at the beginning of this file. LOOP is the current |
ad4a85ad | 870 | loop. */ |
48e1416a | 871 | |
ad4a85ad | 872 | static bool |
873 | suitable_component_p (struct loop *loop, struct component *comp) | |
874 | { | |
875 | unsigned i; | |
876 | dref a, first; | |
877 | basic_block ba, bp = loop->header; | |
878 | bool ok, has_write = false; | |
879 | ||
f1f41a6c | 880 | FOR_EACH_VEC_ELT (comp->refs, i, a) |
ad4a85ad | 881 | { |
75a70cf9 | 882 | ba = gimple_bb (a->stmt); |
ad4a85ad | 883 | |
884 | if (!just_once_each_iteration_p (loop, ba)) | |
885 | return false; | |
886 | ||
887 | gcc_assert (dominated_by_p (CDI_DOMINATORS, ba, bp)); | |
888 | bp = ba; | |
889 | ||
9ff25603 | 890 | if (DR_IS_WRITE (a->ref)) |
ad4a85ad | 891 | has_write = true; |
892 | } | |
893 | ||
f1f41a6c | 894 | first = comp->refs[0]; |
ad4a85ad | 895 | ok = suitable_reference_p (first->ref, &comp->comp_step); |
896 | gcc_assert (ok); | |
e913b5cd | 897 | first->offset = 0; |
ad4a85ad | 898 | |
f1f41a6c | 899 | for (i = 1; comp->refs.iterate (i, &a); i++) |
ad4a85ad | 900 | { |
901 | if (!determine_offset (first->ref, a->ref, &a->offset)) | |
902 | return false; | |
903 | ||
904 | #ifdef ENABLE_CHECKING | |
905 | { | |
906 | enum ref_step_type a_step; | |
907 | ok = suitable_reference_p (a->ref, &a_step); | |
908 | gcc_assert (ok && a_step == comp->comp_step); | |
909 | } | |
910 | #endif | |
911 | } | |
912 | ||
913 | /* If there is a write inside the component, we must know whether the | |
914 | step is nonzero or not -- we would not otherwise be able to recognize | |
915 | whether the value accessed by reads comes from the OFFSET-th iteration | |
916 | or the previous one. */ | |
917 | if (has_write && comp->comp_step == RS_ANY) | |
918 | return false; | |
919 | ||
920 | return true; | |
921 | } | |
48e1416a | 922 | |
ad4a85ad | 923 | /* Check the conditions on references inside each of components COMPS, |
924 | and remove the unsuitable components from the list. The new list | |
925 | of components is returned. The conditions are described in 2) at | |
310d2511 | 926 | the beginning of this file. LOOP is the current loop. */ |
ad4a85ad | 927 | |
928 | static struct component * | |
929 | filter_suitable_components (struct loop *loop, struct component *comps) | |
930 | { | |
931 | struct component **comp, *act; | |
932 | ||
933 | for (comp = &comps; *comp; ) | |
934 | { | |
935 | act = *comp; | |
936 | if (suitable_component_p (loop, act)) | |
937 | comp = &act->next; | |
938 | else | |
939 | { | |
19af51e2 | 940 | dref ref; |
941 | unsigned i; | |
942 | ||
ad4a85ad | 943 | *comp = act->next; |
f1f41a6c | 944 | FOR_EACH_VEC_ELT (act->refs, i, ref) |
19af51e2 | 945 | free (ref); |
ad4a85ad | 946 | release_component (act); |
947 | } | |
948 | } | |
949 | ||
950 | return comps; | |
951 | } | |
952 | ||
953 | /* Compares two drefs A and B by their offset and position. Callback for | |
954 | qsort. */ | |
955 | ||
956 | static int | |
957 | order_drefs (const void *a, const void *b) | |
958 | { | |
45ba1503 | 959 | const dref *const da = (const dref *) a; |
960 | const dref *const db = (const dref *) b; | |
796b6678 | 961 | int offcmp = wi::cmps ((*da)->offset, (*db)->offset); |
ad4a85ad | 962 | |
963 | if (offcmp != 0) | |
964 | return offcmp; | |
965 | ||
966 | return (*da)->pos - (*db)->pos; | |
967 | } | |
968 | ||
969 | /* Returns root of the CHAIN. */ | |
970 | ||
971 | static inline dref | |
972 | get_chain_root (chain_p chain) | |
973 | { | |
f1f41a6c | 974 | return chain->refs[0]; |
ad4a85ad | 975 | } |
976 | ||
977 | /* Adds REF to the chain CHAIN. */ | |
978 | ||
979 | static void | |
980 | add_ref_to_chain (chain_p chain, dref ref) | |
981 | { | |
982 | dref root = get_chain_root (chain); | |
ad4a85ad | 983 | |
796b6678 | 984 | gcc_assert (wi::les_p (root->offset, ref->offset)); |
ab2c1de8 | 985 | widest_int dist = ref->offset - root->offset; |
796b6678 | 986 | if (wi::leu_p (MAX_DISTANCE, dist)) |
19af51e2 | 987 | { |
988 | free (ref); | |
989 | return; | |
990 | } | |
796b6678 | 991 | gcc_assert (wi::fits_uhwi_p (dist)); |
ad4a85ad | 992 | |
f1f41a6c | 993 | chain->refs.safe_push (ref); |
ad4a85ad | 994 | |
cf8f0e63 | 995 | ref->distance = dist.to_uhwi (); |
ad4a85ad | 996 | |
997 | if (ref->distance >= chain->length) | |
998 | { | |
999 | chain->length = ref->distance; | |
1000 | chain->has_max_use_after = false; | |
1001 | } | |
1002 | ||
1003 | if (ref->distance == chain->length | |
1004 | && ref->pos > root->pos) | |
1005 | chain->has_max_use_after = true; | |
1006 | ||
1007 | chain->all_always_accessed &= ref->always_accessed; | |
1008 | } | |
1009 | ||
1010 | /* Returns the chain for invariant component COMP. */ | |
1011 | ||
1012 | static chain_p | |
1013 | make_invariant_chain (struct component *comp) | |
1014 | { | |
1015 | chain_p chain = XCNEW (struct chain); | |
1016 | unsigned i; | |
1017 | dref ref; | |
1018 | ||
1019 | chain->type = CT_INVARIANT; | |
1020 | ||
1021 | chain->all_always_accessed = true; | |
1022 | ||
f1f41a6c | 1023 | FOR_EACH_VEC_ELT (comp->refs, i, ref) |
ad4a85ad | 1024 | { |
f1f41a6c | 1025 | chain->refs.safe_push (ref); |
ad4a85ad | 1026 | chain->all_always_accessed &= ref->always_accessed; |
1027 | } | |
1028 | ||
1029 | return chain; | |
1030 | } | |
1031 | ||
1032 | /* Make a new chain rooted at REF. */ | |
1033 | ||
1034 | static chain_p | |
1035 | make_rooted_chain (dref ref) | |
1036 | { | |
1037 | chain_p chain = XCNEW (struct chain); | |
1038 | ||
1039 | chain->type = DR_IS_READ (ref->ref) ? CT_LOAD : CT_STORE_LOAD; | |
1040 | ||
f1f41a6c | 1041 | chain->refs.safe_push (ref); |
ad4a85ad | 1042 | chain->all_always_accessed = ref->always_accessed; |
1043 | ||
1044 | ref->distance = 0; | |
1045 | ||
1046 | return chain; | |
1047 | } | |
1048 | ||
1049 | /* Returns true if CHAIN is not trivial. */ | |
1050 | ||
1051 | static bool | |
1052 | nontrivial_chain_p (chain_p chain) | |
1053 | { | |
f1f41a6c | 1054 | return chain != NULL && chain->refs.length () > 1; |
ad4a85ad | 1055 | } |
1056 | ||
1057 | /* Returns the ssa name that contains the value of REF, or NULL_TREE if there | |
1058 | is no such name. */ | |
1059 | ||
1060 | static tree | |
1061 | name_for_ref (dref ref) | |
1062 | { | |
1063 | tree name; | |
1064 | ||
75a70cf9 | 1065 | if (is_gimple_assign (ref->stmt)) |
ad4a85ad | 1066 | { |
1067 | if (!ref->ref || DR_IS_READ (ref->ref)) | |
75a70cf9 | 1068 | name = gimple_assign_lhs (ref->stmt); |
ad4a85ad | 1069 | else |
75a70cf9 | 1070 | name = gimple_assign_rhs1 (ref->stmt); |
ad4a85ad | 1071 | } |
1072 | else | |
1073 | name = PHI_RESULT (ref->stmt); | |
1074 | ||
1075 | return (TREE_CODE (name) == SSA_NAME ? name : NULL_TREE); | |
1076 | } | |
1077 | ||
1078 | /* Returns true if REF is a valid initializer for ROOT with given DISTANCE (in | |
1079 | iterations of the innermost enclosing loop). */ | |
1080 | ||
1081 | static bool | |
1082 | valid_initializer_p (struct data_reference *ref, | |
1083 | unsigned distance, struct data_reference *root) | |
1084 | { | |
1085 | aff_tree diff, base, step; | |
5de9d3ed | 1086 | widest_int off; |
ad4a85ad | 1087 | |
ad4a85ad | 1088 | /* Both REF and ROOT must be accessing the same object. */ |
1089 | if (!operand_equal_p (DR_BASE_ADDRESS (ref), DR_BASE_ADDRESS (root), 0)) | |
1090 | return false; | |
1091 | ||
1092 | /* The initializer is defined outside of loop, hence its address must be | |
1093 | invariant inside the loop. */ | |
1094 | gcc_assert (integer_zerop (DR_STEP (ref))); | |
1095 | ||
1096 | /* If the address of the reference is invariant, initializer must access | |
1097 | exactly the same location. */ | |
1098 | if (integer_zerop (DR_STEP (root))) | |
1099 | return (operand_equal_p (DR_OFFSET (ref), DR_OFFSET (root), 0) | |
1100 | && operand_equal_p (DR_INIT (ref), DR_INIT (root), 0)); | |
1101 | ||
1102 | /* Verify that this index of REF is equal to the root's index at | |
1103 | -DISTANCE-th iteration. */ | |
1104 | aff_combination_dr_offset (root, &diff); | |
1105 | aff_combination_dr_offset (ref, &base); | |
e913b5cd | 1106 | aff_combination_scale (&base, -1); |
ad4a85ad | 1107 | aff_combination_add (&diff, &base); |
1108 | ||
a0553bff | 1109 | tree_to_aff_combination_expand (DR_STEP (root), TREE_TYPE (DR_STEP (root)), |
1110 | &step, &name_expansions); | |
ad4a85ad | 1111 | if (!aff_combination_constant_multiple_p (&diff, &step, &off)) |
1112 | return false; | |
1113 | ||
e913b5cd | 1114 | if (off != distance) |
ad4a85ad | 1115 | return false; |
1116 | ||
1117 | return true; | |
1118 | } | |
1119 | ||
1120 | /* Finds looparound phi node of LOOP that copies the value of REF, and if its | |
1121 | initial value is correct (equal to initial value of REF shifted by one | |
1122 | iteration), returns the phi node. Otherwise, NULL_TREE is returned. ROOT | |
1123 | is the root of the current chain. */ | |
1124 | ||
1a91d914 | 1125 | static gphi * |
ad4a85ad | 1126 | find_looparound_phi (struct loop *loop, dref ref, dref root) |
1127 | { | |
75a70cf9 | 1128 | tree name, init, init_ref; |
1a91d914 | 1129 | gphi *phi = NULL; |
1130 | gimple init_stmt; | |
ad4a85ad | 1131 | edge latch = loop_latch_edge (loop); |
1132 | struct data_reference init_dr; | |
1a91d914 | 1133 | gphi_iterator psi; |
ad4a85ad | 1134 | |
75a70cf9 | 1135 | if (is_gimple_assign (ref->stmt)) |
ad4a85ad | 1136 | { |
1137 | if (DR_IS_READ (ref->ref)) | |
75a70cf9 | 1138 | name = gimple_assign_lhs (ref->stmt); |
ad4a85ad | 1139 | else |
75a70cf9 | 1140 | name = gimple_assign_rhs1 (ref->stmt); |
ad4a85ad | 1141 | } |
1142 | else | |
1143 | name = PHI_RESULT (ref->stmt); | |
1144 | if (!name) | |
75a70cf9 | 1145 | return NULL; |
ad4a85ad | 1146 | |
75a70cf9 | 1147 | for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi)) |
1148 | { | |
1a91d914 | 1149 | phi = psi.phi (); |
75a70cf9 | 1150 | if (PHI_ARG_DEF_FROM_EDGE (phi, latch) == name) |
1151 | break; | |
1152 | } | |
ad4a85ad | 1153 | |
75a70cf9 | 1154 | if (gsi_end_p (psi)) |
1155 | return NULL; | |
ad4a85ad | 1156 | |
1157 | init = PHI_ARG_DEF_FROM_EDGE (phi, loop_preheader_edge (loop)); | |
1158 | if (TREE_CODE (init) != SSA_NAME) | |
75a70cf9 | 1159 | return NULL; |
ad4a85ad | 1160 | init_stmt = SSA_NAME_DEF_STMT (init); |
75a70cf9 | 1161 | if (gimple_code (init_stmt) != GIMPLE_ASSIGN) |
1162 | return NULL; | |
1163 | gcc_assert (gimple_assign_lhs (init_stmt) == init); | |
ad4a85ad | 1164 | |
75a70cf9 | 1165 | init_ref = gimple_assign_rhs1 (init_stmt); |
ad4a85ad | 1166 | if (!REFERENCE_CLASS_P (init_ref) |
1167 | && !DECL_P (init_ref)) | |
75a70cf9 | 1168 | return NULL; |
ad4a85ad | 1169 | |
1170 | /* Analyze the behavior of INIT_REF with respect to LOOP (innermost | |
1171 | loop enclosing PHI). */ | |
1172 | memset (&init_dr, 0, sizeof (struct data_reference)); | |
1173 | DR_REF (&init_dr) = init_ref; | |
1174 | DR_STMT (&init_dr) = phi; | |
0c257e4c | 1175 | if (!dr_analyze_innermost (&init_dr, loop)) |
880734c8 | 1176 | return NULL; |
ad4a85ad | 1177 | |
1178 | if (!valid_initializer_p (&init_dr, ref->distance + 1, root->ref)) | |
75a70cf9 | 1179 | return NULL; |
ad4a85ad | 1180 | |
1181 | return phi; | |
1182 | } | |
1183 | ||
1184 | /* Adds a reference for the looparound copy of REF in PHI to CHAIN. */ | |
1185 | ||
1186 | static void | |
1a91d914 | 1187 | insert_looparound_copy (chain_p chain, dref ref, gphi *phi) |
ad4a85ad | 1188 | { |
26dbec0a | 1189 | dref nw = XCNEW (struct dref_d), aref; |
ad4a85ad | 1190 | unsigned i; |
1191 | ||
1192 | nw->stmt = phi; | |
1193 | nw->distance = ref->distance + 1; | |
1194 | nw->always_accessed = 1; | |
1195 | ||
f1f41a6c | 1196 | FOR_EACH_VEC_ELT (chain->refs, i, aref) |
ad4a85ad | 1197 | if (aref->distance >= nw->distance) |
1198 | break; | |
f1f41a6c | 1199 | chain->refs.safe_insert (i, nw); |
ad4a85ad | 1200 | |
1201 | if (nw->distance > chain->length) | |
1202 | { | |
1203 | chain->length = nw->distance; | |
1204 | chain->has_max_use_after = false; | |
1205 | } | |
1206 | } | |
1207 | ||
1208 | /* For references in CHAIN that are copied around the LOOP (created previously | |
1209 | by PRE, or by user), add the results of such copies to the chain. This | |
1210 | enables us to remove the copies by unrolling, and may need less registers | |
1211 | (also, it may allow us to combine chains together). */ | |
1212 | ||
1213 | static void | |
1214 | add_looparound_copies (struct loop *loop, chain_p chain) | |
1215 | { | |
1216 | unsigned i; | |
1217 | dref ref, root = get_chain_root (chain); | |
1a91d914 | 1218 | gphi *phi; |
ad4a85ad | 1219 | |
f1f41a6c | 1220 | FOR_EACH_VEC_ELT (chain->refs, i, ref) |
ad4a85ad | 1221 | { |
1222 | phi = find_looparound_phi (loop, ref, root); | |
1223 | if (!phi) | |
1224 | continue; | |
1225 | ||
1226 | bitmap_set_bit (looparound_phis, SSA_NAME_VERSION (PHI_RESULT (phi))); | |
1227 | insert_looparound_copy (chain, ref, phi); | |
1228 | } | |
1229 | } | |
1230 | ||
1231 | /* Find roots of the values and determine distances in the component COMP. | |
1232 | The references are redistributed into CHAINS. LOOP is the current | |
1233 | loop. */ | |
1234 | ||
1235 | static void | |
1236 | determine_roots_comp (struct loop *loop, | |
1237 | struct component *comp, | |
f1f41a6c | 1238 | vec<chain_p> *chains) |
ad4a85ad | 1239 | { |
1240 | unsigned i; | |
1241 | dref a; | |
1242 | chain_p chain = NULL; | |
5de9d3ed | 1243 | widest_int last_ofs = 0; |
ad4a85ad | 1244 | |
1245 | /* Invariants are handled specially. */ | |
1246 | if (comp->comp_step == RS_INVARIANT) | |
1247 | { | |
1248 | chain = make_invariant_chain (comp); | |
f1f41a6c | 1249 | chains->safe_push (chain); |
ad4a85ad | 1250 | return; |
1251 | } | |
1252 | ||
f1f41a6c | 1253 | comp->refs.qsort (order_drefs); |
ad4a85ad | 1254 | |
f1f41a6c | 1255 | FOR_EACH_VEC_ELT (comp->refs, i, a) |
ad4a85ad | 1256 | { |
9ff25603 | 1257 | if (!chain || DR_IS_WRITE (a->ref) |
796b6678 | 1258 | || wi::leu_p (MAX_DISTANCE, a->offset - last_ofs)) |
ad4a85ad | 1259 | { |
1260 | if (nontrivial_chain_p (chain)) | |
be2e5c02 | 1261 | { |
1262 | add_looparound_copies (loop, chain); | |
f1f41a6c | 1263 | chains->safe_push (chain); |
be2e5c02 | 1264 | } |
ad4a85ad | 1265 | else |
1266 | release_chain (chain); | |
1267 | chain = make_rooted_chain (a); | |
be2e5c02 | 1268 | last_ofs = a->offset; |
ad4a85ad | 1269 | continue; |
1270 | } | |
1271 | ||
1272 | add_ref_to_chain (chain, a); | |
1273 | } | |
1274 | ||
1275 | if (nontrivial_chain_p (chain)) | |
1276 | { | |
1277 | add_looparound_copies (loop, chain); | |
f1f41a6c | 1278 | chains->safe_push (chain); |
ad4a85ad | 1279 | } |
1280 | else | |
1281 | release_chain (chain); | |
1282 | } | |
1283 | ||
1284 | /* Find roots of the values and determine distances in components COMPS, and | |
1285 | separates the references to CHAINS. LOOP is the current loop. */ | |
1286 | ||
1287 | static void | |
1288 | determine_roots (struct loop *loop, | |
f1f41a6c | 1289 | struct component *comps, vec<chain_p> *chains) |
ad4a85ad | 1290 | { |
1291 | struct component *comp; | |
1292 | ||
1293 | for (comp = comps; comp; comp = comp->next) | |
1294 | determine_roots_comp (loop, comp, chains); | |
1295 | } | |
1296 | ||
1297 | /* Replace the reference in statement STMT with temporary variable | |
f4e36c33 | 1298 | NEW_TREE. If SET is true, NEW_TREE is instead initialized to the value of |
ad4a85ad | 1299 | the reference in the statement. IN_LHS is true if the reference |
1300 | is in the lhs of STMT, false if it is in rhs. */ | |
1301 | ||
1302 | static void | |
f4e36c33 | 1303 | replace_ref_with (gimple stmt, tree new_tree, bool set, bool in_lhs) |
ad4a85ad | 1304 | { |
75a70cf9 | 1305 | tree val; |
1a91d914 | 1306 | gassign *new_stmt; |
75a70cf9 | 1307 | gimple_stmt_iterator bsi, psi; |
ad4a85ad | 1308 | |
75a70cf9 | 1309 | if (gimple_code (stmt) == GIMPLE_PHI) |
ad4a85ad | 1310 | { |
1311 | gcc_assert (!in_lhs && !set); | |
1312 | ||
1313 | val = PHI_RESULT (stmt); | |
75a70cf9 | 1314 | bsi = gsi_after_labels (gimple_bb (stmt)); |
1315 | psi = gsi_for_stmt (stmt); | |
1316 | remove_phi_node (&psi, false); | |
ad4a85ad | 1317 | |
75a70cf9 | 1318 | /* Turn the phi node into GIMPLE_ASSIGN. */ |
f4e36c33 | 1319 | new_stmt = gimple_build_assign (val, new_tree); |
75a70cf9 | 1320 | gsi_insert_before (&bsi, new_stmt, GSI_NEW_STMT); |
ad4a85ad | 1321 | return; |
1322 | } | |
48e1416a | 1323 | |
ad4a85ad | 1324 | /* Since the reference is of gimple_reg type, it should only |
1325 | appear as lhs or rhs of modify statement. */ | |
75a70cf9 | 1326 | gcc_assert (is_gimple_assign (stmt)); |
1327 | ||
1328 | bsi = gsi_for_stmt (stmt); | |
ad4a85ad | 1329 | |
f4e36c33 | 1330 | /* If we do not need to initialize NEW_TREE, just replace the use of OLD. */ |
ad4a85ad | 1331 | if (!set) |
1332 | { | |
1333 | gcc_assert (!in_lhs); | |
f4e36c33 | 1334 | gimple_assign_set_rhs_from_tree (&bsi, new_tree); |
75a70cf9 | 1335 | stmt = gsi_stmt (bsi); |
ad4a85ad | 1336 | update_stmt (stmt); |
1337 | return; | |
1338 | } | |
1339 | ||
ad4a85ad | 1340 | if (in_lhs) |
1341 | { | |
75a70cf9 | 1342 | /* We have statement |
48e1416a | 1343 | |
75a70cf9 | 1344 | OLD = VAL |
ad4a85ad | 1345 | |
75a70cf9 | 1346 | If OLD is a memory reference, then VAL is gimple_val, and we transform |
1347 | this to | |
ad4a85ad | 1348 | |
1349 | OLD = VAL | |
1350 | NEW = VAL | |
1351 | ||
48e1416a | 1352 | Otherwise, we are replacing a combination chain, |
75a70cf9 | 1353 | VAL is the expression that performs the combination, and OLD is an |
1354 | SSA name. In this case, we transform the assignment to | |
1355 | ||
1356 | OLD = VAL | |
1357 | NEW = OLD | |
1358 | ||
1359 | */ | |
1360 | ||
1361 | val = gimple_assign_lhs (stmt); | |
1362 | if (TREE_CODE (val) != SSA_NAME) | |
1363 | { | |
75a70cf9 | 1364 | val = gimple_assign_rhs1 (stmt); |
bbfbb5ba | 1365 | gcc_assert (gimple_assign_single_p (stmt)); |
1366 | if (TREE_CLOBBER_P (val)) | |
c6dfe037 | 1367 | val = get_or_create_ssa_default_def (cfun, SSA_NAME_VAR (new_tree)); |
bbfbb5ba | 1368 | else |
1369 | gcc_assert (gimple_assign_copy_p (stmt)); | |
75a70cf9 | 1370 | } |
ad4a85ad | 1371 | } |
1372 | else | |
1373 | { | |
ad4a85ad | 1374 | /* VAL = OLD |
1375 | ||
1376 | is transformed to | |
1377 | ||
1378 | VAL = OLD | |
1379 | NEW = VAL */ | |
75a70cf9 | 1380 | |
1381 | val = gimple_assign_lhs (stmt); | |
ad4a85ad | 1382 | } |
1383 | ||
f4e36c33 | 1384 | new_stmt = gimple_build_assign (new_tree, unshare_expr (val)); |
75a70cf9 | 1385 | gsi_insert_after (&bsi, new_stmt, GSI_NEW_STMT); |
ad4a85ad | 1386 | } |
1387 | ||
99f6be4b | 1388 | /* Returns a memory reference to DR in the ITER-th iteration of |
1389 | the loop it was analyzed in. Append init stmts to STMTS. */ | |
1390 | ||
1391 | static tree | |
1392 | ref_at_iteration (data_reference_p dr, int iter, gimple_seq *stmts) | |
1393 | { | |
1394 | tree off = DR_OFFSET (dr); | |
1395 | tree coff = DR_INIT (dr); | |
1396 | if (iter == 0) | |
1397 | ; | |
1398 | else if (TREE_CODE (DR_STEP (dr)) == INTEGER_CST) | |
1399 | coff = size_binop (PLUS_EXPR, coff, | |
1400 | size_binop (MULT_EXPR, DR_STEP (dr), ssize_int (iter))); | |
ad4a85ad | 1401 | else |
99f6be4b | 1402 | off = size_binop (PLUS_EXPR, off, |
1403 | size_binop (MULT_EXPR, DR_STEP (dr), ssize_int (iter))); | |
1404 | tree addr = fold_build_pointer_plus (DR_BASE_ADDRESS (dr), off); | |
1405 | addr = force_gimple_operand_1 (addr, stmts, is_gimple_mem_ref_addr, | |
1406 | NULL_TREE); | |
e5104bca | 1407 | tree alias_ptr = fold_convert (reference_alias_ptr_type (DR_REF (dr)), coff); |
1408 | /* While data-ref analysis punts on bit offsets it still handles | |
1409 | bitfield accesses at byte boundaries. Cope with that. Note that | |
1410 | we cannot simply re-apply the outer COMPONENT_REF because the | |
1411 | byte-granular portion of it is already applied via DR_INIT and | |
1412 | DR_OFFSET, so simply build a BIT_FIELD_REF knowing that the bits | |
1413 | start at offset zero. */ | |
1414 | if (TREE_CODE (DR_REF (dr)) == COMPONENT_REF | |
1415 | && DECL_BIT_FIELD (TREE_OPERAND (DR_REF (dr), 1))) | |
1416 | { | |
1417 | tree field = TREE_OPERAND (DR_REF (dr), 1); | |
1418 | return build3 (BIT_FIELD_REF, TREE_TYPE (DR_REF (dr)), | |
1419 | build2 (MEM_REF, DECL_BIT_FIELD_TYPE (field), | |
1420 | addr, alias_ptr), | |
1421 | DECL_SIZE (field), bitsize_zero_node); | |
ad4a85ad | 1422 | } |
e5104bca | 1423 | else |
1424 | return fold_build2 (MEM_REF, TREE_TYPE (DR_REF (dr)), addr, alias_ptr); | |
ad4a85ad | 1425 | } |
1426 | ||
1427 | /* Get the initialization expression for the INDEX-th temporary variable | |
1428 | of CHAIN. */ | |
1429 | ||
1430 | static tree | |
1431 | get_init_expr (chain_p chain, unsigned index) | |
1432 | { | |
1433 | if (chain->type == CT_COMBINATION) | |
1434 | { | |
1435 | tree e1 = get_init_expr (chain->ch1, index); | |
1436 | tree e2 = get_init_expr (chain->ch2, index); | |
1437 | ||
f4e36c33 | 1438 | return fold_build2 (chain->op, chain->rslt_type, e1, e2); |
ad4a85ad | 1439 | } |
1440 | else | |
f1f41a6c | 1441 | return chain->inits[index]; |
ad4a85ad | 1442 | } |
1443 | ||
a4c3242a | 1444 | /* Returns a new temporary variable used for the I-th variable carrying |
1445 | value of REF. The variable's uid is marked in TMP_VARS. */ | |
1446 | ||
1447 | static tree | |
1448 | predcom_tmp_var (tree ref, unsigned i, bitmap tmp_vars) | |
1449 | { | |
1450 | tree type = TREE_TYPE (ref); | |
a4c3242a | 1451 | /* We never access the components of the temporary variable in predictive |
1452 | commoning. */ | |
2ac51e48 | 1453 | tree var = create_tmp_reg (type, get_lsm_tmp_name (ref, i)); |
a4c3242a | 1454 | bitmap_set_bit (tmp_vars, DECL_UID (var)); |
1455 | return var; | |
1456 | } | |
1457 | ||
ad4a85ad | 1458 | /* Creates the variables for CHAIN, as well as phi nodes for them and |
1459 | initialization on entry to LOOP. Uids of the newly created | |
1460 | temporary variables are marked in TMP_VARS. */ | |
1461 | ||
1462 | static void | |
1463 | initialize_root_vars (struct loop *loop, chain_p chain, bitmap tmp_vars) | |
1464 | { | |
1465 | unsigned i; | |
1466 | unsigned n = chain->length; | |
1467 | dref root = get_chain_root (chain); | |
1468 | bool reuse_first = !chain->has_max_use_after; | |
75a70cf9 | 1469 | tree ref, init, var, next; |
1a91d914 | 1470 | gphi *phi; |
75a70cf9 | 1471 | gimple_seq stmts; |
ad4a85ad | 1472 | edge entry = loop_preheader_edge (loop), latch = loop_latch_edge (loop); |
1473 | ||
1474 | /* If N == 0, then all the references are within the single iteration. And | |
1475 | since this is an nonempty chain, reuse_first cannot be true. */ | |
1476 | gcc_assert (n > 0 || !reuse_first); | |
1477 | ||
f1f41a6c | 1478 | chain->vars.create (n + 1); |
ad4a85ad | 1479 | |
1480 | if (chain->type == CT_COMBINATION) | |
75a70cf9 | 1481 | ref = gimple_assign_lhs (root->stmt); |
ad4a85ad | 1482 | else |
1483 | ref = DR_REF (root->ref); | |
1484 | ||
1485 | for (i = 0; i < n + (reuse_first ? 0 : 1); i++) | |
1486 | { | |
a4c3242a | 1487 | var = predcom_tmp_var (ref, i, tmp_vars); |
f1f41a6c | 1488 | chain->vars.quick_push (var); |
ad4a85ad | 1489 | } |
1490 | if (reuse_first) | |
f1f41a6c | 1491 | chain->vars.quick_push (chain->vars[0]); |
48e1416a | 1492 | |
f1f41a6c | 1493 | FOR_EACH_VEC_ELT (chain->vars, i, var) |
1494 | chain->vars[i] = make_ssa_name (var, NULL); | |
ad4a85ad | 1495 | |
1496 | for (i = 0; i < n; i++) | |
1497 | { | |
f1f41a6c | 1498 | var = chain->vars[i]; |
1499 | next = chain->vars[i + 1]; | |
ad4a85ad | 1500 | init = get_init_expr (chain, i); |
1501 | ||
1502 | init = force_gimple_operand (init, &stmts, true, NULL_TREE); | |
1503 | if (stmts) | |
dd277d48 | 1504 | gsi_insert_seq_on_edge_immediate (entry, stmts); |
ad4a85ad | 1505 | |
1506 | phi = create_phi_node (var, loop->header); | |
60d535d2 | 1507 | add_phi_arg (phi, init, entry, UNKNOWN_LOCATION); |
1508 | add_phi_arg (phi, next, latch, UNKNOWN_LOCATION); | |
ad4a85ad | 1509 | } |
1510 | } | |
1511 | ||
1512 | /* Create the variables and initialization statement for root of chain | |
1513 | CHAIN. Uids of the newly created temporary variables are marked | |
1514 | in TMP_VARS. */ | |
1515 | ||
1516 | static void | |
1517 | initialize_root (struct loop *loop, chain_p chain, bitmap tmp_vars) | |
1518 | { | |
1519 | dref root = get_chain_root (chain); | |
1520 | bool in_lhs = (chain->type == CT_STORE_LOAD | |
1521 | || chain->type == CT_COMBINATION); | |
1522 | ||
1523 | initialize_root_vars (loop, chain, tmp_vars); | |
1524 | replace_ref_with (root->stmt, | |
f1f41a6c | 1525 | chain->vars[chain->length], |
ad4a85ad | 1526 | true, in_lhs); |
1527 | } | |
1528 | ||
1529 | /* Initializes a variable for load motion for ROOT and prepares phi nodes and | |
1530 | initialization on entry to LOOP if necessary. The ssa name for the variable | |
1531 | is stored in VARS. If WRITTEN is true, also a phi node to copy its value | |
1532 | around the loop is created. Uid of the newly created temporary variable | |
1533 | is marked in TMP_VARS. INITS is the list containing the (single) | |
1534 | initializer. */ | |
1535 | ||
1536 | static void | |
1537 | initialize_root_vars_lm (struct loop *loop, dref root, bool written, | |
f1f41a6c | 1538 | vec<tree> *vars, vec<tree> inits, |
ad4a85ad | 1539 | bitmap tmp_vars) |
1540 | { | |
1541 | unsigned i; | |
75a70cf9 | 1542 | tree ref = DR_REF (root->ref), init, var, next; |
1543 | gimple_seq stmts; | |
1a91d914 | 1544 | gphi *phi; |
ad4a85ad | 1545 | edge entry = loop_preheader_edge (loop), latch = loop_latch_edge (loop); |
1546 | ||
1547 | /* Find the initializer for the variable, and check that it cannot | |
1548 | trap. */ | |
f1f41a6c | 1549 | init = inits[0]; |
ad4a85ad | 1550 | |
f1f41a6c | 1551 | vars->create (written ? 2 : 1); |
a4c3242a | 1552 | var = predcom_tmp_var (ref, 0, tmp_vars); |
f1f41a6c | 1553 | vars->quick_push (var); |
ad4a85ad | 1554 | if (written) |
f1f41a6c | 1555 | vars->quick_push ((*vars)[0]); |
48e1416a | 1556 | |
f1f41a6c | 1557 | FOR_EACH_VEC_ELT (*vars, i, var) |
1558 | (*vars)[i] = make_ssa_name (var, NULL); | |
ad4a85ad | 1559 | |
f1f41a6c | 1560 | var = (*vars)[0]; |
48e1416a | 1561 | |
ad4a85ad | 1562 | init = force_gimple_operand (init, &stmts, written, NULL_TREE); |
1563 | if (stmts) | |
dd277d48 | 1564 | gsi_insert_seq_on_edge_immediate (entry, stmts); |
ad4a85ad | 1565 | |
1566 | if (written) | |
1567 | { | |
f1f41a6c | 1568 | next = (*vars)[1]; |
ad4a85ad | 1569 | phi = create_phi_node (var, loop->header); |
60d535d2 | 1570 | add_phi_arg (phi, init, entry, UNKNOWN_LOCATION); |
1571 | add_phi_arg (phi, next, latch, UNKNOWN_LOCATION); | |
ad4a85ad | 1572 | } |
1573 | else | |
1574 | { | |
1a91d914 | 1575 | gassign *init_stmt = gimple_build_assign (var, init); |
75a70cf9 | 1576 | gsi_insert_on_edge_immediate (entry, init_stmt); |
ad4a85ad | 1577 | } |
1578 | } | |
1579 | ||
1580 | ||
1581 | /* Execute load motion for references in chain CHAIN. Uids of the newly | |
1582 | created temporary variables are marked in TMP_VARS. */ | |
1583 | ||
1584 | static void | |
1585 | execute_load_motion (struct loop *loop, chain_p chain, bitmap tmp_vars) | |
1586 | { | |
c2078b80 | 1587 | auto_vec<tree> vars; |
ad4a85ad | 1588 | dref a; |
1589 | unsigned n_writes = 0, ridx, i; | |
1590 | tree var; | |
1591 | ||
1592 | gcc_assert (chain->type == CT_INVARIANT); | |
1593 | gcc_assert (!chain->combined); | |
f1f41a6c | 1594 | FOR_EACH_VEC_ELT (chain->refs, i, a) |
9ff25603 | 1595 | if (DR_IS_WRITE (a->ref)) |
ad4a85ad | 1596 | n_writes++; |
48e1416a | 1597 | |
ad4a85ad | 1598 | /* If there are no reads in the loop, there is nothing to do. */ |
f1f41a6c | 1599 | if (n_writes == chain->refs.length ()) |
ad4a85ad | 1600 | return; |
1601 | ||
1602 | initialize_root_vars_lm (loop, get_chain_root (chain), n_writes > 0, | |
1603 | &vars, chain->inits, tmp_vars); | |
1604 | ||
1605 | ridx = 0; | |
f1f41a6c | 1606 | FOR_EACH_VEC_ELT (chain->refs, i, a) |
ad4a85ad | 1607 | { |
1608 | bool is_read = DR_IS_READ (a->ref); | |
ad4a85ad | 1609 | |
9ff25603 | 1610 | if (DR_IS_WRITE (a->ref)) |
ad4a85ad | 1611 | { |
1612 | n_writes--; | |
1613 | if (n_writes) | |
1614 | { | |
f1f41a6c | 1615 | var = vars[0]; |
75a70cf9 | 1616 | var = make_ssa_name (SSA_NAME_VAR (var), NULL); |
f1f41a6c | 1617 | vars[0] = var; |
ad4a85ad | 1618 | } |
1619 | else | |
1620 | ridx = 1; | |
1621 | } | |
48e1416a | 1622 | |
f1f41a6c | 1623 | replace_ref_with (a->stmt, vars[ridx], |
ad4a85ad | 1624 | !is_read, !is_read); |
1625 | } | |
ad4a85ad | 1626 | } |
1627 | ||
1628 | /* Returns the single statement in that NAME is used, excepting | |
1629 | the looparound phi nodes contained in one of the chains. If there is no | |
75a70cf9 | 1630 | such statement, or more statements, NULL is returned. */ |
ad4a85ad | 1631 | |
75a70cf9 | 1632 | static gimple |
ad4a85ad | 1633 | single_nonlooparound_use (tree name) |
1634 | { | |
1635 | use_operand_p use; | |
1636 | imm_use_iterator it; | |
75a70cf9 | 1637 | gimple stmt, ret = NULL; |
ad4a85ad | 1638 | |
1639 | FOR_EACH_IMM_USE_FAST (use, it, name) | |
1640 | { | |
1641 | stmt = USE_STMT (use); | |
1642 | ||
75a70cf9 | 1643 | if (gimple_code (stmt) == GIMPLE_PHI) |
ad4a85ad | 1644 | { |
1645 | /* Ignore uses in looparound phi nodes. Uses in other phi nodes | |
1646 | could not be processed anyway, so just fail for them. */ | |
1647 | if (bitmap_bit_p (looparound_phis, | |
1648 | SSA_NAME_VERSION (PHI_RESULT (stmt)))) | |
1649 | continue; | |
1650 | ||
75a70cf9 | 1651 | return NULL; |
ad4a85ad | 1652 | } |
db16e52d | 1653 | else if (is_gimple_debug (stmt)) |
1654 | continue; | |
75a70cf9 | 1655 | else if (ret != NULL) |
1656 | return NULL; | |
ad4a85ad | 1657 | else |
1658 | ret = stmt; | |
1659 | } | |
1660 | ||
1661 | return ret; | |
1662 | } | |
1663 | ||
1664 | /* Remove statement STMT, as well as the chain of assignments in that it is | |
1665 | used. */ | |
1666 | ||
1667 | static void | |
75a70cf9 | 1668 | remove_stmt (gimple stmt) |
ad4a85ad | 1669 | { |
75a70cf9 | 1670 | tree name; |
1671 | gimple next; | |
1672 | gimple_stmt_iterator psi; | |
ad4a85ad | 1673 | |
75a70cf9 | 1674 | if (gimple_code (stmt) == GIMPLE_PHI) |
ad4a85ad | 1675 | { |
1676 | name = PHI_RESULT (stmt); | |
1677 | next = single_nonlooparound_use (name); | |
b6d5efe6 | 1678 | reset_debug_uses (stmt); |
75a70cf9 | 1679 | psi = gsi_for_stmt (stmt); |
1680 | remove_phi_node (&psi, true); | |
ad4a85ad | 1681 | |
1682 | if (!next | |
fb2d5860 | 1683 | || !gimple_assign_ssa_name_copy_p (next) |
75a70cf9 | 1684 | || gimple_assign_rhs1 (next) != name) |
ad4a85ad | 1685 | return; |
1686 | ||
1687 | stmt = next; | |
1688 | } | |
1689 | ||
1690 | while (1) | |
1691 | { | |
75a70cf9 | 1692 | gimple_stmt_iterator bsi; |
48e1416a | 1693 | |
75a70cf9 | 1694 | bsi = gsi_for_stmt (stmt); |
ad4a85ad | 1695 | |
75a70cf9 | 1696 | name = gimple_assign_lhs (stmt); |
ad4a85ad | 1697 | gcc_assert (TREE_CODE (name) == SSA_NAME); |
1698 | ||
1699 | next = single_nonlooparound_use (name); | |
b6d5efe6 | 1700 | reset_debug_uses (stmt); |
ad4a85ad | 1701 | |
e70e8b13 | 1702 | unlink_stmt_vdef (stmt); |
75a70cf9 | 1703 | gsi_remove (&bsi, true); |
fb2d5860 | 1704 | release_defs (stmt); |
ad4a85ad | 1705 | |
1706 | if (!next | |
fb2d5860 | 1707 | || !gimple_assign_ssa_name_copy_p (next) |
75a70cf9 | 1708 | || gimple_assign_rhs1 (next) != name) |
ad4a85ad | 1709 | return; |
1710 | ||
1711 | stmt = next; | |
1712 | } | |
1713 | } | |
1714 | ||
1715 | /* Perform the predictive commoning optimization for a chain CHAIN. | |
1716 | Uids of the newly created temporary variables are marked in TMP_VARS.*/ | |
1717 | ||
1718 | static void | |
1719 | execute_pred_commoning_chain (struct loop *loop, chain_p chain, | |
1720 | bitmap tmp_vars) | |
1721 | { | |
1722 | unsigned i; | |
e70e8b13 | 1723 | dref a; |
ad4a85ad | 1724 | tree var; |
1725 | ||
1726 | if (chain->combined) | |
1727 | { | |
1728 | /* For combined chains, just remove the statements that are used to | |
1729 | compute the values of the expression (except for the root one). */ | |
f1f41a6c | 1730 | for (i = 1; chain->refs.iterate (i, &a); i++) |
ad4a85ad | 1731 | remove_stmt (a->stmt); |
1732 | } | |
1733 | else | |
1734 | { | |
1735 | /* For non-combined chains, set up the variables that hold its value, | |
1736 | and replace the uses of the original references by these | |
1737 | variables. */ | |
ad4a85ad | 1738 | initialize_root (loop, chain, tmp_vars); |
f1f41a6c | 1739 | for (i = 1; chain->refs.iterate (i, &a); i++) |
ad4a85ad | 1740 | { |
f1f41a6c | 1741 | var = chain->vars[chain->length - a->distance]; |
ad4a85ad | 1742 | replace_ref_with (a->stmt, var, false, false); |
1743 | } | |
1744 | } | |
1745 | } | |
1746 | ||
1747 | /* Determines the unroll factor necessary to remove as many temporary variable | |
1748 | copies as possible. CHAINS is the list of chains that will be | |
1749 | optimized. */ | |
1750 | ||
1751 | static unsigned | |
f1f41a6c | 1752 | determine_unroll_factor (vec<chain_p> chains) |
ad4a85ad | 1753 | { |
1754 | chain_p chain; | |
1755 | unsigned factor = 1, af, nfactor, i; | |
1756 | unsigned max = PARAM_VALUE (PARAM_MAX_UNROLL_TIMES); | |
1757 | ||
f1f41a6c | 1758 | FOR_EACH_VEC_ELT (chains, i, chain) |
ad4a85ad | 1759 | { |
1760 | if (chain->type == CT_INVARIANT || chain->combined) | |
1761 | continue; | |
1762 | ||
1763 | /* The best unroll factor for this chain is equal to the number of | |
1764 | temporary variables that we create for it. */ | |
1765 | af = chain->length; | |
1766 | if (chain->has_max_use_after) | |
1767 | af++; | |
1768 | ||
1769 | nfactor = factor * af / gcd (factor, af); | |
1770 | if (nfactor <= max) | |
1771 | factor = nfactor; | |
1772 | } | |
1773 | ||
1774 | return factor; | |
1775 | } | |
1776 | ||
1777 | /* Perform the predictive commoning optimization for CHAINS. | |
1778 | Uids of the newly created temporary variables are marked in TMP_VARS. */ | |
1779 | ||
1780 | static void | |
f1f41a6c | 1781 | execute_pred_commoning (struct loop *loop, vec<chain_p> chains, |
ad4a85ad | 1782 | bitmap tmp_vars) |
1783 | { | |
1784 | chain_p chain; | |
1785 | unsigned i; | |
1786 | ||
f1f41a6c | 1787 | FOR_EACH_VEC_ELT (chains, i, chain) |
ad4a85ad | 1788 | { |
1789 | if (chain->type == CT_INVARIANT) | |
1790 | execute_load_motion (loop, chain, tmp_vars); | |
1791 | else | |
1792 | execute_pred_commoning_chain (loop, chain, tmp_vars); | |
1793 | } | |
48e1416a | 1794 | |
ad4a85ad | 1795 | update_ssa (TODO_update_ssa_only_virtuals); |
1796 | } | |
1797 | ||
310d2511 | 1798 | /* For each reference in CHAINS, if its defining statement is |
75a70cf9 | 1799 | phi node, record the ssa name that is defined by it. */ |
ad4a85ad | 1800 | |
1801 | static void | |
f1f41a6c | 1802 | replace_phis_by_defined_names (vec<chain_p> chains) |
ad4a85ad | 1803 | { |
1804 | chain_p chain; | |
1805 | dref a; | |
1806 | unsigned i, j; | |
1807 | ||
f1f41a6c | 1808 | FOR_EACH_VEC_ELT (chains, i, chain) |
1809 | FOR_EACH_VEC_ELT (chain->refs, j, a) | |
ad4a85ad | 1810 | { |
75a70cf9 | 1811 | if (gimple_code (a->stmt) == GIMPLE_PHI) |
1812 | { | |
1813 | a->name_defined_by_phi = PHI_RESULT (a->stmt); | |
1814 | a->stmt = NULL; | |
1815 | } | |
ad4a85ad | 1816 | } |
1817 | } | |
1818 | ||
75a70cf9 | 1819 | /* For each reference in CHAINS, if name_defined_by_phi is not |
1820 | NULL, use it to set the stmt field. */ | |
ad4a85ad | 1821 | |
1822 | static void | |
f1f41a6c | 1823 | replace_names_by_phis (vec<chain_p> chains) |
ad4a85ad | 1824 | { |
1825 | chain_p chain; | |
1826 | dref a; | |
1827 | unsigned i, j; | |
1828 | ||
f1f41a6c | 1829 | FOR_EACH_VEC_ELT (chains, i, chain) |
1830 | FOR_EACH_VEC_ELT (chain->refs, j, a) | |
75a70cf9 | 1831 | if (a->stmt == NULL) |
ad4a85ad | 1832 | { |
75a70cf9 | 1833 | a->stmt = SSA_NAME_DEF_STMT (a->name_defined_by_phi); |
1834 | gcc_assert (gimple_code (a->stmt) == GIMPLE_PHI); | |
1835 | a->name_defined_by_phi = NULL_TREE; | |
ad4a85ad | 1836 | } |
1837 | } | |
1838 | ||
1839 | /* Wrapper over execute_pred_commoning, to pass it as a callback | |
1840 | to tree_transform_and_unroll_loop. */ | |
1841 | ||
1842 | struct epcc_data | |
1843 | { | |
f1f41a6c | 1844 | vec<chain_p> chains; |
ad4a85ad | 1845 | bitmap tmp_vars; |
1846 | }; | |
1847 | ||
1848 | static void | |
1849 | execute_pred_commoning_cbck (struct loop *loop, void *data) | |
1850 | { | |
45ba1503 | 1851 | struct epcc_data *const dta = (struct epcc_data *) data; |
ad4a85ad | 1852 | |
1853 | /* Restore phi nodes that were replaced by ssa names before | |
1854 | tree_transform_and_unroll_loop (see detailed description in | |
1855 | tree_predictive_commoning_loop). */ | |
1856 | replace_names_by_phis (dta->chains); | |
1857 | execute_pred_commoning (loop, dta->chains, dta->tmp_vars); | |
1858 | } | |
1859 | ||
ad4a85ad | 1860 | /* Base NAME and all the names in the chain of phi nodes that use it |
1861 | on variable VAR. The phi nodes are recognized by being in the copies of | |
1862 | the header of the LOOP. */ | |
1863 | ||
1864 | static void | |
1865 | base_names_in_chain_on (struct loop *loop, tree name, tree var) | |
1866 | { | |
75a70cf9 | 1867 | gimple stmt, phi; |
ad4a85ad | 1868 | imm_use_iterator iter; |
ad4a85ad | 1869 | |
3b652cc1 | 1870 | replace_ssa_name_symbol (name, var); |
ad4a85ad | 1871 | |
1872 | while (1) | |
1873 | { | |
1874 | phi = NULL; | |
1875 | FOR_EACH_IMM_USE_STMT (stmt, iter, name) | |
1876 | { | |
75a70cf9 | 1877 | if (gimple_code (stmt) == GIMPLE_PHI |
1878 | && flow_bb_inside_loop_p (loop, gimple_bb (stmt))) | |
ad4a85ad | 1879 | { |
1880 | phi = stmt; | |
1881 | BREAK_FROM_IMM_USE_STMT (iter); | |
1882 | } | |
1883 | } | |
1884 | if (!phi) | |
1885 | return; | |
1886 | ||
ad4a85ad | 1887 | name = PHI_RESULT (phi); |
3b652cc1 | 1888 | replace_ssa_name_symbol (name, var); |
ad4a85ad | 1889 | } |
1890 | } | |
1891 | ||
1892 | /* Given an unrolled LOOP after predictive commoning, remove the | |
1893 | register copies arising from phi nodes by changing the base | |
1894 | variables of SSA names. TMP_VARS is the set of the temporary variables | |
1895 | for those we want to perform this. */ | |
1896 | ||
1897 | static void | |
1898 | eliminate_temp_copies (struct loop *loop, bitmap tmp_vars) | |
1899 | { | |
1900 | edge e; | |
1a91d914 | 1901 | gphi *phi; |
1902 | gimple stmt; | |
75a70cf9 | 1903 | tree name, use, var; |
1a91d914 | 1904 | gphi_iterator psi; |
ad4a85ad | 1905 | |
1906 | e = loop_latch_edge (loop); | |
75a70cf9 | 1907 | for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi)) |
ad4a85ad | 1908 | { |
1a91d914 | 1909 | phi = psi.phi (); |
ad4a85ad | 1910 | name = PHI_RESULT (phi); |
1911 | var = SSA_NAME_VAR (name); | |
ec11736b | 1912 | if (!var || !bitmap_bit_p (tmp_vars, DECL_UID (var))) |
ad4a85ad | 1913 | continue; |
1914 | use = PHI_ARG_DEF_FROM_EDGE (phi, e); | |
1915 | gcc_assert (TREE_CODE (use) == SSA_NAME); | |
1916 | ||
1917 | /* Base all the ssa names in the ud and du chain of NAME on VAR. */ | |
1918 | stmt = SSA_NAME_DEF_STMT (use); | |
75a70cf9 | 1919 | while (gimple_code (stmt) == GIMPLE_PHI |
3fa1e4f2 | 1920 | /* In case we could not unroll the loop enough to eliminate |
1921 | all copies, we may reach the loop header before the defining | |
1922 | statement (in that case, some register copies will be present | |
1923 | in loop latch in the final code, corresponding to the newly | |
1924 | created looparound phi nodes). */ | |
75a70cf9 | 1925 | && gimple_bb (stmt) != loop->header) |
ad4a85ad | 1926 | { |
75a70cf9 | 1927 | gcc_assert (single_pred_p (gimple_bb (stmt))); |
ad4a85ad | 1928 | use = PHI_ARG_DEF (stmt, 0); |
1929 | stmt = SSA_NAME_DEF_STMT (use); | |
1930 | } | |
1931 | ||
1932 | base_names_in_chain_on (loop, use, var); | |
1933 | } | |
1934 | } | |
1935 | ||
1936 | /* Returns true if CHAIN is suitable to be combined. */ | |
1937 | ||
1938 | static bool | |
1939 | chain_can_be_combined_p (chain_p chain) | |
1940 | { | |
1941 | return (!chain->combined | |
1942 | && (chain->type == CT_LOAD || chain->type == CT_COMBINATION)); | |
1943 | } | |
1944 | ||
1945 | /* Returns the modify statement that uses NAME. Skips over assignment | |
1946 | statements, NAME is replaced with the actual name used in the returned | |
1947 | statement. */ | |
1948 | ||
75a70cf9 | 1949 | static gimple |
ad4a85ad | 1950 | find_use_stmt (tree *name) |
1951 | { | |
75a70cf9 | 1952 | gimple stmt; |
1953 | tree rhs, lhs; | |
ad4a85ad | 1954 | |
1955 | /* Skip over assignments. */ | |
1956 | while (1) | |
1957 | { | |
1958 | stmt = single_nonlooparound_use (*name); | |
1959 | if (!stmt) | |
75a70cf9 | 1960 | return NULL; |
ad4a85ad | 1961 | |
75a70cf9 | 1962 | if (gimple_code (stmt) != GIMPLE_ASSIGN) |
1963 | return NULL; | |
ad4a85ad | 1964 | |
75a70cf9 | 1965 | lhs = gimple_assign_lhs (stmt); |
ad4a85ad | 1966 | if (TREE_CODE (lhs) != SSA_NAME) |
75a70cf9 | 1967 | return NULL; |
ad4a85ad | 1968 | |
75a70cf9 | 1969 | if (gimple_assign_copy_p (stmt)) |
1970 | { | |
1971 | rhs = gimple_assign_rhs1 (stmt); | |
1972 | if (rhs != *name) | |
1973 | return NULL; | |
ad4a85ad | 1974 | |
75a70cf9 | 1975 | *name = lhs; |
1976 | } | |
1977 | else if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
1978 | == GIMPLE_BINARY_RHS) | |
1979 | return stmt; | |
1980 | else | |
1981 | return NULL; | |
ad4a85ad | 1982 | } |
ad4a85ad | 1983 | } |
1984 | ||
1985 | /* Returns true if we may perform reassociation for operation CODE in TYPE. */ | |
1986 | ||
1987 | static bool | |
1988 | may_reassociate_p (tree type, enum tree_code code) | |
1989 | { | |
1990 | if (FLOAT_TYPE_P (type) | |
1991 | && !flag_unsafe_math_optimizations) | |
1992 | return false; | |
1993 | ||
1994 | return (commutative_tree_code (code) | |
1995 | && associative_tree_code (code)); | |
1996 | } | |
1997 | ||
1998 | /* If the operation used in STMT is associative and commutative, go through the | |
1999 | tree of the same operations and returns its root. Distance to the root | |
2000 | is stored in DISTANCE. */ | |
2001 | ||
75a70cf9 | 2002 | static gimple |
2003 | find_associative_operation_root (gimple stmt, unsigned *distance) | |
ad4a85ad | 2004 | { |
75a70cf9 | 2005 | tree lhs; |
2006 | gimple next; | |
2007 | enum tree_code code = gimple_assign_rhs_code (stmt); | |
2008 | tree type = TREE_TYPE (gimple_assign_lhs (stmt)); | |
ad4a85ad | 2009 | unsigned dist = 0; |
2010 | ||
75a70cf9 | 2011 | if (!may_reassociate_p (type, code)) |
2012 | return NULL; | |
ad4a85ad | 2013 | |
2014 | while (1) | |
2015 | { | |
75a70cf9 | 2016 | lhs = gimple_assign_lhs (stmt); |
ad4a85ad | 2017 | gcc_assert (TREE_CODE (lhs) == SSA_NAME); |
2018 | ||
2019 | next = find_use_stmt (&lhs); | |
75a70cf9 | 2020 | if (!next |
2021 | || gimple_assign_rhs_code (next) != code) | |
ad4a85ad | 2022 | break; |
2023 | ||
2024 | stmt = next; | |
2025 | dist++; | |
2026 | } | |
2027 | ||
2028 | if (distance) | |
2029 | *distance = dist; | |
2030 | return stmt; | |
2031 | } | |
2032 | ||
2033 | /* Returns the common statement in that NAME1 and NAME2 have a use. If there | |
2034 | is no such statement, returns NULL_TREE. In case the operation used on | |
310d2511 | 2035 | NAME1 and NAME2 is associative and commutative, returns the root of the |
ad4a85ad | 2036 | tree formed by this operation instead of the statement that uses NAME1 or |
2037 | NAME2. */ | |
2038 | ||
75a70cf9 | 2039 | static gimple |
ad4a85ad | 2040 | find_common_use_stmt (tree *name1, tree *name2) |
2041 | { | |
75a70cf9 | 2042 | gimple stmt1, stmt2; |
ad4a85ad | 2043 | |
2044 | stmt1 = find_use_stmt (name1); | |
2045 | if (!stmt1) | |
75a70cf9 | 2046 | return NULL; |
ad4a85ad | 2047 | |
2048 | stmt2 = find_use_stmt (name2); | |
2049 | if (!stmt2) | |
75a70cf9 | 2050 | return NULL; |
ad4a85ad | 2051 | |
2052 | if (stmt1 == stmt2) | |
2053 | return stmt1; | |
2054 | ||
2055 | stmt1 = find_associative_operation_root (stmt1, NULL); | |
2056 | if (!stmt1) | |
75a70cf9 | 2057 | return NULL; |
ad4a85ad | 2058 | stmt2 = find_associative_operation_root (stmt2, NULL); |
2059 | if (!stmt2) | |
75a70cf9 | 2060 | return NULL; |
ad4a85ad | 2061 | |
75a70cf9 | 2062 | return (stmt1 == stmt2 ? stmt1 : NULL); |
ad4a85ad | 2063 | } |
2064 | ||
2065 | /* Checks whether R1 and R2 are combined together using CODE, with the result | |
2066 | in RSLT_TYPE, in order R1 CODE R2 if SWAP is false and in order R2 CODE R1 | |
2067 | if it is true. If CODE is ERROR_MARK, set these values instead. */ | |
2068 | ||
2069 | static bool | |
2070 | combinable_refs_p (dref r1, dref r2, | |
2071 | enum tree_code *code, bool *swap, tree *rslt_type) | |
2072 | { | |
2073 | enum tree_code acode; | |
2074 | bool aswap; | |
2075 | tree atype; | |
75a70cf9 | 2076 | tree name1, name2; |
2077 | gimple stmt; | |
ad4a85ad | 2078 | |
2079 | name1 = name_for_ref (r1); | |
2080 | name2 = name_for_ref (r2); | |
2081 | gcc_assert (name1 != NULL_TREE && name2 != NULL_TREE); | |
2082 | ||
2083 | stmt = find_common_use_stmt (&name1, &name2); | |
2084 | ||
f8d7b728 | 2085 | if (!stmt |
2086 | /* A simple post-dominance check - make sure the combination | |
2087 | is executed under the same condition as the references. */ | |
2088 | || (gimple_bb (stmt) != gimple_bb (r1->stmt) | |
2089 | && gimple_bb (stmt) != gimple_bb (r2->stmt))) | |
ad4a85ad | 2090 | return false; |
2091 | ||
75a70cf9 | 2092 | acode = gimple_assign_rhs_code (stmt); |
ad4a85ad | 2093 | aswap = (!commutative_tree_code (acode) |
75a70cf9 | 2094 | && gimple_assign_rhs1 (stmt) != name1); |
2095 | atype = TREE_TYPE (gimple_assign_lhs (stmt)); | |
ad4a85ad | 2096 | |
2097 | if (*code == ERROR_MARK) | |
2098 | { | |
2099 | *code = acode; | |
2100 | *swap = aswap; | |
2101 | *rslt_type = atype; | |
2102 | return true; | |
2103 | } | |
2104 | ||
2105 | return (*code == acode | |
2106 | && *swap == aswap | |
2107 | && *rslt_type == atype); | |
2108 | } | |
2109 | ||
2110 | /* Remove OP from the operation on rhs of STMT, and replace STMT with | |
2111 | an assignment of the remaining operand. */ | |
2112 | ||
2113 | static void | |
75a70cf9 | 2114 | remove_name_from_operation (gimple stmt, tree op) |
ad4a85ad | 2115 | { |
75a70cf9 | 2116 | tree other_op; |
2117 | gimple_stmt_iterator si; | |
ad4a85ad | 2118 | |
75a70cf9 | 2119 | gcc_assert (is_gimple_assign (stmt)); |
ad4a85ad | 2120 | |
75a70cf9 | 2121 | if (gimple_assign_rhs1 (stmt) == op) |
2122 | other_op = gimple_assign_rhs2 (stmt); | |
ad4a85ad | 2123 | else |
75a70cf9 | 2124 | other_op = gimple_assign_rhs1 (stmt); |
2125 | ||
2126 | si = gsi_for_stmt (stmt); | |
2127 | gimple_assign_set_rhs_from_tree (&si, other_op); | |
2128 | ||
2129 | /* We should not have reallocated STMT. */ | |
2130 | gcc_assert (gsi_stmt (si) == stmt); | |
2131 | ||
ad4a85ad | 2132 | update_stmt (stmt); |
2133 | } | |
2134 | ||
2135 | /* Reassociates the expression in that NAME1 and NAME2 are used so that they | |
2136 | are combined in a single statement, and returns this statement. */ | |
2137 | ||
75a70cf9 | 2138 | static gimple |
ad4a85ad | 2139 | reassociate_to_the_same_stmt (tree name1, tree name2) |
2140 | { | |
75a70cf9 | 2141 | gimple stmt1, stmt2, root1, root2, s1, s2; |
1a91d914 | 2142 | gassign *new_stmt, *tmp_stmt; |
75a70cf9 | 2143 | tree new_name, tmp_name, var, r1, r2; |
ad4a85ad | 2144 | unsigned dist1, dist2; |
2145 | enum tree_code code; | |
2146 | tree type = TREE_TYPE (name1); | |
75a70cf9 | 2147 | gimple_stmt_iterator bsi; |
ad4a85ad | 2148 | |
2149 | stmt1 = find_use_stmt (&name1); | |
2150 | stmt2 = find_use_stmt (&name2); | |
2151 | root1 = find_associative_operation_root (stmt1, &dist1); | |
2152 | root2 = find_associative_operation_root (stmt2, &dist2); | |
75a70cf9 | 2153 | code = gimple_assign_rhs_code (stmt1); |
ad4a85ad | 2154 | |
2155 | gcc_assert (root1 && root2 && root1 == root2 | |
75a70cf9 | 2156 | && code == gimple_assign_rhs_code (stmt2)); |
ad4a85ad | 2157 | |
2158 | /* Find the root of the nearest expression in that both NAME1 and NAME2 | |
2159 | are used. */ | |
2160 | r1 = name1; | |
2161 | s1 = stmt1; | |
2162 | r2 = name2; | |
2163 | s2 = stmt2; | |
2164 | ||
2165 | while (dist1 > dist2) | |
2166 | { | |
2167 | s1 = find_use_stmt (&r1); | |
75a70cf9 | 2168 | r1 = gimple_assign_lhs (s1); |
ad4a85ad | 2169 | dist1--; |
2170 | } | |
2171 | while (dist2 > dist1) | |
2172 | { | |
2173 | s2 = find_use_stmt (&r2); | |
75a70cf9 | 2174 | r2 = gimple_assign_lhs (s2); |
ad4a85ad | 2175 | dist2--; |
2176 | } | |
2177 | ||
2178 | while (s1 != s2) | |
2179 | { | |
2180 | s1 = find_use_stmt (&r1); | |
75a70cf9 | 2181 | r1 = gimple_assign_lhs (s1); |
ad4a85ad | 2182 | s2 = find_use_stmt (&r2); |
75a70cf9 | 2183 | r2 = gimple_assign_lhs (s2); |
ad4a85ad | 2184 | } |
2185 | ||
2186 | /* Remove NAME1 and NAME2 from the statements in that they are used | |
2187 | currently. */ | |
2188 | remove_name_from_operation (stmt1, name1); | |
2189 | remove_name_from_operation (stmt2, name2); | |
2190 | ||
2191 | /* Insert the new statement combining NAME1 and NAME2 before S1, and | |
2192 | combine it with the rhs of S1. */ | |
2ac51e48 | 2193 | var = create_tmp_reg (type, "predreastmp"); |
75a70cf9 | 2194 | new_name = make_ssa_name (var, NULL); |
2195 | new_stmt = gimple_build_assign_with_ops (code, new_name, name1, name2); | |
ad4a85ad | 2196 | |
2ac51e48 | 2197 | var = create_tmp_reg (type, "predreastmp"); |
75a70cf9 | 2198 | tmp_name = make_ssa_name (var, NULL); |
2199 | ||
2200 | /* Rhs of S1 may now be either a binary expression with operation | |
2201 | CODE, or gimple_val (in case that stmt1 == s1 or stmt2 == s1, | |
2202 | so that name1 or name2 was removed from it). */ | |
2203 | tmp_stmt = gimple_build_assign_with_ops (gimple_assign_rhs_code (s1), | |
2204 | tmp_name, | |
2205 | gimple_assign_rhs1 (s1), | |
2206 | gimple_assign_rhs2 (s1)); | |
2207 | ||
2208 | bsi = gsi_for_stmt (s1); | |
2209 | gimple_assign_set_rhs_with_ops (&bsi, code, new_name, tmp_name); | |
2210 | s1 = gsi_stmt (bsi); | |
ad4a85ad | 2211 | update_stmt (s1); |
2212 | ||
75a70cf9 | 2213 | gsi_insert_before (&bsi, new_stmt, GSI_SAME_STMT); |
2214 | gsi_insert_before (&bsi, tmp_stmt, GSI_SAME_STMT); | |
ad4a85ad | 2215 | |
2216 | return new_stmt; | |
2217 | } | |
2218 | ||
2219 | /* Returns the statement that combines references R1 and R2. In case R1 | |
2220 | and R2 are not used in the same statement, but they are used with an | |
2221 | associative and commutative operation in the same expression, reassociate | |
2222 | the expression so that they are used in the same statement. */ | |
2223 | ||
75a70cf9 | 2224 | static gimple |
ad4a85ad | 2225 | stmt_combining_refs (dref r1, dref r2) |
2226 | { | |
75a70cf9 | 2227 | gimple stmt1, stmt2; |
ad4a85ad | 2228 | tree name1 = name_for_ref (r1); |
2229 | tree name2 = name_for_ref (r2); | |
2230 | ||
2231 | stmt1 = find_use_stmt (&name1); | |
2232 | stmt2 = find_use_stmt (&name2); | |
2233 | if (stmt1 == stmt2) | |
2234 | return stmt1; | |
2235 | ||
2236 | return reassociate_to_the_same_stmt (name1, name2); | |
2237 | } | |
2238 | ||
2239 | /* Tries to combine chains CH1 and CH2 together. If this succeeds, the | |
2240 | description of the new chain is returned, otherwise we return NULL. */ | |
2241 | ||
2242 | static chain_p | |
2243 | combine_chains (chain_p ch1, chain_p ch2) | |
2244 | { | |
2245 | dref r1, r2, nw; | |
2246 | enum tree_code op = ERROR_MARK; | |
2247 | bool swap = false; | |
2248 | chain_p new_chain; | |
2249 | unsigned i; | |
75a70cf9 | 2250 | gimple root_stmt; |
ad4a85ad | 2251 | tree rslt_type = NULL_TREE; |
2252 | ||
2253 | if (ch1 == ch2) | |
c84b1d32 | 2254 | return NULL; |
ad4a85ad | 2255 | if (ch1->length != ch2->length) |
2256 | return NULL; | |
2257 | ||
f1f41a6c | 2258 | if (ch1->refs.length () != ch2->refs.length ()) |
ad4a85ad | 2259 | return NULL; |
2260 | ||
f1f41a6c | 2261 | for (i = 0; (ch1->refs.iterate (i, &r1) |
2262 | && ch2->refs.iterate (i, &r2)); i++) | |
ad4a85ad | 2263 | { |
2264 | if (r1->distance != r2->distance) | |
2265 | return NULL; | |
2266 | ||
2267 | if (!combinable_refs_p (r1, r2, &op, &swap, &rslt_type)) | |
2268 | return NULL; | |
2269 | } | |
2270 | ||
2271 | if (swap) | |
2272 | { | |
2273 | chain_p tmp = ch1; | |
2274 | ch1 = ch2; | |
2275 | ch2 = tmp; | |
2276 | } | |
2277 | ||
2278 | new_chain = XCNEW (struct chain); | |
2279 | new_chain->type = CT_COMBINATION; | |
f4e36c33 | 2280 | new_chain->op = op; |
ad4a85ad | 2281 | new_chain->ch1 = ch1; |
2282 | new_chain->ch2 = ch2; | |
2283 | new_chain->rslt_type = rslt_type; | |
2284 | new_chain->length = ch1->length; | |
2285 | ||
f1f41a6c | 2286 | for (i = 0; (ch1->refs.iterate (i, &r1) |
2287 | && ch2->refs.iterate (i, &r2)); i++) | |
ad4a85ad | 2288 | { |
26dbec0a | 2289 | nw = XCNEW (struct dref_d); |
ad4a85ad | 2290 | nw->stmt = stmt_combining_refs (r1, r2); |
2291 | nw->distance = r1->distance; | |
2292 | ||
f1f41a6c | 2293 | new_chain->refs.safe_push (nw); |
ad4a85ad | 2294 | } |
2295 | ||
2296 | new_chain->has_max_use_after = false; | |
2297 | root_stmt = get_chain_root (new_chain)->stmt; | |
f1f41a6c | 2298 | for (i = 1; new_chain->refs.iterate (i, &nw); i++) |
ad4a85ad | 2299 | { |
2300 | if (nw->distance == new_chain->length | |
2301 | && !stmt_dominates_stmt_p (nw->stmt, root_stmt)) | |
2302 | { | |
2303 | new_chain->has_max_use_after = true; | |
2304 | break; | |
2305 | } | |
2306 | } | |
2307 | ||
2308 | ch1->combined = true; | |
2309 | ch2->combined = true; | |
2310 | return new_chain; | |
2311 | } | |
2312 | ||
2313 | /* Try to combine the CHAINS. */ | |
2314 | ||
2315 | static void | |
f1f41a6c | 2316 | try_combine_chains (vec<chain_p> *chains) |
ad4a85ad | 2317 | { |
2318 | unsigned i, j; | |
2319 | chain_p ch1, ch2, cch; | |
c2078b80 | 2320 | auto_vec<chain_p> worklist; |
ad4a85ad | 2321 | |
f1f41a6c | 2322 | FOR_EACH_VEC_ELT (*chains, i, ch1) |
ad4a85ad | 2323 | if (chain_can_be_combined_p (ch1)) |
f1f41a6c | 2324 | worklist.safe_push (ch1); |
ad4a85ad | 2325 | |
f1f41a6c | 2326 | while (!worklist.is_empty ()) |
ad4a85ad | 2327 | { |
f1f41a6c | 2328 | ch1 = worklist.pop (); |
ad4a85ad | 2329 | if (!chain_can_be_combined_p (ch1)) |
2330 | continue; | |
2331 | ||
f1f41a6c | 2332 | FOR_EACH_VEC_ELT (*chains, j, ch2) |
ad4a85ad | 2333 | { |
2334 | if (!chain_can_be_combined_p (ch2)) | |
2335 | continue; | |
2336 | ||
2337 | cch = combine_chains (ch1, ch2); | |
2338 | if (cch) | |
2339 | { | |
f1f41a6c | 2340 | worklist.safe_push (cch); |
2341 | chains->safe_push (cch); | |
ad4a85ad | 2342 | break; |
2343 | } | |
2344 | } | |
2345 | } | |
2346 | } | |
2347 | ||
ad4a85ad | 2348 | /* Prepare initializers for CHAIN in LOOP. Returns false if this is |
2349 | impossible because one of these initializers may trap, true otherwise. */ | |
2350 | ||
2351 | static bool | |
2352 | prepare_initializers_chain (struct loop *loop, chain_p chain) | |
2353 | { | |
2354 | unsigned i, n = (chain->type == CT_INVARIANT) ? 1 : chain->length; | |
2355 | struct data_reference *dr = get_chain_root (chain)->ref; | |
75a70cf9 | 2356 | tree init; |
2357 | gimple_seq stmts; | |
ad4a85ad | 2358 | dref laref; |
2359 | edge entry = loop_preheader_edge (loop); | |
2360 | ||
2361 | /* Find the initializers for the variables, and check that they cannot | |
2362 | trap. */ | |
f1f41a6c | 2363 | chain->inits.create (n); |
ad4a85ad | 2364 | for (i = 0; i < n; i++) |
f1f41a6c | 2365 | chain->inits.quick_push (NULL_TREE); |
ad4a85ad | 2366 | |
2367 | /* If we have replaced some looparound phi nodes, use their initializers | |
2368 | instead of creating our own. */ | |
f1f41a6c | 2369 | FOR_EACH_VEC_ELT (chain->refs, i, laref) |
ad4a85ad | 2370 | { |
75a70cf9 | 2371 | if (gimple_code (laref->stmt) != GIMPLE_PHI) |
ad4a85ad | 2372 | continue; |
2373 | ||
2374 | gcc_assert (laref->distance > 0); | |
f1f41a6c | 2375 | chain->inits[n - laref->distance] |
2376 | = PHI_ARG_DEF_FROM_EDGE (laref->stmt, entry); | |
ad4a85ad | 2377 | } |
2378 | ||
2379 | for (i = 0; i < n; i++) | |
2380 | { | |
f1f41a6c | 2381 | if (chain->inits[i] != NULL_TREE) |
ad4a85ad | 2382 | continue; |
2383 | ||
99f6be4b | 2384 | init = ref_at_iteration (dr, (int) i - n, &stmts); |
ad4a85ad | 2385 | if (!chain->all_always_accessed && tree_could_trap_p (init)) |
2386 | return false; | |
2387 | ||
ad4a85ad | 2388 | if (stmts) |
dd277d48 | 2389 | gsi_insert_seq_on_edge_immediate (entry, stmts); |
ad4a85ad | 2390 | |
f1f41a6c | 2391 | chain->inits[i] = init; |
ad4a85ad | 2392 | } |
2393 | ||
2394 | return true; | |
2395 | } | |
2396 | ||
2397 | /* Prepare initializers for CHAINS in LOOP, and free chains that cannot | |
2398 | be used because the initializers might trap. */ | |
2399 | ||
2400 | static void | |
f1f41a6c | 2401 | prepare_initializers (struct loop *loop, vec<chain_p> chains) |
ad4a85ad | 2402 | { |
2403 | chain_p chain; | |
2404 | unsigned i; | |
2405 | ||
f1f41a6c | 2406 | for (i = 0; i < chains.length (); ) |
ad4a85ad | 2407 | { |
f1f41a6c | 2408 | chain = chains[i]; |
ad4a85ad | 2409 | if (prepare_initializers_chain (loop, chain)) |
2410 | i++; | |
2411 | else | |
2412 | { | |
2413 | release_chain (chain); | |
f1f41a6c | 2414 | chains.unordered_remove (i); |
ad4a85ad | 2415 | } |
2416 | } | |
2417 | } | |
2418 | ||
2419 | /* Performs predictive commoning for LOOP. Returns true if LOOP was | |
2420 | unrolled. */ | |
2421 | ||
2422 | static bool | |
2423 | tree_predictive_commoning_loop (struct loop *loop) | |
2424 | { | |
f1f41a6c | 2425 | vec<data_reference_p> datarefs; |
2426 | vec<ddr_p> dependences; | |
ad4a85ad | 2427 | struct component *components; |
1e094109 | 2428 | vec<chain_p> chains = vNULL; |
ad4a85ad | 2429 | unsigned unroll_factor; |
2430 | struct tree_niter_desc desc; | |
2431 | bool unroll = false; | |
2432 | edge exit; | |
2433 | bitmap tmp_vars; | |
2434 | ||
2435 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2436 | fprintf (dump_file, "Processing loop %d\n", loop->num); | |
2437 | ||
2438 | /* Find the data references and split them into components according to their | |
2439 | dependence relations. */ | |
4997014d | 2440 | auto_vec<loop_p, 3> loop_nest; |
f1f41a6c | 2441 | dependences.create (10); |
e85cf4e5 | 2442 | datarefs.create (10); |
713f1f14 | 2443 | if (! compute_data_dependences_for_loop (loop, true, &loop_nest, &datarefs, |
2444 | &dependences)) | |
2445 | { | |
2446 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2447 | fprintf (dump_file, "Cannot analyze data dependencies\n"); | |
713f1f14 | 2448 | free_data_refs (datarefs); |
2449 | free_dependence_relations (dependences); | |
2450 | return false; | |
2451 | } | |
2452 | ||
ad4a85ad | 2453 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2454 | dump_data_dependence_relations (dump_file, dependences); | |
2455 | ||
2456 | components = split_data_refs_to_components (loop, datarefs, dependences); | |
f1f41a6c | 2457 | loop_nest.release (); |
ad4a85ad | 2458 | free_dependence_relations (dependences); |
2459 | if (!components) | |
2460 | { | |
2461 | free_data_refs (datarefs); | |
f037f351 | 2462 | free_affine_expand_cache (&name_expansions); |
ad4a85ad | 2463 | return false; |
2464 | } | |
2465 | ||
2466 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2467 | { | |
2468 | fprintf (dump_file, "Initial state:\n\n"); | |
2469 | dump_components (dump_file, components); | |
2470 | } | |
2471 | ||
2472 | /* Find the suitable components and split them into chains. */ | |
2473 | components = filter_suitable_components (loop, components); | |
2474 | ||
2475 | tmp_vars = BITMAP_ALLOC (NULL); | |
2476 | looparound_phis = BITMAP_ALLOC (NULL); | |
2477 | determine_roots (loop, components, &chains); | |
2478 | release_components (components); | |
2479 | ||
f1f41a6c | 2480 | if (!chains.exists ()) |
ad4a85ad | 2481 | { |
2482 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2483 | fprintf (dump_file, | |
2484 | "Predictive commoning failed: no suitable chains\n"); | |
2485 | goto end; | |
2486 | } | |
2487 | prepare_initializers (loop, chains); | |
2488 | ||
2489 | /* Try to combine the chains that are always worked with together. */ | |
2490 | try_combine_chains (&chains); | |
2491 | ||
2492 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2493 | { | |
2494 | fprintf (dump_file, "Before commoning:\n\n"); | |
2495 | dump_chains (dump_file, chains); | |
2496 | } | |
2497 | ||
2498 | /* Determine the unroll factor, and if the loop should be unrolled, ensure | |
2499 | that its number of iterations is divisible by the factor. */ | |
2500 | unroll_factor = determine_unroll_factor (chains); | |
2501 | scev_reset (); | |
286fa508 | 2502 | unroll = (unroll_factor > 1 |
2503 | && can_unroll_loop_p (loop, unroll_factor, &desc)); | |
ad4a85ad | 2504 | exit = single_dom_exit (loop); |
2505 | ||
2506 | /* Execute the predictive commoning transformations, and possibly unroll the | |
2507 | loop. */ | |
2508 | if (unroll) | |
2509 | { | |
2510 | struct epcc_data dta; | |
2511 | ||
2512 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2513 | fprintf (dump_file, "Unrolling %u times.\n", unroll_factor); | |
2514 | ||
2515 | dta.chains = chains; | |
2516 | dta.tmp_vars = tmp_vars; | |
48e1416a | 2517 | |
ad4a85ad | 2518 | update_ssa (TODO_update_ssa_only_virtuals); |
2519 | ||
2520 | /* Cfg manipulations performed in tree_transform_and_unroll_loop before | |
2521 | execute_pred_commoning_cbck is called may cause phi nodes to be | |
2522 | reallocated, which is a problem since CHAINS may point to these | |
2523 | statements. To fix this, we store the ssa names defined by the | |
2524 | phi nodes here instead of the phi nodes themselves, and restore | |
2525 | the phi nodes in execute_pred_commoning_cbck. A bit hacky. */ | |
2526 | replace_phis_by_defined_names (chains); | |
2527 | ||
2528 | tree_transform_and_unroll_loop (loop, unroll_factor, exit, &desc, | |
2529 | execute_pred_commoning_cbck, &dta); | |
2530 | eliminate_temp_copies (loop, tmp_vars); | |
2531 | } | |
2532 | else | |
2533 | { | |
2534 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2535 | fprintf (dump_file, | |
2536 | "Executing predictive commoning without unrolling.\n"); | |
2537 | execute_pred_commoning (loop, chains, tmp_vars); | |
2538 | } | |
2539 | ||
2540 | end: ; | |
2541 | release_chains (chains); | |
2542 | free_data_refs (datarefs); | |
2543 | BITMAP_FREE (tmp_vars); | |
2544 | BITMAP_FREE (looparound_phis); | |
2545 | ||
2546 | free_affine_expand_cache (&name_expansions); | |
2547 | ||
2548 | return unroll; | |
2549 | } | |
2550 | ||
2551 | /* Runs predictive commoning. */ | |
2552 | ||
eb2a640e | 2553 | unsigned |
ad4a85ad | 2554 | tree_predictive_commoning (void) |
2555 | { | |
2556 | bool unrolled = false; | |
2557 | struct loop *loop; | |
eb2a640e | 2558 | unsigned ret = 0; |
ad4a85ad | 2559 | |
2560 | initialize_original_copy_tables (); | |
f21d4d00 | 2561 | FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST) |
7baffbd3 | 2562 | if (optimize_loop_for_speed_p (loop)) |
2563 | { | |
2564 | unrolled |= tree_predictive_commoning_loop (loop); | |
2565 | } | |
ad4a85ad | 2566 | |
2567 | if (unrolled) | |
2568 | { | |
2569 | scev_reset (); | |
eb2a640e | 2570 | ret = TODO_cleanup_cfg; |
ad4a85ad | 2571 | } |
2572 | free_original_copy_tables (); | |
eb2a640e | 2573 | |
2574 | return ret; | |
ad4a85ad | 2575 | } |
64641360 | 2576 | |
2577 | /* Predictive commoning Pass. */ | |
2578 | ||
2579 | static unsigned | |
b3083327 | 2580 | run_tree_predictive_commoning (struct function *fun) |
64641360 | 2581 | { |
b3083327 | 2582 | if (number_of_loops (fun) <= 1) |
64641360 | 2583 | return 0; |
2584 | ||
2585 | return tree_predictive_commoning (); | |
2586 | } | |
2587 | ||
64641360 | 2588 | namespace { |
2589 | ||
2590 | const pass_data pass_data_predcom = | |
2591 | { | |
2592 | GIMPLE_PASS, /* type */ | |
2593 | "pcom", /* name */ | |
2594 | OPTGROUP_LOOP, /* optinfo_flags */ | |
64641360 | 2595 | TV_PREDCOM, /* tv_id */ |
2596 | PROP_cfg, /* properties_required */ | |
2597 | 0, /* properties_provided */ | |
2598 | 0, /* properties_destroyed */ | |
2599 | 0, /* todo_flags_start */ | |
2600 | TODO_update_ssa_only_virtuals, /* todo_flags_finish */ | |
2601 | }; | |
2602 | ||
2603 | class pass_predcom : public gimple_opt_pass | |
2604 | { | |
2605 | public: | |
2606 | pass_predcom (gcc::context *ctxt) | |
2607 | : gimple_opt_pass (pass_data_predcom, ctxt) | |
2608 | {} | |
2609 | ||
2610 | /* opt_pass methods: */ | |
31315c24 | 2611 | virtual bool gate (function *) { return flag_predictive_commoning != 0; } |
b3083327 | 2612 | virtual unsigned int execute (function *fun) |
65b0537f | 2613 | { |
b3083327 | 2614 | return run_tree_predictive_commoning (fun); |
65b0537f | 2615 | } |
64641360 | 2616 | |
2617 | }; // class pass_predcom | |
2618 | ||
2619 | } // anon namespace | |
2620 | ||
2621 | gimple_opt_pass * | |
2622 | make_pass_predcom (gcc::context *ctxt) | |
2623 | { | |
2624 | return new pass_predcom (ctxt); | |
2625 | } | |
2626 | ||
2627 |