]>
Commit | Line | Data |
---|---|---|
7d23383d | 1 | /* Loop invariant motion. |
7cf0dbf3 | 2 | Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2010 |
3 | Free Software Foundation, Inc. | |
48e1416a | 4 | |
7d23383d | 5 | This file is part of GCC. |
48e1416a | 6 | |
7d23383d | 7 | GCC is free software; you can redistribute it and/or modify it |
8 | under the terms of the GNU General Public License as published by the | |
8c4c00c1 | 9 | Free Software Foundation; either version 3, or (at your option) any |
7d23383d | 10 | later version. |
48e1416a | 11 | |
7d23383d | 12 | GCC is distributed in the hope that it will be useful, but WITHOUT |
13 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
48e1416a | 16 | |
7d23383d | 17 | You should have received a copy of the GNU General Public License |
8c4c00c1 | 18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
7d23383d | 20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
7d23383d | 26 | #include "tm_p.h" |
7d23383d | 27 | #include "basic-block.h" |
8e3cb73b | 28 | #include "gimple-pretty-print.h" |
7d23383d | 29 | #include "tree-flow.h" |
7d23383d | 30 | #include "cfgloop.h" |
31 | #include "domwalk.h" | |
32 | #include "params.h" | |
33 | #include "tree-pass.h" | |
34 | #include "flags.h" | |
55a03692 | 35 | #include "hashtab.h" |
063a8bce | 36 | #include "tree-affine.h" |
37 | #include "pointer-set.h" | |
a973ed42 | 38 | #include "tree-ssa-propagate.h" |
7d23383d | 39 | |
ca53beb1 | 40 | /* TODO: Support for predicated code motion. I.e. |
41 | ||
42 | while (1) | |
43 | { | |
44 | if (cond) | |
45 | { | |
46 | a = inv; | |
47 | something; | |
48 | } | |
49 | } | |
50 | ||
61025ec0 | 51 | Where COND and INV are invariants, but evaluating INV may trap or be |
ca53beb1 | 52 | invalid from some other reason if !COND. This may be transformed to |
53 | ||
54 | if (cond) | |
55 | a = inv; | |
56 | while (1) | |
57 | { | |
58 | if (cond) | |
59 | something; | |
60 | } */ | |
61 | ||
7d23383d | 62 | /* A type for the list of statements that have to be moved in order to be able |
63 | to hoist an invariant computation. */ | |
64 | ||
65 | struct depend | |
66 | { | |
75a70cf9 | 67 | gimple stmt; |
7d23383d | 68 | struct depend *next; |
69 | }; | |
70 | ||
7d23383d | 71 | /* The auxiliary data kept for each statement. */ |
72 | ||
73 | struct lim_aux_data | |
74 | { | |
75 | struct loop *max_loop; /* The outermost loop in that the statement | |
76 | is invariant. */ | |
77 | ||
78 | struct loop *tgt_loop; /* The loop out of that we want to move the | |
79 | invariant. */ | |
80 | ||
81 | struct loop *always_executed_in; | |
82 | /* The outermost loop for that we are sure | |
83 | the statement is executed if the loop | |
84 | is entered. */ | |
85 | ||
7d23383d | 86 | unsigned cost; /* Cost of the computation performed by the |
87 | statement. */ | |
88 | ||
89 | struct depend *depends; /* List of statements that must be also hoisted | |
90 | out of the loop when this statement is | |
91 | hoisted; i.e. those that define the operands | |
92 | of the statement and are inside of the | |
93 | MAX_LOOP loop. */ | |
94 | }; | |
95 | ||
75a70cf9 | 96 | /* Maps statements to their lim_aux_data. */ |
97 | ||
98 | static struct pointer_map_t *lim_aux_data_map; | |
7d23383d | 99 | |
063a8bce | 100 | /* Description of a memory reference location. */ |
7d23383d | 101 | |
063a8bce | 102 | typedef struct mem_ref_loc |
7d23383d | 103 | { |
104 | tree *ref; /* The reference itself. */ | |
75a70cf9 | 105 | gimple stmt; /* The statement in that it occurs. */ |
063a8bce | 106 | } *mem_ref_loc_p; |
107 | ||
108 | DEF_VEC_P(mem_ref_loc_p); | |
109 | DEF_VEC_ALLOC_P(mem_ref_loc_p, heap); | |
110 | ||
111 | /* The list of memory reference locations in a loop. */ | |
55a03692 | 112 | |
063a8bce | 113 | typedef struct mem_ref_locs |
114 | { | |
115 | VEC (mem_ref_loc_p, heap) *locs; | |
116 | } *mem_ref_locs_p; | |
117 | ||
118 | DEF_VEC_P(mem_ref_locs_p); | |
119 | DEF_VEC_ALLOC_P(mem_ref_locs_p, heap); | |
55a03692 | 120 | |
063a8bce | 121 | /* Description of a memory reference. */ |
122 | ||
123 | typedef struct mem_ref | |
55a03692 | 124 | { |
125 | tree mem; /* The memory itself. */ | |
063a8bce | 126 | unsigned id; /* ID assigned to the memory reference |
127 | (its index in memory_accesses.refs_list) */ | |
55a03692 | 128 | hashval_t hash; /* Its hash value. */ |
f0b5f617 | 129 | bitmap stored; /* The set of loops in that this memory location |
063a8bce | 130 | is stored to. */ |
131 | VEC (mem_ref_locs_p, heap) *accesses_in_loop; | |
132 | /* The locations of the accesses. Vector | |
133 | indexed by the loop number. */ | |
063a8bce | 134 | |
135 | /* The following sets are computed on demand. We keep both set and | |
136 | its complement, so that we know whether the information was | |
137 | already computed or not. */ | |
138 | bitmap indep_loop; /* The set of loops in that the memory | |
139 | reference is independent, meaning: | |
140 | If it is stored in the loop, this store | |
141 | is independent on all other loads and | |
142 | stores. | |
143 | If it is only loaded, then it is independent | |
144 | on all stores in the loop. */ | |
145 | bitmap dep_loop; /* The complement of INDEP_LOOP. */ | |
146 | ||
147 | bitmap indep_ref; /* The set of memory references on that | |
148 | this reference is independent. */ | |
de60f90c | 149 | bitmap dep_ref; /* The complement of INDEP_REF. */ |
063a8bce | 150 | } *mem_ref_p; |
151 | ||
152 | DEF_VEC_P(mem_ref_p); | |
153 | DEF_VEC_ALLOC_P(mem_ref_p, heap); | |
154 | ||
155 | DEF_VEC_P(bitmap); | |
156 | DEF_VEC_ALLOC_P(bitmap, heap); | |
157 | ||
158 | DEF_VEC_P(htab_t); | |
159 | DEF_VEC_ALLOC_P(htab_t, heap); | |
160 | ||
161 | /* Description of memory accesses in loops. */ | |
162 | ||
163 | static struct | |
164 | { | |
165 | /* The hash table of memory references accessed in loops. */ | |
166 | htab_t refs; | |
167 | ||
168 | /* The list of memory references. */ | |
169 | VEC (mem_ref_p, heap) *refs_list; | |
170 | ||
171 | /* The set of memory references accessed in each loop. */ | |
172 | VEC (bitmap, heap) *refs_in_loop; | |
173 | ||
174 | /* The set of memory references accessed in each loop, including | |
175 | subloops. */ | |
176 | VEC (bitmap, heap) *all_refs_in_loop; | |
177 | ||
0766b2c0 | 178 | /* The set of memory references stored in each loop, including |
179 | subloops. */ | |
180 | VEC (bitmap, heap) *all_refs_stored_in_loop; | |
063a8bce | 181 | |
182 | /* Cache for expanding memory addresses. */ | |
183 | struct pointer_map_t *ttae_cache; | |
184 | } memory_accesses; | |
185 | ||
4fb07d00 | 186 | /* Obstack for the bitmaps in the above data structures. */ |
187 | static bitmap_obstack lim_bitmap_obstack; | |
188 | ||
063a8bce | 189 | static bool ref_indep_loop_p (struct loop *, mem_ref_p); |
7d23383d | 190 | |
191 | /* Minimum cost of an expensive expression. */ | |
192 | #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE)) | |
193 | ||
2fd20c29 | 194 | /* The outermost loop for which execution of the header guarantees that the |
7d23383d | 195 | block will be executed. */ |
196 | #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux) | |
2fd20c29 | 197 | #define SET_ALWAYS_EXECUTED_IN(BB, VAL) ((BB)->aux = (void *) (VAL)) |
7d23383d | 198 | |
0766b2c0 | 199 | /* Whether the reference was analyzable. */ |
200 | #define MEM_ANALYZABLE(REF) ((REF)->mem != error_mark_node) | |
201 | ||
75a70cf9 | 202 | static struct lim_aux_data * |
203 | init_lim_data (gimple stmt) | |
204 | { | |
205 | void **p = pointer_map_insert (lim_aux_data_map, stmt); | |
206 | ||
207 | *p = XCNEW (struct lim_aux_data); | |
208 | return (struct lim_aux_data *) *p; | |
209 | } | |
210 | ||
211 | static struct lim_aux_data * | |
212 | get_lim_data (gimple stmt) | |
213 | { | |
214 | void **p = pointer_map_contains (lim_aux_data_map, stmt); | |
215 | if (!p) | |
216 | return NULL; | |
217 | ||
218 | return (struct lim_aux_data *) *p; | |
219 | } | |
220 | ||
221 | /* Releases the memory occupied by DATA. */ | |
222 | ||
223 | static void | |
224 | free_lim_aux_data (struct lim_aux_data *data) | |
225 | { | |
226 | struct depend *dep, *next; | |
227 | ||
228 | for (dep = data->depends; dep; dep = next) | |
229 | { | |
230 | next = dep->next; | |
231 | free (dep); | |
232 | } | |
233 | free (data); | |
234 | } | |
235 | ||
236 | static void | |
237 | clear_lim_data (gimple stmt) | |
238 | { | |
239 | void **p = pointer_map_contains (lim_aux_data_map, stmt); | |
240 | if (!p) | |
241 | return; | |
242 | ||
243 | free_lim_aux_data ((struct lim_aux_data *) *p); | |
244 | *p = NULL; | |
245 | } | |
246 | ||
7d23383d | 247 | /* Calls CBCK for each index in memory reference ADDR_P. There are two |
248 | kinds situations handled; in each of these cases, the memory reference | |
249 | and DATA are passed to the callback: | |
48e1416a | 250 | |
7d23383d | 251 | Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also |
252 | pass the pointer to the index to the callback. | |
253 | ||
254 | Pointer dereference: INDIRECT_REF (addr). In this case we also pass the | |
255 | pointer to addr to the callback. | |
48e1416a | 256 | |
7d23383d | 257 | If the callback returns false, the whole search stops and false is returned. |
258 | Otherwise the function returns true after traversing through the whole | |
259 | reference *ADDR_P. */ | |
260 | ||
261 | bool | |
262 | for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data) | |
263 | { | |
a59824bb | 264 | tree *nxt, *idx; |
7d23383d | 265 | |
266 | for (; ; addr_p = nxt) | |
267 | { | |
268 | switch (TREE_CODE (*addr_p)) | |
269 | { | |
270 | case SSA_NAME: | |
271 | return cbck (*addr_p, addr_p, data); | |
272 | ||
182cf5a9 | 273 | case MEM_REF: |
7d23383d | 274 | nxt = &TREE_OPERAND (*addr_p, 0); |
275 | return cbck (*addr_p, nxt, data); | |
276 | ||
277 | case BIT_FIELD_REF: | |
7d23383d | 278 | case VIEW_CONVERT_EXPR: |
dec41e98 | 279 | case REALPART_EXPR: |
280 | case IMAGPART_EXPR: | |
7d23383d | 281 | nxt = &TREE_OPERAND (*addr_p, 0); |
282 | break; | |
283 | ||
a59824bb | 284 | case COMPONENT_REF: |
285 | /* If the component has varying offset, it behaves like index | |
286 | as well. */ | |
287 | idx = &TREE_OPERAND (*addr_p, 2); | |
288 | if (*idx | |
289 | && !cbck (*addr_p, idx, data)) | |
290 | return false; | |
291 | ||
292 | nxt = &TREE_OPERAND (*addr_p, 0); | |
293 | break; | |
294 | ||
7d23383d | 295 | case ARRAY_REF: |
9c530f25 | 296 | case ARRAY_RANGE_REF: |
7d23383d | 297 | nxt = &TREE_OPERAND (*addr_p, 0); |
298 | if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data)) | |
299 | return false; | |
300 | break; | |
301 | ||
302 | case VAR_DECL: | |
303 | case PARM_DECL: | |
304 | case STRING_CST: | |
305 | case RESULT_DECL: | |
ba680c4b | 306 | case VECTOR_CST: |
43c710a6 | 307 | case COMPLEX_CST: |
568c2d95 | 308 | case INTEGER_CST: |
309 | case REAL_CST: | |
06f0b99c | 310 | case FIXED_CST: |
2a2aef73 | 311 | case CONSTRUCTOR: |
7d23383d | 312 | return true; |
313 | ||
1d9353f3 | 314 | case ADDR_EXPR: |
315 | gcc_assert (is_gimple_min_invariant (*addr_p)); | |
316 | return true; | |
317 | ||
aed164c3 | 318 | case TARGET_MEM_REF: |
319 | idx = &TMR_BASE (*addr_p); | |
320 | if (*idx | |
321 | && !cbck (*addr_p, idx, data)) | |
322 | return false; | |
323 | idx = &TMR_INDEX (*addr_p); | |
28daba6f | 324 | if (*idx |
325 | && !cbck (*addr_p, idx, data)) | |
326 | return false; | |
327 | idx = &TMR_INDEX2 (*addr_p); | |
aed164c3 | 328 | if (*idx |
329 | && !cbck (*addr_p, idx, data)) | |
330 | return false; | |
331 | return true; | |
332 | ||
7d23383d | 333 | default: |
8c0963c4 | 334 | gcc_unreachable (); |
7d23383d | 335 | } |
336 | } | |
337 | } | |
338 | ||
339 | /* If it is possible to hoist the statement STMT unconditionally, | |
340 | returns MOVE_POSSIBLE. | |
341 | If it is possible to hoist the statement STMT, but we must avoid making | |
342 | it executed if it would not be executed in the original program (e.g. | |
343 | because it may trap), return MOVE_PRESERVE_EXECUTION. | |
344 | Otherwise return MOVE_IMPOSSIBLE. */ | |
345 | ||
07c03fb0 | 346 | enum move_pos |
75a70cf9 | 347 | movement_possibility (gimple stmt) |
7d23383d | 348 | { |
75a70cf9 | 349 | tree lhs; |
350 | enum move_pos ret = MOVE_POSSIBLE; | |
7d23383d | 351 | |
352 | if (flag_unswitch_loops | |
75a70cf9 | 353 | && gimple_code (stmt) == GIMPLE_COND) |
7d23383d | 354 | { |
355 | /* If we perform unswitching, force the operands of the invariant | |
356 | condition to be moved out of the loop. */ | |
7d23383d | 357 | return MOVE_POSSIBLE; |
358 | } | |
359 | ||
9bf0a3f9 | 360 | if (gimple_code (stmt) == GIMPLE_PHI |
361 | && gimple_phi_num_args (stmt) <= 2 | |
7c782c9b | 362 | && !virtual_operand_p (gimple_phi_result (stmt)) |
9bf0a3f9 | 363 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt))) |
364 | return MOVE_POSSIBLE; | |
365 | ||
75a70cf9 | 366 | if (gimple_get_lhs (stmt) == NULL_TREE) |
7d23383d | 367 | return MOVE_IMPOSSIBLE; |
368 | ||
dd277d48 | 369 | if (gimple_vdef (stmt)) |
063a8bce | 370 | return MOVE_IMPOSSIBLE; |
371 | ||
75a70cf9 | 372 | if (stmt_ends_bb_p (stmt) |
373 | || gimple_has_volatile_ops (stmt) | |
374 | || gimple_has_side_effects (stmt) | |
375 | || stmt_could_throw_p (stmt)) | |
7d23383d | 376 | return MOVE_IMPOSSIBLE; |
377 | ||
75a70cf9 | 378 | if (is_gimple_call (stmt)) |
ca53beb1 | 379 | { |
380 | /* While pure or const call is guaranteed to have no side effects, we | |
381 | cannot move it arbitrarily. Consider code like | |
382 | ||
383 | char *s = something (); | |
384 | ||
385 | while (1) | |
386 | { | |
387 | if (s) | |
388 | t = strlen (s); | |
389 | else | |
390 | t = 0; | |
391 | } | |
392 | ||
393 | Here the strlen call cannot be moved out of the loop, even though | |
394 | s is invariant. In addition to possibly creating a call with | |
395 | invalid arguments, moving out a function call that is not executed | |
396 | may cause performance regressions in case the call is costly and | |
397 | not executed at all. */ | |
75a70cf9 | 398 | ret = MOVE_PRESERVE_EXECUTION; |
399 | lhs = gimple_call_lhs (stmt); | |
ca53beb1 | 400 | } |
75a70cf9 | 401 | else if (is_gimple_assign (stmt)) |
402 | lhs = gimple_assign_lhs (stmt); | |
403 | else | |
404 | return MOVE_IMPOSSIBLE; | |
405 | ||
406 | if (TREE_CODE (lhs) == SSA_NAME | |
407 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | |
408 | return MOVE_IMPOSSIBLE; | |
409 | ||
410 | if (TREE_CODE (lhs) != SSA_NAME | |
411 | || gimple_could_trap_p (stmt)) | |
412 | return MOVE_PRESERVE_EXECUTION; | |
413 | ||
de60f90c | 414 | /* Non local loads in a transaction cannot be hoisted out. Well, |
415 | unless the load happens on every path out of the loop, but we | |
416 | don't take this into account yet. */ | |
417 | if (flag_tm | |
418 | && gimple_in_transaction (stmt) | |
419 | && gimple_assign_single_p (stmt)) | |
420 | { | |
421 | tree rhs = gimple_assign_rhs1 (stmt); | |
422 | if (DECL_P (rhs) && is_global_var (rhs)) | |
423 | { | |
424 | if (dump_file) | |
425 | { | |
426 | fprintf (dump_file, "Cannot hoist conditional load of "); | |
427 | print_generic_expr (dump_file, rhs, TDF_SLIM); | |
428 | fprintf (dump_file, " because it is in a transaction.\n"); | |
429 | } | |
430 | return MOVE_IMPOSSIBLE; | |
431 | } | |
432 | } | |
433 | ||
75a70cf9 | 434 | return ret; |
7d23383d | 435 | } |
436 | ||
437 | /* Suppose that operand DEF is used inside the LOOP. Returns the outermost | |
91275768 | 438 | loop to that we could move the expression using DEF if it did not have |
7d23383d | 439 | other operands, i.e. the outermost loop enclosing LOOP in that the value |
440 | of DEF is invariant. */ | |
441 | ||
442 | static struct loop * | |
443 | outermost_invariant_loop (tree def, struct loop *loop) | |
444 | { | |
75a70cf9 | 445 | gimple def_stmt; |
7d23383d | 446 | basic_block def_bb; |
447 | struct loop *max_loop; | |
75a70cf9 | 448 | struct lim_aux_data *lim_data; |
7d23383d | 449 | |
75a70cf9 | 450 | if (!def) |
7d23383d | 451 | return superloop_at_depth (loop, 1); |
452 | ||
75a70cf9 | 453 | if (TREE_CODE (def) != SSA_NAME) |
454 | { | |
455 | gcc_assert (is_gimple_min_invariant (def)); | |
456 | return superloop_at_depth (loop, 1); | |
457 | } | |
458 | ||
7d23383d | 459 | def_stmt = SSA_NAME_DEF_STMT (def); |
75a70cf9 | 460 | def_bb = gimple_bb (def_stmt); |
7d23383d | 461 | if (!def_bb) |
462 | return superloop_at_depth (loop, 1); | |
463 | ||
464 | max_loop = find_common_loop (loop, def_bb->loop_father); | |
465 | ||
75a70cf9 | 466 | lim_data = get_lim_data (def_stmt); |
467 | if (lim_data != NULL && lim_data->max_loop != NULL) | |
7d23383d | 468 | max_loop = find_common_loop (max_loop, |
75a70cf9 | 469 | loop_outer (lim_data->max_loop)); |
7d23383d | 470 | if (max_loop == loop) |
471 | return NULL; | |
9e3536f4 | 472 | max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1); |
7d23383d | 473 | |
474 | return max_loop; | |
475 | } | |
476 | ||
7d23383d | 477 | /* DATA is a structure containing information associated with a statement |
478 | inside LOOP. DEF is one of the operands of this statement. | |
48e1416a | 479 | |
7d23383d | 480 | Find the outermost loop enclosing LOOP in that value of DEF is invariant |
481 | and record this in DATA->max_loop field. If DEF itself is defined inside | |
482 | this loop as well (i.e. we need to hoist it out of the loop if we want | |
483 | to hoist the statement represented by DATA), record the statement in that | |
484 | DEF is defined to the DATA->depends list. Additionally if ADD_COST is true, | |
485 | add the cost of the computation of DEF to the DATA->cost. | |
48e1416a | 486 | |
7d23383d | 487 | If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */ |
488 | ||
489 | static bool | |
490 | add_dependency (tree def, struct lim_aux_data *data, struct loop *loop, | |
491 | bool add_cost) | |
492 | { | |
75a70cf9 | 493 | gimple def_stmt = SSA_NAME_DEF_STMT (def); |
494 | basic_block def_bb = gimple_bb (def_stmt); | |
7d23383d | 495 | struct loop *max_loop; |
496 | struct depend *dep; | |
75a70cf9 | 497 | struct lim_aux_data *def_data; |
7d23383d | 498 | |
499 | if (!def_bb) | |
500 | return true; | |
501 | ||
502 | max_loop = outermost_invariant_loop (def, loop); | |
503 | if (!max_loop) | |
504 | return false; | |
505 | ||
506 | if (flow_loop_nested_p (data->max_loop, max_loop)) | |
507 | data->max_loop = max_loop; | |
508 | ||
75a70cf9 | 509 | def_data = get_lim_data (def_stmt); |
510 | if (!def_data) | |
7d23383d | 511 | return true; |
512 | ||
513 | if (add_cost | |
514 | /* Only add the cost if the statement defining DEF is inside LOOP, | |
515 | i.e. if it is likely that by moving the invariants dependent | |
516 | on it, we will be able to avoid creating a new register for | |
517 | it (since it will be only used in these dependent invariants). */ | |
518 | && def_bb->loop_father == loop) | |
75a70cf9 | 519 | data->cost += def_data->cost; |
7d23383d | 520 | |
4c36ffe6 | 521 | dep = XNEW (struct depend); |
7d23383d | 522 | dep->stmt = def_stmt; |
523 | dep->next = data->depends; | |
524 | data->depends = dep; | |
525 | ||
526 | return true; | |
527 | } | |
528 | ||
0766b2c0 | 529 | /* Returns an estimate for a cost of statement STMT. The values here |
530 | are just ad-hoc constants, similar to costs for inlining. */ | |
7d23383d | 531 | |
532 | static unsigned | |
75a70cf9 | 533 | stmt_cost (gimple stmt) |
7d23383d | 534 | { |
7d23383d | 535 | /* Always try to create possibilities for unswitching. */ |
9bf0a3f9 | 536 | if (gimple_code (stmt) == GIMPLE_COND |
537 | || gimple_code (stmt) == GIMPLE_PHI) | |
7d23383d | 538 | return LIM_EXPENSIVE; |
539 | ||
0766b2c0 | 540 | /* We should be hoisting calls if possible. */ |
75a70cf9 | 541 | if (is_gimple_call (stmt)) |
7d23383d | 542 | { |
0766b2c0 | 543 | tree fndecl; |
7d23383d | 544 | |
545 | /* Unless the call is a builtin_constant_p; this always folds to a | |
546 | constant, so moving it is useless. */ | |
75a70cf9 | 547 | fndecl = gimple_call_fndecl (stmt); |
548 | if (fndecl | |
549 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
550 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P) | |
7d23383d | 551 | return 0; |
552 | ||
0766b2c0 | 553 | return LIM_EXPENSIVE; |
75a70cf9 | 554 | } |
555 | ||
0766b2c0 | 556 | /* Hoisting memory references out should almost surely be a win. */ |
557 | if (gimple_references_memory_p (stmt)) | |
558 | return LIM_EXPENSIVE; | |
559 | ||
75a70cf9 | 560 | if (gimple_code (stmt) != GIMPLE_ASSIGN) |
0766b2c0 | 561 | return 1; |
7d23383d | 562 | |
75a70cf9 | 563 | switch (gimple_assign_rhs_code (stmt)) |
564 | { | |
7d23383d | 565 | case MULT_EXPR: |
23da77dd | 566 | case WIDEN_MULT_EXPR: |
567 | case WIDEN_MULT_PLUS_EXPR: | |
568 | case WIDEN_MULT_MINUS_EXPR: | |
569 | case DOT_PROD_EXPR: | |
570 | case FMA_EXPR: | |
7d23383d | 571 | case TRUNC_DIV_EXPR: |
572 | case CEIL_DIV_EXPR: | |
573 | case FLOOR_DIV_EXPR: | |
574 | case ROUND_DIV_EXPR: | |
575 | case EXACT_DIV_EXPR: | |
576 | case CEIL_MOD_EXPR: | |
577 | case FLOOR_MOD_EXPR: | |
578 | case ROUND_MOD_EXPR: | |
579 | case TRUNC_MOD_EXPR: | |
fd544fc8 | 580 | case RDIV_EXPR: |
7d23383d | 581 | /* Division and multiplication are usually expensive. */ |
0766b2c0 | 582 | return LIM_EXPENSIVE; |
7d23383d | 583 | |
790963ba | 584 | case LSHIFT_EXPR: |
585 | case RSHIFT_EXPR: | |
23da77dd | 586 | case WIDEN_LSHIFT_EXPR: |
587 | case LROTATE_EXPR: | |
588 | case RROTATE_EXPR: | |
0766b2c0 | 589 | /* Shifts and rotates are usually expensive. */ |
590 | return LIM_EXPENSIVE; | |
591 | ||
592 | case CONSTRUCTOR: | |
593 | /* Make vector construction cost proportional to the number | |
594 | of elements. */ | |
595 | return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)); | |
596 | ||
597 | case SSA_NAME: | |
598 | case PAREN_EXPR: | |
599 | /* Whether or not something is wrapped inside a PAREN_EXPR | |
600 | should not change move cost. Nor should an intermediate | |
601 | unpropagated SSA name copy. */ | |
602 | return 0; | |
790963ba | 603 | |
7d23383d | 604 | default: |
0766b2c0 | 605 | return 1; |
7d23383d | 606 | } |
7d23383d | 607 | } |
608 | ||
063a8bce | 609 | /* Finds the outermost loop between OUTER and LOOP in that the memory reference |
610 | REF is independent. If REF is not independent in LOOP, NULL is returned | |
611 | instead. */ | |
612 | ||
613 | static struct loop * | |
614 | outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref) | |
615 | { | |
616 | struct loop *aloop; | |
617 | ||
618 | if (bitmap_bit_p (ref->stored, loop->num)) | |
619 | return NULL; | |
620 | ||
621 | for (aloop = outer; | |
622 | aloop != loop; | |
623 | aloop = superloop_at_depth (loop, loop_depth (aloop) + 1)) | |
624 | if (!bitmap_bit_p (ref->stored, aloop->num) | |
625 | && ref_indep_loop_p (aloop, ref)) | |
626 | return aloop; | |
627 | ||
628 | if (ref_indep_loop_p (loop, ref)) | |
629 | return loop; | |
630 | else | |
631 | return NULL; | |
632 | } | |
633 | ||
634 | /* If there is a simple load or store to a memory reference in STMT, returns | |
f0b5f617 | 635 | the location of the memory reference, and sets IS_STORE according to whether |
063a8bce | 636 | it is a store or load. Otherwise, returns NULL. */ |
637 | ||
638 | static tree * | |
75a70cf9 | 639 | simple_mem_ref_in_stmt (gimple stmt, bool *is_store) |
063a8bce | 640 | { |
6d7105fe | 641 | tree *lhs, *rhs; |
063a8bce | 642 | |
6d7105fe | 643 | /* Recognize SSA_NAME = MEM and MEM = (SSA_NAME | invariant) patterns. */ |
644 | if (!gimple_assign_single_p (stmt)) | |
063a8bce | 645 | return NULL; |
646 | ||
75a70cf9 | 647 | lhs = gimple_assign_lhs_ptr (stmt); |
6d7105fe | 648 | rhs = gimple_assign_rhs1_ptr (stmt); |
063a8bce | 649 | |
6d7105fe | 650 | if (TREE_CODE (*lhs) == SSA_NAME && gimple_vuse (stmt)) |
063a8bce | 651 | { |
063a8bce | 652 | *is_store = false; |
6d7105fe | 653 | return rhs; |
063a8bce | 654 | } |
6d7105fe | 655 | else if (gimple_vdef (stmt) |
656 | && (TREE_CODE (*rhs) == SSA_NAME || is_gimple_min_invariant (*rhs))) | |
063a8bce | 657 | { |
658 | *is_store = true; | |
659 | return lhs; | |
660 | } | |
661 | else | |
662 | return NULL; | |
663 | } | |
664 | ||
665 | /* Returns the memory reference contained in STMT. */ | |
666 | ||
667 | static mem_ref_p | |
75a70cf9 | 668 | mem_ref_in_stmt (gimple stmt) |
063a8bce | 669 | { |
670 | bool store; | |
671 | tree *mem = simple_mem_ref_in_stmt (stmt, &store); | |
672 | hashval_t hash; | |
673 | mem_ref_p ref; | |
674 | ||
675 | if (!mem) | |
676 | return NULL; | |
677 | gcc_assert (!store); | |
678 | ||
679 | hash = iterative_hash_expr (*mem, 0); | |
45ba1503 | 680 | ref = (mem_ref_p) htab_find_with_hash (memory_accesses.refs, *mem, hash); |
063a8bce | 681 | |
682 | gcc_assert (ref != NULL); | |
683 | return ref; | |
684 | } | |
685 | ||
9bf0a3f9 | 686 | /* From a controlling predicate in DOM determine the arguments from |
687 | the PHI node PHI that are chosen if the predicate evaluates to | |
688 | true and false and store them to *TRUE_ARG_P and *FALSE_ARG_P if | |
689 | they are non-NULL. Returns true if the arguments can be determined, | |
690 | else return false. */ | |
691 | ||
692 | static bool | |
693 | extract_true_false_args_from_phi (basic_block dom, gimple phi, | |
694 | tree *true_arg_p, tree *false_arg_p) | |
695 | { | |
696 | basic_block bb = gimple_bb (phi); | |
697 | edge true_edge, false_edge, tem; | |
698 | tree arg0 = NULL_TREE, arg1 = NULL_TREE; | |
699 | ||
700 | /* We have to verify that one edge into the PHI node is dominated | |
701 | by the true edge of the predicate block and the other edge | |
702 | dominated by the false edge. This ensures that the PHI argument | |
703 | we are going to take is completely determined by the path we | |
b533d4c1 | 704 | take from the predicate block. |
705 | We can only use BB dominance checks below if the destination of | |
706 | the true/false edges are dominated by their edge, thus only | |
707 | have a single predecessor. */ | |
9bf0a3f9 | 708 | extract_true_false_edges_from_block (dom, &true_edge, &false_edge); |
709 | tem = EDGE_PRED (bb, 0); | |
710 | if (tem == true_edge | |
b533d4c1 | 711 | || (single_pred_p (true_edge->dest) |
712 | && (tem->src == true_edge->dest | |
713 | || dominated_by_p (CDI_DOMINATORS, | |
714 | tem->src, true_edge->dest)))) | |
9bf0a3f9 | 715 | arg0 = PHI_ARG_DEF (phi, tem->dest_idx); |
716 | else if (tem == false_edge | |
b533d4c1 | 717 | || (single_pred_p (false_edge->dest) |
718 | && (tem->src == false_edge->dest | |
719 | || dominated_by_p (CDI_DOMINATORS, | |
720 | tem->src, false_edge->dest)))) | |
9bf0a3f9 | 721 | arg1 = PHI_ARG_DEF (phi, tem->dest_idx); |
722 | else | |
723 | return false; | |
724 | tem = EDGE_PRED (bb, 1); | |
725 | if (tem == true_edge | |
b533d4c1 | 726 | || (single_pred_p (true_edge->dest) |
727 | && (tem->src == true_edge->dest | |
728 | || dominated_by_p (CDI_DOMINATORS, | |
729 | tem->src, true_edge->dest)))) | |
9bf0a3f9 | 730 | arg0 = PHI_ARG_DEF (phi, tem->dest_idx); |
731 | else if (tem == false_edge | |
b533d4c1 | 732 | || (single_pred_p (false_edge->dest) |
733 | && (tem->src == false_edge->dest | |
734 | || dominated_by_p (CDI_DOMINATORS, | |
735 | tem->src, false_edge->dest)))) | |
9bf0a3f9 | 736 | arg1 = PHI_ARG_DEF (phi, tem->dest_idx); |
737 | else | |
738 | return false; | |
739 | if (!arg0 || !arg1) | |
740 | return false; | |
741 | ||
742 | if (true_arg_p) | |
743 | *true_arg_p = arg0; | |
744 | if (false_arg_p) | |
745 | *false_arg_p = arg1; | |
746 | ||
747 | return true; | |
748 | } | |
749 | ||
7d23383d | 750 | /* Determine the outermost loop to that it is possible to hoist a statement |
751 | STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine | |
752 | the outermost loop in that the value computed by STMT is invariant. | |
753 | If MUST_PRESERVE_EXEC is true, additionally choose such a loop that | |
754 | we preserve the fact whether STMT is executed. It also fills other related | |
755 | information to LIM_DATA (STMT). | |
48e1416a | 756 | |
7d23383d | 757 | The function returns false if STMT cannot be hoisted outside of the loop it |
758 | is defined in, and true otherwise. */ | |
759 | ||
760 | static bool | |
75a70cf9 | 761 | determine_max_movement (gimple stmt, bool must_preserve_exec) |
7d23383d | 762 | { |
75a70cf9 | 763 | basic_block bb = gimple_bb (stmt); |
7d23383d | 764 | struct loop *loop = bb->loop_father; |
765 | struct loop *level; | |
75a70cf9 | 766 | struct lim_aux_data *lim_data = get_lim_data (stmt); |
43daa21e | 767 | tree val; |
768 | ssa_op_iter iter; | |
48e1416a | 769 | |
7d23383d | 770 | if (must_preserve_exec) |
771 | level = ALWAYS_EXECUTED_IN (bb); | |
772 | else | |
773 | level = superloop_at_depth (loop, 1); | |
774 | lim_data->max_loop = level; | |
775 | ||
9bf0a3f9 | 776 | if (gimple_code (stmt) == GIMPLE_PHI) |
777 | { | |
778 | use_operand_p use_p; | |
779 | unsigned min_cost = UINT_MAX; | |
780 | unsigned total_cost = 0; | |
781 | struct lim_aux_data *def_data; | |
782 | ||
783 | /* We will end up promoting dependencies to be unconditionally | |
784 | evaluated. For this reason the PHI cost (and thus the | |
785 | cost we remove from the loop by doing the invariant motion) | |
786 | is that of the cheapest PHI argument dependency chain. */ | |
787 | FOR_EACH_PHI_ARG (use_p, stmt, iter, SSA_OP_USE) | |
788 | { | |
789 | val = USE_FROM_PTR (use_p); | |
790 | if (TREE_CODE (val) != SSA_NAME) | |
791 | continue; | |
792 | if (!add_dependency (val, lim_data, loop, false)) | |
793 | return false; | |
794 | def_data = get_lim_data (SSA_NAME_DEF_STMT (val)); | |
795 | if (def_data) | |
796 | { | |
797 | min_cost = MIN (min_cost, def_data->cost); | |
798 | total_cost += def_data->cost; | |
799 | } | |
800 | } | |
801 | ||
802 | lim_data->cost += min_cost; | |
803 | ||
804 | if (gimple_phi_num_args (stmt) > 1) | |
805 | { | |
806 | basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb); | |
807 | gimple cond; | |
808 | if (gsi_end_p (gsi_last_bb (dom))) | |
809 | return false; | |
810 | cond = gsi_stmt (gsi_last_bb (dom)); | |
811 | if (gimple_code (cond) != GIMPLE_COND) | |
812 | return false; | |
813 | /* Verify that this is an extended form of a diamond and | |
814 | the PHI arguments are completely controlled by the | |
815 | predicate in DOM. */ | |
816 | if (!extract_true_false_args_from_phi (dom, stmt, NULL, NULL)) | |
817 | return false; | |
818 | ||
819 | /* Fold in dependencies and cost of the condition. */ | |
820 | FOR_EACH_SSA_TREE_OPERAND (val, cond, iter, SSA_OP_USE) | |
821 | { | |
822 | if (!add_dependency (val, lim_data, loop, false)) | |
823 | return false; | |
824 | def_data = get_lim_data (SSA_NAME_DEF_STMT (val)); | |
825 | if (def_data) | |
826 | total_cost += def_data->cost; | |
827 | } | |
828 | ||
829 | /* We want to avoid unconditionally executing very expensive | |
830 | operations. As costs for our dependencies cannot be | |
831 | negative just claim we are not invariand for this case. | |
832 | We also are not sure whether the control-flow inside the | |
833 | loop will vanish. */ | |
834 | if (total_cost - min_cost >= 2 * LIM_EXPENSIVE | |
835 | && !(min_cost != 0 | |
836 | && total_cost / min_cost <= 2)) | |
837 | return false; | |
838 | ||
839 | /* Assume that the control-flow in the loop will vanish. | |
840 | ??? We should verify this and not artificially increase | |
841 | the cost if that is not the case. */ | |
842 | lim_data->cost += stmt_cost (stmt); | |
843 | } | |
844 | ||
845 | return true; | |
846 | } | |
847 | else | |
848 | FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE) | |
849 | if (!add_dependency (val, lim_data, loop, true)) | |
850 | return false; | |
7d23383d | 851 | |
dd277d48 | 852 | if (gimple_vuse (stmt)) |
063a8bce | 853 | { |
854 | mem_ref_p ref = mem_ref_in_stmt (stmt); | |
855 | ||
856 | if (ref) | |
857 | { | |
858 | lim_data->max_loop | |
859 | = outermost_indep_loop (lim_data->max_loop, loop, ref); | |
860 | if (!lim_data->max_loop) | |
861 | return false; | |
862 | } | |
863 | else | |
864 | { | |
dd277d48 | 865 | if ((val = gimple_vuse (stmt)) != NULL_TREE) |
063a8bce | 866 | { |
867 | if (!add_dependency (val, lim_data, loop, false)) | |
868 | return false; | |
869 | } | |
870 | } | |
871 | } | |
7d23383d | 872 | |
873 | lim_data->cost += stmt_cost (stmt); | |
874 | ||
875 | return true; | |
876 | } | |
877 | ||
878 | /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL, | |
879 | and that one of the operands of this statement is computed by STMT. | |
880 | Ensure that STMT (together with all the statements that define its | |
881 | operands) is hoisted at least out of the loop LEVEL. */ | |
882 | ||
883 | static void | |
75a70cf9 | 884 | set_level (gimple stmt, struct loop *orig_loop, struct loop *level) |
7d23383d | 885 | { |
75a70cf9 | 886 | struct loop *stmt_loop = gimple_bb (stmt)->loop_father; |
7d23383d | 887 | struct depend *dep; |
75a70cf9 | 888 | struct lim_aux_data *lim_data; |
7d23383d | 889 | |
890 | stmt_loop = find_common_loop (orig_loop, stmt_loop); | |
75a70cf9 | 891 | lim_data = get_lim_data (stmt); |
892 | if (lim_data != NULL && lim_data->tgt_loop != NULL) | |
7d23383d | 893 | stmt_loop = find_common_loop (stmt_loop, |
75a70cf9 | 894 | loop_outer (lim_data->tgt_loop)); |
7d23383d | 895 | if (flow_loop_nested_p (stmt_loop, level)) |
896 | return; | |
897 | ||
75a70cf9 | 898 | gcc_assert (level == lim_data->max_loop |
899 | || flow_loop_nested_p (lim_data->max_loop, level)); | |
7d23383d | 900 | |
75a70cf9 | 901 | lim_data->tgt_loop = level; |
902 | for (dep = lim_data->depends; dep; dep = dep->next) | |
7d23383d | 903 | set_level (dep->stmt, orig_loop, level); |
904 | } | |
905 | ||
906 | /* Determines an outermost loop from that we want to hoist the statement STMT. | |
907 | For now we chose the outermost possible loop. TODO -- use profiling | |
908 | information to set it more sanely. */ | |
909 | ||
910 | static void | |
75a70cf9 | 911 | set_profitable_level (gimple stmt) |
7d23383d | 912 | { |
75a70cf9 | 913 | set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop); |
7d23383d | 914 | } |
915 | ||
75a70cf9 | 916 | /* Returns true if STMT is a call that has side effects. */ |
7d23383d | 917 | |
918 | static bool | |
75a70cf9 | 919 | nonpure_call_p (gimple stmt) |
7d23383d | 920 | { |
75a70cf9 | 921 | if (gimple_code (stmt) != GIMPLE_CALL) |
7d23383d | 922 | return false; |
923 | ||
75a70cf9 | 924 | return gimple_has_side_effects (stmt); |
7d23383d | 925 | } |
926 | ||
790963ba | 927 | /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */ |
928 | ||
75a70cf9 | 929 | static gimple |
930 | rewrite_reciprocal (gimple_stmt_iterator *bsi) | |
790963ba | 931 | { |
75a70cf9 | 932 | gimple stmt, stmt1, stmt2; |
03d37e4e | 933 | tree name, lhs, type; |
193b8a15 | 934 | tree real_one; |
688ff29b | 935 | gimple_stmt_iterator gsi; |
790963ba | 936 | |
75a70cf9 | 937 | stmt = gsi_stmt (*bsi); |
938 | lhs = gimple_assign_lhs (stmt); | |
939 | type = TREE_TYPE (lhs); | |
790963ba | 940 | |
74560f89 | 941 | real_one = build_one_cst (type); |
790963ba | 942 | |
03d37e4e | 943 | name = make_temp_ssa_name (type, NULL, "reciptmp"); |
944 | stmt1 = gimple_build_assign_with_ops (RDIV_EXPR, name, real_one, | |
945 | gimple_assign_rhs2 (stmt)); | |
75a70cf9 | 946 | |
947 | stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name, | |
948 | gimple_assign_rhs1 (stmt)); | |
790963ba | 949 | |
950 | /* Replace division stmt with reciprocal and multiply stmts. | |
951 | The multiply stmt is not invariant, so update iterator | |
952 | and avoid rescanning. */ | |
688ff29b | 953 | gsi = *bsi; |
954 | gsi_insert_before (bsi, stmt1, GSI_NEW_STMT); | |
955 | gsi_replace (&gsi, stmt2, true); | |
790963ba | 956 | |
957 | /* Continue processing with invariant reciprocal statement. */ | |
958 | return stmt1; | |
959 | } | |
960 | ||
961 | /* Check if the pattern at *BSI is a bittest of the form | |
962 | (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */ | |
963 | ||
75a70cf9 | 964 | static gimple |
965 | rewrite_bittest (gimple_stmt_iterator *bsi) | |
790963ba | 966 | { |
75a70cf9 | 967 | gimple stmt, use_stmt, stmt1, stmt2; |
03d37e4e | 968 | tree lhs, name, t, a, b; |
790963ba | 969 | use_operand_p use; |
970 | ||
75a70cf9 | 971 | stmt = gsi_stmt (*bsi); |
972 | lhs = gimple_assign_lhs (stmt); | |
790963ba | 973 | |
974 | /* Verify that the single use of lhs is a comparison against zero. */ | |
975 | if (TREE_CODE (lhs) != SSA_NAME | |
27dc0bdf | 976 | || !single_imm_use (lhs, &use, &use_stmt) |
75a70cf9 | 977 | || gimple_code (use_stmt) != GIMPLE_COND) |
790963ba | 978 | return stmt; |
75a70cf9 | 979 | if (gimple_cond_lhs (use_stmt) != lhs |
980 | || (gimple_cond_code (use_stmt) != NE_EXPR | |
981 | && gimple_cond_code (use_stmt) != EQ_EXPR) | |
982 | || !integer_zerop (gimple_cond_rhs (use_stmt))) | |
790963ba | 983 | return stmt; |
984 | ||
985 | /* Get at the operands of the shift. The rhs is TMP1 & 1. */ | |
75a70cf9 | 986 | stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)); |
987 | if (gimple_code (stmt1) != GIMPLE_ASSIGN) | |
790963ba | 988 | return stmt; |
989 | ||
bef304b8 | 990 | /* There is a conversion in between possibly inserted by fold. */ |
d9659041 | 991 | if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1))) |
790963ba | 992 | { |
75a70cf9 | 993 | t = gimple_assign_rhs1 (stmt1); |
790963ba | 994 | if (TREE_CODE (t) != SSA_NAME |
995 | || !has_single_use (t)) | |
996 | return stmt; | |
997 | stmt1 = SSA_NAME_DEF_STMT (t); | |
75a70cf9 | 998 | if (gimple_code (stmt1) != GIMPLE_ASSIGN) |
790963ba | 999 | return stmt; |
790963ba | 1000 | } |
1001 | ||
1002 | /* Verify that B is loop invariant but A is not. Verify that with | |
1003 | all the stmt walking we are still in the same loop. */ | |
75a70cf9 | 1004 | if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR |
1005 | || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt)) | |
1006 | return stmt; | |
790963ba | 1007 | |
75a70cf9 | 1008 | a = gimple_assign_rhs1 (stmt1); |
1009 | b = gimple_assign_rhs2 (stmt1); | |
1010 | ||
1011 | if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL | |
1012 | && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL) | |
1013 | { | |
688ff29b | 1014 | gimple_stmt_iterator rsi; |
1015 | ||
790963ba | 1016 | /* 1 << B */ |
790963ba | 1017 | t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a), |
1018 | build_int_cst (TREE_TYPE (a), 1), b); | |
03d37e4e | 1019 | name = make_temp_ssa_name (TREE_TYPE (a), NULL, "shifttmp"); |
1020 | stmt1 = gimple_build_assign (name, t); | |
790963ba | 1021 | |
1022 | /* A & (1 << B) */ | |
1023 | t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name); | |
03d37e4e | 1024 | name = make_temp_ssa_name (TREE_TYPE (a), NULL, "shifttmp"); |
1025 | stmt2 = gimple_build_assign (name, t); | |
3de988b5 | 1026 | |
1027 | /* Replace the SSA_NAME we compare against zero. Adjust | |
1028 | the type of zero accordingly. */ | |
27dc0bdf | 1029 | SET_USE (use, name); |
75a70cf9 | 1030 | gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0)); |
790963ba | 1031 | |
688ff29b | 1032 | /* Don't use gsi_replace here, none of the new assignments sets |
1033 | the variable originally set in stmt. Move bsi to stmt1, and | |
1034 | then remove the original stmt, so that we get a chance to | |
1035 | retain debug info for it. */ | |
1036 | rsi = *bsi; | |
1037 | gsi_insert_before (bsi, stmt1, GSI_NEW_STMT); | |
1038 | gsi_insert_before (&rsi, stmt2, GSI_SAME_STMT); | |
1039 | gsi_remove (&rsi, true); | |
790963ba | 1040 | |
1041 | return stmt1; | |
1042 | } | |
1043 | ||
1044 | return stmt; | |
1045 | } | |
1046 | ||
1047 | ||
7d23383d | 1048 | /* Determine the outermost loops in that statements in basic block BB are |
1049 | invariant, and record them to the LIM_DATA associated with the statements. | |
1050 | Callback for walk_dominator_tree. */ | |
1051 | ||
1052 | static void | |
1053 | determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED, | |
1054 | basic_block bb) | |
1055 | { | |
1056 | enum move_pos pos; | |
75a70cf9 | 1057 | gimple_stmt_iterator bsi; |
1058 | gimple stmt; | |
7d23383d | 1059 | bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL; |
1060 | struct loop *outermost = ALWAYS_EXECUTED_IN (bb); | |
75a70cf9 | 1061 | struct lim_aux_data *lim_data; |
7d23383d | 1062 | |
9e3536f4 | 1063 | if (!loop_outer (bb->loop_father)) |
7d23383d | 1064 | return; |
1065 | ||
1066 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1067 | fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n", | |
9e3536f4 | 1068 | bb->index, bb->loop_father->num, loop_depth (bb->loop_father)); |
7d23383d | 1069 | |
9bf0a3f9 | 1070 | /* Look at PHI nodes, but only if there is at most two. |
1071 | ??? We could relax this further by post-processing the inserted | |
1072 | code and transforming adjacent cond-exprs with the same predicate | |
1073 | to control flow again. */ | |
1074 | bsi = gsi_start_phis (bb); | |
1075 | if (!gsi_end_p (bsi) | |
1076 | && ((gsi_next (&bsi), gsi_end_p (bsi)) | |
1077 | || (gsi_next (&bsi), gsi_end_p (bsi)))) | |
1078 | for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi)) | |
1079 | { | |
1080 | stmt = gsi_stmt (bsi); | |
1081 | ||
1082 | pos = movement_possibility (stmt); | |
1083 | if (pos == MOVE_IMPOSSIBLE) | |
1084 | continue; | |
1085 | ||
1086 | lim_data = init_lim_data (stmt); | |
1087 | lim_data->always_executed_in = outermost; | |
1088 | ||
1089 | if (!determine_max_movement (stmt, false)) | |
1090 | { | |
1091 | lim_data->max_loop = NULL; | |
1092 | continue; | |
1093 | } | |
1094 | ||
1095 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1096 | { | |
1097 | print_gimple_stmt (dump_file, stmt, 2, 0); | |
1098 | fprintf (dump_file, " invariant up to level %d, cost %d.\n\n", | |
1099 | loop_depth (lim_data->max_loop), | |
1100 | lim_data->cost); | |
1101 | } | |
1102 | ||
1103 | if (lim_data->cost >= LIM_EXPENSIVE) | |
1104 | set_profitable_level (stmt); | |
1105 | } | |
1106 | ||
75a70cf9 | 1107 | for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
7d23383d | 1108 | { |
75a70cf9 | 1109 | stmt = gsi_stmt (bsi); |
7d23383d | 1110 | |
1111 | pos = movement_possibility (stmt); | |
1112 | if (pos == MOVE_IMPOSSIBLE) | |
1113 | { | |
1114 | if (nonpure_call_p (stmt)) | |
1115 | { | |
1116 | maybe_never = true; | |
1117 | outermost = NULL; | |
1118 | } | |
a973ed42 | 1119 | /* Make sure to note always_executed_in for stores to make |
1120 | store-motion work. */ | |
1121 | else if (stmt_makes_single_store (stmt)) | |
1122 | { | |
75a70cf9 | 1123 | struct lim_aux_data *lim_data = init_lim_data (stmt); |
1124 | lim_data->always_executed_in = outermost; | |
a973ed42 | 1125 | } |
7d23383d | 1126 | continue; |
1127 | } | |
1128 | ||
75a70cf9 | 1129 | if (is_gimple_assign (stmt) |
1130 | && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) | |
1131 | == GIMPLE_BINARY_RHS)) | |
63f88450 | 1132 | { |
75a70cf9 | 1133 | tree op0 = gimple_assign_rhs1 (stmt); |
1134 | tree op1 = gimple_assign_rhs2 (stmt); | |
1135 | struct loop *ol1 = outermost_invariant_loop (op1, | |
1136 | loop_containing_stmt (stmt)); | |
63f88450 | 1137 | |
1138 | /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal | |
1139 | to be hoisted out of loop, saving expensive divide. */ | |
1140 | if (pos == MOVE_POSSIBLE | |
75a70cf9 | 1141 | && gimple_assign_rhs_code (stmt) == RDIV_EXPR |
63f88450 | 1142 | && flag_unsafe_math_optimizations |
1143 | && !flag_trapping_math | |
75a70cf9 | 1144 | && ol1 != NULL |
1145 | && outermost_invariant_loop (op0, ol1) == NULL) | |
63f88450 | 1146 | stmt = rewrite_reciprocal (&bsi); |
1147 | ||
1148 | /* If the shift count is invariant, convert (A >> B) & 1 to | |
1149 | A & (1 << B) allowing the bit mask to be hoisted out of the loop | |
1150 | saving an expensive shift. */ | |
1151 | if (pos == MOVE_POSSIBLE | |
75a70cf9 | 1152 | && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR |
1153 | && integer_onep (op1) | |
1154 | && TREE_CODE (op0) == SSA_NAME | |
1155 | && has_single_use (op0)) | |
63f88450 | 1156 | stmt = rewrite_bittest (&bsi); |
1157 | } | |
fcee97ca | 1158 | |
75a70cf9 | 1159 | lim_data = init_lim_data (stmt); |
1160 | lim_data->always_executed_in = outermost; | |
7d23383d | 1161 | |
1162 | if (maybe_never && pos == MOVE_PRESERVE_EXECUTION) | |
1163 | continue; | |
1164 | ||
1165 | if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION)) | |
1166 | { | |
75a70cf9 | 1167 | lim_data->max_loop = NULL; |
7d23383d | 1168 | continue; |
1169 | } | |
1170 | ||
1171 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1172 | { | |
75a70cf9 | 1173 | print_gimple_stmt (dump_file, stmt, 2, 0); |
7d23383d | 1174 | fprintf (dump_file, " invariant up to level %d, cost %d.\n\n", |
75a70cf9 | 1175 | loop_depth (lim_data->max_loop), |
1176 | lim_data->cost); | |
7d23383d | 1177 | } |
1178 | ||
75a70cf9 | 1179 | if (lim_data->cost >= LIM_EXPENSIVE) |
7d23383d | 1180 | set_profitable_level (stmt); |
1181 | } | |
1182 | } | |
1183 | ||
1184 | /* For each statement determines the outermost loop in that it is invariant, | |
1185 | statements on whose motion it depends and the cost of the computation. | |
1186 | This information is stored to the LIM_DATA structure associated with | |
1187 | each statement. */ | |
1188 | ||
1189 | static void | |
1190 | determine_invariantness (void) | |
1191 | { | |
1192 | struct dom_walk_data walk_data; | |
1193 | ||
1194 | memset (&walk_data, 0, sizeof (struct dom_walk_data)); | |
50b08d37 | 1195 | walk_data.dom_direction = CDI_DOMINATORS; |
6bf320fb | 1196 | walk_data.before_dom_children = determine_invariantness_stmt; |
7d23383d | 1197 | |
1198 | init_walk_dominator_tree (&walk_data); | |
1199 | walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR); | |
1200 | fini_walk_dominator_tree (&walk_data); | |
1201 | } | |
1202 | ||
7d23383d | 1203 | /* Hoist the statements in basic block BB out of the loops prescribed by |
91275768 | 1204 | data stored in LIM_DATA structures associated with each statement. Callback |
7d23383d | 1205 | for walk_dominator_tree. */ |
1206 | ||
1207 | static void | |
9bf0a3f9 | 1208 | move_computations_stmt (struct dom_walk_data *dw_data, |
7d23383d | 1209 | basic_block bb) |
1210 | { | |
1211 | struct loop *level; | |
75a70cf9 | 1212 | gimple_stmt_iterator bsi; |
1213 | gimple stmt; | |
7d23383d | 1214 | unsigned cost = 0; |
75a70cf9 | 1215 | struct lim_aux_data *lim_data; |
7d23383d | 1216 | |
9e3536f4 | 1217 | if (!loop_outer (bb->loop_father)) |
7d23383d | 1218 | return; |
1219 | ||
9bf0a3f9 | 1220 | for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); ) |
1221 | { | |
1222 | gimple new_stmt; | |
1223 | stmt = gsi_stmt (bsi); | |
1224 | ||
1225 | lim_data = get_lim_data (stmt); | |
1226 | if (lim_data == NULL) | |
1227 | { | |
1228 | gsi_next (&bsi); | |
1229 | continue; | |
1230 | } | |
1231 | ||
1232 | cost = lim_data->cost; | |
1233 | level = lim_data->tgt_loop; | |
1234 | clear_lim_data (stmt); | |
1235 | ||
1236 | if (!level) | |
1237 | { | |
1238 | gsi_next (&bsi); | |
1239 | continue; | |
1240 | } | |
1241 | ||
1242 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1243 | { | |
1244 | fprintf (dump_file, "Moving PHI node\n"); | |
1245 | print_gimple_stmt (dump_file, stmt, 0, 0); | |
1246 | fprintf (dump_file, "(cost %u) out of loop %d.\n\n", | |
1247 | cost, level->num); | |
1248 | } | |
1249 | ||
1250 | if (gimple_phi_num_args (stmt) == 1) | |
1251 | { | |
1252 | tree arg = PHI_ARG_DEF (stmt, 0); | |
1253 | new_stmt = gimple_build_assign_with_ops (TREE_CODE (arg), | |
1254 | gimple_phi_result (stmt), | |
1255 | arg, NULL_TREE); | |
1256 | SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt; | |
1257 | } | |
1258 | else | |
1259 | { | |
1260 | basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb); | |
1261 | gimple cond = gsi_stmt (gsi_last_bb (dom)); | |
1262 | tree arg0 = NULL_TREE, arg1 = NULL_TREE, t; | |
1263 | /* Get the PHI arguments corresponding to the true and false | |
1264 | edges of COND. */ | |
1265 | extract_true_false_args_from_phi (dom, stmt, &arg0, &arg1); | |
1266 | gcc_assert (arg0 && arg1); | |
1267 | t = build2 (gimple_cond_code (cond), boolean_type_node, | |
1268 | gimple_cond_lhs (cond), gimple_cond_rhs (cond)); | |
446e85eb | 1269 | new_stmt = gimple_build_assign_with_ops (COND_EXPR, |
1270 | gimple_phi_result (stmt), | |
1271 | t, arg0, arg1); | |
9bf0a3f9 | 1272 | SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt; |
1273 | *((unsigned int *)(dw_data->global_data)) |= TODO_cleanup_cfg; | |
1274 | } | |
1275 | gsi_insert_on_edge (loop_preheader_edge (level), new_stmt); | |
1276 | remove_phi_node (&bsi, false); | |
1277 | } | |
1278 | ||
75a70cf9 | 1279 | for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); ) |
7d23383d | 1280 | { |
e70e8b13 | 1281 | edge e; |
1282 | ||
75a70cf9 | 1283 | stmt = gsi_stmt (bsi); |
7d23383d | 1284 | |
75a70cf9 | 1285 | lim_data = get_lim_data (stmt); |
1286 | if (lim_data == NULL) | |
7d23383d | 1287 | { |
75a70cf9 | 1288 | gsi_next (&bsi); |
7d23383d | 1289 | continue; |
1290 | } | |
1291 | ||
75a70cf9 | 1292 | cost = lim_data->cost; |
1293 | level = lim_data->tgt_loop; | |
1294 | clear_lim_data (stmt); | |
7d23383d | 1295 | |
1296 | if (!level) | |
1297 | { | |
75a70cf9 | 1298 | gsi_next (&bsi); |
7d23383d | 1299 | continue; |
1300 | } | |
1301 | ||
1302 | /* We do not really want to move conditionals out of the loop; we just | |
1303 | placed it here to force its operands to be moved if necessary. */ | |
75a70cf9 | 1304 | if (gimple_code (stmt) == GIMPLE_COND) |
7d23383d | 1305 | continue; |
1306 | ||
1307 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1308 | { | |
1309 | fprintf (dump_file, "Moving statement\n"); | |
75a70cf9 | 1310 | print_gimple_stmt (dump_file, stmt, 0, 0); |
7d23383d | 1311 | fprintf (dump_file, "(cost %u) out of loop %d.\n\n", |
1312 | cost, level->num); | |
1313 | } | |
063a8bce | 1314 | |
e70e8b13 | 1315 | e = loop_preheader_edge (level); |
1316 | gcc_assert (!gimple_vdef (stmt)); | |
1317 | if (gimple_vuse (stmt)) | |
1318 | { | |
1319 | /* The new VUSE is the one from the virtual PHI in the loop | |
1320 | header or the one already present. */ | |
1321 | gimple_stmt_iterator gsi2; | |
1322 | for (gsi2 = gsi_start_phis (e->dest); | |
1323 | !gsi_end_p (gsi2); gsi_next (&gsi2)) | |
1324 | { | |
1325 | gimple phi = gsi_stmt (gsi2); | |
7c782c9b | 1326 | if (virtual_operand_p (gimple_phi_result (phi))) |
e70e8b13 | 1327 | { |
1328 | gimple_set_vuse (stmt, PHI_ARG_DEF_FROM_EDGE (phi, e)); | |
1329 | break; | |
1330 | } | |
1331 | } | |
1332 | } | |
75a70cf9 | 1333 | gsi_remove (&bsi, false); |
e70e8b13 | 1334 | gsi_insert_on_edge (e, stmt); |
7d23383d | 1335 | } |
1336 | } | |
1337 | ||
1338 | /* Hoist the statements out of the loops prescribed by data stored in | |
91275768 | 1339 | LIM_DATA structures associated with each statement.*/ |
7d23383d | 1340 | |
9bf0a3f9 | 1341 | static unsigned int |
7d23383d | 1342 | move_computations (void) |
1343 | { | |
1344 | struct dom_walk_data walk_data; | |
9bf0a3f9 | 1345 | unsigned int todo = 0; |
7d23383d | 1346 | |
1347 | memset (&walk_data, 0, sizeof (struct dom_walk_data)); | |
9bf0a3f9 | 1348 | walk_data.global_data = &todo; |
50b08d37 | 1349 | walk_data.dom_direction = CDI_DOMINATORS; |
6bf320fb | 1350 | walk_data.before_dom_children = move_computations_stmt; |
7d23383d | 1351 | |
1352 | init_walk_dominator_tree (&walk_data); | |
1353 | walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR); | |
1354 | fini_walk_dominator_tree (&walk_data); | |
1355 | ||
75a70cf9 | 1356 | gsi_commit_edge_inserts (); |
dd277d48 | 1357 | if (need_ssa_update_p (cfun)) |
095dcfa3 | 1358 | rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa); |
9bf0a3f9 | 1359 | |
1360 | return todo; | |
7d23383d | 1361 | } |
1362 | ||
1363 | /* Checks whether the statement defining variable *INDEX can be hoisted | |
1364 | out of the loop passed in DATA. Callback for for_each_index. */ | |
1365 | ||
1366 | static bool | |
1367 | may_move_till (tree ref, tree *index, void *data) | |
1368 | { | |
75a70cf9 | 1369 | struct loop *loop = (struct loop *) data, *max_loop; |
7d23383d | 1370 | |
1371 | /* If REF is an array reference, check also that the step and the lower | |
1372 | bound is invariant in LOOP. */ | |
1373 | if (TREE_CODE (ref) == ARRAY_REF) | |
1374 | { | |
75a70cf9 | 1375 | tree step = TREE_OPERAND (ref, 3); |
1376 | tree lbound = TREE_OPERAND (ref, 2); | |
7d23383d | 1377 | |
75a70cf9 | 1378 | max_loop = outermost_invariant_loop (step, loop); |
7d23383d | 1379 | if (!max_loop) |
1380 | return false; | |
1381 | ||
75a70cf9 | 1382 | max_loop = outermost_invariant_loop (lbound, loop); |
7d23383d | 1383 | if (!max_loop) |
1384 | return false; | |
1385 | } | |
1386 | ||
1387 | max_loop = outermost_invariant_loop (*index, loop); | |
1388 | if (!max_loop) | |
1389 | return false; | |
1390 | ||
1391 | return true; | |
1392 | } | |
1393 | ||
75a70cf9 | 1394 | /* If OP is SSA NAME, force the statement that defines it to be |
99e96094 | 1395 | moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */ |
7d23383d | 1396 | |
1397 | static void | |
75a70cf9 | 1398 | force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop) |
7d23383d | 1399 | { |
75a70cf9 | 1400 | gimple stmt; |
7d23383d | 1401 | |
75a70cf9 | 1402 | if (!op |
1403 | || is_gimple_min_invariant (op)) | |
1404 | return; | |
7d23383d | 1405 | |
75a70cf9 | 1406 | gcc_assert (TREE_CODE (op) == SSA_NAME); |
48e1416a | 1407 | |
75a70cf9 | 1408 | stmt = SSA_NAME_DEF_STMT (op); |
1409 | if (gimple_nop_p (stmt)) | |
7d23383d | 1410 | return; |
1411 | ||
75a70cf9 | 1412 | set_level (stmt, orig_loop, loop); |
7d23383d | 1413 | } |
1414 | ||
1415 | /* Forces statement defining invariants in REF (and *INDEX) to be moved out of | |
99e96094 | 1416 | the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for |
1417 | for_each_index. */ | |
1418 | ||
1419 | struct fmt_data | |
1420 | { | |
1421 | struct loop *loop; | |
1422 | struct loop *orig_loop; | |
1423 | }; | |
7d23383d | 1424 | |
1425 | static bool | |
1426 | force_move_till (tree ref, tree *index, void *data) | |
1427 | { | |
f0d6e81c | 1428 | struct fmt_data *fmt_data = (struct fmt_data *) data; |
7d23383d | 1429 | |
1430 | if (TREE_CODE (ref) == ARRAY_REF) | |
1431 | { | |
75a70cf9 | 1432 | tree step = TREE_OPERAND (ref, 3); |
1433 | tree lbound = TREE_OPERAND (ref, 2); | |
7d23383d | 1434 | |
75a70cf9 | 1435 | force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop); |
1436 | force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop); | |
7d23383d | 1437 | } |
1438 | ||
75a70cf9 | 1439 | force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop); |
7d23383d | 1440 | |
1441 | return true; | |
1442 | } | |
1443 | ||
063a8bce | 1444 | /* A hash function for struct mem_ref object OBJ. */ |
1445 | ||
1446 | static hashval_t | |
1447 | memref_hash (const void *obj) | |
1448 | { | |
45ba1503 | 1449 | const struct mem_ref *const mem = (const struct mem_ref *) obj; |
063a8bce | 1450 | |
1451 | return mem->hash; | |
1452 | } | |
1453 | ||
1454 | /* An equality function for struct mem_ref object OBJ1 with | |
1455 | memory reference OBJ2. */ | |
1456 | ||
1457 | static int | |
1458 | memref_eq (const void *obj1, const void *obj2) | |
1459 | { | |
45ba1503 | 1460 | const struct mem_ref *const mem1 = (const struct mem_ref *) obj1; |
063a8bce | 1461 | |
45ba1503 | 1462 | return operand_equal_p (mem1->mem, (const_tree) obj2, 0); |
063a8bce | 1463 | } |
1464 | ||
1465 | /* Releases list of memory reference locations ACCS. */ | |
1466 | ||
1467 | static void | |
1468 | free_mem_ref_locs (mem_ref_locs_p accs) | |
1469 | { | |
1470 | unsigned i; | |
1471 | mem_ref_loc_p loc; | |
1472 | ||
1473 | if (!accs) | |
1474 | return; | |
1475 | ||
48148244 | 1476 | FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc) |
063a8bce | 1477 | free (loc); |
1478 | VEC_free (mem_ref_loc_p, heap, accs->locs); | |
1479 | free (accs); | |
1480 | } | |
1481 | ||
1482 | /* A function to free the mem_ref object OBJ. */ | |
1483 | ||
1484 | static void | |
83b709f2 | 1485 | memref_free (struct mem_ref *mem) |
063a8bce | 1486 | { |
063a8bce | 1487 | unsigned i; |
1488 | mem_ref_locs_p accs; | |
1489 | ||
48148244 | 1490 | FOR_EACH_VEC_ELT (mem_ref_locs_p, mem->accesses_in_loop, i, accs) |
063a8bce | 1491 | free_mem_ref_locs (accs); |
1492 | VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop); | |
1493 | ||
063a8bce | 1494 | free (mem); |
1495 | } | |
1496 | ||
1497 | /* Allocates and returns a memory reference description for MEM whose hash | |
1498 | value is HASH and id is ID. */ | |
1499 | ||
1500 | static mem_ref_p | |
1501 | mem_ref_alloc (tree mem, unsigned hash, unsigned id) | |
1502 | { | |
1503 | mem_ref_p ref = XNEW (struct mem_ref); | |
1504 | ref->mem = mem; | |
1505 | ref->id = id; | |
1506 | ref->hash = hash; | |
4fb07d00 | 1507 | ref->stored = BITMAP_ALLOC (&lim_bitmap_obstack); |
1508 | ref->indep_loop = BITMAP_ALLOC (&lim_bitmap_obstack); | |
1509 | ref->dep_loop = BITMAP_ALLOC (&lim_bitmap_obstack); | |
1510 | ref->indep_ref = BITMAP_ALLOC (&lim_bitmap_obstack); | |
1511 | ref->dep_ref = BITMAP_ALLOC (&lim_bitmap_obstack); | |
063a8bce | 1512 | ref->accesses_in_loop = NULL; |
063a8bce | 1513 | |
1514 | return ref; | |
1515 | } | |
1516 | ||
1517 | /* Allocates and returns the new list of locations. */ | |
1518 | ||
1519 | static mem_ref_locs_p | |
1520 | mem_ref_locs_alloc (void) | |
1521 | { | |
1522 | mem_ref_locs_p accs = XNEW (struct mem_ref_locs); | |
1523 | accs->locs = NULL; | |
1524 | return accs; | |
1525 | } | |
1526 | ||
1527 | /* Records memory reference location *LOC in LOOP to the memory reference | |
1528 | description REF. The reference occurs in statement STMT. */ | |
7d23383d | 1529 | |
1530 | static void | |
75a70cf9 | 1531 | record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc) |
7d23383d | 1532 | { |
063a8bce | 1533 | mem_ref_loc_p aref = XNEW (struct mem_ref_loc); |
1534 | mem_ref_locs_p accs; | |
1535 | bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num); | |
1536 | ||
1537 | if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop) | |
1538 | <= (unsigned) loop->num) | |
1539 | VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop, | |
1540 | loop->num + 1); | |
1541 | accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num); | |
1542 | if (!accs) | |
1543 | { | |
1544 | accs = mem_ref_locs_alloc (); | |
1545 | VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs); | |
1546 | } | |
7d23383d | 1547 | |
1548 | aref->stmt = stmt; | |
063a8bce | 1549 | aref->ref = loc; |
7d23383d | 1550 | |
063a8bce | 1551 | VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref); |
1552 | bitmap_set_bit (ril, ref->id); | |
7d23383d | 1553 | } |
1554 | ||
063a8bce | 1555 | /* Marks reference REF as stored in LOOP. */ |
7d23383d | 1556 | |
1557 | static void | |
063a8bce | 1558 | mark_ref_stored (mem_ref_p ref, struct loop *loop) |
7d23383d | 1559 | { |
063a8bce | 1560 | for (; |
1561 | loop != current_loops->tree_root | |
1562 | && !bitmap_bit_p (ref->stored, loop->num); | |
1563 | loop = loop_outer (loop)) | |
1564 | bitmap_set_bit (ref->stored, loop->num); | |
1565 | } | |
1566 | ||
1567 | /* Gathers memory references in statement STMT in LOOP, storing the | |
1568 | information about them in the memory_accesses structure. Marks | |
1569 | the vops accessed through unrecognized statements there as | |
1570 | well. */ | |
1571 | ||
1572 | static void | |
75a70cf9 | 1573 | gather_mem_refs_stmt (struct loop *loop, gimple stmt) |
063a8bce | 1574 | { |
1575 | tree *mem = NULL; | |
1576 | hashval_t hash; | |
1577 | PTR *slot; | |
1578 | mem_ref_p ref; | |
063a8bce | 1579 | bool is_stored; |
063a8bce | 1580 | unsigned id; |
7d23383d | 1581 | |
dd277d48 | 1582 | if (!gimple_vuse (stmt)) |
063a8bce | 1583 | return; |
1584 | ||
1585 | mem = simple_mem_ref_in_stmt (stmt, &is_stored); | |
1586 | if (!mem) | |
0766b2c0 | 1587 | { |
1588 | id = VEC_length (mem_ref_p, memory_accesses.refs_list); | |
1589 | ref = mem_ref_alloc (error_mark_node, 0, id); | |
1590 | VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref); | |
1591 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1592 | { | |
1593 | fprintf (dump_file, "Unanalyzed memory reference %u: ", id); | |
1594 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); | |
1595 | } | |
1596 | if (gimple_vdef (stmt)) | |
1597 | mark_ref_stored (ref, loop); | |
1598 | record_mem_ref_loc (ref, loop, stmt, mem); | |
1599 | return; | |
1600 | } | |
063a8bce | 1601 | |
1602 | hash = iterative_hash_expr (*mem, 0); | |
1603 | slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT); | |
1604 | ||
1605 | if (*slot) | |
1606 | { | |
45ba1503 | 1607 | ref = (mem_ref_p) *slot; |
063a8bce | 1608 | id = ref->id; |
1609 | } | |
1610 | else | |
7d23383d | 1611 | { |
063a8bce | 1612 | id = VEC_length (mem_ref_p, memory_accesses.refs_list); |
1613 | ref = mem_ref_alloc (*mem, hash, id); | |
1614 | VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref); | |
1615 | *slot = ref; | |
1616 | ||
1617 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1618 | { | |
1619 | fprintf (dump_file, "Memory reference %u: ", id); | |
1620 | print_generic_expr (dump_file, ref->mem, TDF_SLIM); | |
1621 | fprintf (dump_file, "\n"); | |
1622 | } | |
7d23383d | 1623 | } |
61025ec0 | 1624 | |
063a8bce | 1625 | if (is_stored) |
1626 | mark_ref_stored (ref, loop); | |
1627 | ||
063a8bce | 1628 | record_mem_ref_loc (ref, loop, stmt, mem); |
1629 | return; | |
7d23383d | 1630 | } |
1631 | ||
063a8bce | 1632 | /* Gathers memory references in loops. */ |
7d23383d | 1633 | |
1634 | static void | |
063a8bce | 1635 | gather_mem_refs_in_loops (void) |
7d23383d | 1636 | { |
75a70cf9 | 1637 | gimple_stmt_iterator bsi; |
063a8bce | 1638 | basic_block bb; |
1639 | struct loop *loop; | |
1640 | loop_iterator li; | |
063a8bce | 1641 | bitmap lrefs, alrefs, alrefso; |
1642 | ||
1643 | FOR_EACH_BB (bb) | |
1644 | { | |
1645 | loop = bb->loop_father; | |
1646 | if (loop == current_loops->tree_root) | |
1647 | continue; | |
1648 | ||
75a70cf9 | 1649 | for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
1650 | gather_mem_refs_stmt (loop, gsi_stmt (bsi)); | |
063a8bce | 1651 | } |
1652 | ||
0766b2c0 | 1653 | /* Propagate the information about accessed memory references up |
1654 | the loop hierarchy. */ | |
063a8bce | 1655 | FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST) |
1656 | { | |
1657 | lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num); | |
1658 | alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num); | |
1659 | bitmap_ior_into (alrefs, lrefs); | |
1660 | ||
1661 | if (loop_outer (loop) == current_loops->tree_root) | |
1662 | continue; | |
1663 | ||
063a8bce | 1664 | alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop, |
1665 | loop_outer (loop)->num); | |
1666 | bitmap_ior_into (alrefso, alrefs); | |
1667 | } | |
1668 | } | |
1669 | ||
063a8bce | 1670 | /* Create a mapping from virtual operands to references that touch them |
1671 | in LOOP. */ | |
1672 | ||
1673 | static void | |
1674 | create_vop_ref_mapping_loop (struct loop *loop) | |
1675 | { | |
1676 | bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num); | |
1677 | struct loop *sloop; | |
1678 | bitmap_iterator bi; | |
1679 | unsigned i; | |
1680 | mem_ref_p ref; | |
1681 | ||
1682 | EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi) | |
1683 | { | |
1684 | ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | |
0766b2c0 | 1685 | for (sloop = loop; sloop != current_loops->tree_root; |
1686 | sloop = loop_outer (sloop)) | |
1687 | if (bitmap_bit_p (ref->stored, loop->num)) | |
1688 | { | |
1689 | bitmap refs_stored | |
1690 | = VEC_index (bitmap, memory_accesses.all_refs_stored_in_loop, | |
1691 | sloop->num); | |
1692 | bitmap_set_bit (refs_stored, ref->id); | |
1693 | } | |
063a8bce | 1694 | } |
1695 | } | |
1696 | ||
1697 | /* For each non-clobbered virtual operand and each loop, record the memory | |
1698 | references in this loop that touch the operand. */ | |
1699 | ||
1700 | static void | |
1701 | create_vop_ref_mapping (void) | |
1702 | { | |
1703 | loop_iterator li; | |
1704 | struct loop *loop; | |
1705 | ||
1706 | FOR_EACH_LOOP (li, loop, 0) | |
1707 | { | |
1708 | create_vop_ref_mapping_loop (loop); | |
1709 | } | |
1710 | } | |
1711 | ||
1712 | /* Gathers information about memory accesses in the loops. */ | |
1713 | ||
1714 | static void | |
1715 | analyze_memory_references (void) | |
1716 | { | |
1717 | unsigned i; | |
1718 | bitmap empty; | |
063a8bce | 1719 | |
83b709f2 | 1720 | memory_accesses.refs = htab_create (100, memref_hash, memref_eq, NULL); |
063a8bce | 1721 | memory_accesses.refs_list = NULL; |
1722 | memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap, | |
1723 | number_of_loops ()); | |
1724 | memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap, | |
1725 | number_of_loops ()); | |
0766b2c0 | 1726 | memory_accesses.all_refs_stored_in_loop = VEC_alloc (bitmap, heap, |
1727 | number_of_loops ()); | |
063a8bce | 1728 | |
1729 | for (i = 0; i < number_of_loops (); i++) | |
1730 | { | |
4fb07d00 | 1731 | empty = BITMAP_ALLOC (&lim_bitmap_obstack); |
063a8bce | 1732 | VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty); |
4fb07d00 | 1733 | empty = BITMAP_ALLOC (&lim_bitmap_obstack); |
063a8bce | 1734 | VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty); |
4fb07d00 | 1735 | empty = BITMAP_ALLOC (&lim_bitmap_obstack); |
0766b2c0 | 1736 | VEC_quick_push (bitmap, memory_accesses.all_refs_stored_in_loop, empty); |
063a8bce | 1737 | } |
1738 | ||
1739 | memory_accesses.ttae_cache = NULL; | |
1740 | ||
1741 | gather_mem_refs_in_loops (); | |
1742 | create_vop_ref_mapping (); | |
1743 | } | |
1744 | ||
063a8bce | 1745 | /* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in |
1746 | tree_to_aff_combination_expand. */ | |
1747 | ||
1748 | static bool | |
1749 | mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache) | |
1750 | { | |
1751 | /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same | |
1752 | object and their offset differ in such a way that the locations cannot | |
1753 | overlap, then they cannot alias. */ | |
063a8bce | 1754 | double_int size1, size2; |
3391cd1e | 1755 | aff_tree off1, off2; |
063a8bce | 1756 | |
3391cd1e | 1757 | /* Perform basic offset and type-based disambiguation. */ |
1758 | if (!refs_may_alias_p (mem1, mem2)) | |
063a8bce | 1759 | return false; |
7d23383d | 1760 | |
063a8bce | 1761 | /* The expansion of addresses may be a bit expensive, thus we only do |
1762 | the check at -O2 and higher optimization levels. */ | |
1763 | if (optimize < 2) | |
1764 | return true; | |
1765 | ||
1766 | get_inner_reference_aff (mem1, &off1, &size1); | |
1767 | get_inner_reference_aff (mem2, &off2, &size2); | |
1768 | aff_combination_expand (&off1, ttae_cache); | |
1769 | aff_combination_expand (&off2, ttae_cache); | |
1770 | aff_combination_scale (&off1, double_int_minus_one); | |
1771 | aff_combination_add (&off2, &off1); | |
1772 | ||
5fc88ffd | 1773 | if (aff_comb_cannot_overlap_p (&off2, size1, size2)) |
063a8bce | 1774 | return false; |
1775 | ||
1776 | return true; | |
1777 | } | |
1778 | ||
1779 | /* Rewrites location LOC by TMP_VAR. */ | |
1780 | ||
1781 | static void | |
1782 | rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var) | |
1783 | { | |
063a8bce | 1784 | *loc->ref = tmp_var; |
1785 | update_stmt (loc->stmt); | |
1786 | } | |
1787 | ||
1788 | /* Adds all locations of REF in LOOP and its subloops to LOCS. */ | |
1789 | ||
1790 | static void | |
1791 | get_all_locs_in_loop (struct loop *loop, mem_ref_p ref, | |
1792 | VEC (mem_ref_loc_p, heap) **locs) | |
1793 | { | |
1794 | mem_ref_locs_p accs; | |
1795 | unsigned i; | |
1796 | mem_ref_loc_p loc; | |
1797 | bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, | |
1798 | loop->num); | |
1799 | struct loop *subloop; | |
1800 | ||
1801 | if (!bitmap_bit_p (refs, ref->id)) | |
1802 | return; | |
1803 | ||
1804 | if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop) | |
1805 | > (unsigned) loop->num) | |
1806 | { | |
1807 | accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num); | |
1808 | if (accs) | |
1809 | { | |
48148244 | 1810 | FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc) |
063a8bce | 1811 | VEC_safe_push (mem_ref_loc_p, heap, *locs, loc); |
1812 | } | |
1813 | } | |
1814 | ||
1815 | for (subloop = loop->inner; subloop != NULL; subloop = subloop->next) | |
1816 | get_all_locs_in_loop (subloop, ref, locs); | |
1817 | } | |
1818 | ||
1819 | /* Rewrites all references to REF in LOOP by variable TMP_VAR. */ | |
1820 | ||
1821 | static void | |
1822 | rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var) | |
1823 | { | |
1824 | unsigned i; | |
1825 | mem_ref_loc_p loc; | |
1826 | VEC (mem_ref_loc_p, heap) *locs = NULL; | |
1827 | ||
1828 | get_all_locs_in_loop (loop, ref, &locs); | |
48148244 | 1829 | FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc) |
063a8bce | 1830 | rewrite_mem_ref_loc (loc, tmp_var); |
1831 | VEC_free (mem_ref_loc_p, heap, locs); | |
7d23383d | 1832 | } |
1833 | ||
840580de | 1834 | /* The name and the length of the currently generated variable |
1835 | for lsm. */ | |
1836 | #define MAX_LSM_NAME_LENGTH 40 | |
1837 | static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1]; | |
1838 | static int lsm_tmp_name_length; | |
1839 | ||
1840 | /* Adds S to lsm_tmp_name. */ | |
1841 | ||
1842 | static void | |
1843 | lsm_tmp_name_add (const char *s) | |
1844 | { | |
1845 | int l = strlen (s) + lsm_tmp_name_length; | |
1846 | if (l > MAX_LSM_NAME_LENGTH) | |
1847 | return; | |
1848 | ||
1849 | strcpy (lsm_tmp_name + lsm_tmp_name_length, s); | |
1850 | lsm_tmp_name_length = l; | |
1851 | } | |
1852 | ||
1853 | /* Stores the name for temporary variable that replaces REF to | |
1854 | lsm_tmp_name. */ | |
1855 | ||
1856 | static void | |
1857 | gen_lsm_tmp_name (tree ref) | |
1858 | { | |
1859 | const char *name; | |
1860 | ||
1861 | switch (TREE_CODE (ref)) | |
1862 | { | |
182cf5a9 | 1863 | case MEM_REF: |
58a7eead | 1864 | case TARGET_MEM_REF: |
840580de | 1865 | gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); |
1866 | lsm_tmp_name_add ("_"); | |
1867 | break; | |
1868 | ||
182cf5a9 | 1869 | case ADDR_EXPR: |
1870 | gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1871 | break; | |
1872 | ||
840580de | 1873 | case BIT_FIELD_REF: |
1874 | case VIEW_CONVERT_EXPR: | |
1875 | case ARRAY_RANGE_REF: | |
1876 | gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1877 | break; | |
1878 | ||
1879 | case REALPART_EXPR: | |
1880 | gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1881 | lsm_tmp_name_add ("_RE"); | |
1882 | break; | |
48e1416a | 1883 | |
840580de | 1884 | case IMAGPART_EXPR: |
1885 | gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1886 | lsm_tmp_name_add ("_IM"); | |
1887 | break; | |
1888 | ||
1889 | case COMPONENT_REF: | |
1890 | gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1891 | lsm_tmp_name_add ("_"); | |
1892 | name = get_name (TREE_OPERAND (ref, 1)); | |
1893 | if (!name) | |
1894 | name = "F"; | |
840580de | 1895 | lsm_tmp_name_add (name); |
70d42340 | 1896 | break; |
840580de | 1897 | |
1898 | case ARRAY_REF: | |
1899 | gen_lsm_tmp_name (TREE_OPERAND (ref, 0)); | |
1900 | lsm_tmp_name_add ("_I"); | |
1901 | break; | |
1902 | ||
1903 | case SSA_NAME: | |
840580de | 1904 | case VAR_DECL: |
1905 | case PARM_DECL: | |
1906 | name = get_name (ref); | |
1907 | if (!name) | |
1908 | name = "D"; | |
1909 | lsm_tmp_name_add (name); | |
1910 | break; | |
1911 | ||
1912 | case STRING_CST: | |
1913 | lsm_tmp_name_add ("S"); | |
1914 | break; | |
1915 | ||
1916 | case RESULT_DECL: | |
1917 | lsm_tmp_name_add ("R"); | |
1918 | break; | |
1919 | ||
869bac23 | 1920 | case INTEGER_CST: |
1921 | /* Nothing. */ | |
1922 | break; | |
1923 | ||
840580de | 1924 | default: |
1925 | gcc_unreachable (); | |
1926 | } | |
1927 | } | |
1928 | ||
1929 | /* Determines name for temporary variable that replaces REF. | |
ad4a85ad | 1930 | The name is accumulated into the lsm_tmp_name variable. |
1931 | N is added to the name of the temporary. */ | |
840580de | 1932 | |
ad4a85ad | 1933 | char * |
1934 | get_lsm_tmp_name (tree ref, unsigned n) | |
840580de | 1935 | { |
ad4a85ad | 1936 | char ns[2]; |
1937 | ||
840580de | 1938 | lsm_tmp_name_length = 0; |
1939 | gen_lsm_tmp_name (ref); | |
1940 | lsm_tmp_name_add ("_lsm"); | |
ad4a85ad | 1941 | if (n < 10) |
1942 | { | |
1943 | ns[0] = '0' + n; | |
1944 | ns[1] = 0; | |
1945 | lsm_tmp_name_add (ns); | |
1946 | } | |
840580de | 1947 | return lsm_tmp_name; |
1948 | } | |
1949 | ||
61025ec0 | 1950 | struct prev_flag_edges { |
1951 | /* Edge to insert new flag comparison code. */ | |
1952 | edge append_cond_position; | |
1953 | ||
1954 | /* Edge for fall through from previous flag comparison. */ | |
1955 | edge last_cond_fallthru; | |
1956 | }; | |
1957 | ||
1958 | /* Helper function for execute_sm. Emit code to store TMP_VAR into | |
1959 | MEM along edge EX. | |
1960 | ||
1961 | The store is only done if MEM has changed. We do this so no | |
1962 | changes to MEM occur on code paths that did not originally store | |
1963 | into it. | |
1964 | ||
1965 | The common case for execute_sm will transform: | |
1966 | ||
1967 | for (...) { | |
1968 | if (foo) | |
1969 | stuff; | |
1970 | else | |
1971 | MEM = TMP_VAR; | |
1972 | } | |
1973 | ||
1974 | into: | |
1975 | ||
1976 | lsm = MEM; | |
1977 | for (...) { | |
1978 | if (foo) | |
1979 | stuff; | |
1980 | else | |
1981 | lsm = TMP_VAR; | |
1982 | } | |
1983 | MEM = lsm; | |
1984 | ||
1985 | This function will generate: | |
1986 | ||
1987 | lsm = MEM; | |
1988 | ||
1989 | lsm_flag = false; | |
1990 | ... | |
1991 | for (...) { | |
1992 | if (foo) | |
1993 | stuff; | |
1994 | else { | |
1995 | lsm = TMP_VAR; | |
1996 | lsm_flag = true; | |
1997 | } | |
1998 | } | |
1999 | if (lsm_flag) <-- | |
2000 | MEM = lsm; <-- | |
2001 | */ | |
2002 | ||
2003 | static void | |
2004 | execute_sm_if_changed (edge ex, tree mem, tree tmp_var, tree flag) | |
2005 | { | |
2006 | basic_block new_bb, then_bb, old_dest; | |
2007 | bool loop_has_only_one_exit; | |
2008 | edge then_old_edge, orig_ex = ex; | |
2009 | gimple_stmt_iterator gsi; | |
2010 | gimple stmt; | |
2011 | struct prev_flag_edges *prev_edges = (struct prev_flag_edges *) ex->aux; | |
2012 | ||
2013 | /* ?? Insert store after previous store if applicable. See note | |
2014 | below. */ | |
2015 | if (prev_edges) | |
2016 | ex = prev_edges->append_cond_position; | |
2017 | ||
2018 | loop_has_only_one_exit = single_pred_p (ex->dest); | |
2019 | ||
2020 | if (loop_has_only_one_exit) | |
2021 | ex = split_block_after_labels (ex->dest); | |
2022 | ||
2023 | old_dest = ex->dest; | |
2024 | new_bb = split_edge (ex); | |
2025 | then_bb = create_empty_bb (new_bb); | |
2026 | if (current_loops && new_bb->loop_father) | |
2027 | add_bb_to_loop (then_bb, new_bb->loop_father); | |
2028 | ||
2029 | gsi = gsi_start_bb (new_bb); | |
2030 | stmt = gimple_build_cond (NE_EXPR, flag, boolean_false_node, | |
2031 | NULL_TREE, NULL_TREE); | |
2032 | gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING); | |
2033 | ||
2034 | gsi = gsi_start_bb (then_bb); | |
2035 | /* Insert actual store. */ | |
2036 | stmt = gimple_build_assign (unshare_expr (mem), tmp_var); | |
2037 | gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING); | |
2038 | ||
2039 | make_edge (new_bb, then_bb, EDGE_TRUE_VALUE); | |
2040 | make_edge (new_bb, old_dest, EDGE_FALSE_VALUE); | |
2041 | then_old_edge = make_edge (then_bb, old_dest, EDGE_FALLTHRU); | |
2042 | ||
2043 | set_immediate_dominator (CDI_DOMINATORS, then_bb, new_bb); | |
2044 | ||
2045 | if (prev_edges) | |
2046 | { | |
2047 | basic_block prevbb = prev_edges->last_cond_fallthru->src; | |
2048 | redirect_edge_succ (prev_edges->last_cond_fallthru, new_bb); | |
2049 | set_immediate_dominator (CDI_DOMINATORS, new_bb, prevbb); | |
2050 | set_immediate_dominator (CDI_DOMINATORS, old_dest, | |
2051 | recompute_dominator (CDI_DOMINATORS, old_dest)); | |
2052 | } | |
2053 | ||
2054 | /* ?? Because stores may alias, they must happen in the exact | |
2055 | sequence they originally happened. Save the position right after | |
2056 | the (_lsm) store we just created so we can continue appending after | |
2057 | it and maintain the original order. */ | |
2058 | { | |
2059 | struct prev_flag_edges *p; | |
2060 | ||
2061 | if (orig_ex->aux) | |
2062 | orig_ex->aux = NULL; | |
2063 | alloc_aux_for_edge (orig_ex, sizeof (struct prev_flag_edges)); | |
2064 | p = (struct prev_flag_edges *) orig_ex->aux; | |
2065 | p->append_cond_position = then_old_edge; | |
2066 | p->last_cond_fallthru = find_edge (new_bb, old_dest); | |
2067 | orig_ex->aux = (void *) p; | |
2068 | } | |
2069 | ||
2070 | if (!loop_has_only_one_exit) | |
2071 | for (gsi = gsi_start_phis (old_dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
2072 | { | |
2073 | gimple phi = gsi_stmt (gsi); | |
2074 | unsigned i; | |
2075 | ||
2076 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
2077 | if (gimple_phi_arg_edge (phi, i)->src == new_bb) | |
2078 | { | |
2079 | tree arg = gimple_phi_arg_def (phi, i); | |
60d535d2 | 2080 | add_phi_arg (phi, arg, then_old_edge, UNKNOWN_LOCATION); |
61025ec0 | 2081 | update_stmt (phi); |
2082 | } | |
2083 | } | |
2084 | /* Remove the original fall through edge. This was the | |
2085 | single_succ_edge (new_bb). */ | |
2086 | EDGE_SUCC (new_bb, 0)->flags &= ~EDGE_FALLTHRU; | |
2087 | } | |
2088 | ||
2089 | /* Helper function for execute_sm. On every location where REF is | |
2090 | set, set an appropriate flag indicating the store. */ | |
2091 | ||
2092 | static tree | |
2093 | execute_sm_if_changed_flag_set (struct loop *loop, mem_ref_p ref) | |
2094 | { | |
2095 | unsigned i; | |
2096 | mem_ref_loc_p loc; | |
2097 | tree flag; | |
2098 | VEC (mem_ref_loc_p, heap) *locs = NULL; | |
2099 | char *str = get_lsm_tmp_name (ref->mem, ~0); | |
2100 | ||
2101 | lsm_tmp_name_add ("_flag"); | |
072f7ab1 | 2102 | flag = create_tmp_reg (boolean_type_node, str); |
61025ec0 | 2103 | get_all_locs_in_loop (loop, ref, &locs); |
2104 | FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc) | |
2105 | { | |
2106 | gimple_stmt_iterator gsi; | |
2107 | gimple stmt; | |
2108 | ||
b0bfec50 | 2109 | /* Only set the flag for writes. */ |
2110 | if (is_gimple_assign (loc->stmt) | |
2111 | && gimple_assign_lhs_ptr (loc->stmt) == loc->ref) | |
2112 | { | |
2113 | gsi = gsi_for_stmt (loc->stmt); | |
2114 | stmt = gimple_build_assign (flag, boolean_true_node); | |
2115 | gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING); | |
2116 | } | |
61025ec0 | 2117 | } |
2118 | VEC_free (mem_ref_loc_p, heap, locs); | |
2119 | return flag; | |
2120 | } | |
2121 | ||
063a8bce | 2122 | /* Executes store motion of memory reference REF from LOOP. |
749ea85f | 2123 | Exits from the LOOP are stored in EXITS. The initialization of the |
2124 | temporary variable is put to the preheader of the loop, and assignments | |
2125 | to the reference from the temporary variable are emitted to exits. */ | |
7d23383d | 2126 | |
2127 | static void | |
063a8bce | 2128 | execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref) |
7d23383d | 2129 | { |
61025ec0 | 2130 | tree tmp_var, store_flag; |
7d23383d | 2131 | unsigned i; |
61025ec0 | 2132 | gimple load; |
99e96094 | 2133 | struct fmt_data fmt_data; |
61025ec0 | 2134 | edge ex, latch_edge; |
75a70cf9 | 2135 | struct lim_aux_data *lim_data; |
61025ec0 | 2136 | bool multi_threaded_model_p = false; |
7d23383d | 2137 | |
69154f26 | 2138 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2139 | { | |
2140 | fprintf (dump_file, "Executing store motion of "); | |
063a8bce | 2141 | print_generic_expr (dump_file, ref->mem, 0); |
69154f26 | 2142 | fprintf (dump_file, " from loop %d\n", loop->num); |
2143 | } | |
2144 | ||
072f7ab1 | 2145 | tmp_var = create_tmp_reg (TREE_TYPE (ref->mem), |
063a8bce | 2146 | get_lsm_tmp_name (ref->mem, ~0)); |
7d23383d | 2147 | |
99e96094 | 2148 | fmt_data.loop = loop; |
2149 | fmt_data.orig_loop = loop; | |
063a8bce | 2150 | for_each_index (&ref->mem, force_move_till, &fmt_data); |
7d23383d | 2151 | |
25c30572 | 2152 | if (block_in_transaction (loop_preheader_edge (loop)->src) |
61025ec0 | 2153 | || !PARAM_VALUE (PARAM_ALLOW_STORE_DATA_RACES)) |
2154 | multi_threaded_model_p = true; | |
2155 | ||
2156 | if (multi_threaded_model_p) | |
2157 | store_flag = execute_sm_if_changed_flag_set (loop, ref); | |
2158 | ||
063a8bce | 2159 | rewrite_mem_refs (loop, ref, tmp_var); |
7d23383d | 2160 | |
61025ec0 | 2161 | /* Emit the load code into the latch, so that we are sure it will |
2162 | be processed after all dependencies. */ | |
2163 | latch_edge = loop_latch_edge (loop); | |
2164 | ||
2165 | /* FIXME/TODO: For the multi-threaded variant, we could avoid this | |
2166 | load altogether, since the store is predicated by a flag. We | |
2167 | could, do the load only if it was originally in the loop. */ | |
75a70cf9 | 2168 | load = gimple_build_assign (tmp_var, unshare_expr (ref->mem)); |
2169 | lim_data = init_lim_data (load); | |
2170 | lim_data->max_loop = loop; | |
2171 | lim_data->tgt_loop = loop; | |
61025ec0 | 2172 | gsi_insert_on_edge (latch_edge, load); |
7d23383d | 2173 | |
61025ec0 | 2174 | if (multi_threaded_model_p) |
7d23383d | 2175 | { |
61025ec0 | 2176 | load = gimple_build_assign (store_flag, boolean_false_node); |
2177 | lim_data = init_lim_data (load); | |
2178 | lim_data->max_loop = loop; | |
2179 | lim_data->tgt_loop = loop; | |
2180 | gsi_insert_on_edge (latch_edge, load); | |
7d23383d | 2181 | } |
61025ec0 | 2182 | |
2183 | /* Sink the store to every exit from the loop. */ | |
2184 | FOR_EACH_VEC_ELT (edge, exits, i, ex) | |
2185 | if (!multi_threaded_model_p) | |
2186 | { | |
2187 | gimple store; | |
2188 | store = gimple_build_assign (unshare_expr (ref->mem), tmp_var); | |
2189 | gsi_insert_on_edge (ex, store); | |
2190 | } | |
2191 | else | |
2192 | execute_sm_if_changed (ex, ref->mem, tmp_var, store_flag); | |
7d23383d | 2193 | } |
2194 | ||
063a8bce | 2195 | /* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit |
2196 | edges of the LOOP. */ | |
7d23383d | 2197 | |
2198 | static void | |
063a8bce | 2199 | hoist_memory_references (struct loop *loop, bitmap mem_refs, |
2200 | VEC (edge, heap) *exits) | |
7d23383d | 2201 | { |
063a8bce | 2202 | mem_ref_p ref; |
2203 | unsigned i; | |
2204 | bitmap_iterator bi; | |
69154f26 | 2205 | |
063a8bce | 2206 | EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi) |
7d23383d | 2207 | { |
063a8bce | 2208 | ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); |
2209 | execute_sm (loop, exits, ref); | |
7d23383d | 2210 | } |
55a03692 | 2211 | } |
2212 | ||
307f7fda | 2213 | /* Returns true if REF is always accessed in LOOP. If STORED_P is true |
2214 | make sure REF is always stored to in LOOP. */ | |
7d23383d | 2215 | |
2216 | static bool | |
307f7fda | 2217 | ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p) |
7d23383d | 2218 | { |
063a8bce | 2219 | VEC (mem_ref_loc_p, heap) *locs = NULL; |
7d23383d | 2220 | unsigned i; |
063a8bce | 2221 | mem_ref_loc_p loc; |
2222 | bool ret = false; | |
2223 | struct loop *must_exec; | |
307f7fda | 2224 | tree base; |
2225 | ||
2226 | base = get_base_address (ref->mem); | |
182cf5a9 | 2227 | if (INDIRECT_REF_P (base) |
2228 | || TREE_CODE (base) == MEM_REF) | |
307f7fda | 2229 | base = TREE_OPERAND (base, 0); |
7d23383d | 2230 | |
063a8bce | 2231 | get_all_locs_in_loop (loop, ref, &locs); |
48148244 | 2232 | FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc) |
063a8bce | 2233 | { |
75a70cf9 | 2234 | if (!get_lim_data (loc->stmt)) |
063a8bce | 2235 | continue; |
7d23383d | 2236 | |
307f7fda | 2237 | /* If we require an always executed store make sure the statement |
2238 | stores to the reference. */ | |
2239 | if (stored_p) | |
2240 | { | |
2241 | tree lhs; | |
2242 | if (!gimple_get_lhs (loc->stmt)) | |
2243 | continue; | |
2244 | lhs = get_base_address (gimple_get_lhs (loc->stmt)); | |
2245 | if (!lhs) | |
2246 | continue; | |
182cf5a9 | 2247 | if (INDIRECT_REF_P (lhs) |
2248 | || TREE_CODE (lhs) == MEM_REF) | |
307f7fda | 2249 | lhs = TREE_OPERAND (lhs, 0); |
2250 | if (lhs != base) | |
2251 | continue; | |
2252 | } | |
2253 | ||
75a70cf9 | 2254 | must_exec = get_lim_data (loc->stmt)->always_executed_in; |
063a8bce | 2255 | if (!must_exec) |
2256 | continue; | |
7d23383d | 2257 | |
063a8bce | 2258 | if (must_exec == loop |
2259 | || flow_loop_nested_p (must_exec, loop)) | |
2260 | { | |
2261 | ret = true; | |
2262 | break; | |
2263 | } | |
2264 | } | |
2265 | VEC_free (mem_ref_loc_p, heap, locs); | |
55a03692 | 2266 | |
063a8bce | 2267 | return ret; |
55a03692 | 2268 | } |
2269 | ||
063a8bce | 2270 | /* Returns true if REF1 and REF2 are independent. */ |
55a03692 | 2271 | |
063a8bce | 2272 | static bool |
2273 | refs_independent_p (mem_ref_p ref1, mem_ref_p ref2) | |
55a03692 | 2274 | { |
063a8bce | 2275 | if (ref1 == ref2 |
2276 | || bitmap_bit_p (ref1->indep_ref, ref2->id)) | |
2277 | return true; | |
2278 | if (bitmap_bit_p (ref1->dep_ref, ref2->id)) | |
2279 | return false; | |
0766b2c0 | 2280 | if (!MEM_ANALYZABLE (ref1) |
2281 | || !MEM_ANALYZABLE (ref2)) | |
2282 | return false; | |
55a03692 | 2283 | |
063a8bce | 2284 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2285 | fprintf (dump_file, "Querying dependency of refs %u and %u: ", | |
2286 | ref1->id, ref2->id); | |
2287 | ||
2288 | if (mem_refs_may_alias_p (ref1->mem, ref2->mem, | |
2289 | &memory_accesses.ttae_cache)) | |
2290 | { | |
2291 | bitmap_set_bit (ref1->dep_ref, ref2->id); | |
2292 | bitmap_set_bit (ref2->dep_ref, ref1->id); | |
2293 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2294 | fprintf (dump_file, "dependent.\n"); | |
2295 | return false; | |
2296 | } | |
2297 | else | |
2298 | { | |
2299 | bitmap_set_bit (ref1->indep_ref, ref2->id); | |
2300 | bitmap_set_bit (ref2->indep_ref, ref1->id); | |
2301 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2302 | fprintf (dump_file, "independent.\n"); | |
2303 | return true; | |
2304 | } | |
55a03692 | 2305 | } |
2306 | ||
063a8bce | 2307 | /* Records the information whether REF is independent in LOOP (according |
2308 | to INDEP). */ | |
55a03692 | 2309 | |
2310 | static void | |
063a8bce | 2311 | record_indep_loop (struct loop *loop, mem_ref_p ref, bool indep) |
55a03692 | 2312 | { |
063a8bce | 2313 | if (indep) |
2314 | bitmap_set_bit (ref->indep_loop, loop->num); | |
2315 | else | |
2316 | bitmap_set_bit (ref->dep_loop, loop->num); | |
2317 | } | |
55a03692 | 2318 | |
063a8bce | 2319 | /* Returns true if REF is independent on all other memory references in |
2320 | LOOP. */ | |
55a03692 | 2321 | |
063a8bce | 2322 | static bool |
2323 | ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref) | |
2324 | { | |
0766b2c0 | 2325 | bitmap refs_to_check; |
063a8bce | 2326 | unsigned i; |
2327 | bitmap_iterator bi; | |
2328 | bool ret = true, stored = bitmap_bit_p (ref->stored, loop->num); | |
063a8bce | 2329 | mem_ref_p aref; |
2330 | ||
0766b2c0 | 2331 | if (stored) |
2332 | refs_to_check = VEC_index (bitmap, | |
2333 | memory_accesses.all_refs_in_loop, loop->num); | |
2334 | else | |
2335 | refs_to_check = VEC_index (bitmap, | |
2336 | memory_accesses.all_refs_stored_in_loop, | |
2337 | loop->num); | |
55a03692 | 2338 | |
063a8bce | 2339 | EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi) |
55a03692 | 2340 | { |
063a8bce | 2341 | aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); |
0766b2c0 | 2342 | if (!MEM_ANALYZABLE (aref) |
2343 | || !refs_independent_p (ref, aref)) | |
063a8bce | 2344 | { |
2345 | ret = false; | |
2346 | record_indep_loop (loop, aref, false); | |
2347 | break; | |
2348 | } | |
55a03692 | 2349 | } |
55a03692 | 2350 | |
063a8bce | 2351 | return ret; |
55a03692 | 2352 | } |
2353 | ||
063a8bce | 2354 | /* Returns true if REF is independent on all other memory references in |
2355 | LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */ | |
55a03692 | 2356 | |
063a8bce | 2357 | static bool |
2358 | ref_indep_loop_p (struct loop *loop, mem_ref_p ref) | |
55a03692 | 2359 | { |
063a8bce | 2360 | bool ret; |
55a03692 | 2361 | |
063a8bce | 2362 | if (bitmap_bit_p (ref->indep_loop, loop->num)) |
2363 | return true; | |
2364 | if (bitmap_bit_p (ref->dep_loop, loop->num)) | |
2365 | return false; | |
55a03692 | 2366 | |
063a8bce | 2367 | ret = ref_indep_loop_p_1 (loop, ref); |
342ea212 | 2368 | |
063a8bce | 2369 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2370 | fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n", | |
2371 | ref->id, loop->num, ret ? "independent" : "dependent"); | |
2372 | ||
2373 | record_indep_loop (loop, ref, ret); | |
2374 | ||
2375 | return ret; | |
55a03692 | 2376 | } |
2377 | ||
063a8bce | 2378 | /* Returns true if we can perform store motion of REF from LOOP. */ |
55a03692 | 2379 | |
063a8bce | 2380 | static bool |
2381 | can_sm_ref_p (struct loop *loop, mem_ref_p ref) | |
55a03692 | 2382 | { |
307f7fda | 2383 | tree base; |
2384 | ||
0766b2c0 | 2385 | /* Can't hoist unanalyzable refs. */ |
2386 | if (!MEM_ANALYZABLE (ref)) | |
2387 | return false; | |
2388 | ||
063a8bce | 2389 | /* Unless the reference is stored in the loop, there is nothing to do. */ |
2390 | if (!bitmap_bit_p (ref->stored, loop->num)) | |
2391 | return false; | |
55a03692 | 2392 | |
063a8bce | 2393 | /* It should be movable. */ |
2394 | if (!is_gimple_reg_type (TREE_TYPE (ref->mem)) | |
2395 | || TREE_THIS_VOLATILE (ref->mem) | |
2396 | || !for_each_index (&ref->mem, may_move_till, loop)) | |
2397 | return false; | |
342ea212 | 2398 | |
16d9b5d8 | 2399 | /* If it can throw fail, we do not properly update EH info. */ |
2400 | if (tree_could_throw_p (ref->mem)) | |
2401 | return false; | |
2402 | ||
307f7fda | 2403 | /* If it can trap, it must be always executed in LOOP. |
2404 | Readonly memory locations may trap when storing to them, but | |
2405 | tree_could_trap_p is a predicate for rvalues, so check that | |
2406 | explicitly. */ | |
2407 | base = get_base_address (ref->mem); | |
2408 | if ((tree_could_trap_p (ref->mem) | |
2409 | || (DECL_P (base) && TREE_READONLY (base))) | |
2410 | && !ref_always_accessed_p (loop, ref, true)) | |
063a8bce | 2411 | return false; |
342ea212 | 2412 | |
063a8bce | 2413 | /* And it must be independent on all other memory references |
2414 | in LOOP. */ | |
2415 | if (!ref_indep_loop_p (loop, ref)) | |
2416 | return false; | |
342ea212 | 2417 | |
063a8bce | 2418 | return true; |
342ea212 | 2419 | } |
2420 | ||
063a8bce | 2421 | /* Marks the references in LOOP for that store motion should be performed |
2422 | in REFS_TO_SM. SM_EXECUTED is the set of references for that store | |
2423 | motion was performed in one of the outer loops. */ | |
342ea212 | 2424 | |
2425 | static void | |
063a8bce | 2426 | find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm) |
55a03692 | 2427 | { |
063a8bce | 2428 | bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, |
2429 | loop->num); | |
2430 | unsigned i; | |
2431 | bitmap_iterator bi; | |
2432 | mem_ref_p ref; | |
2433 | ||
2434 | EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi) | |
2435 | { | |
2436 | ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i); | |
2437 | if (can_sm_ref_p (loop, ref)) | |
2438 | bitmap_set_bit (refs_to_sm, i); | |
2439 | } | |
342ea212 | 2440 | } |
55a03692 | 2441 | |
063a8bce | 2442 | /* Checks whether LOOP (with exits stored in EXITS array) is suitable |
2443 | for a store motion optimization (i.e. whether we can insert statement | |
2444 | on its exits). */ | |
342ea212 | 2445 | |
063a8bce | 2446 | static bool |
2447 | loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, | |
2448 | VEC (edge, heap) *exits) | |
342ea212 | 2449 | { |
063a8bce | 2450 | unsigned i; |
2451 | edge ex; | |
55a03692 | 2452 | |
48148244 | 2453 | FOR_EACH_VEC_ELT (edge, exits, i, ex) |
9102193b | 2454 | if (ex->flags & (EDGE_ABNORMAL | EDGE_EH)) |
063a8bce | 2455 | return false; |
2456 | ||
2457 | return true; | |
55a03692 | 2458 | } |
2459 | ||
7d23383d | 2460 | /* Try to perform store motion for all memory references modified inside |
063a8bce | 2461 | LOOP. SM_EXECUTED is the bitmap of the memory references for that |
2462 | store motion was executed in one of the outer loops. */ | |
7d23383d | 2463 | |
2464 | static void | |
063a8bce | 2465 | store_motion_loop (struct loop *loop, bitmap sm_executed) |
7d23383d | 2466 | { |
749ea85f | 2467 | VEC (edge, heap) *exits = get_loop_exit_edges (loop); |
063a8bce | 2468 | struct loop *subloop; |
2469 | bitmap sm_in_loop = BITMAP_ALLOC (NULL); | |
7d23383d | 2470 | |
063a8bce | 2471 | if (loop_suitable_for_sm (loop, exits)) |
7d23383d | 2472 | { |
063a8bce | 2473 | find_refs_for_sm (loop, sm_executed, sm_in_loop); |
2474 | hoist_memory_references (loop, sm_in_loop, exits); | |
7d23383d | 2475 | } |
749ea85f | 2476 | VEC_free (edge, heap, exits); |
063a8bce | 2477 | |
2478 | bitmap_ior_into (sm_executed, sm_in_loop); | |
2479 | for (subloop = loop->inner; subloop != NULL; subloop = subloop->next) | |
2480 | store_motion_loop (subloop, sm_executed); | |
2481 | bitmap_and_compl_into (sm_executed, sm_in_loop); | |
2482 | BITMAP_FREE (sm_in_loop); | |
7d23383d | 2483 | } |
2484 | ||
2485 | /* Try to perform store motion for all memory references modified inside | |
7194de72 | 2486 | loops. */ |
7d23383d | 2487 | |
2488 | static void | |
063a8bce | 2489 | store_motion (void) |
7d23383d | 2490 | { |
2491 | struct loop *loop; | |
063a8bce | 2492 | bitmap sm_executed = BITMAP_ALLOC (NULL); |
dda28f7c | 2493 | |
063a8bce | 2494 | for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next) |
2495 | store_motion_loop (loop, sm_executed); | |
17519ba0 | 2496 | |
063a8bce | 2497 | BITMAP_FREE (sm_executed); |
75a70cf9 | 2498 | gsi_commit_edge_inserts (); |
7d23383d | 2499 | } |
2500 | ||
2501 | /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e. | |
2502 | for each such basic block bb records the outermost loop for that execution | |
2503 | of its header implies execution of bb. CONTAINS_CALL is the bitmap of | |
2504 | blocks that contain a nonpure call. */ | |
2505 | ||
2506 | static void | |
2507 | fill_always_executed_in (struct loop *loop, sbitmap contains_call) | |
2508 | { | |
2509 | basic_block bb = NULL, *bbs, last = NULL; | |
2510 | unsigned i; | |
2511 | edge e; | |
2512 | struct loop *inn_loop = loop; | |
2513 | ||
2fd20c29 | 2514 | if (ALWAYS_EXECUTED_IN (loop->header) == NULL) |
7d23383d | 2515 | { |
2516 | bbs = get_loop_body_in_dom_order (loop); | |
2517 | ||
2518 | for (i = 0; i < loop->num_nodes; i++) | |
2519 | { | |
cd665a06 | 2520 | edge_iterator ei; |
7d23383d | 2521 | bb = bbs[i]; |
2522 | ||
2523 | if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb)) | |
2524 | last = bb; | |
2525 | ||
2526 | if (TEST_BIT (contains_call, bb->index)) | |
2527 | break; | |
2528 | ||
cd665a06 | 2529 | FOR_EACH_EDGE (e, ei, bb->succs) |
7d23383d | 2530 | if (!flow_bb_inside_loop_p (loop, e->dest)) |
2531 | break; | |
2532 | if (e) | |
2533 | break; | |
2534 | ||
2535 | /* A loop might be infinite (TODO use simple loop analysis | |
2536 | to disprove this if possible). */ | |
2537 | if (bb->flags & BB_IRREDUCIBLE_LOOP) | |
2538 | break; | |
2539 | ||
2540 | if (!flow_bb_inside_loop_p (inn_loop, bb)) | |
2541 | break; | |
2542 | ||
2543 | if (bb->loop_father->header == bb) | |
2544 | { | |
2545 | if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb)) | |
2546 | break; | |
2547 | ||
2548 | /* In a loop that is always entered we may proceed anyway. | |
2549 | But record that we entered it and stop once we leave it. */ | |
2550 | inn_loop = bb->loop_father; | |
2551 | } | |
2552 | } | |
2553 | ||
2554 | while (1) | |
2555 | { | |
2fd20c29 | 2556 | SET_ALWAYS_EXECUTED_IN (last, loop); |
7d23383d | 2557 | if (last == loop->header) |
2558 | break; | |
2559 | last = get_immediate_dominator (CDI_DOMINATORS, last); | |
2560 | } | |
2561 | ||
2562 | free (bbs); | |
2563 | } | |
2564 | ||
2565 | for (loop = loop->inner; loop; loop = loop->next) | |
2566 | fill_always_executed_in (loop, contains_call); | |
2567 | } | |
2568 | ||
7194de72 | 2569 | /* Compute the global information needed by the loop invariant motion pass. */ |
7d23383d | 2570 | |
2571 | static void | |
7194de72 | 2572 | tree_ssa_lim_initialize (void) |
7d23383d | 2573 | { |
2574 | sbitmap contains_call = sbitmap_alloc (last_basic_block); | |
75a70cf9 | 2575 | gimple_stmt_iterator bsi; |
7d23383d | 2576 | struct loop *loop; |
2577 | basic_block bb; | |
2578 | ||
4fb07d00 | 2579 | bitmap_obstack_initialize (&lim_bitmap_obstack); |
2580 | ||
7d23383d | 2581 | sbitmap_zero (contains_call); |
2582 | FOR_EACH_BB (bb) | |
2583 | { | |
75a70cf9 | 2584 | for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi)) |
7d23383d | 2585 | { |
75a70cf9 | 2586 | if (nonpure_call_p (gsi_stmt (bsi))) |
7d23383d | 2587 | break; |
2588 | } | |
2589 | ||
75a70cf9 | 2590 | if (!gsi_end_p (bsi)) |
7d23383d | 2591 | SET_BIT (contains_call, bb->index); |
2592 | } | |
2593 | ||
7194de72 | 2594 | for (loop = current_loops->tree_root->inner; loop; loop = loop->next) |
7d23383d | 2595 | fill_always_executed_in (loop, contains_call); |
2596 | ||
2597 | sbitmap_free (contains_call); | |
75a70cf9 | 2598 | |
2599 | lim_aux_data_map = pointer_map_create (); | |
de60f90c | 2600 | |
2601 | if (flag_tm) | |
2602 | compute_transaction_bits (); | |
61025ec0 | 2603 | |
2604 | alloc_aux_for_edges (0); | |
7d23383d | 2605 | } |
2606 | ||
2607 | /* Cleans up after the invariant motion pass. */ | |
2608 | ||
2609 | static void | |
2610 | tree_ssa_lim_finalize (void) | |
2611 | { | |
2612 | basic_block bb; | |
063a8bce | 2613 | unsigned i; |
83b709f2 | 2614 | mem_ref_p ref; |
7d23383d | 2615 | |
61025ec0 | 2616 | free_aux_for_edges (); |
2617 | ||
7d23383d | 2618 | FOR_EACH_BB (bb) |
2fd20c29 | 2619 | SET_ALWAYS_EXECUTED_IN (bb, NULL); |
063a8bce | 2620 | |
4fb07d00 | 2621 | bitmap_obstack_release (&lim_bitmap_obstack); |
75a70cf9 | 2622 | pointer_map_destroy (lim_aux_data_map); |
2623 | ||
063a8bce | 2624 | htab_delete (memory_accesses.refs); |
2625 | ||
83b709f2 | 2626 | FOR_EACH_VEC_ELT (mem_ref_p, memory_accesses.refs_list, i, ref) |
2627 | memref_free (ref); | |
2628 | VEC_free (mem_ref_p, heap, memory_accesses.refs_list); | |
2629 | ||
063a8bce | 2630 | VEC_free (bitmap, heap, memory_accesses.refs_in_loop); |
063a8bce | 2631 | VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop); |
0766b2c0 | 2632 | VEC_free (bitmap, heap, memory_accesses.all_refs_stored_in_loop); |
063a8bce | 2633 | |
2634 | if (memory_accesses.ttae_cache) | |
78d53e33 | 2635 | free_affine_expand_cache (&memory_accesses.ttae_cache); |
7d23383d | 2636 | } |
2637 | ||
7194de72 | 2638 | /* Moves invariants from loops. Only "expensive" invariants are moved out -- |
7d23383d | 2639 | i.e. those that are likely to be win regardless of the register pressure. */ |
2640 | ||
9bf0a3f9 | 2641 | unsigned int |
7194de72 | 2642 | tree_ssa_lim (void) |
7d23383d | 2643 | { |
9bf0a3f9 | 2644 | unsigned int todo; |
2645 | ||
7194de72 | 2646 | tree_ssa_lim_initialize (); |
7d23383d | 2647 | |
063a8bce | 2648 | /* Gathers information about memory accesses in the loops. */ |
2649 | analyze_memory_references (); | |
2650 | ||
7d23383d | 2651 | /* For each statement determine the outermost loop in that it is |
2652 | invariant and cost for computing the invariant. */ | |
2653 | determine_invariantness (); | |
2654 | ||
063a8bce | 2655 | /* Execute store motion. Force the necessary invariants to be moved |
2656 | out of the loops as well. */ | |
2657 | store_motion (); | |
7d23383d | 2658 | |
2659 | /* Move the expressions that are expensive enough. */ | |
9bf0a3f9 | 2660 | todo = move_computations (); |
7d23383d | 2661 | |
2662 | tree_ssa_lim_finalize (); | |
9bf0a3f9 | 2663 | |
2664 | return todo; | |
7d23383d | 2665 | } |