]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-loop-im.c
tree-flow.h (make_rename_temp): Remove.
[thirdparty/gcc.git] / gcc / tree-ssa-loop-im.c
CommitLineData
a7e5372d 1/* Loop invariant motion.
c75c517d
SB
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2010
3 Free Software Foundation, Inc.
b8698a0f 4
a7e5372d 5This file is part of GCC.
b8698a0f 6
a7e5372d
ZD
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9dcd6f09 9Free Software Foundation; either version 3, or (at your option) any
a7e5372d 10later version.
b8698a0f 11
a7e5372d
ZD
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
b8698a0f 16
a7e5372d 17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
a7e5372d
ZD
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
a7e5372d 26#include "tm_p.h"
a7e5372d 27#include "basic-block.h"
2eb79bbb 28#include "gimple-pretty-print.h"
a7e5372d 29#include "tree-flow.h"
a7e5372d
ZD
30#include "cfgloop.h"
31#include "domwalk.h"
32#include "params.h"
33#include "tree-pass.h"
34#include "flags.h"
01fd257a 35#include "hashtab.h"
72425608
ZD
36#include "tree-affine.h"
37#include "pointer-set.h"
8ded35f9 38#include "tree-ssa-propagate.h"
a7e5372d 39
f10a6654
ZD
40/* TODO: Support for predicated code motion. I.e.
41
42 while (1)
43 {
44 if (cond)
45 {
46 a = inv;
47 something;
48 }
49 }
50
039496da 51 Where COND and INV are invariants, but evaluating INV may trap or be
f10a6654
ZD
52 invalid from some other reason if !COND. This may be transformed to
53
54 if (cond)
55 a = inv;
56 while (1)
57 {
58 if (cond)
59 something;
60 } */
61
a7e5372d
ZD
62/* A type for the list of statements that have to be moved in order to be able
63 to hoist an invariant computation. */
64
65struct depend
66{
726a989a 67 gimple stmt;
a7e5372d
ZD
68 struct depend *next;
69};
70
a7e5372d
ZD
71/* The auxiliary data kept for each statement. */
72
73struct lim_aux_data
74{
75 struct loop *max_loop; /* The outermost loop in that the statement
76 is invariant. */
77
78 struct loop *tgt_loop; /* The loop out of that we want to move the
79 invariant. */
80
81 struct loop *always_executed_in;
82 /* The outermost loop for that we are sure
83 the statement is executed if the loop
84 is entered. */
85
a7e5372d
ZD
86 unsigned cost; /* Cost of the computation performed by the
87 statement. */
88
89 struct depend *depends; /* List of statements that must be also hoisted
90 out of the loop when this statement is
91 hoisted; i.e. those that define the operands
92 of the statement and are inside of the
93 MAX_LOOP loop. */
94};
95
726a989a
RB
96/* Maps statements to their lim_aux_data. */
97
98static struct pointer_map_t *lim_aux_data_map;
a7e5372d 99
72425608 100/* Description of a memory reference location. */
a7e5372d 101
72425608 102typedef struct mem_ref_loc
a7e5372d
ZD
103{
104 tree *ref; /* The reference itself. */
726a989a 105 gimple stmt; /* The statement in that it occurs. */
72425608
ZD
106} *mem_ref_loc_p;
107
108DEF_VEC_P(mem_ref_loc_p);
109DEF_VEC_ALLOC_P(mem_ref_loc_p, heap);
110
111/* The list of memory reference locations in a loop. */
01fd257a 112
72425608
ZD
113typedef struct mem_ref_locs
114{
115 VEC (mem_ref_loc_p, heap) *locs;
116} *mem_ref_locs_p;
117
118DEF_VEC_P(mem_ref_locs_p);
119DEF_VEC_ALLOC_P(mem_ref_locs_p, heap);
01fd257a 120
72425608
ZD
121/* Description of a memory reference. */
122
123typedef struct mem_ref
01fd257a
ZD
124{
125 tree mem; /* The memory itself. */
72425608
ZD
126 unsigned id; /* ID assigned to the memory reference
127 (its index in memory_accesses.refs_list) */
01fd257a 128 hashval_t hash; /* Its hash value. */
fa10beec 129 bitmap stored; /* The set of loops in that this memory location
72425608
ZD
130 is stored to. */
131 VEC (mem_ref_locs_p, heap) *accesses_in_loop;
132 /* The locations of the accesses. Vector
133 indexed by the loop number. */
72425608
ZD
134
135 /* The following sets are computed on demand. We keep both set and
136 its complement, so that we know whether the information was
137 already computed or not. */
138 bitmap indep_loop; /* The set of loops in that the memory
139 reference is independent, meaning:
140 If it is stored in the loop, this store
141 is independent on all other loads and
142 stores.
143 If it is only loaded, then it is independent
144 on all stores in the loop. */
145 bitmap dep_loop; /* The complement of INDEP_LOOP. */
146
147 bitmap indep_ref; /* The set of memory references on that
148 this reference is independent. */
19c0d7df 149 bitmap dep_ref; /* The complement of INDEP_REF. */
72425608
ZD
150} *mem_ref_p;
151
152DEF_VEC_P(mem_ref_p);
153DEF_VEC_ALLOC_P(mem_ref_p, heap);
154
155DEF_VEC_P(bitmap);
156DEF_VEC_ALLOC_P(bitmap, heap);
157
158DEF_VEC_P(htab_t);
159DEF_VEC_ALLOC_P(htab_t, heap);
160
161/* Description of memory accesses in loops. */
162
163static struct
164{
165 /* The hash table of memory references accessed in loops. */
166 htab_t refs;
167
168 /* The list of memory references. */
169 VEC (mem_ref_p, heap) *refs_list;
170
171 /* The set of memory references accessed in each loop. */
172 VEC (bitmap, heap) *refs_in_loop;
173
174 /* The set of memory references accessed in each loop, including
175 subloops. */
176 VEC (bitmap, heap) *all_refs_in_loop;
177
546d314c
RG
178 /* The set of memory references stored in each loop, including
179 subloops. */
180 VEC (bitmap, heap) *all_refs_stored_in_loop;
72425608
ZD
181
182 /* Cache for expanding memory addresses. */
183 struct pointer_map_t *ttae_cache;
184} memory_accesses;
185
186static bool ref_indep_loop_p (struct loop *, mem_ref_p);
a7e5372d
ZD
187
188/* Minimum cost of an expensive expression. */
189#define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
190
8a519095 191/* The outermost loop for which execution of the header guarantees that the
a7e5372d
ZD
192 block will be executed. */
193#define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
8a519095 194#define SET_ALWAYS_EXECUTED_IN(BB, VAL) ((BB)->aux = (void *) (VAL))
a7e5372d 195
546d314c
RG
196/* Whether the reference was analyzable. */
197#define MEM_ANALYZABLE(REF) ((REF)->mem != error_mark_node)
198
726a989a
RB
199static struct lim_aux_data *
200init_lim_data (gimple stmt)
201{
202 void **p = pointer_map_insert (lim_aux_data_map, stmt);
203
204 *p = XCNEW (struct lim_aux_data);
205 return (struct lim_aux_data *) *p;
206}
207
208static struct lim_aux_data *
209get_lim_data (gimple stmt)
210{
211 void **p = pointer_map_contains (lim_aux_data_map, stmt);
212 if (!p)
213 return NULL;
214
215 return (struct lim_aux_data *) *p;
216}
217
218/* Releases the memory occupied by DATA. */
219
220static void
221free_lim_aux_data (struct lim_aux_data *data)
222{
223 struct depend *dep, *next;
224
225 for (dep = data->depends; dep; dep = next)
226 {
227 next = dep->next;
228 free (dep);
229 }
230 free (data);
231}
232
233static void
234clear_lim_data (gimple stmt)
235{
236 void **p = pointer_map_contains (lim_aux_data_map, stmt);
237 if (!p)
238 return;
239
240 free_lim_aux_data ((struct lim_aux_data *) *p);
241 *p = NULL;
242}
243
a7e5372d
ZD
244/* Calls CBCK for each index in memory reference ADDR_P. There are two
245 kinds situations handled; in each of these cases, the memory reference
246 and DATA are passed to the callback:
b8698a0f 247
a7e5372d
ZD
248 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
249 pass the pointer to the index to the callback.
250
251 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
252 pointer to addr to the callback.
b8698a0f 253
a7e5372d
ZD
254 If the callback returns false, the whole search stops and false is returned.
255 Otherwise the function returns true after traversing through the whole
256 reference *ADDR_P. */
257
258bool
259for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
260{
be35cf60 261 tree *nxt, *idx;
a7e5372d
ZD
262
263 for (; ; addr_p = nxt)
264 {
265 switch (TREE_CODE (*addr_p))
266 {
267 case SSA_NAME:
268 return cbck (*addr_p, addr_p, data);
269
70f34814 270 case MEM_REF:
a7e5372d
ZD
271 nxt = &TREE_OPERAND (*addr_p, 0);
272 return cbck (*addr_p, nxt, data);
273
274 case BIT_FIELD_REF:
a7e5372d 275 case VIEW_CONVERT_EXPR:
8b11a64c
ZD
276 case REALPART_EXPR:
277 case IMAGPART_EXPR:
a7e5372d
ZD
278 nxt = &TREE_OPERAND (*addr_p, 0);
279 break;
280
be35cf60
ZD
281 case COMPONENT_REF:
282 /* If the component has varying offset, it behaves like index
283 as well. */
284 idx = &TREE_OPERAND (*addr_p, 2);
285 if (*idx
286 && !cbck (*addr_p, idx, data))
287 return false;
288
289 nxt = &TREE_OPERAND (*addr_p, 0);
290 break;
291
a7e5372d 292 case ARRAY_REF:
61c25908 293 case ARRAY_RANGE_REF:
a7e5372d
ZD
294 nxt = &TREE_OPERAND (*addr_p, 0);
295 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
296 return false;
297 break;
298
299 case VAR_DECL:
300 case PARM_DECL:
301 case STRING_CST:
302 case RESULT_DECL:
60407f7a 303 case VECTOR_CST:
33674347 304 case COMPLEX_CST:
e2889823
PB
305 case INTEGER_CST:
306 case REAL_CST:
325217ed 307 case FIXED_CST:
bb0c55f6 308 case CONSTRUCTOR:
a7e5372d
ZD
309 return true;
310
3d45dd59
RG
311 case ADDR_EXPR:
312 gcc_assert (is_gimple_min_invariant (*addr_p));
313 return true;
314
ac182688
ZD
315 case TARGET_MEM_REF:
316 idx = &TMR_BASE (*addr_p);
317 if (*idx
318 && !cbck (*addr_p, idx, data))
319 return false;
320 idx = &TMR_INDEX (*addr_p);
4d948885
RG
321 if (*idx
322 && !cbck (*addr_p, idx, data))
323 return false;
324 idx = &TMR_INDEX2 (*addr_p);
ac182688
ZD
325 if (*idx
326 && !cbck (*addr_p, idx, data))
327 return false;
328 return true;
329
a7e5372d 330 default:
1e128c5f 331 gcc_unreachable ();
a7e5372d
ZD
332 }
333 }
334}
335
336/* If it is possible to hoist the statement STMT unconditionally,
337 returns MOVE_POSSIBLE.
338 If it is possible to hoist the statement STMT, but we must avoid making
339 it executed if it would not be executed in the original program (e.g.
340 because it may trap), return MOVE_PRESERVE_EXECUTION.
341 Otherwise return MOVE_IMPOSSIBLE. */
342
40923b20 343enum move_pos
726a989a 344movement_possibility (gimple stmt)
a7e5372d 345{
726a989a
RB
346 tree lhs;
347 enum move_pos ret = MOVE_POSSIBLE;
a7e5372d
ZD
348
349 if (flag_unswitch_loops
726a989a 350 && gimple_code (stmt) == GIMPLE_COND)
a7e5372d
ZD
351 {
352 /* If we perform unswitching, force the operands of the invariant
353 condition to be moved out of the loop. */
a7e5372d
ZD
354 return MOVE_POSSIBLE;
355 }
356
e3bdfed6
RG
357 if (gimple_code (stmt) == GIMPLE_PHI
358 && gimple_phi_num_args (stmt) <= 2
359 && is_gimple_reg (gimple_phi_result (stmt))
360 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt)))
361 return MOVE_POSSIBLE;
362
726a989a 363 if (gimple_get_lhs (stmt) == NULL_TREE)
a7e5372d
ZD
364 return MOVE_IMPOSSIBLE;
365
5006671f 366 if (gimple_vdef (stmt))
72425608
ZD
367 return MOVE_IMPOSSIBLE;
368
726a989a
RB
369 if (stmt_ends_bb_p (stmt)
370 || gimple_has_volatile_ops (stmt)
371 || gimple_has_side_effects (stmt)
372 || stmt_could_throw_p (stmt))
a7e5372d
ZD
373 return MOVE_IMPOSSIBLE;
374
726a989a 375 if (is_gimple_call (stmt))
f10a6654
ZD
376 {
377 /* While pure or const call is guaranteed to have no side effects, we
378 cannot move it arbitrarily. Consider code like
379
380 char *s = something ();
381
382 while (1)
383 {
384 if (s)
385 t = strlen (s);
386 else
387 t = 0;
388 }
389
390 Here the strlen call cannot be moved out of the loop, even though
391 s is invariant. In addition to possibly creating a call with
392 invalid arguments, moving out a function call that is not executed
393 may cause performance regressions in case the call is costly and
394 not executed at all. */
726a989a
RB
395 ret = MOVE_PRESERVE_EXECUTION;
396 lhs = gimple_call_lhs (stmt);
f10a6654 397 }
726a989a
RB
398 else if (is_gimple_assign (stmt))
399 lhs = gimple_assign_lhs (stmt);
400 else
401 return MOVE_IMPOSSIBLE;
402
403 if (TREE_CODE (lhs) == SSA_NAME
404 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
405 return MOVE_IMPOSSIBLE;
406
407 if (TREE_CODE (lhs) != SSA_NAME
408 || gimple_could_trap_p (stmt))
409 return MOVE_PRESERVE_EXECUTION;
410
19c0d7df
AH
411 /* Non local loads in a transaction cannot be hoisted out. Well,
412 unless the load happens on every path out of the loop, but we
413 don't take this into account yet. */
414 if (flag_tm
415 && gimple_in_transaction (stmt)
416 && gimple_assign_single_p (stmt))
417 {
418 tree rhs = gimple_assign_rhs1 (stmt);
419 if (DECL_P (rhs) && is_global_var (rhs))
420 {
421 if (dump_file)
422 {
423 fprintf (dump_file, "Cannot hoist conditional load of ");
424 print_generic_expr (dump_file, rhs, TDF_SLIM);
425 fprintf (dump_file, " because it is in a transaction.\n");
426 }
427 return MOVE_IMPOSSIBLE;
428 }
429 }
430
726a989a 431 return ret;
a7e5372d
ZD
432}
433
434/* Suppose that operand DEF is used inside the LOOP. Returns the outermost
2a7e31df 435 loop to that we could move the expression using DEF if it did not have
a7e5372d
ZD
436 other operands, i.e. the outermost loop enclosing LOOP in that the value
437 of DEF is invariant. */
438
439static struct loop *
440outermost_invariant_loop (tree def, struct loop *loop)
441{
726a989a 442 gimple def_stmt;
a7e5372d
ZD
443 basic_block def_bb;
444 struct loop *max_loop;
726a989a 445 struct lim_aux_data *lim_data;
a7e5372d 446
726a989a 447 if (!def)
a7e5372d
ZD
448 return superloop_at_depth (loop, 1);
449
726a989a
RB
450 if (TREE_CODE (def) != SSA_NAME)
451 {
452 gcc_assert (is_gimple_min_invariant (def));
453 return superloop_at_depth (loop, 1);
454 }
455
a7e5372d 456 def_stmt = SSA_NAME_DEF_STMT (def);
726a989a 457 def_bb = gimple_bb (def_stmt);
a7e5372d
ZD
458 if (!def_bb)
459 return superloop_at_depth (loop, 1);
460
461 max_loop = find_common_loop (loop, def_bb->loop_father);
462
726a989a
RB
463 lim_data = get_lim_data (def_stmt);
464 if (lim_data != NULL && lim_data->max_loop != NULL)
a7e5372d 465 max_loop = find_common_loop (max_loop,
726a989a 466 loop_outer (lim_data->max_loop));
a7e5372d
ZD
467 if (max_loop == loop)
468 return NULL;
9ba025a2 469 max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
a7e5372d
ZD
470
471 return max_loop;
472}
473
a7e5372d
ZD
474/* DATA is a structure containing information associated with a statement
475 inside LOOP. DEF is one of the operands of this statement.
b8698a0f 476
a7e5372d
ZD
477 Find the outermost loop enclosing LOOP in that value of DEF is invariant
478 and record this in DATA->max_loop field. If DEF itself is defined inside
479 this loop as well (i.e. we need to hoist it out of the loop if we want
480 to hoist the statement represented by DATA), record the statement in that
481 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
482 add the cost of the computation of DEF to the DATA->cost.
b8698a0f 483
a7e5372d
ZD
484 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
485
486static bool
487add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
488 bool add_cost)
489{
726a989a
RB
490 gimple def_stmt = SSA_NAME_DEF_STMT (def);
491 basic_block def_bb = gimple_bb (def_stmt);
a7e5372d
ZD
492 struct loop *max_loop;
493 struct depend *dep;
726a989a 494 struct lim_aux_data *def_data;
a7e5372d
ZD
495
496 if (!def_bb)
497 return true;
498
499 max_loop = outermost_invariant_loop (def, loop);
500 if (!max_loop)
501 return false;
502
503 if (flow_loop_nested_p (data->max_loop, max_loop))
504 data->max_loop = max_loop;
505
726a989a
RB
506 def_data = get_lim_data (def_stmt);
507 if (!def_data)
a7e5372d
ZD
508 return true;
509
510 if (add_cost
511 /* Only add the cost if the statement defining DEF is inside LOOP,
512 i.e. if it is likely that by moving the invariants dependent
513 on it, we will be able to avoid creating a new register for
514 it (since it will be only used in these dependent invariants). */
515 && def_bb->loop_father == loop)
726a989a 516 data->cost += def_data->cost;
a7e5372d 517
5ed6ace5 518 dep = XNEW (struct depend);
a7e5372d
ZD
519 dep->stmt = def_stmt;
520 dep->next = data->depends;
521 data->depends = dep;
522
523 return true;
524}
525
546d314c
RG
526/* Returns an estimate for a cost of statement STMT. The values here
527 are just ad-hoc constants, similar to costs for inlining. */
a7e5372d
ZD
528
529static unsigned
726a989a 530stmt_cost (gimple stmt)
a7e5372d 531{
a7e5372d 532 /* Always try to create possibilities for unswitching. */
e3bdfed6
RG
533 if (gimple_code (stmt) == GIMPLE_COND
534 || gimple_code (stmt) == GIMPLE_PHI)
a7e5372d
ZD
535 return LIM_EXPENSIVE;
536
546d314c 537 /* We should be hoisting calls if possible. */
726a989a 538 if (is_gimple_call (stmt))
a7e5372d 539 {
546d314c 540 tree fndecl;
a7e5372d
ZD
541
542 /* Unless the call is a builtin_constant_p; this always folds to a
543 constant, so moving it is useless. */
726a989a
RB
544 fndecl = gimple_call_fndecl (stmt);
545 if (fndecl
546 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
547 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P)
a7e5372d
ZD
548 return 0;
549
546d314c 550 return LIM_EXPENSIVE;
726a989a
RB
551 }
552
546d314c
RG
553 /* Hoisting memory references out should almost surely be a win. */
554 if (gimple_references_memory_p (stmt))
555 return LIM_EXPENSIVE;
556
726a989a 557 if (gimple_code (stmt) != GIMPLE_ASSIGN)
546d314c 558 return 1;
a7e5372d 559
726a989a
RB
560 switch (gimple_assign_rhs_code (stmt))
561 {
a7e5372d 562 case MULT_EXPR:
67af611e
RG
563 case WIDEN_MULT_EXPR:
564 case WIDEN_MULT_PLUS_EXPR:
565 case WIDEN_MULT_MINUS_EXPR:
566 case DOT_PROD_EXPR:
567 case FMA_EXPR:
a7e5372d
ZD
568 case TRUNC_DIV_EXPR:
569 case CEIL_DIV_EXPR:
570 case FLOOR_DIV_EXPR:
571 case ROUND_DIV_EXPR:
572 case EXACT_DIV_EXPR:
573 case CEIL_MOD_EXPR:
574 case FLOOR_MOD_EXPR:
575 case ROUND_MOD_EXPR:
576 case TRUNC_MOD_EXPR:
b4852851 577 case RDIV_EXPR:
a7e5372d 578 /* Division and multiplication are usually expensive. */
546d314c 579 return LIM_EXPENSIVE;
a7e5372d 580
e0a60731
RG
581 case LSHIFT_EXPR:
582 case RSHIFT_EXPR:
67af611e
RG
583 case WIDEN_LSHIFT_EXPR:
584 case LROTATE_EXPR:
585 case RROTATE_EXPR:
546d314c
RG
586 /* Shifts and rotates are usually expensive. */
587 return LIM_EXPENSIVE;
588
589 case CONSTRUCTOR:
590 /* Make vector construction cost proportional to the number
591 of elements. */
592 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
593
594 case SSA_NAME:
595 case PAREN_EXPR:
596 /* Whether or not something is wrapped inside a PAREN_EXPR
597 should not change move cost. Nor should an intermediate
598 unpropagated SSA name copy. */
599 return 0;
e0a60731 600
a7e5372d 601 default:
546d314c 602 return 1;
a7e5372d 603 }
a7e5372d
ZD
604}
605
72425608
ZD
606/* Finds the outermost loop between OUTER and LOOP in that the memory reference
607 REF is independent. If REF is not independent in LOOP, NULL is returned
608 instead. */
609
610static struct loop *
611outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref)
612{
613 struct loop *aloop;
614
615 if (bitmap_bit_p (ref->stored, loop->num))
616 return NULL;
617
618 for (aloop = outer;
619 aloop != loop;
620 aloop = superloop_at_depth (loop, loop_depth (aloop) + 1))
621 if (!bitmap_bit_p (ref->stored, aloop->num)
622 && ref_indep_loop_p (aloop, ref))
623 return aloop;
624
625 if (ref_indep_loop_p (loop, ref))
626 return loop;
627 else
628 return NULL;
629}
630
631/* If there is a simple load or store to a memory reference in STMT, returns
fa10beec 632 the location of the memory reference, and sets IS_STORE according to whether
72425608
ZD
633 it is a store or load. Otherwise, returns NULL. */
634
635static tree *
726a989a 636simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
72425608 637{
726a989a
RB
638 tree *lhs;
639 enum tree_code code;
72425608
ZD
640
641 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
726a989a 642 if (gimple_code (stmt) != GIMPLE_ASSIGN)
72425608
ZD
643 return NULL;
644
726a989a
RB
645 code = gimple_assign_rhs_code (stmt);
646
647 lhs = gimple_assign_lhs_ptr (stmt);
72425608
ZD
648
649 if (TREE_CODE (*lhs) == SSA_NAME)
650 {
726a989a
RB
651 if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
652 || !is_gimple_addressable (gimple_assign_rhs1 (stmt)))
72425608
ZD
653 return NULL;
654
655 *is_store = false;
726a989a 656 return gimple_assign_rhs1_ptr (stmt);
72425608 657 }
726a989a
RB
658 else if (code == SSA_NAME
659 || (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
660 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
72425608
ZD
661 {
662 *is_store = true;
663 return lhs;
664 }
665 else
666 return NULL;
667}
668
669/* Returns the memory reference contained in STMT. */
670
671static mem_ref_p
726a989a 672mem_ref_in_stmt (gimple stmt)
72425608
ZD
673{
674 bool store;
675 tree *mem = simple_mem_ref_in_stmt (stmt, &store);
676 hashval_t hash;
677 mem_ref_p ref;
678
679 if (!mem)
680 return NULL;
681 gcc_assert (!store);
682
683 hash = iterative_hash_expr (*mem, 0);
3d9a9f94 684 ref = (mem_ref_p) htab_find_with_hash (memory_accesses.refs, *mem, hash);
72425608
ZD
685
686 gcc_assert (ref != NULL);
687 return ref;
688}
689
e3bdfed6
RG
690/* From a controlling predicate in DOM determine the arguments from
691 the PHI node PHI that are chosen if the predicate evaluates to
692 true and false and store them to *TRUE_ARG_P and *FALSE_ARG_P if
693 they are non-NULL. Returns true if the arguments can be determined,
694 else return false. */
695
696static bool
697extract_true_false_args_from_phi (basic_block dom, gimple phi,
698 tree *true_arg_p, tree *false_arg_p)
699{
700 basic_block bb = gimple_bb (phi);
701 edge true_edge, false_edge, tem;
702 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
703
704 /* We have to verify that one edge into the PHI node is dominated
705 by the true edge of the predicate block and the other edge
706 dominated by the false edge. This ensures that the PHI argument
707 we are going to take is completely determined by the path we
12d80acc
RG
708 take from the predicate block.
709 We can only use BB dominance checks below if the destination of
710 the true/false edges are dominated by their edge, thus only
711 have a single predecessor. */
e3bdfed6
RG
712 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
713 tem = EDGE_PRED (bb, 0);
714 if (tem == true_edge
12d80acc
RG
715 || (single_pred_p (true_edge->dest)
716 && (tem->src == true_edge->dest
717 || dominated_by_p (CDI_DOMINATORS,
718 tem->src, true_edge->dest))))
e3bdfed6
RG
719 arg0 = PHI_ARG_DEF (phi, tem->dest_idx);
720 else if (tem == false_edge
12d80acc
RG
721 || (single_pred_p (false_edge->dest)
722 && (tem->src == false_edge->dest
723 || dominated_by_p (CDI_DOMINATORS,
724 tem->src, false_edge->dest))))
e3bdfed6
RG
725 arg1 = PHI_ARG_DEF (phi, tem->dest_idx);
726 else
727 return false;
728 tem = EDGE_PRED (bb, 1);
729 if (tem == true_edge
12d80acc
RG
730 || (single_pred_p (true_edge->dest)
731 && (tem->src == true_edge->dest
732 || dominated_by_p (CDI_DOMINATORS,
733 tem->src, true_edge->dest))))
e3bdfed6
RG
734 arg0 = PHI_ARG_DEF (phi, tem->dest_idx);
735 else if (tem == false_edge
12d80acc
RG
736 || (single_pred_p (false_edge->dest)
737 && (tem->src == false_edge->dest
738 || dominated_by_p (CDI_DOMINATORS,
739 tem->src, false_edge->dest))))
e3bdfed6
RG
740 arg1 = PHI_ARG_DEF (phi, tem->dest_idx);
741 else
742 return false;
743 if (!arg0 || !arg1)
744 return false;
745
746 if (true_arg_p)
747 *true_arg_p = arg0;
748 if (false_arg_p)
749 *false_arg_p = arg1;
750
751 return true;
752}
753
a7e5372d
ZD
754/* Determine the outermost loop to that it is possible to hoist a statement
755 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
756 the outermost loop in that the value computed by STMT is invariant.
757 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
758 we preserve the fact whether STMT is executed. It also fills other related
759 information to LIM_DATA (STMT).
b8698a0f 760
a7e5372d
ZD
761 The function returns false if STMT cannot be hoisted outside of the loop it
762 is defined in, and true otherwise. */
763
764static bool
726a989a 765determine_max_movement (gimple stmt, bool must_preserve_exec)
a7e5372d 766{
726a989a 767 basic_block bb = gimple_bb (stmt);
a7e5372d
ZD
768 struct loop *loop = bb->loop_father;
769 struct loop *level;
726a989a 770 struct lim_aux_data *lim_data = get_lim_data (stmt);
4c124b4c
AM
771 tree val;
772 ssa_op_iter iter;
b8698a0f 773
a7e5372d
ZD
774 if (must_preserve_exec)
775 level = ALWAYS_EXECUTED_IN (bb);
776 else
777 level = superloop_at_depth (loop, 1);
778 lim_data->max_loop = level;
779
e3bdfed6
RG
780 if (gimple_code (stmt) == GIMPLE_PHI)
781 {
782 use_operand_p use_p;
783 unsigned min_cost = UINT_MAX;
784 unsigned total_cost = 0;
785 struct lim_aux_data *def_data;
786
787 /* We will end up promoting dependencies to be unconditionally
788 evaluated. For this reason the PHI cost (and thus the
789 cost we remove from the loop by doing the invariant motion)
790 is that of the cheapest PHI argument dependency chain. */
791 FOR_EACH_PHI_ARG (use_p, stmt, iter, SSA_OP_USE)
792 {
793 val = USE_FROM_PTR (use_p);
794 if (TREE_CODE (val) != SSA_NAME)
795 continue;
796 if (!add_dependency (val, lim_data, loop, false))
797 return false;
798 def_data = get_lim_data (SSA_NAME_DEF_STMT (val));
799 if (def_data)
800 {
801 min_cost = MIN (min_cost, def_data->cost);
802 total_cost += def_data->cost;
803 }
804 }
805
806 lim_data->cost += min_cost;
807
808 if (gimple_phi_num_args (stmt) > 1)
809 {
810 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
811 gimple cond;
812 if (gsi_end_p (gsi_last_bb (dom)))
813 return false;
814 cond = gsi_stmt (gsi_last_bb (dom));
815 if (gimple_code (cond) != GIMPLE_COND)
816 return false;
817 /* Verify that this is an extended form of a diamond and
818 the PHI arguments are completely controlled by the
819 predicate in DOM. */
820 if (!extract_true_false_args_from_phi (dom, stmt, NULL, NULL))
821 return false;
822
823 /* Fold in dependencies and cost of the condition. */
824 FOR_EACH_SSA_TREE_OPERAND (val, cond, iter, SSA_OP_USE)
825 {
826 if (!add_dependency (val, lim_data, loop, false))
827 return false;
828 def_data = get_lim_data (SSA_NAME_DEF_STMT (val));
829 if (def_data)
830 total_cost += def_data->cost;
831 }
832
833 /* We want to avoid unconditionally executing very expensive
834 operations. As costs for our dependencies cannot be
835 negative just claim we are not invariand for this case.
836 We also are not sure whether the control-flow inside the
837 loop will vanish. */
838 if (total_cost - min_cost >= 2 * LIM_EXPENSIVE
839 && !(min_cost != 0
840 && total_cost / min_cost <= 2))
841 return false;
842
843 /* Assume that the control-flow in the loop will vanish.
844 ??? We should verify this and not artificially increase
845 the cost if that is not the case. */
846 lim_data->cost += stmt_cost (stmt);
847 }
848
849 return true;
850 }
851 else
852 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
853 if (!add_dependency (val, lim_data, loop, true))
854 return false;
a7e5372d 855
5006671f 856 if (gimple_vuse (stmt))
72425608
ZD
857 {
858 mem_ref_p ref = mem_ref_in_stmt (stmt);
859
860 if (ref)
861 {
862 lim_data->max_loop
863 = outermost_indep_loop (lim_data->max_loop, loop, ref);
864 if (!lim_data->max_loop)
865 return false;
866 }
867 else
868 {
5006671f 869 if ((val = gimple_vuse (stmt)) != NULL_TREE)
72425608
ZD
870 {
871 if (!add_dependency (val, lim_data, loop, false))
872 return false;
873 }
874 }
875 }
a7e5372d
ZD
876
877 lim_data->cost += stmt_cost (stmt);
878
879 return true;
880}
881
882/* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
883 and that one of the operands of this statement is computed by STMT.
884 Ensure that STMT (together with all the statements that define its
885 operands) is hoisted at least out of the loop LEVEL. */
886
887static void
726a989a 888set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
a7e5372d 889{
726a989a 890 struct loop *stmt_loop = gimple_bb (stmt)->loop_father;
a7e5372d 891 struct depend *dep;
726a989a 892 struct lim_aux_data *lim_data;
a7e5372d
ZD
893
894 stmt_loop = find_common_loop (orig_loop, stmt_loop);
726a989a
RB
895 lim_data = get_lim_data (stmt);
896 if (lim_data != NULL && lim_data->tgt_loop != NULL)
a7e5372d 897 stmt_loop = find_common_loop (stmt_loop,
726a989a 898 loop_outer (lim_data->tgt_loop));
a7e5372d
ZD
899 if (flow_loop_nested_p (stmt_loop, level))
900 return;
901
726a989a
RB
902 gcc_assert (level == lim_data->max_loop
903 || flow_loop_nested_p (lim_data->max_loop, level));
a7e5372d 904
726a989a
RB
905 lim_data->tgt_loop = level;
906 for (dep = lim_data->depends; dep; dep = dep->next)
a7e5372d
ZD
907 set_level (dep->stmt, orig_loop, level);
908}
909
910/* Determines an outermost loop from that we want to hoist the statement STMT.
911 For now we chose the outermost possible loop. TODO -- use profiling
912 information to set it more sanely. */
913
914static void
726a989a 915set_profitable_level (gimple stmt)
a7e5372d 916{
726a989a 917 set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
a7e5372d
ZD
918}
919
726a989a 920/* Returns true if STMT is a call that has side effects. */
a7e5372d
ZD
921
922static bool
726a989a 923nonpure_call_p (gimple stmt)
a7e5372d 924{
726a989a 925 if (gimple_code (stmt) != GIMPLE_CALL)
a7e5372d
ZD
926 return false;
927
726a989a 928 return gimple_has_side_effects (stmt);
a7e5372d
ZD
929}
930
e0a60731
RG
931/* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
932
726a989a
RB
933static gimple
934rewrite_reciprocal (gimple_stmt_iterator *bsi)
e0a60731 935{
726a989a
RB
936 gimple stmt, stmt1, stmt2;
937 tree var, name, lhs, type;
f50d67f6 938 tree real_one;
0ca5af51 939 gimple_stmt_iterator gsi;
e0a60731 940
726a989a
RB
941 stmt = gsi_stmt (*bsi);
942 lhs = gimple_assign_lhs (stmt);
943 type = TREE_TYPE (lhs);
e0a60731 944
46eb666a 945 var = create_tmp_reg (type, "reciptmp");
f50d67f6 946
8e8e423f 947 real_one = build_one_cst (type);
e0a60731 948
726a989a 949 stmt1 = gimple_build_assign_with_ops (RDIV_EXPR,
f50d67f6 950 var, real_one, gimple_assign_rhs2 (stmt));
e0a60731 951 name = make_ssa_name (var, stmt1);
726a989a
RB
952 gimple_assign_set_lhs (stmt1, name);
953
954 stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name,
955 gimple_assign_rhs1 (stmt));
e0a60731
RG
956
957 /* Replace division stmt with reciprocal and multiply stmts.
958 The multiply stmt is not invariant, so update iterator
959 and avoid rescanning. */
0ca5af51
AO
960 gsi = *bsi;
961 gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
962 gsi_replace (&gsi, stmt2, true);
e0a60731
RG
963
964 /* Continue processing with invariant reciprocal statement. */
965 return stmt1;
966}
967
968/* Check if the pattern at *BSI is a bittest of the form
969 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
970
726a989a
RB
971static gimple
972rewrite_bittest (gimple_stmt_iterator *bsi)
e0a60731 973{
726a989a
RB
974 gimple stmt, use_stmt, stmt1, stmt2;
975 tree lhs, var, name, t, a, b;
e0a60731
RG
976 use_operand_p use;
977
726a989a
RB
978 stmt = gsi_stmt (*bsi);
979 lhs = gimple_assign_lhs (stmt);
e0a60731
RG
980
981 /* Verify that the single use of lhs is a comparison against zero. */
982 if (TREE_CODE (lhs) != SSA_NAME
5c7ec4f0 983 || !single_imm_use (lhs, &use, &use_stmt)
726a989a 984 || gimple_code (use_stmt) != GIMPLE_COND)
e0a60731 985 return stmt;
726a989a
RB
986 if (gimple_cond_lhs (use_stmt) != lhs
987 || (gimple_cond_code (use_stmt) != NE_EXPR
988 && gimple_cond_code (use_stmt) != EQ_EXPR)
989 || !integer_zerop (gimple_cond_rhs (use_stmt)))
e0a60731
RG
990 return stmt;
991
992 /* Get at the operands of the shift. The rhs is TMP1 & 1. */
726a989a
RB
993 stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
994 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
e0a60731
RG
995 return stmt;
996
0d52bcc1 997 /* There is a conversion in between possibly inserted by fold. */
1a87cf0c 998 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1)))
e0a60731 999 {
726a989a 1000 t = gimple_assign_rhs1 (stmt1);
e0a60731
RG
1001 if (TREE_CODE (t) != SSA_NAME
1002 || !has_single_use (t))
1003 return stmt;
1004 stmt1 = SSA_NAME_DEF_STMT (t);
726a989a 1005 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
e0a60731 1006 return stmt;
e0a60731
RG
1007 }
1008
1009 /* Verify that B is loop invariant but A is not. Verify that with
1010 all the stmt walking we are still in the same loop. */
726a989a
RB
1011 if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR
1012 || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt))
1013 return stmt;
e0a60731 1014
726a989a
RB
1015 a = gimple_assign_rhs1 (stmt1);
1016 b = gimple_assign_rhs2 (stmt1);
1017
1018 if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL
1019 && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL)
1020 {
0ca5af51
AO
1021 gimple_stmt_iterator rsi;
1022
e0a60731
RG
1023 /* 1 << B */
1024 var = create_tmp_var (TREE_TYPE (a), "shifttmp");
e0a60731
RG
1025 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
1026 build_int_cst (TREE_TYPE (a), 1), b);
726a989a 1027 stmt1 = gimple_build_assign (var, t);
e0a60731 1028 name = make_ssa_name (var, stmt1);
726a989a 1029 gimple_assign_set_lhs (stmt1, name);
e0a60731
RG
1030
1031 /* A & (1 << B) */
1032 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
726a989a 1033 stmt2 = gimple_build_assign (var, t);
5c7ec4f0 1034 name = make_ssa_name (var, stmt2);
726a989a 1035 gimple_assign_set_lhs (stmt2, name);
a6e2d112
UB
1036
1037 /* Replace the SSA_NAME we compare against zero. Adjust
1038 the type of zero accordingly. */
5c7ec4f0 1039 SET_USE (use, name);
726a989a 1040 gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0));
e0a60731 1041
0ca5af51
AO
1042 /* Don't use gsi_replace here, none of the new assignments sets
1043 the variable originally set in stmt. Move bsi to stmt1, and
1044 then remove the original stmt, so that we get a chance to
1045 retain debug info for it. */
1046 rsi = *bsi;
1047 gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
1048 gsi_insert_before (&rsi, stmt2, GSI_SAME_STMT);
1049 gsi_remove (&rsi, true);
e0a60731
RG
1050
1051 return stmt1;
1052 }
1053
1054 return stmt;
1055}
1056
1057
a7e5372d
ZD
1058/* Determine the outermost loops in that statements in basic block BB are
1059 invariant, and record them to the LIM_DATA associated with the statements.
1060 Callback for walk_dominator_tree. */
1061
1062static void
1063determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
1064 basic_block bb)
1065{
1066 enum move_pos pos;
726a989a
RB
1067 gimple_stmt_iterator bsi;
1068 gimple stmt;
a7e5372d
ZD
1069 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
1070 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
726a989a 1071 struct lim_aux_data *lim_data;
a7e5372d 1072
9ba025a2 1073 if (!loop_outer (bb->loop_father))
a7e5372d
ZD
1074 return;
1075
1076 if (dump_file && (dump_flags & TDF_DETAILS))
1077 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
9ba025a2 1078 bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
a7e5372d 1079
e3bdfed6
RG
1080 /* Look at PHI nodes, but only if there is at most two.
1081 ??? We could relax this further by post-processing the inserted
1082 code and transforming adjacent cond-exprs with the same predicate
1083 to control flow again. */
1084 bsi = gsi_start_phis (bb);
1085 if (!gsi_end_p (bsi)
1086 && ((gsi_next (&bsi), gsi_end_p (bsi))
1087 || (gsi_next (&bsi), gsi_end_p (bsi))))
1088 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1089 {
1090 stmt = gsi_stmt (bsi);
1091
1092 pos = movement_possibility (stmt);
1093 if (pos == MOVE_IMPOSSIBLE)
1094 continue;
1095
1096 lim_data = init_lim_data (stmt);
1097 lim_data->always_executed_in = outermost;
1098
1099 if (!determine_max_movement (stmt, false))
1100 {
1101 lim_data->max_loop = NULL;
1102 continue;
1103 }
1104
1105 if (dump_file && (dump_flags & TDF_DETAILS))
1106 {
1107 print_gimple_stmt (dump_file, stmt, 2, 0);
1108 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
1109 loop_depth (lim_data->max_loop),
1110 lim_data->cost);
1111 }
1112
1113 if (lim_data->cost >= LIM_EXPENSIVE)
1114 set_profitable_level (stmt);
1115 }
1116
726a989a 1117 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
a7e5372d 1118 {
726a989a 1119 stmt = gsi_stmt (bsi);
a7e5372d
ZD
1120
1121 pos = movement_possibility (stmt);
1122 if (pos == MOVE_IMPOSSIBLE)
1123 {
1124 if (nonpure_call_p (stmt))
1125 {
1126 maybe_never = true;
1127 outermost = NULL;
1128 }
8ded35f9
RG
1129 /* Make sure to note always_executed_in for stores to make
1130 store-motion work. */
1131 else if (stmt_makes_single_store (stmt))
1132 {
726a989a
RB
1133 struct lim_aux_data *lim_data = init_lim_data (stmt);
1134 lim_data->always_executed_in = outermost;
8ded35f9 1135 }
a7e5372d
ZD
1136 continue;
1137 }
1138
726a989a
RB
1139 if (is_gimple_assign (stmt)
1140 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
1141 == GIMPLE_BINARY_RHS))
a9b77cd1 1142 {
726a989a
RB
1143 tree op0 = gimple_assign_rhs1 (stmt);
1144 tree op1 = gimple_assign_rhs2 (stmt);
1145 struct loop *ol1 = outermost_invariant_loop (op1,
1146 loop_containing_stmt (stmt));
a9b77cd1
ZD
1147
1148 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
1149 to be hoisted out of loop, saving expensive divide. */
1150 if (pos == MOVE_POSSIBLE
726a989a 1151 && gimple_assign_rhs_code (stmt) == RDIV_EXPR
a9b77cd1
ZD
1152 && flag_unsafe_math_optimizations
1153 && !flag_trapping_math
726a989a
RB
1154 && ol1 != NULL
1155 && outermost_invariant_loop (op0, ol1) == NULL)
a9b77cd1
ZD
1156 stmt = rewrite_reciprocal (&bsi);
1157
1158 /* If the shift count is invariant, convert (A >> B) & 1 to
1159 A & (1 << B) allowing the bit mask to be hoisted out of the loop
1160 saving an expensive shift. */
1161 if (pos == MOVE_POSSIBLE
726a989a
RB
1162 && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
1163 && integer_onep (op1)
1164 && TREE_CODE (op0) == SSA_NAME
1165 && has_single_use (op0))
a9b77cd1
ZD
1166 stmt = rewrite_bittest (&bsi);
1167 }
37cca405 1168
726a989a
RB
1169 lim_data = init_lim_data (stmt);
1170 lim_data->always_executed_in = outermost;
a7e5372d
ZD
1171
1172 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
1173 continue;
1174
1175 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
1176 {
726a989a 1177 lim_data->max_loop = NULL;
a7e5372d
ZD
1178 continue;
1179 }
1180
1181 if (dump_file && (dump_flags & TDF_DETAILS))
1182 {
726a989a 1183 print_gimple_stmt (dump_file, stmt, 2, 0);
a7e5372d 1184 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
726a989a
RB
1185 loop_depth (lim_data->max_loop),
1186 lim_data->cost);
a7e5372d
ZD
1187 }
1188
726a989a 1189 if (lim_data->cost >= LIM_EXPENSIVE)
a7e5372d
ZD
1190 set_profitable_level (stmt);
1191 }
1192}
1193
1194/* For each statement determines the outermost loop in that it is invariant,
1195 statements on whose motion it depends and the cost of the computation.
1196 This information is stored to the LIM_DATA structure associated with
1197 each statement. */
1198
1199static void
1200determine_invariantness (void)
1201{
1202 struct dom_walk_data walk_data;
1203
1204 memset (&walk_data, 0, sizeof (struct dom_walk_data));
2b28c07a 1205 walk_data.dom_direction = CDI_DOMINATORS;
ccf5c864 1206 walk_data.before_dom_children = determine_invariantness_stmt;
a7e5372d
ZD
1207
1208 init_walk_dominator_tree (&walk_data);
1209 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1210 fini_walk_dominator_tree (&walk_data);
1211}
1212
a7e5372d 1213/* Hoist the statements in basic block BB out of the loops prescribed by
2a7e31df 1214 data stored in LIM_DATA structures associated with each statement. Callback
a7e5372d
ZD
1215 for walk_dominator_tree. */
1216
1217static void
e3bdfed6 1218move_computations_stmt (struct dom_walk_data *dw_data,
a7e5372d
ZD
1219 basic_block bb)
1220{
1221 struct loop *level;
726a989a
RB
1222 gimple_stmt_iterator bsi;
1223 gimple stmt;
a7e5372d 1224 unsigned cost = 0;
726a989a 1225 struct lim_aux_data *lim_data;
a7e5372d 1226
9ba025a2 1227 if (!loop_outer (bb->loop_father))
a7e5372d
ZD
1228 return;
1229
e3bdfed6
RG
1230 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); )
1231 {
1232 gimple new_stmt;
1233 stmt = gsi_stmt (bsi);
1234
1235 lim_data = get_lim_data (stmt);
1236 if (lim_data == NULL)
1237 {
1238 gsi_next (&bsi);
1239 continue;
1240 }
1241
1242 cost = lim_data->cost;
1243 level = lim_data->tgt_loop;
1244 clear_lim_data (stmt);
1245
1246 if (!level)
1247 {
1248 gsi_next (&bsi);
1249 continue;
1250 }
1251
1252 if (dump_file && (dump_flags & TDF_DETAILS))
1253 {
1254 fprintf (dump_file, "Moving PHI node\n");
1255 print_gimple_stmt (dump_file, stmt, 0, 0);
1256 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1257 cost, level->num);
1258 }
1259
1260 if (gimple_phi_num_args (stmt) == 1)
1261 {
1262 tree arg = PHI_ARG_DEF (stmt, 0);
1263 new_stmt = gimple_build_assign_with_ops (TREE_CODE (arg),
1264 gimple_phi_result (stmt),
1265 arg, NULL_TREE);
1266 SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt;
1267 }
1268 else
1269 {
1270 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
1271 gimple cond = gsi_stmt (gsi_last_bb (dom));
1272 tree arg0 = NULL_TREE, arg1 = NULL_TREE, t;
1273 /* Get the PHI arguments corresponding to the true and false
1274 edges of COND. */
1275 extract_true_false_args_from_phi (dom, stmt, &arg0, &arg1);
1276 gcc_assert (arg0 && arg1);
1277 t = build2 (gimple_cond_code (cond), boolean_type_node,
1278 gimple_cond_lhs (cond), gimple_cond_rhs (cond));
4e71066d
RG
1279 new_stmt = gimple_build_assign_with_ops3 (COND_EXPR,
1280 gimple_phi_result (stmt),
1281 t, arg0, arg1);
e3bdfed6
RG
1282 SSA_NAME_DEF_STMT (gimple_phi_result (stmt)) = new_stmt;
1283 *((unsigned int *)(dw_data->global_data)) |= TODO_cleanup_cfg;
1284 }
1285 gsi_insert_on_edge (loop_preheader_edge (level), new_stmt);
1286 remove_phi_node (&bsi, false);
1287 }
1288
726a989a 1289 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
a7e5372d 1290 {
13714310
RG
1291 edge e;
1292
726a989a 1293 stmt = gsi_stmt (bsi);
a7e5372d 1294
726a989a
RB
1295 lim_data = get_lim_data (stmt);
1296 if (lim_data == NULL)
a7e5372d 1297 {
726a989a 1298 gsi_next (&bsi);
a7e5372d
ZD
1299 continue;
1300 }
1301
726a989a
RB
1302 cost = lim_data->cost;
1303 level = lim_data->tgt_loop;
1304 clear_lim_data (stmt);
a7e5372d
ZD
1305
1306 if (!level)
1307 {
726a989a 1308 gsi_next (&bsi);
a7e5372d
ZD
1309 continue;
1310 }
1311
1312 /* We do not really want to move conditionals out of the loop; we just
1313 placed it here to force its operands to be moved if necessary. */
726a989a 1314 if (gimple_code (stmt) == GIMPLE_COND)
a7e5372d
ZD
1315 continue;
1316
1317 if (dump_file && (dump_flags & TDF_DETAILS))
1318 {
1319 fprintf (dump_file, "Moving statement\n");
726a989a 1320 print_gimple_stmt (dump_file, stmt, 0, 0);
a7e5372d
ZD
1321 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1322 cost, level->num);
1323 }
72425608 1324
13714310
RG
1325 e = loop_preheader_edge (level);
1326 gcc_assert (!gimple_vdef (stmt));
1327 if (gimple_vuse (stmt))
1328 {
1329 /* The new VUSE is the one from the virtual PHI in the loop
1330 header or the one already present. */
1331 gimple_stmt_iterator gsi2;
1332 for (gsi2 = gsi_start_phis (e->dest);
1333 !gsi_end_p (gsi2); gsi_next (&gsi2))
1334 {
1335 gimple phi = gsi_stmt (gsi2);
1336 if (!is_gimple_reg (gimple_phi_result (phi)))
1337 {
1338 gimple_set_vuse (stmt, PHI_ARG_DEF_FROM_EDGE (phi, e));
1339 break;
1340 }
1341 }
1342 }
726a989a 1343 gsi_remove (&bsi, false);
13714310 1344 gsi_insert_on_edge (e, stmt);
a7e5372d
ZD
1345 }
1346}
1347
1348/* Hoist the statements out of the loops prescribed by data stored in
2a7e31df 1349 LIM_DATA structures associated with each statement.*/
a7e5372d 1350
e3bdfed6 1351static unsigned int
a7e5372d
ZD
1352move_computations (void)
1353{
1354 struct dom_walk_data walk_data;
e3bdfed6 1355 unsigned int todo = 0;
a7e5372d
ZD
1356
1357 memset (&walk_data, 0, sizeof (struct dom_walk_data));
e3bdfed6 1358 walk_data.global_data = &todo;
2b28c07a 1359 walk_data.dom_direction = CDI_DOMINATORS;
ccf5c864 1360 walk_data.before_dom_children = move_computations_stmt;
a7e5372d
ZD
1361
1362 init_walk_dominator_tree (&walk_data);
1363 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1364 fini_walk_dominator_tree (&walk_data);
1365
726a989a 1366 gsi_commit_edge_inserts ();
5006671f 1367 if (need_ssa_update_p (cfun))
84d65814 1368 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
e3bdfed6
RG
1369
1370 return todo;
a7e5372d
ZD
1371}
1372
1373/* Checks whether the statement defining variable *INDEX can be hoisted
1374 out of the loop passed in DATA. Callback for for_each_index. */
1375
1376static bool
1377may_move_till (tree ref, tree *index, void *data)
1378{
726a989a 1379 struct loop *loop = (struct loop *) data, *max_loop;
a7e5372d
ZD
1380
1381 /* If REF is an array reference, check also that the step and the lower
1382 bound is invariant in LOOP. */
1383 if (TREE_CODE (ref) == ARRAY_REF)
1384 {
726a989a
RB
1385 tree step = TREE_OPERAND (ref, 3);
1386 tree lbound = TREE_OPERAND (ref, 2);
a7e5372d 1387
726a989a 1388 max_loop = outermost_invariant_loop (step, loop);
a7e5372d
ZD
1389 if (!max_loop)
1390 return false;
1391
726a989a 1392 max_loop = outermost_invariant_loop (lbound, loop);
a7e5372d
ZD
1393 if (!max_loop)
1394 return false;
1395 }
1396
1397 max_loop = outermost_invariant_loop (*index, loop);
1398 if (!max_loop)
1399 return false;
1400
1401 return true;
1402}
1403
726a989a 1404/* If OP is SSA NAME, force the statement that defines it to be
b4042a03 1405 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
a7e5372d
ZD
1406
1407static void
726a989a 1408force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop)
a7e5372d 1409{
726a989a 1410 gimple stmt;
a7e5372d 1411
726a989a
RB
1412 if (!op
1413 || is_gimple_min_invariant (op))
1414 return;
a7e5372d 1415
726a989a 1416 gcc_assert (TREE_CODE (op) == SSA_NAME);
b8698a0f 1417
726a989a
RB
1418 stmt = SSA_NAME_DEF_STMT (op);
1419 if (gimple_nop_p (stmt))
a7e5372d
ZD
1420 return;
1421
726a989a 1422 set_level (stmt, orig_loop, loop);
a7e5372d
ZD
1423}
1424
1425/* Forces statement defining invariants in REF (and *INDEX) to be moved out of
b4042a03
ZD
1426 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
1427 for_each_index. */
1428
1429struct fmt_data
1430{
1431 struct loop *loop;
1432 struct loop *orig_loop;
1433};
a7e5372d
ZD
1434
1435static bool
1436force_move_till (tree ref, tree *index, void *data)
1437{
c22940cd 1438 struct fmt_data *fmt_data = (struct fmt_data *) data;
a7e5372d
ZD
1439
1440 if (TREE_CODE (ref) == ARRAY_REF)
1441 {
726a989a
RB
1442 tree step = TREE_OPERAND (ref, 3);
1443 tree lbound = TREE_OPERAND (ref, 2);
a7e5372d 1444
726a989a
RB
1445 force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop);
1446 force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop);
a7e5372d
ZD
1447 }
1448
726a989a 1449 force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop);
a7e5372d
ZD
1450
1451 return true;
1452}
1453
72425608
ZD
1454/* A hash function for struct mem_ref object OBJ. */
1455
1456static hashval_t
1457memref_hash (const void *obj)
1458{
3d9a9f94 1459 const struct mem_ref *const mem = (const struct mem_ref *) obj;
72425608
ZD
1460
1461 return mem->hash;
1462}
1463
1464/* An equality function for struct mem_ref object OBJ1 with
1465 memory reference OBJ2. */
1466
1467static int
1468memref_eq (const void *obj1, const void *obj2)
1469{
3d9a9f94 1470 const struct mem_ref *const mem1 = (const struct mem_ref *) obj1;
72425608 1471
3d9a9f94 1472 return operand_equal_p (mem1->mem, (const_tree) obj2, 0);
72425608
ZD
1473}
1474
1475/* Releases list of memory reference locations ACCS. */
1476
1477static void
1478free_mem_ref_locs (mem_ref_locs_p accs)
1479{
1480 unsigned i;
1481 mem_ref_loc_p loc;
1482
1483 if (!accs)
1484 return;
1485
ac47786e 1486 FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc)
72425608
ZD
1487 free (loc);
1488 VEC_free (mem_ref_loc_p, heap, accs->locs);
1489 free (accs);
1490}
1491
1492/* A function to free the mem_ref object OBJ. */
1493
1494static void
1495memref_free (void *obj)
1496{
3d9a9f94 1497 struct mem_ref *const mem = (struct mem_ref *) obj;
72425608
ZD
1498 unsigned i;
1499 mem_ref_locs_p accs;
1500
1501 BITMAP_FREE (mem->stored);
1502 BITMAP_FREE (mem->indep_loop);
1503 BITMAP_FREE (mem->dep_loop);
1504 BITMAP_FREE (mem->indep_ref);
1505 BITMAP_FREE (mem->dep_ref);
1506
ac47786e 1507 FOR_EACH_VEC_ELT (mem_ref_locs_p, mem->accesses_in_loop, i, accs)
72425608
ZD
1508 free_mem_ref_locs (accs);
1509 VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop);
1510
72425608
ZD
1511 free (mem);
1512}
1513
1514/* Allocates and returns a memory reference description for MEM whose hash
1515 value is HASH and id is ID. */
1516
1517static mem_ref_p
1518mem_ref_alloc (tree mem, unsigned hash, unsigned id)
1519{
1520 mem_ref_p ref = XNEW (struct mem_ref);
1521 ref->mem = mem;
1522 ref->id = id;
1523 ref->hash = hash;
1524 ref->stored = BITMAP_ALLOC (NULL);
1525 ref->indep_loop = BITMAP_ALLOC (NULL);
1526 ref->dep_loop = BITMAP_ALLOC (NULL);
1527 ref->indep_ref = BITMAP_ALLOC (NULL);
1528 ref->dep_ref = BITMAP_ALLOC (NULL);
1529 ref->accesses_in_loop = NULL;
72425608
ZD
1530
1531 return ref;
1532}
1533
1534/* Allocates and returns the new list of locations. */
1535
1536static mem_ref_locs_p
1537mem_ref_locs_alloc (void)
1538{
1539 mem_ref_locs_p accs = XNEW (struct mem_ref_locs);
1540 accs->locs = NULL;
1541 return accs;
1542}
1543
1544/* Records memory reference location *LOC in LOOP to the memory reference
1545 description REF. The reference occurs in statement STMT. */
a7e5372d
ZD
1546
1547static void
726a989a 1548record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc)
a7e5372d 1549{
72425608
ZD
1550 mem_ref_loc_p aref = XNEW (struct mem_ref_loc);
1551 mem_ref_locs_p accs;
1552 bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1553
1554 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1555 <= (unsigned) loop->num)
1556 VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop,
1557 loop->num + 1);
1558 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1559 if (!accs)
1560 {
1561 accs = mem_ref_locs_alloc ();
1562 VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs);
1563 }
a7e5372d
ZD
1564
1565 aref->stmt = stmt;
72425608 1566 aref->ref = loc;
a7e5372d 1567
72425608
ZD
1568 VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref);
1569 bitmap_set_bit (ril, ref->id);
a7e5372d
ZD
1570}
1571
72425608 1572/* Marks reference REF as stored in LOOP. */
a7e5372d
ZD
1573
1574static void
72425608 1575mark_ref_stored (mem_ref_p ref, struct loop *loop)
a7e5372d 1576{
72425608
ZD
1577 for (;
1578 loop != current_loops->tree_root
1579 && !bitmap_bit_p (ref->stored, loop->num);
1580 loop = loop_outer (loop))
1581 bitmap_set_bit (ref->stored, loop->num);
1582}
1583
1584/* Gathers memory references in statement STMT in LOOP, storing the
1585 information about them in the memory_accesses structure. Marks
1586 the vops accessed through unrecognized statements there as
1587 well. */
1588
1589static void
726a989a 1590gather_mem_refs_stmt (struct loop *loop, gimple stmt)
72425608
ZD
1591{
1592 tree *mem = NULL;
1593 hashval_t hash;
1594 PTR *slot;
1595 mem_ref_p ref;
72425608 1596 bool is_stored;
72425608 1597 unsigned id;
a7e5372d 1598
5006671f 1599 if (!gimple_vuse (stmt))
72425608
ZD
1600 return;
1601
1602 mem = simple_mem_ref_in_stmt (stmt, &is_stored);
1603 if (!mem)
546d314c
RG
1604 {
1605 id = VEC_length (mem_ref_p, memory_accesses.refs_list);
1606 ref = mem_ref_alloc (error_mark_node, 0, id);
1607 VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
1608 if (dump_file && (dump_flags & TDF_DETAILS))
1609 {
1610 fprintf (dump_file, "Unanalyzed memory reference %u: ", id);
1611 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1612 }
1613 if (gimple_vdef (stmt))
1614 mark_ref_stored (ref, loop);
1615 record_mem_ref_loc (ref, loop, stmt, mem);
1616 return;
1617 }
72425608
ZD
1618
1619 hash = iterative_hash_expr (*mem, 0);
1620 slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT);
1621
1622 if (*slot)
1623 {
3d9a9f94 1624 ref = (mem_ref_p) *slot;
72425608
ZD
1625 id = ref->id;
1626 }
1627 else
a7e5372d 1628 {
72425608
ZD
1629 id = VEC_length (mem_ref_p, memory_accesses.refs_list);
1630 ref = mem_ref_alloc (*mem, hash, id);
1631 VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
1632 *slot = ref;
1633
1634 if (dump_file && (dump_flags & TDF_DETAILS))
1635 {
1636 fprintf (dump_file, "Memory reference %u: ", id);
1637 print_generic_expr (dump_file, ref->mem, TDF_SLIM);
1638 fprintf (dump_file, "\n");
1639 }
a7e5372d 1640 }
039496da 1641
72425608
ZD
1642 if (is_stored)
1643 mark_ref_stored (ref, loop);
1644
72425608
ZD
1645 record_mem_ref_loc (ref, loop, stmt, mem);
1646 return;
a7e5372d
ZD
1647}
1648
72425608 1649/* Gathers memory references in loops. */
a7e5372d
ZD
1650
1651static void
72425608 1652gather_mem_refs_in_loops (void)
a7e5372d 1653{
726a989a 1654 gimple_stmt_iterator bsi;
72425608
ZD
1655 basic_block bb;
1656 struct loop *loop;
1657 loop_iterator li;
72425608
ZD
1658 bitmap lrefs, alrefs, alrefso;
1659
1660 FOR_EACH_BB (bb)
1661 {
1662 loop = bb->loop_father;
1663 if (loop == current_loops->tree_root)
1664 continue;
1665
726a989a
RB
1666 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1667 gather_mem_refs_stmt (loop, gsi_stmt (bsi));
72425608
ZD
1668 }
1669
546d314c
RG
1670 /* Propagate the information about accessed memory references up
1671 the loop hierarchy. */
72425608
ZD
1672 FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
1673 {
1674 lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1675 alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num);
1676 bitmap_ior_into (alrefs, lrefs);
1677
1678 if (loop_outer (loop) == current_loops->tree_root)
1679 continue;
1680
72425608
ZD
1681 alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1682 loop_outer (loop)->num);
1683 bitmap_ior_into (alrefso, alrefs);
1684 }
1685}
1686
72425608
ZD
1687/* Create a mapping from virtual operands to references that touch them
1688 in LOOP. */
1689
1690static void
1691create_vop_ref_mapping_loop (struct loop *loop)
1692{
1693 bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1694 struct loop *sloop;
1695 bitmap_iterator bi;
1696 unsigned i;
1697 mem_ref_p ref;
1698
1699 EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi)
1700 {
1701 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
546d314c
RG
1702 for (sloop = loop; sloop != current_loops->tree_root;
1703 sloop = loop_outer (sloop))
1704 if (bitmap_bit_p (ref->stored, loop->num))
1705 {
1706 bitmap refs_stored
1707 = VEC_index (bitmap, memory_accesses.all_refs_stored_in_loop,
1708 sloop->num);
1709 bitmap_set_bit (refs_stored, ref->id);
1710 }
72425608
ZD
1711 }
1712}
1713
1714/* For each non-clobbered virtual operand and each loop, record the memory
1715 references in this loop that touch the operand. */
1716
1717static void
1718create_vop_ref_mapping (void)
1719{
1720 loop_iterator li;
1721 struct loop *loop;
1722
1723 FOR_EACH_LOOP (li, loop, 0)
1724 {
1725 create_vop_ref_mapping_loop (loop);
1726 }
1727}
1728
1729/* Gathers information about memory accesses in the loops. */
1730
1731static void
1732analyze_memory_references (void)
1733{
1734 unsigned i;
1735 bitmap empty;
72425608
ZD
1736
1737 memory_accesses.refs
1738 = htab_create (100, memref_hash, memref_eq, memref_free);
1739 memory_accesses.refs_list = NULL;
1740 memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap,
1741 number_of_loops ());
1742 memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap,
1743 number_of_loops ());
546d314c
RG
1744 memory_accesses.all_refs_stored_in_loop = VEC_alloc (bitmap, heap,
1745 number_of_loops ());
72425608
ZD
1746
1747 for (i = 0; i < number_of_loops (); i++)
1748 {
1749 empty = BITMAP_ALLOC (NULL);
1750 VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty);
1751 empty = BITMAP_ALLOC (NULL);
1752 VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty);
1753 empty = BITMAP_ALLOC (NULL);
546d314c 1754 VEC_quick_push (bitmap, memory_accesses.all_refs_stored_in_loop, empty);
72425608
ZD
1755 }
1756
1757 memory_accesses.ttae_cache = NULL;
1758
1759 gather_mem_refs_in_loops ();
1760 create_vop_ref_mapping ();
1761}
1762
72425608
ZD
1763/* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in
1764 tree_to_aff_combination_expand. */
1765
1766static bool
1767mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache)
1768{
1769 /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
1770 object and their offset differ in such a way that the locations cannot
1771 overlap, then they cannot alias. */
72425608 1772 double_int size1, size2;
1842e4d4 1773 aff_tree off1, off2;
72425608 1774
1842e4d4
RG
1775 /* Perform basic offset and type-based disambiguation. */
1776 if (!refs_may_alias_p (mem1, mem2))
72425608 1777 return false;
a7e5372d 1778
72425608
ZD
1779 /* The expansion of addresses may be a bit expensive, thus we only do
1780 the check at -O2 and higher optimization levels. */
1781 if (optimize < 2)
1782 return true;
1783
1784 get_inner_reference_aff (mem1, &off1, &size1);
1785 get_inner_reference_aff (mem2, &off2, &size2);
1786 aff_combination_expand (&off1, ttae_cache);
1787 aff_combination_expand (&off2, ttae_cache);
1788 aff_combination_scale (&off1, double_int_minus_one);
1789 aff_combination_add (&off2, &off1);
1790
02f5d6c5 1791 if (aff_comb_cannot_overlap_p (&off2, size1, size2))
72425608
ZD
1792 return false;
1793
1794 return true;
1795}
1796
1797/* Rewrites location LOC by TMP_VAR. */
1798
1799static void
1800rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var)
1801{
72425608
ZD
1802 *loc->ref = tmp_var;
1803 update_stmt (loc->stmt);
1804}
1805
1806/* Adds all locations of REF in LOOP and its subloops to LOCS. */
1807
1808static void
1809get_all_locs_in_loop (struct loop *loop, mem_ref_p ref,
1810 VEC (mem_ref_loc_p, heap) **locs)
1811{
1812 mem_ref_locs_p accs;
1813 unsigned i;
1814 mem_ref_loc_p loc;
1815 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1816 loop->num);
1817 struct loop *subloop;
1818
1819 if (!bitmap_bit_p (refs, ref->id))
1820 return;
1821
1822 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1823 > (unsigned) loop->num)
1824 {
1825 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1826 if (accs)
1827 {
ac47786e 1828 FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc)
72425608
ZD
1829 VEC_safe_push (mem_ref_loc_p, heap, *locs, loc);
1830 }
1831 }
1832
1833 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
1834 get_all_locs_in_loop (subloop, ref, locs);
1835}
1836
1837/* Rewrites all references to REF in LOOP by variable TMP_VAR. */
1838
1839static void
1840rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
1841{
1842 unsigned i;
1843 mem_ref_loc_p loc;
1844 VEC (mem_ref_loc_p, heap) *locs = NULL;
1845
1846 get_all_locs_in_loop (loop, ref, &locs);
ac47786e 1847 FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
72425608
ZD
1848 rewrite_mem_ref_loc (loc, tmp_var);
1849 VEC_free (mem_ref_loc_p, heap, locs);
a7e5372d
ZD
1850}
1851
d28cbb07
ZD
1852/* The name and the length of the currently generated variable
1853 for lsm. */
1854#define MAX_LSM_NAME_LENGTH 40
1855static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
1856static int lsm_tmp_name_length;
1857
1858/* Adds S to lsm_tmp_name. */
1859
1860static void
1861lsm_tmp_name_add (const char *s)
1862{
1863 int l = strlen (s) + lsm_tmp_name_length;
1864 if (l > MAX_LSM_NAME_LENGTH)
1865 return;
1866
1867 strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
1868 lsm_tmp_name_length = l;
1869}
1870
1871/* Stores the name for temporary variable that replaces REF to
1872 lsm_tmp_name. */
1873
1874static void
1875gen_lsm_tmp_name (tree ref)
1876{
1877 const char *name;
1878
1879 switch (TREE_CODE (ref))
1880 {
70f34814 1881 case MEM_REF:
d5fed62d 1882 case TARGET_MEM_REF:
d28cbb07
ZD
1883 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1884 lsm_tmp_name_add ("_");
1885 break;
1886
70f34814
RG
1887 case ADDR_EXPR:
1888 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1889 break;
1890
d28cbb07
ZD
1891 case BIT_FIELD_REF:
1892 case VIEW_CONVERT_EXPR:
1893 case ARRAY_RANGE_REF:
1894 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1895 break;
1896
1897 case REALPART_EXPR:
1898 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1899 lsm_tmp_name_add ("_RE");
1900 break;
b8698a0f 1901
d28cbb07
ZD
1902 case IMAGPART_EXPR:
1903 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1904 lsm_tmp_name_add ("_IM");
1905 break;
1906
1907 case COMPONENT_REF:
1908 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1909 lsm_tmp_name_add ("_");
1910 name = get_name (TREE_OPERAND (ref, 1));
1911 if (!name)
1912 name = "F";
d28cbb07 1913 lsm_tmp_name_add (name);
cbe80ff8 1914 break;
d28cbb07
ZD
1915
1916 case ARRAY_REF:
1917 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1918 lsm_tmp_name_add ("_I");
1919 break;
1920
1921 case SSA_NAME:
1922 ref = SSA_NAME_VAR (ref);
1923 /* Fallthru. */
1924
1925 case VAR_DECL:
1926 case PARM_DECL:
1927 name = get_name (ref);
1928 if (!name)
1929 name = "D";
1930 lsm_tmp_name_add (name);
1931 break;
1932
1933 case STRING_CST:
1934 lsm_tmp_name_add ("S");
1935 break;
1936
1937 case RESULT_DECL:
1938 lsm_tmp_name_add ("R");
1939 break;
1940
150e3929
RG
1941 case INTEGER_CST:
1942 /* Nothing. */
1943 break;
1944
d28cbb07
ZD
1945 default:
1946 gcc_unreachable ();
1947 }
1948}
1949
1950/* Determines name for temporary variable that replaces REF.
bbc8a8dc
ZD
1951 The name is accumulated into the lsm_tmp_name variable.
1952 N is added to the name of the temporary. */
d28cbb07 1953
bbc8a8dc
ZD
1954char *
1955get_lsm_tmp_name (tree ref, unsigned n)
d28cbb07 1956{
bbc8a8dc
ZD
1957 char ns[2];
1958
d28cbb07
ZD
1959 lsm_tmp_name_length = 0;
1960 gen_lsm_tmp_name (ref);
1961 lsm_tmp_name_add ("_lsm");
bbc8a8dc
ZD
1962 if (n < 10)
1963 {
1964 ns[0] = '0' + n;
1965 ns[1] = 0;
1966 lsm_tmp_name_add (ns);
1967 }
d28cbb07
ZD
1968 return lsm_tmp_name;
1969}
1970
039496da
AH
1971struct prev_flag_edges {
1972 /* Edge to insert new flag comparison code. */
1973 edge append_cond_position;
1974
1975 /* Edge for fall through from previous flag comparison. */
1976 edge last_cond_fallthru;
1977};
1978
1979/* Helper function for execute_sm. Emit code to store TMP_VAR into
1980 MEM along edge EX.
1981
1982 The store is only done if MEM has changed. We do this so no
1983 changes to MEM occur on code paths that did not originally store
1984 into it.
1985
1986 The common case for execute_sm will transform:
1987
1988 for (...) {
1989 if (foo)
1990 stuff;
1991 else
1992 MEM = TMP_VAR;
1993 }
1994
1995 into:
1996
1997 lsm = MEM;
1998 for (...) {
1999 if (foo)
2000 stuff;
2001 else
2002 lsm = TMP_VAR;
2003 }
2004 MEM = lsm;
2005
2006 This function will generate:
2007
2008 lsm = MEM;
2009
2010 lsm_flag = false;
2011 ...
2012 for (...) {
2013 if (foo)
2014 stuff;
2015 else {
2016 lsm = TMP_VAR;
2017 lsm_flag = true;
2018 }
2019 }
2020 if (lsm_flag) <--
2021 MEM = lsm; <--
2022*/
2023
2024static void
2025execute_sm_if_changed (edge ex, tree mem, tree tmp_var, tree flag)
2026{
2027 basic_block new_bb, then_bb, old_dest;
2028 bool loop_has_only_one_exit;
2029 edge then_old_edge, orig_ex = ex;
2030 gimple_stmt_iterator gsi;
2031 gimple stmt;
2032 struct prev_flag_edges *prev_edges = (struct prev_flag_edges *) ex->aux;
2033
2034 /* ?? Insert store after previous store if applicable. See note
2035 below. */
2036 if (prev_edges)
2037 ex = prev_edges->append_cond_position;
2038
2039 loop_has_only_one_exit = single_pred_p (ex->dest);
2040
2041 if (loop_has_only_one_exit)
2042 ex = split_block_after_labels (ex->dest);
2043
2044 old_dest = ex->dest;
2045 new_bb = split_edge (ex);
2046 then_bb = create_empty_bb (new_bb);
2047 if (current_loops && new_bb->loop_father)
2048 add_bb_to_loop (then_bb, new_bb->loop_father);
2049
2050 gsi = gsi_start_bb (new_bb);
2051 stmt = gimple_build_cond (NE_EXPR, flag, boolean_false_node,
2052 NULL_TREE, NULL_TREE);
2053 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
2054
2055 gsi = gsi_start_bb (then_bb);
2056 /* Insert actual store. */
2057 stmt = gimple_build_assign (unshare_expr (mem), tmp_var);
2058 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
2059
2060 make_edge (new_bb, then_bb, EDGE_TRUE_VALUE);
2061 make_edge (new_bb, old_dest, EDGE_FALSE_VALUE);
2062 then_old_edge = make_edge (then_bb, old_dest, EDGE_FALLTHRU);
2063
2064 set_immediate_dominator (CDI_DOMINATORS, then_bb, new_bb);
2065
2066 if (prev_edges)
2067 {
2068 basic_block prevbb = prev_edges->last_cond_fallthru->src;
2069 redirect_edge_succ (prev_edges->last_cond_fallthru, new_bb);
2070 set_immediate_dominator (CDI_DOMINATORS, new_bb, prevbb);
2071 set_immediate_dominator (CDI_DOMINATORS, old_dest,
2072 recompute_dominator (CDI_DOMINATORS, old_dest));
2073 }
2074
2075 /* ?? Because stores may alias, they must happen in the exact
2076 sequence they originally happened. Save the position right after
2077 the (_lsm) store we just created so we can continue appending after
2078 it and maintain the original order. */
2079 {
2080 struct prev_flag_edges *p;
2081
2082 if (orig_ex->aux)
2083 orig_ex->aux = NULL;
2084 alloc_aux_for_edge (orig_ex, sizeof (struct prev_flag_edges));
2085 p = (struct prev_flag_edges *) orig_ex->aux;
2086 p->append_cond_position = then_old_edge;
2087 p->last_cond_fallthru = find_edge (new_bb, old_dest);
2088 orig_ex->aux = (void *) p;
2089 }
2090
2091 if (!loop_has_only_one_exit)
2092 for (gsi = gsi_start_phis (old_dest); !gsi_end_p (gsi); gsi_next (&gsi))
2093 {
2094 gimple phi = gsi_stmt (gsi);
2095 unsigned i;
2096
2097 for (i = 0; i < gimple_phi_num_args (phi); i++)
2098 if (gimple_phi_arg_edge (phi, i)->src == new_bb)
2099 {
2100 tree arg = gimple_phi_arg_def (phi, i);
9e227d60 2101 add_phi_arg (phi, arg, then_old_edge, UNKNOWN_LOCATION);
039496da
AH
2102 update_stmt (phi);
2103 }
2104 }
2105 /* Remove the original fall through edge. This was the
2106 single_succ_edge (new_bb). */
2107 EDGE_SUCC (new_bb, 0)->flags &= ~EDGE_FALLTHRU;
2108}
2109
2110/* Helper function for execute_sm. On every location where REF is
2111 set, set an appropriate flag indicating the store. */
2112
2113static tree
2114execute_sm_if_changed_flag_set (struct loop *loop, mem_ref_p ref)
2115{
2116 unsigned i;
2117 mem_ref_loc_p loc;
2118 tree flag;
2119 VEC (mem_ref_loc_p, heap) *locs = NULL;
2120 char *str = get_lsm_tmp_name (ref->mem, ~0);
2121
2122 lsm_tmp_name_add ("_flag");
7cc434a3 2123 flag = create_tmp_reg (boolean_type_node, str);
039496da
AH
2124 get_all_locs_in_loop (loop, ref, &locs);
2125 FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
2126 {
2127 gimple_stmt_iterator gsi;
2128 gimple stmt;
2129
2130 gsi = gsi_for_stmt (loc->stmt);
2131 stmt = gimple_build_assign (flag, boolean_true_node);
2132 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
2133 }
2134 VEC_free (mem_ref_loc_p, heap, locs);
2135 return flag;
2136}
2137
72425608 2138/* Executes store motion of memory reference REF from LOOP.
ca83d385
ZD
2139 Exits from the LOOP are stored in EXITS. The initialization of the
2140 temporary variable is put to the preheader of the loop, and assignments
2141 to the reference from the temporary variable are emitted to exits. */
a7e5372d
ZD
2142
2143static void
72425608 2144execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
a7e5372d 2145{
039496da 2146 tree tmp_var, store_flag;
a7e5372d 2147 unsigned i;
039496da 2148 gimple load;
b4042a03 2149 struct fmt_data fmt_data;
039496da 2150 edge ex, latch_edge;
726a989a 2151 struct lim_aux_data *lim_data;
039496da 2152 bool multi_threaded_model_p = false;
a7e5372d 2153
a3631d97
ZD
2154 if (dump_file && (dump_flags & TDF_DETAILS))
2155 {
2156 fprintf (dump_file, "Executing store motion of ");
72425608 2157 print_generic_expr (dump_file, ref->mem, 0);
a3631d97
ZD
2158 fprintf (dump_file, " from loop %d\n", loop->num);
2159 }
2160
7cc434a3 2161 tmp_var = create_tmp_reg (TREE_TYPE (ref->mem),
72425608 2162 get_lsm_tmp_name (ref->mem, ~0));
a7e5372d 2163
b4042a03
ZD
2164 fmt_data.loop = loop;
2165 fmt_data.orig_loop = loop;
72425608 2166 for_each_index (&ref->mem, force_move_till, &fmt_data);
a7e5372d 2167
874a3589 2168 if (block_in_transaction (loop_preheader_edge (loop)->src)
039496da
AH
2169 || !PARAM_VALUE (PARAM_ALLOW_STORE_DATA_RACES))
2170 multi_threaded_model_p = true;
2171
2172 if (multi_threaded_model_p)
2173 store_flag = execute_sm_if_changed_flag_set (loop, ref);
2174
72425608 2175 rewrite_mem_refs (loop, ref, tmp_var);
a7e5372d 2176
039496da
AH
2177 /* Emit the load code into the latch, so that we are sure it will
2178 be processed after all dependencies. */
2179 latch_edge = loop_latch_edge (loop);
2180
2181 /* FIXME/TODO: For the multi-threaded variant, we could avoid this
2182 load altogether, since the store is predicated by a flag. We
2183 could, do the load only if it was originally in the loop. */
726a989a
RB
2184 load = gimple_build_assign (tmp_var, unshare_expr (ref->mem));
2185 lim_data = init_lim_data (load);
2186 lim_data->max_loop = loop;
2187 lim_data->tgt_loop = loop;
039496da 2188 gsi_insert_on_edge (latch_edge, load);
a7e5372d 2189
039496da 2190 if (multi_threaded_model_p)
a7e5372d 2191 {
039496da
AH
2192 load = gimple_build_assign (store_flag, boolean_false_node);
2193 lim_data = init_lim_data (load);
2194 lim_data->max_loop = loop;
2195 lim_data->tgt_loop = loop;
2196 gsi_insert_on_edge (latch_edge, load);
a7e5372d 2197 }
039496da
AH
2198
2199 /* Sink the store to every exit from the loop. */
2200 FOR_EACH_VEC_ELT (edge, exits, i, ex)
2201 if (!multi_threaded_model_p)
2202 {
2203 gimple store;
2204 store = gimple_build_assign (unshare_expr (ref->mem), tmp_var);
2205 gsi_insert_on_edge (ex, store);
2206 }
2207 else
2208 execute_sm_if_changed (ex, ref->mem, tmp_var, store_flag);
a7e5372d
ZD
2209}
2210
72425608
ZD
2211/* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit
2212 edges of the LOOP. */
a7e5372d
ZD
2213
2214static void
72425608
ZD
2215hoist_memory_references (struct loop *loop, bitmap mem_refs,
2216 VEC (edge, heap) *exits)
a7e5372d 2217{
72425608
ZD
2218 mem_ref_p ref;
2219 unsigned i;
2220 bitmap_iterator bi;
a3631d97 2221
72425608 2222 EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi)
a7e5372d 2223 {
72425608
ZD
2224 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2225 execute_sm (loop, exits, ref);
a7e5372d 2226 }
01fd257a
ZD
2227}
2228
58adb739
RG
2229/* Returns true if REF is always accessed in LOOP. If STORED_P is true
2230 make sure REF is always stored to in LOOP. */
a7e5372d
ZD
2231
2232static bool
58adb739 2233ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
a7e5372d 2234{
72425608 2235 VEC (mem_ref_loc_p, heap) *locs = NULL;
a7e5372d 2236 unsigned i;
72425608
ZD
2237 mem_ref_loc_p loc;
2238 bool ret = false;
2239 struct loop *must_exec;
58adb739
RG
2240 tree base;
2241
2242 base = get_base_address (ref->mem);
70f34814
RG
2243 if (INDIRECT_REF_P (base)
2244 || TREE_CODE (base) == MEM_REF)
58adb739 2245 base = TREE_OPERAND (base, 0);
a7e5372d 2246
72425608 2247 get_all_locs_in_loop (loop, ref, &locs);
ac47786e 2248 FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
72425608 2249 {
726a989a 2250 if (!get_lim_data (loc->stmt))
72425608 2251 continue;
a7e5372d 2252
58adb739
RG
2253 /* If we require an always executed store make sure the statement
2254 stores to the reference. */
2255 if (stored_p)
2256 {
2257 tree lhs;
2258 if (!gimple_get_lhs (loc->stmt))
2259 continue;
2260 lhs = get_base_address (gimple_get_lhs (loc->stmt));
2261 if (!lhs)
2262 continue;
70f34814
RG
2263 if (INDIRECT_REF_P (lhs)
2264 || TREE_CODE (lhs) == MEM_REF)
58adb739
RG
2265 lhs = TREE_OPERAND (lhs, 0);
2266 if (lhs != base)
2267 continue;
2268 }
2269
726a989a 2270 must_exec = get_lim_data (loc->stmt)->always_executed_in;
72425608
ZD
2271 if (!must_exec)
2272 continue;
a7e5372d 2273
72425608
ZD
2274 if (must_exec == loop
2275 || flow_loop_nested_p (must_exec, loop))
2276 {
2277 ret = true;
2278 break;
2279 }
2280 }
2281 VEC_free (mem_ref_loc_p, heap, locs);
01fd257a 2282
72425608 2283 return ret;
01fd257a
ZD
2284}
2285
72425608 2286/* Returns true if REF1 and REF2 are independent. */
01fd257a 2287
72425608
ZD
2288static bool
2289refs_independent_p (mem_ref_p ref1, mem_ref_p ref2)
01fd257a 2290{
72425608
ZD
2291 if (ref1 == ref2
2292 || bitmap_bit_p (ref1->indep_ref, ref2->id))
2293 return true;
2294 if (bitmap_bit_p (ref1->dep_ref, ref2->id))
2295 return false;
546d314c
RG
2296 if (!MEM_ANALYZABLE (ref1)
2297 || !MEM_ANALYZABLE (ref2))
2298 return false;
01fd257a 2299
72425608
ZD
2300 if (dump_file && (dump_flags & TDF_DETAILS))
2301 fprintf (dump_file, "Querying dependency of refs %u and %u: ",
2302 ref1->id, ref2->id);
2303
2304 if (mem_refs_may_alias_p (ref1->mem, ref2->mem,
2305 &memory_accesses.ttae_cache))
2306 {
2307 bitmap_set_bit (ref1->dep_ref, ref2->id);
2308 bitmap_set_bit (ref2->dep_ref, ref1->id);
2309 if (dump_file && (dump_flags & TDF_DETAILS))
2310 fprintf (dump_file, "dependent.\n");
2311 return false;
2312 }
2313 else
2314 {
2315 bitmap_set_bit (ref1->indep_ref, ref2->id);
2316 bitmap_set_bit (ref2->indep_ref, ref1->id);
2317 if (dump_file && (dump_flags & TDF_DETAILS))
2318 fprintf (dump_file, "independent.\n");
2319 return true;
2320 }
01fd257a
ZD
2321}
2322
72425608
ZD
2323/* Records the information whether REF is independent in LOOP (according
2324 to INDEP). */
01fd257a
ZD
2325
2326static void
72425608 2327record_indep_loop (struct loop *loop, mem_ref_p ref, bool indep)
01fd257a 2328{
72425608
ZD
2329 if (indep)
2330 bitmap_set_bit (ref->indep_loop, loop->num);
2331 else
2332 bitmap_set_bit (ref->dep_loop, loop->num);
2333}
01fd257a 2334
72425608
ZD
2335/* Returns true if REF is independent on all other memory references in
2336 LOOP. */
01fd257a 2337
72425608
ZD
2338static bool
2339ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref)
2340{
546d314c 2341 bitmap refs_to_check;
72425608
ZD
2342 unsigned i;
2343 bitmap_iterator bi;
2344 bool ret = true, stored = bitmap_bit_p (ref->stored, loop->num);
72425608
ZD
2345 mem_ref_p aref;
2346
546d314c
RG
2347 if (stored)
2348 refs_to_check = VEC_index (bitmap,
2349 memory_accesses.all_refs_in_loop, loop->num);
2350 else
2351 refs_to_check = VEC_index (bitmap,
2352 memory_accesses.all_refs_stored_in_loop,
2353 loop->num);
01fd257a 2354
72425608 2355 EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
01fd257a 2356 {
72425608 2357 aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
546d314c
RG
2358 if (!MEM_ANALYZABLE (aref)
2359 || !refs_independent_p (ref, aref))
72425608
ZD
2360 {
2361 ret = false;
2362 record_indep_loop (loop, aref, false);
2363 break;
2364 }
01fd257a 2365 }
01fd257a 2366
72425608 2367 return ret;
01fd257a
ZD
2368}
2369
72425608
ZD
2370/* Returns true if REF is independent on all other memory references in
2371 LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */
01fd257a 2372
72425608
ZD
2373static bool
2374ref_indep_loop_p (struct loop *loop, mem_ref_p ref)
01fd257a 2375{
72425608 2376 bool ret;
01fd257a 2377
72425608
ZD
2378 if (bitmap_bit_p (ref->indep_loop, loop->num))
2379 return true;
2380 if (bitmap_bit_p (ref->dep_loop, loop->num))
2381 return false;
01fd257a 2382
72425608 2383 ret = ref_indep_loop_p_1 (loop, ref);
ed9c043b 2384
72425608
ZD
2385 if (dump_file && (dump_flags & TDF_DETAILS))
2386 fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n",
2387 ref->id, loop->num, ret ? "independent" : "dependent");
2388
2389 record_indep_loop (loop, ref, ret);
2390
2391 return ret;
01fd257a
ZD
2392}
2393
72425608 2394/* Returns true if we can perform store motion of REF from LOOP. */
01fd257a 2395
72425608
ZD
2396static bool
2397can_sm_ref_p (struct loop *loop, mem_ref_p ref)
01fd257a 2398{
58adb739
RG
2399 tree base;
2400
546d314c
RG
2401 /* Can't hoist unanalyzable refs. */
2402 if (!MEM_ANALYZABLE (ref))
2403 return false;
2404
72425608
ZD
2405 /* Unless the reference is stored in the loop, there is nothing to do. */
2406 if (!bitmap_bit_p (ref->stored, loop->num))
2407 return false;
01fd257a 2408
72425608
ZD
2409 /* It should be movable. */
2410 if (!is_gimple_reg_type (TREE_TYPE (ref->mem))
2411 || TREE_THIS_VOLATILE (ref->mem)
2412 || !for_each_index (&ref->mem, may_move_till, loop))
2413 return false;
ed9c043b 2414
9939e416
RG
2415 /* If it can throw fail, we do not properly update EH info. */
2416 if (tree_could_throw_p (ref->mem))
2417 return false;
2418
58adb739
RG
2419 /* If it can trap, it must be always executed in LOOP.
2420 Readonly memory locations may trap when storing to them, but
2421 tree_could_trap_p is a predicate for rvalues, so check that
2422 explicitly. */
2423 base = get_base_address (ref->mem);
2424 if ((tree_could_trap_p (ref->mem)
2425 || (DECL_P (base) && TREE_READONLY (base)))
2426 && !ref_always_accessed_p (loop, ref, true))
72425608 2427 return false;
ed9c043b 2428
72425608
ZD
2429 /* And it must be independent on all other memory references
2430 in LOOP. */
2431 if (!ref_indep_loop_p (loop, ref))
2432 return false;
ed9c043b 2433
72425608 2434 return true;
ed9c043b
ZD
2435}
2436
72425608
ZD
2437/* Marks the references in LOOP for that store motion should be performed
2438 in REFS_TO_SM. SM_EXECUTED is the set of references for that store
2439 motion was performed in one of the outer loops. */
ed9c043b
ZD
2440
2441static void
72425608 2442find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm)
01fd257a 2443{
72425608
ZD
2444 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
2445 loop->num);
2446 unsigned i;
2447 bitmap_iterator bi;
2448 mem_ref_p ref;
2449
2450 EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi)
2451 {
2452 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2453 if (can_sm_ref_p (loop, ref))
2454 bitmap_set_bit (refs_to_sm, i);
2455 }
ed9c043b 2456}
01fd257a 2457
72425608
ZD
2458/* Checks whether LOOP (with exits stored in EXITS array) is suitable
2459 for a store motion optimization (i.e. whether we can insert statement
2460 on its exits). */
ed9c043b 2461
72425608
ZD
2462static bool
2463loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
2464 VEC (edge, heap) *exits)
ed9c043b 2465{
72425608
ZD
2466 unsigned i;
2467 edge ex;
01fd257a 2468
ac47786e 2469 FOR_EACH_VEC_ELT (edge, exits, i, ex)
6391db68 2470 if (ex->flags & (EDGE_ABNORMAL | EDGE_EH))
72425608
ZD
2471 return false;
2472
2473 return true;
01fd257a
ZD
2474}
2475
a7e5372d 2476/* Try to perform store motion for all memory references modified inside
72425608
ZD
2477 LOOP. SM_EXECUTED is the bitmap of the memory references for that
2478 store motion was executed in one of the outer loops. */
a7e5372d
ZD
2479
2480static void
72425608 2481store_motion_loop (struct loop *loop, bitmap sm_executed)
a7e5372d 2482{
ca83d385 2483 VEC (edge, heap) *exits = get_loop_exit_edges (loop);
72425608
ZD
2484 struct loop *subloop;
2485 bitmap sm_in_loop = BITMAP_ALLOC (NULL);
a7e5372d 2486
72425608 2487 if (loop_suitable_for_sm (loop, exits))
a7e5372d 2488 {
72425608
ZD
2489 find_refs_for_sm (loop, sm_executed, sm_in_loop);
2490 hoist_memory_references (loop, sm_in_loop, exits);
a7e5372d 2491 }
ca83d385 2492 VEC_free (edge, heap, exits);
72425608
ZD
2493
2494 bitmap_ior_into (sm_executed, sm_in_loop);
2495 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
2496 store_motion_loop (subloop, sm_executed);
2497 bitmap_and_compl_into (sm_executed, sm_in_loop);
2498 BITMAP_FREE (sm_in_loop);
a7e5372d
ZD
2499}
2500
2501/* Try to perform store motion for all memory references modified inside
d73be268 2502 loops. */
a7e5372d
ZD
2503
2504static void
72425608 2505store_motion (void)
a7e5372d
ZD
2506{
2507 struct loop *loop;
72425608 2508 bitmap sm_executed = BITMAP_ALLOC (NULL);
d16464bb 2509
72425608
ZD
2510 for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next)
2511 store_motion_loop (loop, sm_executed);
42fd6772 2512
72425608 2513 BITMAP_FREE (sm_executed);
726a989a 2514 gsi_commit_edge_inserts ();
a7e5372d
ZD
2515}
2516
2517/* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
2518 for each such basic block bb records the outermost loop for that execution
2519 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
2520 blocks that contain a nonpure call. */
2521
2522static void
2523fill_always_executed_in (struct loop *loop, sbitmap contains_call)
2524{
2525 basic_block bb = NULL, *bbs, last = NULL;
2526 unsigned i;
2527 edge e;
2528 struct loop *inn_loop = loop;
2529
8a519095 2530 if (ALWAYS_EXECUTED_IN (loop->header) == NULL)
a7e5372d
ZD
2531 {
2532 bbs = get_loop_body_in_dom_order (loop);
2533
2534 for (i = 0; i < loop->num_nodes; i++)
2535 {
628f6a4e 2536 edge_iterator ei;
a7e5372d
ZD
2537 bb = bbs[i];
2538
2539 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2540 last = bb;
2541
2542 if (TEST_BIT (contains_call, bb->index))
2543 break;
2544
628f6a4e 2545 FOR_EACH_EDGE (e, ei, bb->succs)
a7e5372d
ZD
2546 if (!flow_bb_inside_loop_p (loop, e->dest))
2547 break;
2548 if (e)
2549 break;
2550
2551 /* A loop might be infinite (TODO use simple loop analysis
2552 to disprove this if possible). */
2553 if (bb->flags & BB_IRREDUCIBLE_LOOP)
2554 break;
2555
2556 if (!flow_bb_inside_loop_p (inn_loop, bb))
2557 break;
2558
2559 if (bb->loop_father->header == bb)
2560 {
2561 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2562 break;
2563
2564 /* In a loop that is always entered we may proceed anyway.
2565 But record that we entered it and stop once we leave it. */
2566 inn_loop = bb->loop_father;
2567 }
2568 }
2569
2570 while (1)
2571 {
8a519095 2572 SET_ALWAYS_EXECUTED_IN (last, loop);
a7e5372d
ZD
2573 if (last == loop->header)
2574 break;
2575 last = get_immediate_dominator (CDI_DOMINATORS, last);
2576 }
2577
2578 free (bbs);
2579 }
2580
2581 for (loop = loop->inner; loop; loop = loop->next)
2582 fill_always_executed_in (loop, contains_call);
2583}
2584
d73be268 2585/* Compute the global information needed by the loop invariant motion pass. */
a7e5372d
ZD
2586
2587static void
d73be268 2588tree_ssa_lim_initialize (void)
a7e5372d
ZD
2589{
2590 sbitmap contains_call = sbitmap_alloc (last_basic_block);
726a989a 2591 gimple_stmt_iterator bsi;
a7e5372d
ZD
2592 struct loop *loop;
2593 basic_block bb;
2594
2595 sbitmap_zero (contains_call);
2596 FOR_EACH_BB (bb)
2597 {
726a989a 2598 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
a7e5372d 2599 {
726a989a 2600 if (nonpure_call_p (gsi_stmt (bsi)))
a7e5372d
ZD
2601 break;
2602 }
2603
726a989a 2604 if (!gsi_end_p (bsi))
a7e5372d
ZD
2605 SET_BIT (contains_call, bb->index);
2606 }
2607
d73be268 2608 for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
a7e5372d
ZD
2609 fill_always_executed_in (loop, contains_call);
2610
2611 sbitmap_free (contains_call);
726a989a
RB
2612
2613 lim_aux_data_map = pointer_map_create ();
19c0d7df
AH
2614
2615 if (flag_tm)
2616 compute_transaction_bits ();
039496da
AH
2617
2618 alloc_aux_for_edges (0);
a7e5372d
ZD
2619}
2620
2621/* Cleans up after the invariant motion pass. */
2622
2623static void
2624tree_ssa_lim_finalize (void)
2625{
2626 basic_block bb;
72425608
ZD
2627 unsigned i;
2628 bitmap b;
a7e5372d 2629
039496da
AH
2630 free_aux_for_edges ();
2631
a7e5372d 2632 FOR_EACH_BB (bb)
8a519095 2633 SET_ALWAYS_EXECUTED_IN (bb, NULL);
72425608 2634
726a989a
RB
2635 pointer_map_destroy (lim_aux_data_map);
2636
72425608
ZD
2637 VEC_free (mem_ref_p, heap, memory_accesses.refs_list);
2638 htab_delete (memory_accesses.refs);
2639
ac47786e 2640 FOR_EACH_VEC_ELT (bitmap, memory_accesses.refs_in_loop, i, b)
72425608
ZD
2641 BITMAP_FREE (b);
2642 VEC_free (bitmap, heap, memory_accesses.refs_in_loop);
2643
ac47786e 2644 FOR_EACH_VEC_ELT (bitmap, memory_accesses.all_refs_in_loop, i, b)
72425608
ZD
2645 BITMAP_FREE (b);
2646 VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop);
2647
546d314c 2648 FOR_EACH_VEC_ELT (bitmap, memory_accesses.all_refs_stored_in_loop, i, b)
72425608 2649 BITMAP_FREE (b);
546d314c 2650 VEC_free (bitmap, heap, memory_accesses.all_refs_stored_in_loop);
72425608
ZD
2651
2652 if (memory_accesses.ttae_cache)
2653 pointer_map_destroy (memory_accesses.ttae_cache);
a7e5372d
ZD
2654}
2655
d73be268 2656/* Moves invariants from loops. Only "expensive" invariants are moved out --
a7e5372d
ZD
2657 i.e. those that are likely to be win regardless of the register pressure. */
2658
e3bdfed6 2659unsigned int
d73be268 2660tree_ssa_lim (void)
a7e5372d 2661{
e3bdfed6
RG
2662 unsigned int todo;
2663
d73be268 2664 tree_ssa_lim_initialize ();
a7e5372d 2665
72425608
ZD
2666 /* Gathers information about memory accesses in the loops. */
2667 analyze_memory_references ();
2668
a7e5372d
ZD
2669 /* For each statement determine the outermost loop in that it is
2670 invariant and cost for computing the invariant. */
2671 determine_invariantness ();
2672
72425608
ZD
2673 /* Execute store motion. Force the necessary invariants to be moved
2674 out of the loops as well. */
2675 store_motion ();
a7e5372d
ZD
2676
2677 /* Move the expressions that are expensive enough. */
e3bdfed6 2678 todo = move_computations ();
a7e5372d
ZD
2679
2680 tree_ssa_lim_finalize ();
e3bdfed6
RG
2681
2682 return todo;
a7e5372d 2683}