]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-loop-im.c
Update copyright years.
[thirdparty/gcc.git] / gcc / tree-ssa-loop-im.c
CommitLineData
a7e5372d 1/* Loop invariant motion.
8d9254fc 2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
b8698a0f 3
a7e5372d 4This file is part of GCC.
b8698a0f 5
a7e5372d
ZD
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
9dcd6f09 8Free Software Foundation; either version 3, or (at your option) any
a7e5372d 9later version.
b8698a0f 10
a7e5372d
ZD
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
b8698a0f 15
a7e5372d 16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
a7e5372d
ZD
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
a7e5372d 24#include "tree.h"
c7131fb2 25#include "gimple.h"
957060b5
AM
26#include "cfghooks.h"
27#include "tree-pass.h"
c7131fb2 28#include "ssa.h"
957060b5 29#include "gimple-pretty-print.h"
40e23961 30#include "fold-const.h"
60393bbc 31#include "cfganal.h"
2fb9a547 32#include "tree-eh.h"
45b0be94 33#include "gimplify.h"
5be5c238 34#include "gimple-iterator.h"
442b4905 35#include "tree-cfg.h"
e28030cf 36#include "tree-ssa-loop-manip.h"
442b4905
AM
37#include "tree-ssa-loop.h"
38#include "tree-into-ssa.h"
a7e5372d
ZD
39#include "cfgloop.h"
40#include "domwalk.h"
72425608 41#include "tree-affine.h"
8ded35f9 42#include "tree-ssa-propagate.h"
1fe37220 43#include "trans-mem.h"
19e51b40 44#include "gimple-fold.h"
9c37629f 45#include "tree-scalar-evolution.h"
964ef24c 46#include "tree-ssa-loop-niter.h"
1c852d1d
RB
47#include "alias.h"
48#include "builtins.h"
49#include "tree-dfa.h"
a7e5372d 50
f10a6654
ZD
51/* TODO: Support for predicated code motion. I.e.
52
53 while (1)
54 {
55 if (cond)
56 {
57 a = inv;
58 something;
59 }
60 }
61
039496da 62 Where COND and INV are invariants, but evaluating INV may trap or be
f10a6654
ZD
63 invalid from some other reason if !COND. This may be transformed to
64
65 if (cond)
66 a = inv;
67 while (1)
68 {
69 if (cond)
70 something;
71 } */
72
a7e5372d
ZD
73/* The auxiliary data kept for each statement. */
74
75struct lim_aux_data
76{
99b1c316 77 class loop *max_loop; /* The outermost loop in that the statement
a7e5372d
ZD
78 is invariant. */
79
99b1c316 80 class loop *tgt_loop; /* The loop out of that we want to move the
a7e5372d
ZD
81 invariant. */
82
99b1c316 83 class loop *always_executed_in;
a7e5372d
ZD
84 /* The outermost loop for that we are sure
85 the statement is executed if the loop
86 is entered. */
87
a7e5372d
ZD
88 unsigned cost; /* Cost of the computation performed by the
89 statement. */
90
5164293a
RB
91 unsigned ref; /* The simple_mem_ref in this stmt or 0. */
92
93 vec<gimple *> depends; /* Vector of statements that must be also
76421b44
RB
94 hoisted out of the loop when this statement
95 is hoisted; i.e. those that define the
96 operands of the statement and are inside of
97 the MAX_LOOP loop. */
a7e5372d
ZD
98};
99
726a989a
RB
100/* Maps statements to their lim_aux_data. */
101
355fe088 102static hash_map<gimple *, lim_aux_data *> *lim_aux_data_map;
a7e5372d 103
72425608 104/* Description of a memory reference location. */
a7e5372d 105
61baec41 106struct mem_ref_loc
a7e5372d
ZD
107{
108 tree *ref; /* The reference itself. */
355fe088 109 gimple *stmt; /* The statement in that it occurs. */
61baec41 110};
72425608 111
72425608 112
72425608
ZD
113/* Description of a memory reference. */
114
6c1dae73 115class im_mem_ref
01fd257a 116{
6c1dae73 117public:
a8aa828b 118 unsigned id : 30; /* ID assigned to the memory reference
72425608 119 (its index in memory_accesses.refs_list) */
1c852d1d 120 unsigned ref_canonical : 1; /* Whether mem.ref was canonicalized. */
a8aa828b 121 unsigned ref_decomposed : 1; /* Whether the ref was hashed from mem. */
01fd257a 122 hashval_t hash; /* Its hash value. */
bdb01696
RB
123
124 /* The memory access itself and associated caching of alias-oracle
125 query meta-data. */
126 ao_ref mem;
127
32500433 128 bitmap stored; /* The set of loops in that this memory location
72425608 129 is stored to. */
32500433 130 vec<mem_ref_loc> accesses_in_loop;
72425608
ZD
131 /* The locations of the accesses. Vector
132 indexed by the loop number. */
72425608
ZD
133
134 /* The following sets are computed on demand. We keep both set and
135 its complement, so that we know whether the information was
136 already computed or not. */
374001cb 137 bitmap_head indep_loop; /* The set of loops in that the memory
72425608
ZD
138 reference is independent, meaning:
139 If it is stored in the loop, this store
140 is independent on all other loads and
141 stores.
142 If it is only loaded, then it is independent
143 on all stores in the loop. */
374001cb 144 bitmap_head dep_loop; /* The complement of INDEP_LOOP. */
61baec41 145};
72425608 146
c00217fc
RB
147/* We use two bits per loop in the ref->{in,}dep_loop bitmaps, the first
148 to record (in)dependence against stores in the loop and its subloops, the
149 second to record (in)dependence against all references in the loop
150 and its subloops. */
151#define LOOP_DEP_BIT(loopnum, storedp) (2 * (loopnum) + (storedp ? 1 : 0))
72425608 152
bf190e8d
LC
153/* Mem_ref hashtable helpers. */
154
8d67ee55 155struct mem_ref_hasher : nofree_ptr_hash <im_mem_ref>
bf190e8d 156{
1c852d1d 157 typedef ao_ref *compare_type;
67f58944 158 static inline hashval_t hash (const im_mem_ref *);
1c852d1d 159 static inline bool equal (const im_mem_ref *, const ao_ref *);
bf190e8d
LC
160};
161
99b1c316 162/* A hash function for class im_mem_ref object OBJ. */
bf190e8d
LC
163
164inline hashval_t
67f58944 165mem_ref_hasher::hash (const im_mem_ref *mem)
bf190e8d
LC
166{
167 return mem->hash;
168}
169
99b1c316 170/* An equality function for class im_mem_ref object MEM1 with
bf190e8d
LC
171 memory reference OBJ2. */
172
173inline bool
1c852d1d
RB
174mem_ref_hasher::equal (const im_mem_ref *mem1, const ao_ref *obj2)
175{
176 if (obj2->max_size_known_p ())
a8aa828b
RB
177 return (mem1->ref_decomposed
178 && operand_equal_p (mem1->mem.base, obj2->base, 0)
1c852d1d
RB
179 && known_eq (mem1->mem.offset, obj2->offset)
180 && known_eq (mem1->mem.size, obj2->size)
181 && known_eq (mem1->mem.max_size, obj2->max_size)
182 && mem1->mem.volatile_p == obj2->volatile_p
8c996ec6
RB
183 && (mem1->mem.ref_alias_set == obj2->ref_alias_set
184 /* We are not canonicalizing alias-sets but for the
185 special-case we didn't canonicalize yet and the
186 incoming ref is a alias-set zero MEM we pick
187 the correct one already. */
188 || (!mem1->ref_canonical
189 && (TREE_CODE (obj2->ref) == MEM_REF
190 || TREE_CODE (obj2->ref) == TARGET_MEM_REF)
191 && obj2->ref_alias_set == 0)
192 /* Likewise if there's a canonical ref with alias-set zero. */
193 || (mem1->ref_canonical && mem1->mem.ref_alias_set == 0))
1c852d1d
RB
194 && types_compatible_p (TREE_TYPE (mem1->mem.ref),
195 TREE_TYPE (obj2->ref)));
196 else
197 return operand_equal_p (mem1->mem.ref, obj2->ref, 0);
bf190e8d 198}
72425608 199
72425608
ZD
200
201/* Description of memory accesses in loops. */
202
203static struct
204{
205 /* The hash table of memory references accessed in loops. */
c203e8a7 206 hash_table<mem_ref_hasher> *refs;
72425608
ZD
207
208 /* The list of memory references. */
61baec41 209 vec<im_mem_ref *> refs_list;
72425608
ZD
210
211 /* The set of memory references accessed in each loop. */
374001cb 212 vec<bitmap_head> refs_in_loop;
72425608 213
c00217fc 214 /* The set of memory references stored in each loop. */
374001cb 215 vec<bitmap_head> refs_stored_in_loop;
c00217fc 216
c00217fc 217 /* The set of memory references stored in each loop, including subloops . */
374001cb 218 vec<bitmap_head> all_refs_stored_in_loop;
72425608
ZD
219
220 /* Cache for expanding memory addresses. */
39c8aaa4 221 hash_map<tree, name_expansion *> *ttae_cache;
72425608
ZD
222} memory_accesses;
223
3f9b14ff
SB
224/* Obstack for the bitmaps in the above data structures. */
225static bitmap_obstack lim_bitmap_obstack;
32500433 226static obstack mem_ref_obstack;
3f9b14ff 227
99b1c316
MS
228static bool ref_indep_loop_p (class loop *, im_mem_ref *);
229static bool ref_always_accessed_p (class loop *, im_mem_ref *, bool);
a7e5372d
ZD
230
231/* Minimum cost of an expensive expression. */
028d4092 232#define LIM_EXPENSIVE ((unsigned) param_lim_expensive)
a7e5372d 233
8a519095 234/* The outermost loop for which execution of the header guarantees that the
a7e5372d 235 block will be executed. */
99b1c316 236#define ALWAYS_EXECUTED_IN(BB) ((class loop *) (BB)->aux)
8a519095 237#define SET_ALWAYS_EXECUTED_IN(BB, VAL) ((BB)->aux = (void *) (VAL))
a7e5372d 238
e6647190
RB
239/* ID of the shared unanalyzable mem. */
240#define UNANALYZABLE_MEM_ID 0
241
546d314c 242/* Whether the reference was analyzable. */
e6647190 243#define MEM_ANALYZABLE(REF) ((REF)->id != UNANALYZABLE_MEM_ID)
546d314c 244
726a989a 245static struct lim_aux_data *
355fe088 246init_lim_data (gimple *stmt)
726a989a 247{
b787e7a2
TS
248 lim_aux_data *p = XCNEW (struct lim_aux_data);
249 lim_aux_data_map->put (stmt, p);
726a989a 250
b787e7a2 251 return p;
726a989a
RB
252}
253
254static struct lim_aux_data *
355fe088 255get_lim_data (gimple *stmt)
726a989a 256{
b787e7a2 257 lim_aux_data **p = lim_aux_data_map->get (stmt);
726a989a
RB
258 if (!p)
259 return NULL;
260
b787e7a2 261 return *p;
726a989a
RB
262}
263
264/* Releases the memory occupied by DATA. */
265
266static void
267free_lim_aux_data (struct lim_aux_data *data)
268{
c3284718 269 data->depends.release ();
726a989a
RB
270 free (data);
271}
272
273static void
355fe088 274clear_lim_data (gimple *stmt)
726a989a 275{
b787e7a2 276 lim_aux_data **p = lim_aux_data_map->get (stmt);
726a989a
RB
277 if (!p)
278 return;
279
b787e7a2 280 free_lim_aux_data (*p);
726a989a
RB
281 *p = NULL;
282}
283
b8698a0f 284
71343877
AM
285/* The possibilities of statement movement. */
286enum move_pos
287 {
288 MOVE_IMPOSSIBLE, /* No movement -- side effect expression. */
289 MOVE_PRESERVE_EXECUTION, /* Must not cause the non-executed statement
290 become executed -- memory accesses, ... */
291 MOVE_POSSIBLE /* Unlimited movement. */
292 };
be35cf60 293
a7e5372d
ZD
294
295/* If it is possible to hoist the statement STMT unconditionally,
296 returns MOVE_POSSIBLE.
297 If it is possible to hoist the statement STMT, but we must avoid making
298 it executed if it would not be executed in the original program (e.g.
299 because it may trap), return MOVE_PRESERVE_EXECUTION.
300 Otherwise return MOVE_IMPOSSIBLE. */
301
40923b20 302enum move_pos
355fe088 303movement_possibility (gimple *stmt)
a7e5372d 304{
726a989a
RB
305 tree lhs;
306 enum move_pos ret = MOVE_POSSIBLE;
a7e5372d
ZD
307
308 if (flag_unswitch_loops
726a989a 309 && gimple_code (stmt) == GIMPLE_COND)
a7e5372d
ZD
310 {
311 /* If we perform unswitching, force the operands of the invariant
312 condition to be moved out of the loop. */
a7e5372d
ZD
313 return MOVE_POSSIBLE;
314 }
315
e3bdfed6
RG
316 if (gimple_code (stmt) == GIMPLE_PHI
317 && gimple_phi_num_args (stmt) <= 2
ea057359 318 && !virtual_operand_p (gimple_phi_result (stmt))
e3bdfed6
RG
319 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt)))
320 return MOVE_POSSIBLE;
321
726a989a 322 if (gimple_get_lhs (stmt) == NULL_TREE)
a7e5372d
ZD
323 return MOVE_IMPOSSIBLE;
324
5006671f 325 if (gimple_vdef (stmt))
72425608
ZD
326 return MOVE_IMPOSSIBLE;
327
726a989a
RB
328 if (stmt_ends_bb_p (stmt)
329 || gimple_has_volatile_ops (stmt)
330 || gimple_has_side_effects (stmt)
36bbc05d 331 || stmt_could_throw_p (cfun, stmt))
a7e5372d
ZD
332 return MOVE_IMPOSSIBLE;
333
726a989a 334 if (is_gimple_call (stmt))
f10a6654
ZD
335 {
336 /* While pure or const call is guaranteed to have no side effects, we
337 cannot move it arbitrarily. Consider code like
338
339 char *s = something ();
340
341 while (1)
342 {
343 if (s)
344 t = strlen (s);
345 else
346 t = 0;
347 }
348
349 Here the strlen call cannot be moved out of the loop, even though
350 s is invariant. In addition to possibly creating a call with
351 invalid arguments, moving out a function call that is not executed
352 may cause performance regressions in case the call is costly and
353 not executed at all. */
726a989a
RB
354 ret = MOVE_PRESERVE_EXECUTION;
355 lhs = gimple_call_lhs (stmt);
f10a6654 356 }
726a989a
RB
357 else if (is_gimple_assign (stmt))
358 lhs = gimple_assign_lhs (stmt);
359 else
360 return MOVE_IMPOSSIBLE;
361
362 if (TREE_CODE (lhs) == SSA_NAME
363 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
364 return MOVE_IMPOSSIBLE;
365
366 if (TREE_CODE (lhs) != SSA_NAME
367 || gimple_could_trap_p (stmt))
368 return MOVE_PRESERVE_EXECUTION;
369
19c0d7df
AH
370 /* Non local loads in a transaction cannot be hoisted out. Well,
371 unless the load happens on every path out of the loop, but we
372 don't take this into account yet. */
373 if (flag_tm
374 && gimple_in_transaction (stmt)
375 && gimple_assign_single_p (stmt))
376 {
377 tree rhs = gimple_assign_rhs1 (stmt);
378 if (DECL_P (rhs) && is_global_var (rhs))
379 {
380 if (dump_file)
381 {
382 fprintf (dump_file, "Cannot hoist conditional load of ");
383 print_generic_expr (dump_file, rhs, TDF_SLIM);
384 fprintf (dump_file, " because it is in a transaction.\n");
385 }
386 return MOVE_IMPOSSIBLE;
387 }
388 }
389
726a989a 390 return ret;
a7e5372d
ZD
391}
392
393/* Suppose that operand DEF is used inside the LOOP. Returns the outermost
2a7e31df 394 loop to that we could move the expression using DEF if it did not have
a7e5372d
ZD
395 other operands, i.e. the outermost loop enclosing LOOP in that the value
396 of DEF is invariant. */
397
99b1c316
MS
398static class loop *
399outermost_invariant_loop (tree def, class loop *loop)
a7e5372d 400{
355fe088 401 gimple *def_stmt;
a7e5372d 402 basic_block def_bb;
99b1c316 403 class loop *max_loop;
726a989a 404 struct lim_aux_data *lim_data;
a7e5372d 405
726a989a 406 if (!def)
a7e5372d
ZD
407 return superloop_at_depth (loop, 1);
408
726a989a
RB
409 if (TREE_CODE (def) != SSA_NAME)
410 {
411 gcc_assert (is_gimple_min_invariant (def));
412 return superloop_at_depth (loop, 1);
413 }
414
a7e5372d 415 def_stmt = SSA_NAME_DEF_STMT (def);
726a989a 416 def_bb = gimple_bb (def_stmt);
a7e5372d
ZD
417 if (!def_bb)
418 return superloop_at_depth (loop, 1);
419
420 max_loop = find_common_loop (loop, def_bb->loop_father);
421
726a989a
RB
422 lim_data = get_lim_data (def_stmt);
423 if (lim_data != NULL && lim_data->max_loop != NULL)
a7e5372d 424 max_loop = find_common_loop (max_loop,
726a989a 425 loop_outer (lim_data->max_loop));
a7e5372d
ZD
426 if (max_loop == loop)
427 return NULL;
9ba025a2 428 max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
a7e5372d
ZD
429
430 return max_loop;
431}
432
a7e5372d
ZD
433/* DATA is a structure containing information associated with a statement
434 inside LOOP. DEF is one of the operands of this statement.
b8698a0f 435
a7e5372d
ZD
436 Find the outermost loop enclosing LOOP in that value of DEF is invariant
437 and record this in DATA->max_loop field. If DEF itself is defined inside
438 this loop as well (i.e. we need to hoist it out of the loop if we want
439 to hoist the statement represented by DATA), record the statement in that
440 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
441 add the cost of the computation of DEF to the DATA->cost.
b8698a0f 442
a7e5372d
ZD
443 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
444
445static bool
99b1c316 446add_dependency (tree def, struct lim_aux_data *data, class loop *loop,
a7e5372d
ZD
447 bool add_cost)
448{
355fe088 449 gimple *def_stmt = SSA_NAME_DEF_STMT (def);
726a989a 450 basic_block def_bb = gimple_bb (def_stmt);
99b1c316 451 class loop *max_loop;
726a989a 452 struct lim_aux_data *def_data;
a7e5372d
ZD
453
454 if (!def_bb)
455 return true;
456
457 max_loop = outermost_invariant_loop (def, loop);
458 if (!max_loop)
459 return false;
460
461 if (flow_loop_nested_p (data->max_loop, max_loop))
462 data->max_loop = max_loop;
463
726a989a
RB
464 def_data = get_lim_data (def_stmt);
465 if (!def_data)
a7e5372d
ZD
466 return true;
467
468 if (add_cost
469 /* Only add the cost if the statement defining DEF is inside LOOP,
470 i.e. if it is likely that by moving the invariants dependent
471 on it, we will be able to avoid creating a new register for
472 it (since it will be only used in these dependent invariants). */
473 && def_bb->loop_father == loop)
726a989a 474 data->cost += def_data->cost;
a7e5372d 475
76421b44 476 data->depends.safe_push (def_stmt);
a7e5372d
ZD
477
478 return true;
479}
480
546d314c
RG
481/* Returns an estimate for a cost of statement STMT. The values here
482 are just ad-hoc constants, similar to costs for inlining. */
a7e5372d
ZD
483
484static unsigned
355fe088 485stmt_cost (gimple *stmt)
a7e5372d 486{
a7e5372d 487 /* Always try to create possibilities for unswitching. */
e3bdfed6
RG
488 if (gimple_code (stmt) == GIMPLE_COND
489 || gimple_code (stmt) == GIMPLE_PHI)
a7e5372d
ZD
490 return LIM_EXPENSIVE;
491
546d314c 492 /* We should be hoisting calls if possible. */
726a989a 493 if (is_gimple_call (stmt))
a7e5372d 494 {
546d314c 495 tree fndecl;
a7e5372d
ZD
496
497 /* Unless the call is a builtin_constant_p; this always folds to a
498 constant, so moving it is useless. */
726a989a 499 fndecl = gimple_call_fndecl (stmt);
3d78e008 500 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_CONSTANT_P))
a7e5372d
ZD
501 return 0;
502
546d314c 503 return LIM_EXPENSIVE;
726a989a
RB
504 }
505
546d314c
RG
506 /* Hoisting memory references out should almost surely be a win. */
507 if (gimple_references_memory_p (stmt))
508 return LIM_EXPENSIVE;
509
726a989a 510 if (gimple_code (stmt) != GIMPLE_ASSIGN)
546d314c 511 return 1;
a7e5372d 512
726a989a
RB
513 switch (gimple_assign_rhs_code (stmt))
514 {
a7e5372d 515 case MULT_EXPR:
67af611e
RG
516 case WIDEN_MULT_EXPR:
517 case WIDEN_MULT_PLUS_EXPR:
518 case WIDEN_MULT_MINUS_EXPR:
519 case DOT_PROD_EXPR:
a7e5372d
ZD
520 case TRUNC_DIV_EXPR:
521 case CEIL_DIV_EXPR:
522 case FLOOR_DIV_EXPR:
523 case ROUND_DIV_EXPR:
524 case EXACT_DIV_EXPR:
525 case CEIL_MOD_EXPR:
526 case FLOOR_MOD_EXPR:
527 case ROUND_MOD_EXPR:
528 case TRUNC_MOD_EXPR:
b4852851 529 case RDIV_EXPR:
a7e5372d 530 /* Division and multiplication are usually expensive. */
546d314c 531 return LIM_EXPENSIVE;
a7e5372d 532
e0a60731
RG
533 case LSHIFT_EXPR:
534 case RSHIFT_EXPR:
67af611e
RG
535 case WIDEN_LSHIFT_EXPR:
536 case LROTATE_EXPR:
537 case RROTATE_EXPR:
546d314c
RG
538 /* Shifts and rotates are usually expensive. */
539 return LIM_EXPENSIVE;
540
541 case CONSTRUCTOR:
542 /* Make vector construction cost proportional to the number
543 of elements. */
544 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
545
546 case SSA_NAME:
547 case PAREN_EXPR:
548 /* Whether or not something is wrapped inside a PAREN_EXPR
549 should not change move cost. Nor should an intermediate
550 unpropagated SSA name copy. */
551 return 0;
e0a60731 552
a7e5372d 553 default:
546d314c 554 return 1;
a7e5372d 555 }
a7e5372d
ZD
556}
557
72425608
ZD
558/* Finds the outermost loop between OUTER and LOOP in that the memory reference
559 REF is independent. If REF is not independent in LOOP, NULL is returned
560 instead. */
561
99b1c316
MS
562static class loop *
563outermost_indep_loop (class loop *outer, class loop *loop, im_mem_ref *ref)
72425608 564{
99b1c316 565 class loop *aloop;
72425608 566
32500433 567 if (ref->stored && bitmap_bit_p (ref->stored, loop->num))
72425608
ZD
568 return NULL;
569
570 for (aloop = outer;
571 aloop != loop;
572 aloop = superloop_at_depth (loop, loop_depth (aloop) + 1))
32500433 573 if ((!ref->stored || !bitmap_bit_p (ref->stored, aloop->num))
bcfaa720 574 && ref_indep_loop_p (aloop, ref))
72425608
ZD
575 return aloop;
576
bcfaa720 577 if (ref_indep_loop_p (loop, ref))
72425608
ZD
578 return loop;
579 else
580 return NULL;
581}
582
583/* If there is a simple load or store to a memory reference in STMT, returns
fa10beec 584 the location of the memory reference, and sets IS_STORE according to whether
72425608
ZD
585 it is a store or load. Otherwise, returns NULL. */
586
587static tree *
355fe088 588simple_mem_ref_in_stmt (gimple *stmt, bool *is_store)
72425608 589{
1fc1ef37 590 tree *lhs, *rhs;
72425608 591
1fc1ef37
EB
592 /* Recognize SSA_NAME = MEM and MEM = (SSA_NAME | invariant) patterns. */
593 if (!gimple_assign_single_p (stmt))
72425608
ZD
594 return NULL;
595
726a989a 596 lhs = gimple_assign_lhs_ptr (stmt);
1fc1ef37 597 rhs = gimple_assign_rhs1_ptr (stmt);
72425608 598
1fc1ef37 599 if (TREE_CODE (*lhs) == SSA_NAME && gimple_vuse (stmt))
72425608 600 {
72425608 601 *is_store = false;
1fc1ef37 602 return rhs;
72425608 603 }
1fc1ef37
EB
604 else if (gimple_vdef (stmt)
605 && (TREE_CODE (*rhs) == SSA_NAME || is_gimple_min_invariant (*rhs)))
72425608
ZD
606 {
607 *is_store = true;
608 return lhs;
609 }
610 else
611 return NULL;
612}
613
e3bdfed6
RG
614/* From a controlling predicate in DOM determine the arguments from
615 the PHI node PHI that are chosen if the predicate evaluates to
616 true and false and store them to *TRUE_ARG_P and *FALSE_ARG_P if
617 they are non-NULL. Returns true if the arguments can be determined,
618 else return false. */
619
620static bool
538dd0b7 621extract_true_false_args_from_phi (basic_block dom, gphi *phi,
e3bdfed6
RG
622 tree *true_arg_p, tree *false_arg_p)
623{
e6503e0a
RB
624 edge te, fe;
625 if (! extract_true_false_controlled_edges (dom, gimple_bb (phi),
626 &te, &fe))
e3bdfed6
RG
627 return false;
628
629 if (true_arg_p)
e6503e0a 630 *true_arg_p = PHI_ARG_DEF (phi, te->dest_idx);
e3bdfed6 631 if (false_arg_p)
e6503e0a 632 *false_arg_p = PHI_ARG_DEF (phi, fe->dest_idx);
e3bdfed6
RG
633
634 return true;
635}
636
a7e5372d
ZD
637/* Determine the outermost loop to that it is possible to hoist a statement
638 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
639 the outermost loop in that the value computed by STMT is invariant.
640 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
641 we preserve the fact whether STMT is executed. It also fills other related
642 information to LIM_DATA (STMT).
b8698a0f 643
a7e5372d
ZD
644 The function returns false if STMT cannot be hoisted outside of the loop it
645 is defined in, and true otherwise. */
646
647static bool
355fe088 648determine_max_movement (gimple *stmt, bool must_preserve_exec)
a7e5372d 649{
726a989a 650 basic_block bb = gimple_bb (stmt);
99b1c316
MS
651 class loop *loop = bb->loop_father;
652 class loop *level;
726a989a 653 struct lim_aux_data *lim_data = get_lim_data (stmt);
4c124b4c
AM
654 tree val;
655 ssa_op_iter iter;
b8698a0f 656
a7e5372d
ZD
657 if (must_preserve_exec)
658 level = ALWAYS_EXECUTED_IN (bb);
659 else
660 level = superloop_at_depth (loop, 1);
661 lim_data->max_loop = level;
662
538dd0b7 663 if (gphi *phi = dyn_cast <gphi *> (stmt))
e3bdfed6
RG
664 {
665 use_operand_p use_p;
666 unsigned min_cost = UINT_MAX;
667 unsigned total_cost = 0;
668 struct lim_aux_data *def_data;
669
670 /* We will end up promoting dependencies to be unconditionally
671 evaluated. For this reason the PHI cost (and thus the
672 cost we remove from the loop by doing the invariant motion)
673 is that of the cheapest PHI argument dependency chain. */
538dd0b7 674 FOR_EACH_PHI_ARG (use_p, phi, iter, SSA_OP_USE)
e3bdfed6
RG
675 {
676 val = USE_FROM_PTR (use_p);
d969f3c1 677
e3bdfed6 678 if (TREE_CODE (val) != SSA_NAME)
d969f3c1
CB
679 {
680 /* Assign const 1 to constants. */
681 min_cost = MIN (min_cost, 1);
682 total_cost += 1;
683 continue;
684 }
e3bdfed6
RG
685 if (!add_dependency (val, lim_data, loop, false))
686 return false;
a4ab23b6 687
355fe088 688 gimple *def_stmt = SSA_NAME_DEF_STMT (val);
a4ab23b6
RB
689 if (gimple_bb (def_stmt)
690 && gimple_bb (def_stmt)->loop_father == loop)
e3bdfed6 691 {
a4ab23b6
RB
692 def_data = get_lim_data (def_stmt);
693 if (def_data)
694 {
695 min_cost = MIN (min_cost, def_data->cost);
696 total_cost += def_data->cost;
697 }
e3bdfed6
RG
698 }
699 }
700
a4ab23b6 701 min_cost = MIN (min_cost, total_cost);
e3bdfed6
RG
702 lim_data->cost += min_cost;
703
538dd0b7 704 if (gimple_phi_num_args (phi) > 1)
e3bdfed6
RG
705 {
706 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
355fe088 707 gimple *cond;
e3bdfed6
RG
708 if (gsi_end_p (gsi_last_bb (dom)))
709 return false;
710 cond = gsi_stmt (gsi_last_bb (dom));
711 if (gimple_code (cond) != GIMPLE_COND)
712 return false;
713 /* Verify that this is an extended form of a diamond and
714 the PHI arguments are completely controlled by the
715 predicate in DOM. */
538dd0b7 716 if (!extract_true_false_args_from_phi (dom, phi, NULL, NULL))
e3bdfed6
RG
717 return false;
718
719 /* Fold in dependencies and cost of the condition. */
720 FOR_EACH_SSA_TREE_OPERAND (val, cond, iter, SSA_OP_USE)
721 {
722 if (!add_dependency (val, lim_data, loop, false))
723 return false;
724 def_data = get_lim_data (SSA_NAME_DEF_STMT (val));
725 if (def_data)
524c2492 726 lim_data->cost += def_data->cost;
e3bdfed6
RG
727 }
728
729 /* We want to avoid unconditionally executing very expensive
730 operations. As costs for our dependencies cannot be
731 negative just claim we are not invariand for this case.
732 We also are not sure whether the control-flow inside the
733 loop will vanish. */
734 if (total_cost - min_cost >= 2 * LIM_EXPENSIVE
735 && !(min_cost != 0
736 && total_cost / min_cost <= 2))
737 return false;
738
739 /* Assume that the control-flow in the loop will vanish.
740 ??? We should verify this and not artificially increase
741 the cost if that is not the case. */
742 lim_data->cost += stmt_cost (stmt);
743 }
744
745 return true;
746 }
747 else
748 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
749 if (!add_dependency (val, lim_data, loop, true))
750 return false;
a7e5372d 751
5006671f 752 if (gimple_vuse (stmt))
72425608 753 {
5164293a
RB
754 im_mem_ref *ref
755 = lim_data ? memory_accesses.refs_list[lim_data->ref] : NULL;
756 if (ref
757 && MEM_ANALYZABLE (ref))
72425608 758 {
5164293a
RB
759 lim_data->max_loop = outermost_indep_loop (lim_data->max_loop,
760 loop, ref);
72425608
ZD
761 if (!lim_data->max_loop)
762 return false;
763 }
5164293a
RB
764 else if (! add_dependency (gimple_vuse (stmt), lim_data, loop, false))
765 return false;
72425608 766 }
a7e5372d
ZD
767
768 lim_data->cost += stmt_cost (stmt);
769
770 return true;
771}
772
773/* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
774 and that one of the operands of this statement is computed by STMT.
775 Ensure that STMT (together with all the statements that define its
776 operands) is hoisted at least out of the loop LEVEL. */
777
778static void
99b1c316 779set_level (gimple *stmt, class loop *orig_loop, class loop *level)
a7e5372d 780{
99b1c316 781 class loop *stmt_loop = gimple_bb (stmt)->loop_father;
726a989a 782 struct lim_aux_data *lim_data;
355fe088 783 gimple *dep_stmt;
76421b44 784 unsigned i;
a7e5372d
ZD
785
786 stmt_loop = find_common_loop (orig_loop, stmt_loop);
726a989a
RB
787 lim_data = get_lim_data (stmt);
788 if (lim_data != NULL && lim_data->tgt_loop != NULL)
a7e5372d 789 stmt_loop = find_common_loop (stmt_loop,
726a989a 790 loop_outer (lim_data->tgt_loop));
a7e5372d
ZD
791 if (flow_loop_nested_p (stmt_loop, level))
792 return;
793
726a989a
RB
794 gcc_assert (level == lim_data->max_loop
795 || flow_loop_nested_p (lim_data->max_loop, level));
a7e5372d 796
726a989a 797 lim_data->tgt_loop = level;
76421b44
RB
798 FOR_EACH_VEC_ELT (lim_data->depends, i, dep_stmt)
799 set_level (dep_stmt, orig_loop, level);
a7e5372d
ZD
800}
801
802/* Determines an outermost loop from that we want to hoist the statement STMT.
803 For now we chose the outermost possible loop. TODO -- use profiling
804 information to set it more sanely. */
805
806static void
355fe088 807set_profitable_level (gimple *stmt)
a7e5372d 808{
726a989a 809 set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
a7e5372d
ZD
810}
811
726a989a 812/* Returns true if STMT is a call that has side effects. */
a7e5372d
ZD
813
814static bool
355fe088 815nonpure_call_p (gimple *stmt)
a7e5372d 816{
726a989a 817 if (gimple_code (stmt) != GIMPLE_CALL)
a7e5372d
ZD
818 return false;
819
726a989a 820 return gimple_has_side_effects (stmt);
a7e5372d
ZD
821}
822
e0a60731
RG
823/* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
824
355fe088 825static gimple *
726a989a 826rewrite_reciprocal (gimple_stmt_iterator *bsi)
e0a60731 827{
538dd0b7 828 gassign *stmt, *stmt1, *stmt2;
83d5977e 829 tree name, lhs, type;
f50d67f6 830 tree real_one;
0ca5af51 831 gimple_stmt_iterator gsi;
e0a60731 832
538dd0b7 833 stmt = as_a <gassign *> (gsi_stmt (*bsi));
726a989a
RB
834 lhs = gimple_assign_lhs (stmt);
835 type = TREE_TYPE (lhs);
e0a60731 836
8e8e423f 837 real_one = build_one_cst (type);
e0a60731 838
83d5977e 839 name = make_temp_ssa_name (type, NULL, "reciptmp");
0d0e4a03
JJ
840 stmt1 = gimple_build_assign (name, RDIV_EXPR, real_one,
841 gimple_assign_rhs2 (stmt));
842 stmt2 = gimple_build_assign (lhs, MULT_EXPR, name,
843 gimple_assign_rhs1 (stmt));
e0a60731
RG
844
845 /* Replace division stmt with reciprocal and multiply stmts.
846 The multiply stmt is not invariant, so update iterator
847 and avoid rescanning. */
0ca5af51
AO
848 gsi = *bsi;
849 gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
850 gsi_replace (&gsi, stmt2, true);
e0a60731
RG
851
852 /* Continue processing with invariant reciprocal statement. */
853 return stmt1;
854}
855
856/* Check if the pattern at *BSI is a bittest of the form
857 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
858
355fe088 859static gimple *
726a989a 860rewrite_bittest (gimple_stmt_iterator *bsi)
e0a60731 861{
538dd0b7 862 gassign *stmt;
355fe088 863 gimple *stmt1;
538dd0b7 864 gassign *stmt2;
355fe088 865 gimple *use_stmt;
538dd0b7 866 gcond *cond_stmt;
83d5977e 867 tree lhs, name, t, a, b;
e0a60731
RG
868 use_operand_p use;
869
538dd0b7 870 stmt = as_a <gassign *> (gsi_stmt (*bsi));
726a989a 871 lhs = gimple_assign_lhs (stmt);
e0a60731
RG
872
873 /* Verify that the single use of lhs is a comparison against zero. */
874 if (TREE_CODE (lhs) != SSA_NAME
538dd0b7 875 || !single_imm_use (lhs, &use, &use_stmt))
e0a60731 876 return stmt;
538dd0b7
DM
877 cond_stmt = dyn_cast <gcond *> (use_stmt);
878 if (!cond_stmt)
879 return stmt;
880 if (gimple_cond_lhs (cond_stmt) != lhs
881 || (gimple_cond_code (cond_stmt) != NE_EXPR
882 && gimple_cond_code (cond_stmt) != EQ_EXPR)
883 || !integer_zerop (gimple_cond_rhs (cond_stmt)))
e0a60731
RG
884 return stmt;
885
886 /* Get at the operands of the shift. The rhs is TMP1 & 1. */
726a989a
RB
887 stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
888 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
e0a60731
RG
889 return stmt;
890
0d52bcc1 891 /* There is a conversion in between possibly inserted by fold. */
1a87cf0c 892 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt1)))
e0a60731 893 {
726a989a 894 t = gimple_assign_rhs1 (stmt1);
e0a60731
RG
895 if (TREE_CODE (t) != SSA_NAME
896 || !has_single_use (t))
897 return stmt;
898 stmt1 = SSA_NAME_DEF_STMT (t);
726a989a 899 if (gimple_code (stmt1) != GIMPLE_ASSIGN)
e0a60731 900 return stmt;
e0a60731
RG
901 }
902
903 /* Verify that B is loop invariant but A is not. Verify that with
904 all the stmt walking we are still in the same loop. */
726a989a
RB
905 if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR
906 || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt))
907 return stmt;
e0a60731 908
726a989a
RB
909 a = gimple_assign_rhs1 (stmt1);
910 b = gimple_assign_rhs2 (stmt1);
911
912 if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL
913 && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL)
914 {
0ca5af51
AO
915 gimple_stmt_iterator rsi;
916
e0a60731 917 /* 1 << B */
e0a60731
RG
918 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
919 build_int_cst (TREE_TYPE (a), 1), b);
83d5977e
RG
920 name = make_temp_ssa_name (TREE_TYPE (a), NULL, "shifttmp");
921 stmt1 = gimple_build_assign (name, t);
e0a60731
RG
922
923 /* A & (1 << B) */
924 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
83d5977e
RG
925 name = make_temp_ssa_name (TREE_TYPE (a), NULL, "shifttmp");
926 stmt2 = gimple_build_assign (name, t);
a6e2d112
UB
927
928 /* Replace the SSA_NAME we compare against zero. Adjust
929 the type of zero accordingly. */
5c7ec4f0 930 SET_USE (use, name);
538dd0b7
DM
931 gimple_cond_set_rhs (cond_stmt,
932 build_int_cst_type (TREE_TYPE (name),
933 0));
e0a60731 934
0ca5af51
AO
935 /* Don't use gsi_replace here, none of the new assignments sets
936 the variable originally set in stmt. Move bsi to stmt1, and
937 then remove the original stmt, so that we get a chance to
938 retain debug info for it. */
939 rsi = *bsi;
940 gsi_insert_before (bsi, stmt1, GSI_NEW_STMT);
941 gsi_insert_before (&rsi, stmt2, GSI_SAME_STMT);
716e92f4 942 gimple *to_release = gsi_stmt (rsi);
0ca5af51 943 gsi_remove (&rsi, true);
716e92f4 944 release_defs (to_release);
e0a60731
RG
945
946 return stmt1;
947 }
948
949 return stmt;
950}
951
4d9192b5
TS
952/* For each statement determines the outermost loop in that it is invariant,
953 - statements on whose motion it depends and the cost of the computation.
954 - This information is stored to the LIM_DATA structure associated with
955 - each statement. */
956class invariantness_dom_walker : public dom_walker
957{
958public:
959 invariantness_dom_walker (cdi_direction direction)
960 : dom_walker (direction) {}
961
3daacdcd 962 virtual edge before_dom_children (basic_block);
4d9192b5 963};
e0a60731 964
a7e5372d
ZD
965/* Determine the outermost loops in that statements in basic block BB are
966 invariant, and record them to the LIM_DATA associated with the statements.
4d9192b5 967 Callback for dom_walker. */
a7e5372d 968
3daacdcd 969edge
4d9192b5 970invariantness_dom_walker::before_dom_children (basic_block bb)
a7e5372d
ZD
971{
972 enum move_pos pos;
726a989a 973 gimple_stmt_iterator bsi;
355fe088 974 gimple *stmt;
a7e5372d 975 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
99b1c316 976 class loop *outermost = ALWAYS_EXECUTED_IN (bb);
726a989a 977 struct lim_aux_data *lim_data;
a7e5372d 978
9ba025a2 979 if (!loop_outer (bb->loop_father))
3daacdcd 980 return NULL;
a7e5372d
ZD
981
982 if (dump_file && (dump_flags & TDF_DETAILS))
983 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
9ba025a2 984 bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
a7e5372d 985
e3bdfed6
RG
986 /* Look at PHI nodes, but only if there is at most two.
987 ??? We could relax this further by post-processing the inserted
988 code and transforming adjacent cond-exprs with the same predicate
989 to control flow again. */
990 bsi = gsi_start_phis (bb);
991 if (!gsi_end_p (bsi)
992 && ((gsi_next (&bsi), gsi_end_p (bsi))
993 || (gsi_next (&bsi), gsi_end_p (bsi))))
994 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
995 {
996 stmt = gsi_stmt (bsi);
997
998 pos = movement_possibility (stmt);
999 if (pos == MOVE_IMPOSSIBLE)
1000 continue;
1001
5164293a
RB
1002 lim_data = get_lim_data (stmt);
1003 if (! lim_data)
1004 lim_data = init_lim_data (stmt);
e3bdfed6
RG
1005 lim_data->always_executed_in = outermost;
1006
1007 if (!determine_max_movement (stmt, false))
1008 {
1009 lim_data->max_loop = NULL;
1010 continue;
1011 }
1012
1013 if (dump_file && (dump_flags & TDF_DETAILS))
1014 {
ef6cb4c7 1015 print_gimple_stmt (dump_file, stmt, 2);
e3bdfed6
RG
1016 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
1017 loop_depth (lim_data->max_loop),
1018 lim_data->cost);
1019 }
1020
1021 if (lim_data->cost >= LIM_EXPENSIVE)
1022 set_profitable_level (stmt);
1023 }
1024
726a989a 1025 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
a7e5372d 1026 {
726a989a 1027 stmt = gsi_stmt (bsi);
a7e5372d
ZD
1028
1029 pos = movement_possibility (stmt);
1030 if (pos == MOVE_IMPOSSIBLE)
1031 {
1032 if (nonpure_call_p (stmt))
1033 {
1034 maybe_never = true;
1035 outermost = NULL;
1036 }
8ded35f9
RG
1037 /* Make sure to note always_executed_in for stores to make
1038 store-motion work. */
1039 else if (stmt_makes_single_store (stmt))
1040 {
5164293a
RB
1041 struct lim_aux_data *lim_data = get_lim_data (stmt);
1042 if (! lim_data)
1043 lim_data = init_lim_data (stmt);
726a989a 1044 lim_data->always_executed_in = outermost;
8ded35f9 1045 }
a7e5372d
ZD
1046 continue;
1047 }
1048
726a989a
RB
1049 if (is_gimple_assign (stmt)
1050 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
1051 == GIMPLE_BINARY_RHS))
a9b77cd1 1052 {
726a989a
RB
1053 tree op0 = gimple_assign_rhs1 (stmt);
1054 tree op1 = gimple_assign_rhs2 (stmt);
99b1c316 1055 class loop *ol1 = outermost_invariant_loop (op1,
726a989a 1056 loop_containing_stmt (stmt));
a9b77cd1
ZD
1057
1058 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
1059 to be hoisted out of loop, saving expensive divide. */
1060 if (pos == MOVE_POSSIBLE
726a989a 1061 && gimple_assign_rhs_code (stmt) == RDIV_EXPR
a9b77cd1
ZD
1062 && flag_unsafe_math_optimizations
1063 && !flag_trapping_math
726a989a
RB
1064 && ol1 != NULL
1065 && outermost_invariant_loop (op0, ol1) == NULL)
a9b77cd1
ZD
1066 stmt = rewrite_reciprocal (&bsi);
1067
1068 /* If the shift count is invariant, convert (A >> B) & 1 to
1069 A & (1 << B) allowing the bit mask to be hoisted out of the loop
1070 saving an expensive shift. */
1071 if (pos == MOVE_POSSIBLE
726a989a
RB
1072 && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
1073 && integer_onep (op1)
1074 && TREE_CODE (op0) == SSA_NAME
1075 && has_single_use (op0))
a9b77cd1
ZD
1076 stmt = rewrite_bittest (&bsi);
1077 }
37cca405 1078
5164293a
RB
1079 lim_data = get_lim_data (stmt);
1080 if (! lim_data)
1081 lim_data = init_lim_data (stmt);
726a989a 1082 lim_data->always_executed_in = outermost;
a7e5372d
ZD
1083
1084 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
1085 continue;
1086
1087 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
1088 {
726a989a 1089 lim_data->max_loop = NULL;
a7e5372d
ZD
1090 continue;
1091 }
1092
1093 if (dump_file && (dump_flags & TDF_DETAILS))
1094 {
ef6cb4c7 1095 print_gimple_stmt (dump_file, stmt, 2);
a7e5372d 1096 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
726a989a
RB
1097 loop_depth (lim_data->max_loop),
1098 lim_data->cost);
a7e5372d
ZD
1099 }
1100
726a989a 1101 if (lim_data->cost >= LIM_EXPENSIVE)
a7e5372d
ZD
1102 set_profitable_level (stmt);
1103 }
3daacdcd 1104 return NULL;
a7e5372d
ZD
1105}
1106
a7e5372d 1107/* Hoist the statements in basic block BB out of the loops prescribed by
2a7e31df 1108 data stored in LIM_DATA structures associated with each statement. Callback
a7e5372d
ZD
1109 for walk_dominator_tree. */
1110
22a08dd1
RB
1111unsigned int
1112move_computations_worker (basic_block bb)
a7e5372d 1113{
99b1c316 1114 class loop *level;
a7e5372d 1115 unsigned cost = 0;
726a989a 1116 struct lim_aux_data *lim_data;
22a08dd1 1117 unsigned int todo = 0;
a7e5372d 1118
9ba025a2 1119 if (!loop_outer (bb->loop_father))
22a08dd1 1120 return todo;
a7e5372d 1121
538dd0b7 1122 for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi); )
e3bdfed6 1123 {
538dd0b7
DM
1124 gassign *new_stmt;
1125 gphi *stmt = bsi.phi ();
e3bdfed6
RG
1126
1127 lim_data = get_lim_data (stmt);
1128 if (lim_data == NULL)
1129 {
1130 gsi_next (&bsi);
1131 continue;
1132 }
1133
1134 cost = lim_data->cost;
1135 level = lim_data->tgt_loop;
1136 clear_lim_data (stmt);
1137
1138 if (!level)
1139 {
1140 gsi_next (&bsi);
1141 continue;
1142 }
1143
1144 if (dump_file && (dump_flags & TDF_DETAILS))
1145 {
1146 fprintf (dump_file, "Moving PHI node\n");
ef6cb4c7 1147 print_gimple_stmt (dump_file, stmt, 0);
e3bdfed6
RG
1148 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1149 cost, level->num);
1150 }
1151
1152 if (gimple_phi_num_args (stmt) == 1)
1153 {
1154 tree arg = PHI_ARG_DEF (stmt, 0);
0d0e4a03
JJ
1155 new_stmt = gimple_build_assign (gimple_phi_result (stmt),
1156 TREE_CODE (arg), arg);
e3bdfed6
RG
1157 }
1158 else
1159 {
1160 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
355fe088 1161 gimple *cond = gsi_stmt (gsi_last_bb (dom));
e3bdfed6
RG
1162 tree arg0 = NULL_TREE, arg1 = NULL_TREE, t;
1163 /* Get the PHI arguments corresponding to the true and false
1164 edges of COND. */
1165 extract_true_false_args_from_phi (dom, stmt, &arg0, &arg1);
1166 gcc_assert (arg0 && arg1);
1167 t = build2 (gimple_cond_code (cond), boolean_type_node,
1168 gimple_cond_lhs (cond), gimple_cond_rhs (cond));
0d0e4a03
JJ
1169 new_stmt = gimple_build_assign (gimple_phi_result (stmt),
1170 COND_EXPR, t, arg0, arg1);
22a08dd1 1171 todo |= TODO_cleanup_cfg;
e3bdfed6 1172 }
07faade6
RB
1173 if (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (new_stmt)))
1174 && (!ALWAYS_EXECUTED_IN (bb)
1175 || (ALWAYS_EXECUTED_IN (bb) != level
1176 && !flow_loop_nested_p (ALWAYS_EXECUTED_IN (bb), level))))
a6377d52
JJ
1177 {
1178 tree lhs = gimple_assign_lhs (new_stmt);
1179 SSA_NAME_RANGE_INFO (lhs) = NULL;
a6377d52 1180 }
e3bdfed6
RG
1181 gsi_insert_on_edge (loop_preheader_edge (level), new_stmt);
1182 remove_phi_node (&bsi, false);
1183 }
1184
538dd0b7 1185 for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
a7e5372d 1186 {
13714310
RG
1187 edge e;
1188
355fe088 1189 gimple *stmt = gsi_stmt (bsi);
a7e5372d 1190
726a989a
RB
1191 lim_data = get_lim_data (stmt);
1192 if (lim_data == NULL)
a7e5372d 1193 {
726a989a 1194 gsi_next (&bsi);
a7e5372d
ZD
1195 continue;
1196 }
1197
726a989a
RB
1198 cost = lim_data->cost;
1199 level = lim_data->tgt_loop;
1200 clear_lim_data (stmt);
a7e5372d
ZD
1201
1202 if (!level)
1203 {
726a989a 1204 gsi_next (&bsi);
a7e5372d
ZD
1205 continue;
1206 }
1207
1208 /* We do not really want to move conditionals out of the loop; we just
1209 placed it here to force its operands to be moved if necessary. */
726a989a 1210 if (gimple_code (stmt) == GIMPLE_COND)
a7e5372d
ZD
1211 continue;
1212
1213 if (dump_file && (dump_flags & TDF_DETAILS))
1214 {
1215 fprintf (dump_file, "Moving statement\n");
ef6cb4c7 1216 print_gimple_stmt (dump_file, stmt, 0);
a7e5372d
ZD
1217 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1218 cost, level->num);
1219 }
72425608 1220
13714310
RG
1221 e = loop_preheader_edge (level);
1222 gcc_assert (!gimple_vdef (stmt));
1223 if (gimple_vuse (stmt))
1224 {
1225 /* The new VUSE is the one from the virtual PHI in the loop
1226 header or the one already present. */
538dd0b7 1227 gphi_iterator gsi2;
13714310
RG
1228 for (gsi2 = gsi_start_phis (e->dest);
1229 !gsi_end_p (gsi2); gsi_next (&gsi2))
1230 {
538dd0b7 1231 gphi *phi = gsi2.phi ();
ea057359 1232 if (virtual_operand_p (gimple_phi_result (phi)))
13714310
RG
1233 {
1234 gimple_set_vuse (stmt, PHI_ARG_DEF_FROM_EDGE (phi, e));
1235 break;
1236 }
1237 }
1238 }
726a989a 1239 gsi_remove (&bsi, false);
07faade6
RB
1240 if (gimple_has_lhs (stmt)
1241 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
1242 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_get_lhs (stmt)))
1243 && (!ALWAYS_EXECUTED_IN (bb)
1244 || !(ALWAYS_EXECUTED_IN (bb) == level
1245 || flow_loop_nested_p (ALWAYS_EXECUTED_IN (bb), level))))
a6377d52
JJ
1246 {
1247 tree lhs = gimple_get_lhs (stmt);
1248 SSA_NAME_RANGE_INFO (lhs) = NULL;
a6377d52 1249 }
f4bed77b
RB
1250 /* In case this is a stmt that is not unconditionally executed
1251 when the target loop header is executed and the stmt may
1252 invoke undefined integer or pointer overflow rewrite it to
1253 unsigned arithmetic. */
1254 if (is_gimple_assign (stmt)
1255 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt)))
1256 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (gimple_assign_lhs (stmt)))
1257 && arith_code_with_undefined_signed_overflow
1258 (gimple_assign_rhs_code (stmt))
1259 && (!ALWAYS_EXECUTED_IN (bb)
1260 || !(ALWAYS_EXECUTED_IN (bb) == level
1261 || flow_loop_nested_p (ALWAYS_EXECUTED_IN (bb), level))))
1262 gsi_insert_seq_on_edge (e, rewrite_to_defined_overflow (stmt));
1263 else
1264 gsi_insert_on_edge (e, stmt);
a7e5372d 1265 }
22a08dd1
RB
1266
1267 return todo;
a7e5372d
ZD
1268}
1269
1270/* Hoist the statements out of the loops prescribed by data stored in
2a7e31df 1271 LIM_DATA structures associated with each statement.*/
a7e5372d 1272
e3bdfed6 1273static unsigned int
a7e5372d
ZD
1274move_computations (void)
1275{
22a08dd1
RB
1276 int *rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
1277 int n = pre_and_rev_post_order_compute_fn (cfun, NULL, rpo, false);
1278 unsigned todo = 0;
1279
1280 for (int i = 0; i < n; ++i)
1281 todo |= move_computations_worker (BASIC_BLOCK_FOR_FN (cfun, rpo[i]));
1282
1283 free (rpo);
a7e5372d 1284
726a989a 1285 gsi_commit_edge_inserts ();
5006671f 1286 if (need_ssa_update_p (cfun))
84d65814 1287 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
e3bdfed6 1288
22a08dd1 1289 return todo;
a7e5372d
ZD
1290}
1291
1292/* Checks whether the statement defining variable *INDEX can be hoisted
1293 out of the loop passed in DATA. Callback for for_each_index. */
1294
1295static bool
1296may_move_till (tree ref, tree *index, void *data)
1297{
99b1c316 1298 class loop *loop = (class loop *) data, *max_loop;
a7e5372d
ZD
1299
1300 /* If REF is an array reference, check also that the step and the lower
1301 bound is invariant in LOOP. */
1302 if (TREE_CODE (ref) == ARRAY_REF)
1303 {
726a989a
RB
1304 tree step = TREE_OPERAND (ref, 3);
1305 tree lbound = TREE_OPERAND (ref, 2);
a7e5372d 1306
726a989a 1307 max_loop = outermost_invariant_loop (step, loop);
a7e5372d
ZD
1308 if (!max_loop)
1309 return false;
1310
726a989a 1311 max_loop = outermost_invariant_loop (lbound, loop);
a7e5372d
ZD
1312 if (!max_loop)
1313 return false;
1314 }
1315
1316 max_loop = outermost_invariant_loop (*index, loop);
1317 if (!max_loop)
1318 return false;
1319
1320 return true;
1321}
1322
726a989a 1323/* If OP is SSA NAME, force the statement that defines it to be
b4042a03 1324 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
a7e5372d
ZD
1325
1326static void
99b1c316 1327force_move_till_op (tree op, class loop *orig_loop, class loop *loop)
a7e5372d 1328{
355fe088 1329 gimple *stmt;
a7e5372d 1330
726a989a
RB
1331 if (!op
1332 || is_gimple_min_invariant (op))
1333 return;
a7e5372d 1334
726a989a 1335 gcc_assert (TREE_CODE (op) == SSA_NAME);
b8698a0f 1336
726a989a
RB
1337 stmt = SSA_NAME_DEF_STMT (op);
1338 if (gimple_nop_p (stmt))
a7e5372d
ZD
1339 return;
1340
726a989a 1341 set_level (stmt, orig_loop, loop);
a7e5372d
ZD
1342}
1343
1344/* Forces statement defining invariants in REF (and *INDEX) to be moved out of
b4042a03
ZD
1345 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
1346 for_each_index. */
1347
1348struct fmt_data
1349{
99b1c316
MS
1350 class loop *loop;
1351 class loop *orig_loop;
b4042a03 1352};
a7e5372d
ZD
1353
1354static bool
1355force_move_till (tree ref, tree *index, void *data)
1356{
c22940cd 1357 struct fmt_data *fmt_data = (struct fmt_data *) data;
a7e5372d
ZD
1358
1359 if (TREE_CODE (ref) == ARRAY_REF)
1360 {
726a989a
RB
1361 tree step = TREE_OPERAND (ref, 3);
1362 tree lbound = TREE_OPERAND (ref, 2);
a7e5372d 1363
726a989a
RB
1364 force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop);
1365 force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop);
a7e5372d
ZD
1366 }
1367
726a989a 1368 force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop);
a7e5372d
ZD
1369
1370 return true;
1371}
1372
72425608
ZD
1373/* A function to free the mem_ref object OBJ. */
1374
1375static void
99b1c316 1376memref_free (class im_mem_ref *mem)
72425608 1377{
9771b263 1378 mem->accesses_in_loop.release ();
72425608
ZD
1379}
1380
1381/* Allocates and returns a memory reference description for MEM whose hash
1382 value is HASH and id is ID. */
1383
61baec41 1384static im_mem_ref *
1c852d1d 1385mem_ref_alloc (ao_ref *mem, unsigned hash, unsigned id)
72425608 1386{
99b1c316 1387 im_mem_ref *ref = XOBNEW (&mem_ref_obstack, class im_mem_ref);
1c852d1d
RB
1388 if (mem)
1389 ref->mem = *mem;
1390 else
1391 ao_ref_init (&ref->mem, error_mark_node);
72425608 1392 ref->id = id;
1c852d1d 1393 ref->ref_canonical = false;
a8aa828b 1394 ref->ref_decomposed = false;
72425608 1395 ref->hash = hash;
32500433 1396 ref->stored = NULL;
374001cb
RB
1397 bitmap_initialize (&ref->indep_loop, &lim_bitmap_obstack);
1398 bitmap_initialize (&ref->dep_loop, &lim_bitmap_obstack);
32500433 1399 ref->accesses_in_loop.create (1);
72425608
ZD
1400
1401 return ref;
1402}
1403
72425608
ZD
1404/* Records memory reference location *LOC in LOOP to the memory reference
1405 description REF. The reference occurs in statement STMT. */
a7e5372d
ZD
1406
1407static void
355fe088 1408record_mem_ref_loc (im_mem_ref *ref, gimple *stmt, tree *loc)
a7e5372d 1409{
15d19bf8 1410 mem_ref_loc aref;
15d19bf8
RB
1411 aref.stmt = stmt;
1412 aref.ref = loc;
32500433
RB
1413 ref->accesses_in_loop.safe_push (aref);
1414}
1415
1416/* Set the LOOP bit in REF stored bitmap and allocate that if
1417 necessary. Return whether a bit was changed. */
1418
1419static bool
99b1c316 1420set_ref_stored_in_loop (im_mem_ref *ref, class loop *loop)
32500433
RB
1421{
1422 if (!ref->stored)
1423 ref->stored = BITMAP_ALLOC (&lim_bitmap_obstack);
1424 return bitmap_set_bit (ref->stored, loop->num);
a7e5372d
ZD
1425}
1426
72425608 1427/* Marks reference REF as stored in LOOP. */
a7e5372d
ZD
1428
1429static void
99b1c316 1430mark_ref_stored (im_mem_ref *ref, class loop *loop)
a7e5372d 1431{
374001cb 1432 while (loop != current_loops->tree_root
32500433 1433 && set_ref_stored_in_loop (ref, loop))
374001cb 1434 loop = loop_outer (loop);
72425608
ZD
1435}
1436
1437/* Gathers memory references in statement STMT in LOOP, storing the
1438 information about them in the memory_accesses structure. Marks
1439 the vops accessed through unrecognized statements there as
1440 well. */
1441
1442static void
99b1c316 1443gather_mem_refs_stmt (class loop *loop, gimple *stmt)
72425608
ZD
1444{
1445 tree *mem = NULL;
1446 hashval_t hash;
46842bfe 1447 im_mem_ref **slot;
61baec41 1448 im_mem_ref *ref;
72425608 1449 bool is_stored;
72425608 1450 unsigned id;
a7e5372d 1451
5006671f 1452 if (!gimple_vuse (stmt))
72425608
ZD
1453 return;
1454
1455 mem = simple_mem_ref_in_stmt (stmt, &is_stored);
1456 if (!mem)
546d314c 1457 {
e6647190
RB
1458 /* We use the shared mem_ref for all unanalyzable refs. */
1459 id = UNANALYZABLE_MEM_ID;
1460 ref = memory_accesses.refs_list[id];
546d314c
RG
1461 if (dump_file && (dump_flags & TDF_DETAILS))
1462 {
1463 fprintf (dump_file, "Unanalyzed memory reference %u: ", id);
1464 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1465 }
e6647190 1466 is_stored = gimple_vdef (stmt);
72425608
ZD
1467 }
1468 else
a7e5372d 1469 {
1c852d1d
RB
1470 /* We are looking for equal refs that might differ in structure
1471 such as a.b vs. MEM[&a + 4]. So we key off the ao_ref but
1472 make sure we can canonicalize the ref in the hashtable if
1473 non-operand_equal_p refs are found. For the lookup we mark
1474 the case we want strict equality with aor.max_size == -1. */
1475 ao_ref aor;
1476 ao_ref_init (&aor, *mem);
1477 ao_ref_base (&aor);
1478 ao_ref_alias_set (&aor);
1479 HOST_WIDE_INT offset, size, max_size;
1480 poly_int64 saved_maxsize = aor.max_size, mem_off;
1481 tree mem_base;
a8aa828b 1482 bool ref_decomposed;
1c852d1d
RB
1483 if (aor.max_size_known_p ()
1484 && aor.offset.is_constant (&offset)
4c3643a5
RB
1485 && aor.size.is_constant (&size)
1486 && aor.max_size.is_constant (&max_size)
1c852d1d 1487 && size == max_size
4c3643a5 1488 && (size % BITS_PER_UNIT) == 0
79977c82
RB
1489 /* We're canonicalizing to a MEM where TYPE_SIZE specifies the
1490 size. Make sure this is consistent with the extraction. */
1491 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (*mem)))
1492 && known_eq (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (*mem))),
1493 aor.size)
1c852d1d
RB
1494 && (mem_base = get_addr_base_and_unit_offset (aor.ref, &mem_off)))
1495 {
a8aa828b 1496 ref_decomposed = true;
1c852d1d
RB
1497 hash = iterative_hash_expr (ao_ref_base (&aor), 0);
1498 hash = iterative_hash_host_wide_int (offset, hash);
1499 hash = iterative_hash_host_wide_int (size, hash);
1500 }
1501 else
1502 {
a8aa828b 1503 ref_decomposed = false;
1c852d1d
RB
1504 hash = iterative_hash_expr (aor.ref, 0);
1505 aor.max_size = -1;
1506 }
1507 slot = memory_accesses.refs->find_slot_with_hash (&aor, hash, INSERT);
1508 aor.max_size = saved_maxsize;
e6647190 1509 if (*slot)
72425608 1510 {
1c852d1d
RB
1511 if (!(*slot)->ref_canonical
1512 && !operand_equal_p (*mem, (*slot)->mem.ref, 0))
1513 {
1514 /* If we didn't yet canonicalize the hashtable ref (which
1515 we'll end up using for code insertion) and hit a second
1516 equal ref that is not structurally equivalent create
1517 a canonical ref which is a bare MEM_REF. */
1518 if (TREE_CODE (*mem) == MEM_REF
1519 || TREE_CODE (*mem) == TARGET_MEM_REF)
1520 {
1521 (*slot)->mem.ref = *mem;
1522 (*slot)->mem.base_alias_set = ao_ref_base_alias_set (&aor);
1523 }
1524 else
1525 {
1526 tree ref_alias_type = reference_alias_ptr_type (*mem);
1527 unsigned int ref_align = get_object_alignment (*mem);
1528 tree ref_type = TREE_TYPE (*mem);
1529 tree tmp = build_fold_addr_expr (unshare_expr (mem_base));
1530 if (TYPE_ALIGN (ref_type) != ref_align)
1531 ref_type = build_aligned_type (ref_type, ref_align);
1532 (*slot)->mem.ref
1533 = fold_build2 (MEM_REF, ref_type, tmp,
1534 build_int_cst (ref_alias_type, mem_off));
1535 if ((*slot)->mem.volatile_p)
1536 TREE_THIS_VOLATILE ((*slot)->mem.ref) = 1;
1537 gcc_checking_assert (TREE_CODE ((*slot)->mem.ref) == MEM_REF
1538 && is_gimple_mem_ref_addr
1539 (TREE_OPERAND ((*slot)->mem.ref,
1540 0)));
1541 (*slot)->mem.base_alias_set = (*slot)->mem.ref_alias_set;
1542 }
1543 (*slot)->ref_canonical = true;
1544 }
61baec41 1545 ref = *slot;
e6647190 1546 id = ref->id;
72425608 1547 }
e6647190
RB
1548 else
1549 {
1550 id = memory_accesses.refs_list.length ();
1c852d1d 1551 ref = mem_ref_alloc (&aor, hash, id);
a8aa828b 1552 ref->ref_decomposed = ref_decomposed;
e6647190
RB
1553 memory_accesses.refs_list.safe_push (ref);
1554 *slot = ref;
039496da 1555
e6647190
RB
1556 if (dump_file && (dump_flags & TDF_DETAILS))
1557 {
1558 fprintf (dump_file, "Memory reference %u: ", id);
1559 print_generic_expr (dump_file, ref->mem.ref, TDF_SLIM);
1560 fprintf (dump_file, "\n");
1561 }
1562 }
1563
32500433 1564 record_mem_ref_loc (ref, stmt, mem);
e6647190 1565 }
374001cb 1566 bitmap_set_bit (&memory_accesses.refs_in_loop[loop->num], ref->id);
72425608 1567 if (is_stored)
c00217fc 1568 {
374001cb 1569 bitmap_set_bit (&memory_accesses.refs_stored_in_loop[loop->num], ref->id);
c00217fc
RB
1570 mark_ref_stored (ref, loop);
1571 }
5164293a 1572 init_lim_data (stmt)->ref = ref->id;
72425608 1573 return;
a7e5372d
ZD
1574}
1575
5a2d2a79
RB
1576static unsigned *bb_loop_postorder;
1577
1578/* qsort sort function to sort blocks after their loop fathers postorder. */
1579
1580static int
8c228993
RB
1581sort_bbs_in_loop_postorder_cmp (const void *bb1_, const void *bb2_,
1582 void *bb_loop_postorder_)
5a2d2a79 1583{
8c228993 1584 unsigned *bb_loop_postorder = (unsigned *)bb_loop_postorder_;
06b4c6d2
AM
1585 basic_block bb1 = *(const basic_block *)bb1_;
1586 basic_block bb2 = *(const basic_block *)bb2_;
99b1c316
MS
1587 class loop *loop1 = bb1->loop_father;
1588 class loop *loop2 = bb2->loop_father;
5a2d2a79 1589 if (loop1->num == loop2->num)
5804f627 1590 return bb1->index - bb2->index;
5a2d2a79
RB
1591 return bb_loop_postorder[loop1->num] < bb_loop_postorder[loop2->num] ? -1 : 1;
1592}
1593
32500433
RB
1594/* qsort sort function to sort ref locs after their loop fathers postorder. */
1595
1596static int
8c228993
RB
1597sort_locs_in_loop_postorder_cmp (const void *loc1_, const void *loc2_,
1598 void *bb_loop_postorder_)
32500433 1599{
8c228993 1600 unsigned *bb_loop_postorder = (unsigned *)bb_loop_postorder_;
06b4c6d2
AM
1601 const mem_ref_loc *loc1 = (const mem_ref_loc *)loc1_;
1602 const mem_ref_loc *loc2 = (const mem_ref_loc *)loc2_;
99b1c316
MS
1603 class loop *loop1 = gimple_bb (loc1->stmt)->loop_father;
1604 class loop *loop2 = gimple_bb (loc2->stmt)->loop_father;
32500433
RB
1605 if (loop1->num == loop2->num)
1606 return 0;
1607 return bb_loop_postorder[loop1->num] < bb_loop_postorder[loop2->num] ? -1 : 1;
1608}
1609
72425608 1610/* Gathers memory references in loops. */
a7e5372d
ZD
1611
1612static void
374001cb 1613analyze_memory_references (void)
a7e5372d 1614{
726a989a 1615 gimple_stmt_iterator bsi;
5a2d2a79 1616 basic_block bb, *bbs;
99b1c316 1617 class loop *loop, *outer;
5a2d2a79 1618 unsigned i, n;
72425608 1619
5a2d2a79
RB
1620 /* Collect all basic-blocks in loops and sort them after their
1621 loops postorder. */
1622 i = 0;
0cae8d31 1623 bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
11cd3bed 1624 FOR_EACH_BB_FN (bb, cfun)
5a2d2a79
RB
1625 if (bb->loop_father != current_loops->tree_root)
1626 bbs[i++] = bb;
1627 n = i;
8c228993
RB
1628 gcc_sort_r (bbs, n, sizeof (basic_block), sort_bbs_in_loop_postorder_cmp,
1629 bb_loop_postorder);
5a2d2a79
RB
1630
1631 /* Visit blocks in loop postorder and assign mem-ref IDs in that order.
1632 That results in better locality for all the bitmaps. */
1633 for (i = 0; i < n; ++i)
72425608 1634 {
5a2d2a79 1635 basic_block bb = bbs[i];
726a989a 1636 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
5a2d2a79 1637 gather_mem_refs_stmt (bb->loop_father, gsi_stmt (bsi));
72425608
ZD
1638 }
1639
32500433
RB
1640 /* Sort the location list of gathered memory references after their
1641 loop postorder number. */
46842bfe 1642 im_mem_ref *ref;
32500433 1643 FOR_EACH_VEC_ELT (memory_accesses.refs_list, i, ref)
8c228993
RB
1644 ref->accesses_in_loop.sort (sort_locs_in_loop_postorder_cmp,
1645 bb_loop_postorder);
32500433 1646
5a2d2a79
RB
1647 free (bbs);
1648
546d314c
RG
1649 /* Propagate the information about accessed memory references up
1650 the loop hierarchy. */
f0bd40b1 1651 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
72425608 1652 {
c00217fc 1653 /* Finalize the overall touched references (including subloops). */
374001cb
RB
1654 bitmap_ior_into (&memory_accesses.all_refs_stored_in_loop[loop->num],
1655 &memory_accesses.refs_stored_in_loop[loop->num]);
c00217fc
RB
1656
1657 /* Propagate the information about accessed memory references up
1658 the loop hierarchy. */
1659 outer = loop_outer (loop);
1660 if (outer == current_loops->tree_root)
72425608
ZD
1661 continue;
1662
374001cb
RB
1663 bitmap_ior_into (&memory_accesses.all_refs_stored_in_loop[outer->num],
1664 &memory_accesses.all_refs_stored_in_loop[loop->num]);
72425608 1665 }
72425608
ZD
1666}
1667
72425608
ZD
1668/* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in
1669 tree_to_aff_combination_expand. */
1670
1671static bool
61baec41 1672mem_refs_may_alias_p (im_mem_ref *mem1, im_mem_ref *mem2,
39c8aaa4 1673 hash_map<tree, name_expansion *> **ttae_cache)
72425608
ZD
1674{
1675 /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
1676 object and their offset differ in such a way that the locations cannot
1677 overlap, then they cannot alias. */
a85d87b2 1678 poly_widest_int size1, size2;
1842e4d4 1679 aff_tree off1, off2;
72425608 1680
1842e4d4 1681 /* Perform basic offset and type-based disambiguation. */
bdb01696 1682 if (!refs_may_alias_p_1 (&mem1->mem, &mem2->mem, true))
72425608 1683 return false;
a7e5372d 1684
72425608
ZD
1685 /* The expansion of addresses may be a bit expensive, thus we only do
1686 the check at -O2 and higher optimization levels. */
1687 if (optimize < 2)
1688 return true;
1689
bdb01696
RB
1690 get_inner_reference_aff (mem1->mem.ref, &off1, &size1);
1691 get_inner_reference_aff (mem2->mem.ref, &off2, &size2);
72425608
ZD
1692 aff_combination_expand (&off1, ttae_cache);
1693 aff_combination_expand (&off2, ttae_cache);
807e902e 1694 aff_combination_scale (&off1, -1);
72425608
ZD
1695 aff_combination_add (&off2, &off1);
1696
02f5d6c5 1697 if (aff_comb_cannot_overlap_p (&off2, size1, size2))
72425608
ZD
1698 return false;
1699
1700 return true;
1701}
1702
32500433
RB
1703/* Compare function for bsearch searching for reference locations
1704 in a loop. */
1705
1706static int
8c228993
RB
1707find_ref_loc_in_loop_cmp (const void *loop_, const void *loc_,
1708 void *bb_loop_postorder_)
32500433 1709{
8c228993 1710 unsigned *bb_loop_postorder = (unsigned *)bb_loop_postorder_;
99b1c316 1711 class loop *loop = (class loop *)const_cast<void *>(loop_);
32500433 1712 mem_ref_loc *loc = (mem_ref_loc *)const_cast<void *>(loc_);
99b1c316 1713 class loop *loc_loop = gimple_bb (loc->stmt)->loop_father;
32500433
RB
1714 if (loop->num == loc_loop->num
1715 || flow_loop_nested_p (loop, loc_loop))
1716 return 0;
1717 return (bb_loop_postorder[loop->num] < bb_loop_postorder[loc_loop->num]
1718 ? -1 : 1);
1719}
1720
15d19bf8
RB
1721/* Iterates over all locations of REF in LOOP and its subloops calling
1722 fn.operator() with the location as argument. When that operator
1723 returns true the iteration is stopped and true is returned.
1724 Otherwise false is returned. */
72425608 1725
15d19bf8
RB
1726template <typename FN>
1727static bool
99b1c316 1728for_all_locs_in_loop (class loop *loop, im_mem_ref *ref, FN fn)
72425608 1729{
72425608 1730 unsigned i;
61baec41 1731 mem_ref_loc *loc;
72425608 1732
32500433
RB
1733 /* Search for the cluster of locs in the accesses_in_loop vector
1734 which is sorted after postorder index of the loop father. */
8c228993
RB
1735 loc = ref->accesses_in_loop.bsearch (loop, find_ref_loc_in_loop_cmp,
1736 bb_loop_postorder);
32500433
RB
1737 if (!loc)
1738 return false;
72425608 1739
32500433
RB
1740 /* We have found one location inside loop or its sub-loops. Iterate
1741 both forward and backward to cover the whole cluster. */
1742 i = loc - ref->accesses_in_loop.address ();
1743 while (i > 0)
1744 {
1745 --i;
61baec41 1746 mem_ref_loc *l = &ref->accesses_in_loop[i];
32500433
RB
1747 if (!flow_bb_inside_loop_p (loop, gimple_bb (l->stmt)))
1748 break;
1749 if (fn (l))
1750 return true;
1751 }
1752 for (i = loc - ref->accesses_in_loop.address ();
1753 i < ref->accesses_in_loop.length (); ++i)
1754 {
61baec41 1755 mem_ref_loc *l = &ref->accesses_in_loop[i];
32500433
RB
1756 if (!flow_bb_inside_loop_p (loop, gimple_bb (l->stmt)))
1757 break;
1758 if (fn (l))
1759 return true;
1760 }
15d19bf8
RB
1761
1762 return false;
1763}
1764
1765/* Rewrites location LOC by TMP_VAR. */
1766
6c1dae73 1767class rewrite_mem_ref_loc
15d19bf8 1768{
6c1dae73 1769public:
15d19bf8 1770 rewrite_mem_ref_loc (tree tmp_var_) : tmp_var (tmp_var_) {}
61baec41 1771 bool operator () (mem_ref_loc *loc);
15d19bf8
RB
1772 tree tmp_var;
1773};
1774
1775bool
61baec41 1776rewrite_mem_ref_loc::operator () (mem_ref_loc *loc)
15d19bf8
RB
1777{
1778 *loc->ref = tmp_var;
1779 update_stmt (loc->stmt);
1780 return false;
72425608
ZD
1781}
1782
1783/* Rewrites all references to REF in LOOP by variable TMP_VAR. */
1784
1785static void
99b1c316 1786rewrite_mem_refs (class loop *loop, im_mem_ref *ref, tree tmp_var)
72425608 1787{
15d19bf8 1788 for_all_locs_in_loop (loop, ref, rewrite_mem_ref_loc (tmp_var));
a7e5372d
ZD
1789}
1790
9fd9ccf7
RB
1791/* Stores the first reference location in LOCP. */
1792
6c1dae73 1793class first_mem_ref_loc_1
9fd9ccf7 1794{
6c1dae73 1795public:
61baec41
TS
1796 first_mem_ref_loc_1 (mem_ref_loc **locp_) : locp (locp_) {}
1797 bool operator () (mem_ref_loc *loc);
1798 mem_ref_loc **locp;
9fd9ccf7
RB
1799};
1800
1801bool
61baec41 1802first_mem_ref_loc_1::operator () (mem_ref_loc *loc)
9fd9ccf7
RB
1803{
1804 *locp = loc;
1805 return true;
1806}
1807
1808/* Returns the first reference location to REF in LOOP. */
1809
61baec41 1810static mem_ref_loc *
99b1c316 1811first_mem_ref_loc (class loop *loop, im_mem_ref *ref)
9fd9ccf7 1812{
61baec41 1813 mem_ref_loc *locp = NULL;
9fd9ccf7
RB
1814 for_all_locs_in_loop (loop, ref, first_mem_ref_loc_1 (&locp));
1815 return locp;
1816}
1817
039496da
AH
1818struct prev_flag_edges {
1819 /* Edge to insert new flag comparison code. */
1820 edge append_cond_position;
1821
1822 /* Edge for fall through from previous flag comparison. */
1823 edge last_cond_fallthru;
1824};
1825
1826/* Helper function for execute_sm. Emit code to store TMP_VAR into
1827 MEM along edge EX.
1828
1829 The store is only done if MEM has changed. We do this so no
1830 changes to MEM occur on code paths that did not originally store
1831 into it.
1832
1833 The common case for execute_sm will transform:
1834
1835 for (...) {
1836 if (foo)
1837 stuff;
1838 else
1839 MEM = TMP_VAR;
1840 }
1841
1842 into:
1843
1844 lsm = MEM;
1845 for (...) {
1846 if (foo)
1847 stuff;
1848 else
1849 lsm = TMP_VAR;
1850 }
1851 MEM = lsm;
1852
1853 This function will generate:
1854
1855 lsm = MEM;
1856
1857 lsm_flag = false;
1858 ...
1859 for (...) {
1860 if (foo)
1861 stuff;
1862 else {
1863 lsm = TMP_VAR;
1864 lsm_flag = true;
1865 }
1866 }
1867 if (lsm_flag) <--
1868 MEM = lsm; <--
1869*/
1870
1871static void
d91561cb
JH
1872execute_sm_if_changed (edge ex, tree mem, tree tmp_var, tree flag,
1873 edge preheader, hash_set <basic_block> *flag_bbs)
039496da
AH
1874{
1875 basic_block new_bb, then_bb, old_dest;
1876 bool loop_has_only_one_exit;
1877 edge then_old_edge, orig_ex = ex;
1878 gimple_stmt_iterator gsi;
355fe088 1879 gimple *stmt;
039496da 1880 struct prev_flag_edges *prev_edges = (struct prev_flag_edges *) ex->aux;
22718afe 1881 bool irr = ex->flags & EDGE_IRREDUCIBLE_LOOP;
039496da 1882
d91561cb
JH
1883 profile_count count_sum = profile_count::zero ();
1884 int nbbs = 0, ncount = 0;
357067f2 1885 profile_probability flag_probability = profile_probability::uninitialized ();
d91561cb
JH
1886
1887 /* Flag is set in FLAG_BBS. Determine probability that flag will be true
1888 at loop exit.
1889
67914693 1890 This code may look fancy, but it cannot update profile very realistically
d91561cb
JH
1891 because we do not know the probability that flag will be true at given
1892 loop exit.
1893
1894 We look for two interesting extremes
1895 - when exit is dominated by block setting the flag, we know it will
1896 always be true. This is a common case.
1897 - when all blocks setting the flag have very low frequency we know
1898 it will likely be false.
1899 In all other cases we default to 2/3 for flag being true. */
1900
1901 for (hash_set<basic_block>::iterator it = flag_bbs->begin ();
1902 it != flag_bbs->end (); ++it)
1903 {
d91561cb
JH
1904 if ((*it)->count.initialized_p ())
1905 count_sum += (*it)->count, ncount ++;
1906 if (dominated_by_p (CDI_DOMINATORS, ex->src, *it))
357067f2 1907 flag_probability = profile_probability::always ();
d91561cb
JH
1908 nbbs++;
1909 }
1910
357067f2
JH
1911 profile_probability cap = profile_probability::always ().apply_scale (2, 3);
1912
1913 if (flag_probability.initialized_p ())
d91561cb 1914 ;
e7a74006
JH
1915 else if (ncount == nbbs
1916 && preheader->count () >= count_sum && preheader->count ().nonzero_p ())
d91561cb 1917 {
ef30ab83 1918 flag_probability = count_sum.probability_in (preheader->count ());
357067f2
JH
1919 if (flag_probability > cap)
1920 flag_probability = cap;
d91561cb 1921 }
e7a74006
JH
1922
1923 if (!flag_probability.initialized_p ())
357067f2 1924 flag_probability = cap;
d91561cb 1925
039496da
AH
1926 /* ?? Insert store after previous store if applicable. See note
1927 below. */
1928 if (prev_edges)
1929 ex = prev_edges->append_cond_position;
1930
1931 loop_has_only_one_exit = single_pred_p (ex->dest);
1932
1933 if (loop_has_only_one_exit)
1934 ex = split_block_after_labels (ex->dest);
4f912f91
RB
1935 else
1936 {
1937 for (gphi_iterator gpi = gsi_start_phis (ex->dest);
1938 !gsi_end_p (gpi); gsi_next (&gpi))
1939 {
1940 gphi *phi = gpi.phi ();
1941 if (virtual_operand_p (gimple_phi_result (phi)))
1942 continue;
1943
1944 /* When the destination has a non-virtual PHI node with multiple
1945 predecessors make sure we preserve the PHI structure by
1946 forcing a forwarder block so that hoisting of that PHI will
1947 still work. */
1948 split_edge (ex);
1949 break;
1950 }
1951 }
039496da
AH
1952
1953 old_dest = ex->dest;
1954 new_bb = split_edge (ex);
1955 then_bb = create_empty_bb (new_bb);
d91561cb 1956 then_bb->count = new_bb->count.apply_probability (flag_probability);
22718afe
RB
1957 if (irr)
1958 then_bb->flags = BB_IRREDUCIBLE_LOOP;
1959 add_bb_to_loop (then_bb, new_bb->loop_father);
039496da
AH
1960
1961 gsi = gsi_start_bb (new_bb);
1962 stmt = gimple_build_cond (NE_EXPR, flag, boolean_false_node,
1963 NULL_TREE, NULL_TREE);
1964 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
1965
1966 gsi = gsi_start_bb (then_bb);
1967 /* Insert actual store. */
1968 stmt = gimple_build_assign (unshare_expr (mem), tmp_var);
1969 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
1970
d91561cb
JH
1971 edge e1 = single_succ_edge (new_bb);
1972 edge e2 = make_edge (new_bb, then_bb,
1973 EDGE_TRUE_VALUE | (irr ? EDGE_IRREDUCIBLE_LOOP : 0));
1974 e2->probability = flag_probability;
d91561cb
JH
1975
1976 e1->flags |= EDGE_FALSE_VALUE | (irr ? EDGE_IRREDUCIBLE_LOOP : 0);
1977 e1->flags &= ~EDGE_FALLTHRU;
1978
357067f2 1979 e1->probability = flag_probability.invert ();
d91561cb 1980
357067f2 1981 then_old_edge = make_single_succ_edge (then_bb, old_dest,
22718afe 1982 EDGE_FALLTHRU | (irr ? EDGE_IRREDUCIBLE_LOOP : 0));
039496da
AH
1983
1984 set_immediate_dominator (CDI_DOMINATORS, then_bb, new_bb);
1985
1986 if (prev_edges)
1987 {
1988 basic_block prevbb = prev_edges->last_cond_fallthru->src;
1989 redirect_edge_succ (prev_edges->last_cond_fallthru, new_bb);
1990 set_immediate_dominator (CDI_DOMINATORS, new_bb, prevbb);
1991 set_immediate_dominator (CDI_DOMINATORS, old_dest,
1992 recompute_dominator (CDI_DOMINATORS, old_dest));
1993 }
1994
1995 /* ?? Because stores may alias, they must happen in the exact
1996 sequence they originally happened. Save the position right after
1997 the (_lsm) store we just created so we can continue appending after
1998 it and maintain the original order. */
1999 {
2000 struct prev_flag_edges *p;
2001
2002 if (orig_ex->aux)
2003 orig_ex->aux = NULL;
2004 alloc_aux_for_edge (orig_ex, sizeof (struct prev_flag_edges));
2005 p = (struct prev_flag_edges *) orig_ex->aux;
2006 p->append_cond_position = then_old_edge;
2007 p->last_cond_fallthru = find_edge (new_bb, old_dest);
2008 orig_ex->aux = (void *) p;
2009 }
2010
2011 if (!loop_has_only_one_exit)
538dd0b7
DM
2012 for (gphi_iterator gpi = gsi_start_phis (old_dest);
2013 !gsi_end_p (gpi); gsi_next (&gpi))
039496da 2014 {
538dd0b7 2015 gphi *phi = gpi.phi ();
039496da
AH
2016 unsigned i;
2017
2018 for (i = 0; i < gimple_phi_num_args (phi); i++)
2019 if (gimple_phi_arg_edge (phi, i)->src == new_bb)
2020 {
2021 tree arg = gimple_phi_arg_def (phi, i);
9e227d60 2022 add_phi_arg (phi, arg, then_old_edge, UNKNOWN_LOCATION);
039496da
AH
2023 update_stmt (phi);
2024 }
2025 }
039496da
AH
2026}
2027
15d19bf8
RB
2028/* When REF is set on the location, set flag indicating the store. */
2029
6c1dae73 2030class sm_set_flag_if_changed
15d19bf8 2031{
6c1dae73 2032public:
d91561cb
JH
2033 sm_set_flag_if_changed (tree flag_, hash_set <basic_block> *bbs_)
2034 : flag (flag_), bbs (bbs_) {}
61baec41 2035 bool operator () (mem_ref_loc *loc);
15d19bf8 2036 tree flag;
d91561cb 2037 hash_set <basic_block> *bbs;
15d19bf8
RB
2038};
2039
2040bool
61baec41 2041sm_set_flag_if_changed::operator () (mem_ref_loc *loc)
15d19bf8
RB
2042{
2043 /* Only set the flag for writes. */
2044 if (is_gimple_assign (loc->stmt)
2045 && gimple_assign_lhs_ptr (loc->stmt) == loc->ref)
2046 {
2047 gimple_stmt_iterator gsi = gsi_for_stmt (loc->stmt);
355fe088 2048 gimple *stmt = gimple_build_assign (flag, boolean_true_node);
15d19bf8 2049 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
d91561cb 2050 bbs->add (gimple_bb (stmt));
15d19bf8
RB
2051 }
2052 return false;
2053}
2054
039496da
AH
2055/* Helper function for execute_sm. On every location where REF is
2056 set, set an appropriate flag indicating the store. */
2057
2058static tree
99b1c316 2059execute_sm_if_changed_flag_set (class loop *loop, im_mem_ref *ref,
d91561cb 2060 hash_set <basic_block> *bbs)
039496da 2061{
039496da 2062 tree flag;
71343877 2063 char *str = get_lsm_tmp_name (ref->mem.ref, ~0, "_flag");
7cc434a3 2064 flag = create_tmp_reg (boolean_type_node, str);
d91561cb 2065 for_all_locs_in_loop (loop, ref, sm_set_flag_if_changed (flag, bbs));
039496da
AH
2066 return flag;
2067}
2068
72425608 2069/* Executes store motion of memory reference REF from LOOP.
ca83d385
ZD
2070 Exits from the LOOP are stored in EXITS. The initialization of the
2071 temporary variable is put to the preheader of the loop, and assignments
2072 to the reference from the temporary variable are emitted to exits. */
a7e5372d
ZD
2073
2074static void
99b1c316 2075execute_sm (class loop *loop, vec<edge> exits, im_mem_ref *ref)
a7e5372d 2076{
32500433 2077 tree tmp_var, store_flag = NULL_TREE;
a7e5372d 2078 unsigned i;
538dd0b7 2079 gassign *load;
b4042a03 2080 struct fmt_data fmt_data;
9fd9ccf7 2081 edge ex;
726a989a 2082 struct lim_aux_data *lim_data;
039496da 2083 bool multi_threaded_model_p = false;
9fd9ccf7 2084 gimple_stmt_iterator gsi;
d91561cb 2085 hash_set<basic_block> flag_bbs;
a7e5372d 2086
a3631d97
ZD
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 {
2089 fprintf (dump_file, "Executing store motion of ");
ef6cb4c7 2090 print_generic_expr (dump_file, ref->mem.ref);
a3631d97
ZD
2091 fprintf (dump_file, " from loop %d\n", loop->num);
2092 }
2093
bdb01696
RB
2094 tmp_var = create_tmp_reg (TREE_TYPE (ref->mem.ref),
2095 get_lsm_tmp_name (ref->mem.ref, ~0));
a7e5372d 2096
b4042a03
ZD
2097 fmt_data.loop = loop;
2098 fmt_data.orig_loop = loop;
bdb01696 2099 for_each_index (&ref->mem.ref, force_move_till, &fmt_data);
a7e5372d 2100
7783daa7 2101 if (bb_in_transaction (loop_preheader_edge (loop)->src)
e622a32d 2102 || (! flag_store_data_races
2866f21d 2103 && ! ref_always_accessed_p (loop, ref, true)))
039496da
AH
2104 multi_threaded_model_p = true;
2105
2106 if (multi_threaded_model_p)
d91561cb 2107 store_flag = execute_sm_if_changed_flag_set (loop, ref, &flag_bbs);
039496da 2108
72425608 2109 rewrite_mem_refs (loop, ref, tmp_var);
a7e5372d 2110
9fd9ccf7
RB
2111 /* Emit the load code on a random exit edge or into the latch if
2112 the loop does not exit, so that we are sure it will be processed
2113 by move_computations after all dependencies. */
2114 gsi = gsi_for_stmt (first_mem_ref_loc (loop, ref)->stmt);
039496da
AH
2115
2116 /* FIXME/TODO: For the multi-threaded variant, we could avoid this
2117 load altogether, since the store is predicated by a flag. We
2118 could, do the load only if it was originally in the loop. */
bdb01696 2119 load = gimple_build_assign (tmp_var, unshare_expr (ref->mem.ref));
726a989a
RB
2120 lim_data = init_lim_data (load);
2121 lim_data->max_loop = loop;
2122 lim_data->tgt_loop = loop;
9fd9ccf7 2123 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
a7e5372d 2124
039496da 2125 if (multi_threaded_model_p)
a7e5372d 2126 {
039496da
AH
2127 load = gimple_build_assign (store_flag, boolean_false_node);
2128 lim_data = init_lim_data (load);
2129 lim_data->max_loop = loop;
2130 lim_data->tgt_loop = loop;
9fd9ccf7 2131 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
a7e5372d 2132 }
039496da
AH
2133
2134 /* Sink the store to every exit from the loop. */
9771b263 2135 FOR_EACH_VEC_ELT (exits, i, ex)
039496da
AH
2136 if (!multi_threaded_model_p)
2137 {
538dd0b7 2138 gassign *store;
bdb01696 2139 store = gimple_build_assign (unshare_expr (ref->mem.ref), tmp_var);
039496da
AH
2140 gsi_insert_on_edge (ex, store);
2141 }
2142 else
d91561cb
JH
2143 execute_sm_if_changed (ex, ref->mem.ref, tmp_var, store_flag,
2144 loop_preheader_edge (loop), &flag_bbs);
a7e5372d
ZD
2145}
2146
72425608
ZD
2147/* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit
2148 edges of the LOOP. */
a7e5372d
ZD
2149
2150static void
99b1c316 2151hoist_memory_references (class loop *loop, bitmap mem_refs,
9771b263 2152 vec<edge> exits)
a7e5372d 2153{
61baec41 2154 im_mem_ref *ref;
72425608
ZD
2155 unsigned i;
2156 bitmap_iterator bi;
a3631d97 2157
72425608 2158 EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi)
a7e5372d 2159 {
9771b263 2160 ref = memory_accesses.refs_list[i];
72425608 2161 execute_sm (loop, exits, ref);
a7e5372d 2162 }
01fd257a
ZD
2163}
2164
6c1dae73 2165class ref_always_accessed
15d19bf8 2166{
6c1dae73 2167public:
99b1c316 2168 ref_always_accessed (class loop *loop_, bool stored_p_)
32500433 2169 : loop (loop_), stored_p (stored_p_) {}
61baec41 2170 bool operator () (mem_ref_loc *loc);
99b1c316 2171 class loop *loop;
15d19bf8
RB
2172 bool stored_p;
2173};
a7e5372d 2174
15d19bf8 2175bool
61baec41 2176ref_always_accessed::operator () (mem_ref_loc *loc)
a7e5372d 2177{
99b1c316 2178 class loop *must_exec;
58adb739 2179
15d19bf8
RB
2180 if (!get_lim_data (loc->stmt))
2181 return false;
a7e5372d 2182
15d19bf8
RB
2183 /* If we require an always executed store make sure the statement
2184 stores to the reference. */
2185 if (stored_p)
72425608 2186 {
32500433
RB
2187 tree lhs = gimple_get_lhs (loc->stmt);
2188 if (!lhs
2189 || lhs != *loc->ref)
15d19bf8
RB
2190 return false;
2191 }
a7e5372d 2192
15d19bf8
RB
2193 must_exec = get_lim_data (loc->stmt)->always_executed_in;
2194 if (!must_exec)
2195 return false;
58adb739 2196
15d19bf8
RB
2197 if (must_exec == loop
2198 || flow_loop_nested_p (must_exec, loop))
2199 return true;
a7e5372d 2200
15d19bf8
RB
2201 return false;
2202}
01fd257a 2203
15d19bf8
RB
2204/* Returns true if REF is always accessed in LOOP. If STORED_P is true
2205 make sure REF is always stored to in LOOP. */
2206
2207static bool
99b1c316 2208ref_always_accessed_p (class loop *loop, im_mem_ref *ref, bool stored_p)
15d19bf8 2209{
15d19bf8 2210 return for_all_locs_in_loop (loop, ref,
32500433 2211 ref_always_accessed (loop, stored_p));
01fd257a
ZD
2212}
2213
72425608 2214/* Returns true if REF1 and REF2 are independent. */
01fd257a 2215
72425608 2216static bool
61baec41 2217refs_independent_p (im_mem_ref *ref1, im_mem_ref *ref2)
01fd257a 2218{
bdb01696 2219 if (ref1 == ref2)
72425608 2220 return true;
bdb01696 2221
72425608
ZD
2222 if (dump_file && (dump_flags & TDF_DETAILS))
2223 fprintf (dump_file, "Querying dependency of refs %u and %u: ",
2224 ref1->id, ref2->id);
2225
bdb01696 2226 if (mem_refs_may_alias_p (ref1, ref2, &memory_accesses.ttae_cache))
72425608 2227 {
72425608
ZD
2228 if (dump_file && (dump_flags & TDF_DETAILS))
2229 fprintf (dump_file, "dependent.\n");
2230 return false;
2231 }
2232 else
2233 {
72425608
ZD
2234 if (dump_file && (dump_flags & TDF_DETAILS))
2235 fprintf (dump_file, "independent.\n");
2236 return true;
2237 }
01fd257a
ZD
2238}
2239
c00217fc
RB
2240/* Mark REF dependent on stores or loads (according to STORED_P) in LOOP
2241 and its super-loops. */
01fd257a
ZD
2242
2243static void
99b1c316 2244record_dep_loop (class loop *loop, im_mem_ref *ref, bool stored_p)
01fd257a 2245{
c00217fc
RB
2246 /* We can propagate dependent-in-loop bits up the loop
2247 hierarchy to all outer loops. */
2248 while (loop != current_loops->tree_root
374001cb 2249 && bitmap_set_bit (&ref->dep_loop, LOOP_DEP_BIT (loop->num, stored_p)))
c00217fc 2250 loop = loop_outer (loop);
72425608 2251}
01fd257a 2252
ff4fa993 2253/* Returns true if REF is independent on all other memory
bcfaa720 2254 references in LOOP. */
01fd257a 2255
72425608 2256static bool
99b1c316 2257ref_indep_loop_p_1 (class loop *loop, im_mem_ref *ref, bool stored_p)
72425608 2258{
ff4fa993
YR
2259 stored_p |= (ref->stored && bitmap_bit_p (ref->stored, loop->num));
2260
ff4fa993 2261 bool indep_p = true;
546d314c 2262 bitmap refs_to_check;
72425608 2263
c00217fc 2264 if (stored_p)
374001cb 2265 refs_to_check = &memory_accesses.refs_in_loop[loop->num];
546d314c 2266 else
374001cb 2267 refs_to_check = &memory_accesses.refs_stored_in_loop[loop->num];
01fd257a 2268
e6647190 2269 if (bitmap_bit_p (refs_to_check, UNANALYZABLE_MEM_ID))
ff4fa993 2270 indep_p = false;
ff4fa993 2271 else
01fd257a 2272 {
ff4fa993
YR
2273 if (bitmap_bit_p (&ref->indep_loop, LOOP_DEP_BIT (loop->num, stored_p)))
2274 return true;
2275 if (bitmap_bit_p (&ref->dep_loop, LOOP_DEP_BIT (loop->num, stored_p)))
c00217fc 2276 return false;
01fd257a 2277
99b1c316 2278 class loop *inner = loop->inner;
ff4fa993
YR
2279 while (inner)
2280 {
bcfaa720 2281 if (!ref_indep_loop_p_1 (inner, ref, stored_p))
ff4fa993
YR
2282 {
2283 indep_p = false;
2284 break;
2285 }
2286 inner = inner->next;
2287 }
513d866d 2288
ff4fa993
YR
2289 if (indep_p)
2290 {
2291 unsigned i;
2292 bitmap_iterator bi;
2293 EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
2294 {
2295 im_mem_ref *aref = memory_accesses.refs_list[i];
2296 if (!refs_independent_p (ref, aref))
2297 {
2298 indep_p = false;
2299 break;
2300 }
2301 }
2302 }
c00217fc
RB
2303 }
2304
72425608
ZD
2305 if (dump_file && (dump_flags & TDF_DETAILS))
2306 fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n",
c00217fc 2307 ref->id, loop->num, indep_p ? "independent" : "dependent");
72425608 2308
c00217fc
RB
2309 /* Record the computed result in the cache. */
2310 if (indep_p)
2311 {
374001cb 2312 if (bitmap_set_bit (&ref->indep_loop, LOOP_DEP_BIT (loop->num, stored_p))
c00217fc
RB
2313 && stored_p)
2314 {
2315 /* If it's independend against all refs then it's independent
2316 against stores, too. */
374001cb 2317 bitmap_set_bit (&ref->indep_loop, LOOP_DEP_BIT (loop->num, false));
c00217fc
RB
2318 }
2319 }
2320 else
2321 {
2322 record_dep_loop (loop, ref, stored_p);
2323 if (!stored_p)
2324 {
2325 /* If it's dependent against stores it's dependent against
2326 all refs, too. */
2327 record_dep_loop (loop, ref, true);
2328 }
2329 }
72425608 2330
c00217fc
RB
2331 return indep_p;
2332}
2333
2334/* Returns true if REF is independent on all other memory references in
bcfaa720 2335 LOOP. */
c00217fc
RB
2336
2337static bool
99b1c316 2338ref_indep_loop_p (class loop *loop, im_mem_ref *ref)
c00217fc
RB
2339{
2340 gcc_checking_assert (MEM_ANALYZABLE (ref));
2341
bcfaa720 2342 return ref_indep_loop_p_1 (loop, ref, false);
01fd257a
ZD
2343}
2344
72425608 2345/* Returns true if we can perform store motion of REF from LOOP. */
01fd257a 2346
72425608 2347static bool
99b1c316 2348can_sm_ref_p (class loop *loop, im_mem_ref *ref)
01fd257a 2349{
58adb739
RG
2350 tree base;
2351
546d314c
RG
2352 /* Can't hoist unanalyzable refs. */
2353 if (!MEM_ANALYZABLE (ref))
2354 return false;
2355
72425608 2356 /* It should be movable. */
bdb01696
RB
2357 if (!is_gimple_reg_type (TREE_TYPE (ref->mem.ref))
2358 || TREE_THIS_VOLATILE (ref->mem.ref)
2359 || !for_each_index (&ref->mem.ref, may_move_till, loop))
72425608 2360 return false;
ed9c043b 2361
9939e416 2362 /* If it can throw fail, we do not properly update EH info. */
bdb01696 2363 if (tree_could_throw_p (ref->mem.ref))
9939e416
RG
2364 return false;
2365
58adb739
RG
2366 /* If it can trap, it must be always executed in LOOP.
2367 Readonly memory locations may trap when storing to them, but
2368 tree_could_trap_p is a predicate for rvalues, so check that
2369 explicitly. */
bdb01696
RB
2370 base = get_base_address (ref->mem.ref);
2371 if ((tree_could_trap_p (ref->mem.ref)
58adb739
RG
2372 || (DECL_P (base) && TREE_READONLY (base)))
2373 && !ref_always_accessed_p (loop, ref, true))
72425608 2374 return false;
ed9c043b 2375
72425608
ZD
2376 /* And it must be independent on all other memory references
2377 in LOOP. */
bcfaa720 2378 if (!ref_indep_loop_p (loop, ref))
72425608 2379 return false;
ed9c043b 2380
72425608 2381 return true;
ed9c043b
ZD
2382}
2383
72425608
ZD
2384/* Marks the references in LOOP for that store motion should be performed
2385 in REFS_TO_SM. SM_EXECUTED is the set of references for that store
2386 motion was performed in one of the outer loops. */
ed9c043b
ZD
2387
2388static void
99b1c316 2389find_refs_for_sm (class loop *loop, bitmap sm_executed, bitmap refs_to_sm)
01fd257a 2390{
374001cb 2391 bitmap refs = &memory_accesses.all_refs_stored_in_loop[loop->num];
72425608
ZD
2392 unsigned i;
2393 bitmap_iterator bi;
61baec41 2394 im_mem_ref *ref;
72425608
ZD
2395
2396 EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi)
2397 {
9771b263 2398 ref = memory_accesses.refs_list[i];
72425608
ZD
2399 if (can_sm_ref_p (loop, ref))
2400 bitmap_set_bit (refs_to_sm, i);
2401 }
ed9c043b 2402}
01fd257a 2403
72425608
ZD
2404/* Checks whether LOOP (with exits stored in EXITS array) is suitable
2405 for a store motion optimization (i.e. whether we can insert statement
2406 on its exits). */
ed9c043b 2407
72425608 2408static bool
99b1c316 2409loop_suitable_for_sm (class loop *loop ATTRIBUTE_UNUSED,
9771b263 2410 vec<edge> exits)
ed9c043b 2411{
72425608
ZD
2412 unsigned i;
2413 edge ex;
01fd257a 2414
9771b263 2415 FOR_EACH_VEC_ELT (exits, i, ex)
6391db68 2416 if (ex->flags & (EDGE_ABNORMAL | EDGE_EH))
72425608
ZD
2417 return false;
2418
2419 return true;
01fd257a
ZD
2420}
2421
a7e5372d 2422/* Try to perform store motion for all memory references modified inside
72425608
ZD
2423 LOOP. SM_EXECUTED is the bitmap of the memory references for that
2424 store motion was executed in one of the outer loops. */
a7e5372d
ZD
2425
2426static void
99b1c316 2427store_motion_loop (class loop *loop, bitmap sm_executed)
a7e5372d 2428{
9771b263 2429 vec<edge> exits = get_loop_exit_edges (loop);
99b1c316 2430 class loop *subloop;
cad1735b 2431 bitmap sm_in_loop = BITMAP_ALLOC (&lim_bitmap_obstack);
a7e5372d 2432
72425608 2433 if (loop_suitable_for_sm (loop, exits))
a7e5372d 2434 {
72425608
ZD
2435 find_refs_for_sm (loop, sm_executed, sm_in_loop);
2436 hoist_memory_references (loop, sm_in_loop, exits);
a7e5372d 2437 }
9771b263 2438 exits.release ();
72425608
ZD
2439
2440 bitmap_ior_into (sm_executed, sm_in_loop);
2441 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
2442 store_motion_loop (subloop, sm_executed);
2443 bitmap_and_compl_into (sm_executed, sm_in_loop);
2444 BITMAP_FREE (sm_in_loop);
a7e5372d
ZD
2445}
2446
2447/* Try to perform store motion for all memory references modified inside
d73be268 2448 loops. */
a7e5372d
ZD
2449
2450static void
72425608 2451store_motion (void)
a7e5372d 2452{
99b1c316 2453 class loop *loop;
cad1735b 2454 bitmap sm_executed = BITMAP_ALLOC (&lim_bitmap_obstack);
d16464bb 2455
72425608
ZD
2456 for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next)
2457 store_motion_loop (loop, sm_executed);
42fd6772 2458
72425608 2459 BITMAP_FREE (sm_executed);
726a989a 2460 gsi_commit_edge_inserts ();
a7e5372d
ZD
2461}
2462
2463/* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
2464 for each such basic block bb records the outermost loop for that execution
2465 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
2466 blocks that contain a nonpure call. */
2467
2468static void
99b1c316 2469fill_always_executed_in_1 (class loop *loop, sbitmap contains_call)
a7e5372d
ZD
2470{
2471 basic_block bb = NULL, *bbs, last = NULL;
2472 unsigned i;
2473 edge e;
99b1c316 2474 class loop *inn_loop = loop;
a7e5372d 2475
8a519095 2476 if (ALWAYS_EXECUTED_IN (loop->header) == NULL)
a7e5372d
ZD
2477 {
2478 bbs = get_loop_body_in_dom_order (loop);
2479
2480 for (i = 0; i < loop->num_nodes; i++)
2481 {
628f6a4e 2482 edge_iterator ei;
a7e5372d
ZD
2483 bb = bbs[i];
2484
2485 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2486 last = bb;
2487
d7c028c0 2488 if (bitmap_bit_p (contains_call, bb->index))
a7e5372d
ZD
2489 break;
2490
628f6a4e 2491 FOR_EACH_EDGE (e, ei, bb->succs)
964ef24c
RB
2492 {
2493 /* If there is an exit from this BB. */
2494 if (!flow_bb_inside_loop_p (loop, e->dest))
2495 break;
2496 /* Or we enter a possibly non-finite loop. */
2497 if (flow_loop_nested_p (bb->loop_father,
2498 e->dest->loop_father)
2499 && ! finite_loop_p (e->dest->loop_father))
2500 break;
2501 }
a7e5372d
ZD
2502 if (e)
2503 break;
2504
2505 /* A loop might be infinite (TODO use simple loop analysis
2506 to disprove this if possible). */
2507 if (bb->flags & BB_IRREDUCIBLE_LOOP)
2508 break;
2509
2510 if (!flow_bb_inside_loop_p (inn_loop, bb))
2511 break;
2512
2513 if (bb->loop_father->header == bb)
2514 {
2515 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2516 break;
2517
2518 /* In a loop that is always entered we may proceed anyway.
2519 But record that we entered it and stop once we leave it. */
2520 inn_loop = bb->loop_father;
2521 }
2522 }
2523
2524 while (1)
2525 {
8a519095 2526 SET_ALWAYS_EXECUTED_IN (last, loop);
a7e5372d
ZD
2527 if (last == loop->header)
2528 break;
2529 last = get_immediate_dominator (CDI_DOMINATORS, last);
2530 }
2531
2532 free (bbs);
2533 }
2534
2535 for (loop = loop->inner; loop; loop = loop->next)
374001cb 2536 fill_always_executed_in_1 (loop, contains_call);
a7e5372d
ZD
2537}
2538
374001cb
RB
2539/* Fills ALWAYS_EXECUTED_IN information for basic blocks, i.e.
2540 for each such basic block bb records the outermost loop for that execution
2541 of its header implies execution of bb. */
a7e5372d
ZD
2542
2543static void
374001cb 2544fill_always_executed_in (void)
a7e5372d 2545{
a7e5372d 2546 basic_block bb;
99b1c316 2547 class loop *loop;
3f9b14ff 2548
7ba9e72d 2549 auto_sbitmap contains_call (last_basic_block_for_fn (cfun));
f61e445a 2550 bitmap_clear (contains_call);
11cd3bed 2551 FOR_EACH_BB_FN (bb, cfun)
a7e5372d 2552 {
374001cb
RB
2553 gimple_stmt_iterator gsi;
2554 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a7e5372d 2555 {
374001cb 2556 if (nonpure_call_p (gsi_stmt (gsi)))
a7e5372d
ZD
2557 break;
2558 }
2559
374001cb 2560 if (!gsi_end_p (gsi))
d7c028c0 2561 bitmap_set_bit (contains_call, bb->index);
a7e5372d
ZD
2562 }
2563
d73be268 2564 for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
374001cb 2565 fill_always_executed_in_1 (loop, contains_call);
374001cb
RB
2566}
2567
2568
2569/* Compute the global information needed by the loop invariant motion pass. */
726a989a 2570
374001cb
RB
2571static void
2572tree_ssa_lim_initialize (void)
2573{
99b1c316 2574 class loop *loop;
374001cb
RB
2575 unsigned i;
2576
2577 bitmap_obstack_initialize (&lim_bitmap_obstack);
32500433 2578 gcc_obstack_init (&mem_ref_obstack);
355fe088 2579 lim_aux_data_map = new hash_map<gimple *, lim_aux_data *>;
19c0d7df
AH
2580
2581 if (flag_tm)
2582 compute_transaction_bits ();
039496da
AH
2583
2584 alloc_aux_for_edges (0);
374001cb 2585
c203e8a7 2586 memory_accesses.refs = new hash_table<mem_ref_hasher> (100);
374001cb
RB
2587 memory_accesses.refs_list.create (100);
2588 /* Allocate a special, unanalyzable mem-ref with ID zero. */
2589 memory_accesses.refs_list.quick_push
1c852d1d 2590 (mem_ref_alloc (NULL, 0, UNANALYZABLE_MEM_ID));
374001cb 2591
0fc822d0
RB
2592 memory_accesses.refs_in_loop.create (number_of_loops (cfun));
2593 memory_accesses.refs_in_loop.quick_grow (number_of_loops (cfun));
2594 memory_accesses.refs_stored_in_loop.create (number_of_loops (cfun));
2595 memory_accesses.refs_stored_in_loop.quick_grow (number_of_loops (cfun));
2596 memory_accesses.all_refs_stored_in_loop.create (number_of_loops (cfun));
2597 memory_accesses.all_refs_stored_in_loop.quick_grow (number_of_loops (cfun));
374001cb 2598
0fc822d0 2599 for (i = 0; i < number_of_loops (cfun); i++)
374001cb
RB
2600 {
2601 bitmap_initialize (&memory_accesses.refs_in_loop[i],
2602 &lim_bitmap_obstack);
2603 bitmap_initialize (&memory_accesses.refs_stored_in_loop[i],
2604 &lim_bitmap_obstack);
2605 bitmap_initialize (&memory_accesses.all_refs_stored_in_loop[i],
2606 &lim_bitmap_obstack);
2607 }
2608
2609 memory_accesses.ttae_cache = NULL;
32500433
RB
2610
2611 /* Initialize bb_loop_postorder with a mapping from loop->num to
2612 its postorder index. */
2613 i = 0;
2614 bb_loop_postorder = XNEWVEC (unsigned, number_of_loops (cfun));
2615 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
2616 bb_loop_postorder[loop->num] = i++;
a7e5372d
ZD
2617}
2618
2619/* Cleans up after the invariant motion pass. */
2620
2621static void
2622tree_ssa_lim_finalize (void)
2623{
2624 basic_block bb;
72425608 2625 unsigned i;
61baec41 2626 im_mem_ref *ref;
a7e5372d 2627
039496da
AH
2628 free_aux_for_edges ();
2629
11cd3bed 2630 FOR_EACH_BB_FN (bb, cfun)
8a519095 2631 SET_ALWAYS_EXECUTED_IN (bb, NULL);
72425608 2632
3f9b14ff 2633 bitmap_obstack_release (&lim_bitmap_obstack);
b787e7a2 2634 delete lim_aux_data_map;
726a989a 2635
c203e8a7
TS
2636 delete memory_accesses.refs;
2637 memory_accesses.refs = NULL;
72425608 2638
9771b263 2639 FOR_EACH_VEC_ELT (memory_accesses.refs_list, i, ref)
f5843d08 2640 memref_free (ref);
9771b263 2641 memory_accesses.refs_list.release ();
32500433 2642 obstack_free (&mem_ref_obstack, NULL);
f5843d08 2643
9771b263 2644 memory_accesses.refs_in_loop.release ();
c00217fc 2645 memory_accesses.refs_stored_in_loop.release ();
9771b263 2646 memory_accesses.all_refs_stored_in_loop.release ();
72425608
ZD
2647
2648 if (memory_accesses.ttae_cache)
b6db991c 2649 free_affine_expand_cache (&memory_accesses.ttae_cache);
32500433
RB
2650
2651 free (bb_loop_postorder);
a7e5372d
ZD
2652}
2653
d73be268 2654/* Moves invariants from loops. Only "expensive" invariants are moved out --
a7e5372d
ZD
2655 i.e. those that are likely to be win regardless of the register pressure. */
2656
9c37629f 2657static unsigned int
d73be268 2658tree_ssa_lim (void)
a7e5372d 2659{
e3bdfed6
RG
2660 unsigned int todo;
2661
d73be268 2662 tree_ssa_lim_initialize ();
a7e5372d 2663
72425608
ZD
2664 /* Gathers information about memory accesses in the loops. */
2665 analyze_memory_references ();
2666
374001cb
RB
2667 /* Fills ALWAYS_EXECUTED_IN information for basic blocks. */
2668 fill_always_executed_in ();
2669
a7e5372d
ZD
2670 /* For each statement determine the outermost loop in that it is
2671 invariant and cost for computing the invariant. */
4d9192b5
TS
2672 invariantness_dom_walker (CDI_DOMINATORS)
2673 .walk (cfun->cfg->x_entry_block_ptr);
a7e5372d 2674
72425608
ZD
2675 /* Execute store motion. Force the necessary invariants to be moved
2676 out of the loops as well. */
2677 store_motion ();
a7e5372d
ZD
2678
2679 /* Move the expressions that are expensive enough. */
e3bdfed6 2680 todo = move_computations ();
a7e5372d
ZD
2681
2682 tree_ssa_lim_finalize ();
e3bdfed6
RG
2683
2684 return todo;
a7e5372d 2685}
71343877
AM
2686
2687/* Loop invariant motion pass. */
2688
71343877
AM
2689namespace {
2690
2691const pass_data pass_data_lim =
2692{
2693 GIMPLE_PASS, /* type */
2694 "lim", /* name */
2695 OPTGROUP_LOOP, /* optinfo_flags */
71343877
AM
2696 TV_LIM, /* tv_id */
2697 PROP_cfg, /* properties_required */
2698 0, /* properties_provided */
2699 0, /* properties_destroyed */
2700 0, /* todo_flags_start */
2701 0, /* todo_flags_finish */
2702};
2703
2704class pass_lim : public gimple_opt_pass
2705{
2706public:
2707 pass_lim (gcc::context *ctxt)
2708 : gimple_opt_pass (pass_data_lim, ctxt)
2709 {}
2710
2711 /* opt_pass methods: */
2712 opt_pass * clone () { return new pass_lim (m_ctxt); }
1a3d085c 2713 virtual bool gate (function *) { return flag_tree_loop_im != 0; }
be55bfe6 2714 virtual unsigned int execute (function *);
71343877
AM
2715
2716}; // class pass_lim
2717
be55bfe6
TS
2718unsigned int
2719pass_lim::execute (function *fun)
2720{
9c37629f
TV
2721 bool in_loop_pipeline = scev_initialized_p ();
2722 if (!in_loop_pipeline)
2723 loop_optimizer_init (LOOPS_NORMAL | LOOPS_HAVE_RECORDED_EXITS);
2724
be55bfe6
TS
2725 if (number_of_loops (fun) <= 1)
2726 return 0;
9c37629f 2727 unsigned int todo = tree_ssa_lim ();
be55bfe6 2728
9c37629f
TV
2729 if (!in_loop_pipeline)
2730 loop_optimizer_finalize ();
144a822f
RB
2731 else
2732 scev_reset ();
9c37629f 2733 return todo;
be55bfe6
TS
2734}
2735
71343877
AM
2736} // anon namespace
2737
2738gimple_opt_pass *
2739make_pass_lim (gcc::context *ctxt)
2740{
2741 return new pass_lim (ctxt);
2742}
2743
2744