]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ira-build.c
-fuse-caller-save - Collect register usage information
[thirdparty/gcc.git] / gcc / ira-build.c
CommitLineData
058e97ec 1/* Building internal representation for IRA.
23a5b65a 2 Copyright (C) 2006-2014 Free Software Foundation, Inc.
058e97ec
VM
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "target.h"
28#include "regs.h"
29#include "flags.h"
30#include "hard-reg-set.h"
31#include "basic-block.h"
32#include "insn-config.h"
33#include "recog.h"
718f9c0f 34#include "diagnostic-core.h"
058e97ec
VM
35#include "params.h"
36#include "df.h"
058e97ec
VM
37#include "reload.h"
38#include "sparseset.h"
39#include "ira-int.h"
5936d944 40#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
058e97ec
VM
41
42static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx,
43 ira_loop_tree_node_t);
44
45/* The root of the loop tree corresponding to the all function. */
46ira_loop_tree_node_t ira_loop_tree_root;
47
48/* Height of the loop tree. */
49int ira_loop_tree_height;
50
51/* All nodes representing basic blocks are referred through the
52 following array. We can not use basic block member `aux' for this
53 because it is used for insertion of insns on edges. */
54ira_loop_tree_node_t ira_bb_nodes;
55
56/* All nodes representing loops are referred through the following
57 array. */
58ira_loop_tree_node_t ira_loop_nodes;
59
caff7edf
JJ
60/* And size of the ira_loop_nodes array. */
61unsigned int ira_loop_nodes_count;
62
b8698a0f 63/* Map regno -> allocnos with given regno (see comments for
058e97ec
VM
64 allocno member `next_regno_allocno'). */
65ira_allocno_t *ira_regno_allocno_map;
66
67/* Array of references to all allocnos. The order number of the
68 allocno corresponds to the index in the array. Removed allocnos
69 have NULL element value. */
70ira_allocno_t *ira_allocnos;
71
72/* Sizes of the previous array. */
73int ira_allocnos_num;
74
a49ae217
BS
75/* Count of conflict record structures we've created, used when creating
76 a new conflict id. */
77int ira_objects_num;
78
79/* Map a conflict id to its conflict record. */
80ira_object_t *ira_object_id_map;
058e97ec 81
3b6d1699
VM
82/* Array of references to all allocno preferences. The order number
83 of the preference corresponds to the index in the array. */
84ira_pref_t *ira_prefs;
85
86/* Size of the previous array. */
87int ira_prefs_num;
88
058e97ec
VM
89/* Array of references to all copies. The order number of the copy
90 corresponds to the index in the array. Removed copies have NULL
91 element value. */
92ira_copy_t *ira_copies;
93
94/* Size of the previous array. */
95int ira_copies_num;
96
97\f
98
99/* LAST_BASIC_BLOCK before generating additional insns because of live
100 range splitting. Emitting insns on a critical edge creates a new
101 basic block. */
102static int last_basic_block_before_change;
103
2608d841
VM
104/* Initialize some members in loop tree node NODE. Use LOOP_NUM for
105 the member loop_num. */
058e97ec 106static void
2608d841
VM
107init_loop_tree_node (struct ira_loop_tree_node *node, int loop_num)
108{
109 int max_regno = max_reg_num ();
110
111 node->regno_allocno_map
112 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
113 memset (node->regno_allocno_map, 0, sizeof (ira_allocno_t) * max_regno);
114 memset (node->reg_pressure, 0, sizeof (node->reg_pressure));
115 node->all_allocnos = ira_allocate_bitmap ();
116 node->modified_regnos = ira_allocate_bitmap ();
117 node->border_allocnos = ira_allocate_bitmap ();
118 node->local_copies = ira_allocate_bitmap ();
119 node->loop_num = loop_num;
120 node->children = NULL;
121 node->subloops = NULL;
122}
123
124
125/* The following function allocates the loop tree nodes. If
126 CURRENT_LOOPS is NULL, the nodes corresponding to the loops (except
127 the root which corresponds the all function) will be not allocated
128 but nodes will still be allocated for basic blocks. */
129static void
130create_loop_tree_nodes (void)
058e97ec
VM
131{
132 unsigned int i, j;
058e97ec
VM
133 bool skip_p;
134 edge_iterator ei;
135 edge e;
9771b263 136 vec<edge> edges;
058e97ec
VM
137 loop_p loop;
138
139 ira_bb_nodes
140 = ((struct ira_loop_tree_node *)
8b1c6fd7
DM
141 ira_allocate (sizeof (struct ira_loop_tree_node)
142 * last_basic_block_for_fn (cfun)));
143 last_basic_block_before_change = last_basic_block_for_fn (cfun);
144 for (i = 0; i < (unsigned int) last_basic_block_for_fn (cfun); i++)
058e97ec
VM
145 {
146 ira_bb_nodes[i].regno_allocno_map = NULL;
147 memset (ira_bb_nodes[i].reg_pressure, 0,
148 sizeof (ira_bb_nodes[i].reg_pressure));
49d988e7 149 ira_bb_nodes[i].all_allocnos = NULL;
058e97ec
VM
150 ira_bb_nodes[i].modified_regnos = NULL;
151 ira_bb_nodes[i].border_allocnos = NULL;
152 ira_bb_nodes[i].local_copies = NULL;
153 }
2608d841
VM
154 if (current_loops == NULL)
155 {
caff7edf 156 ira_loop_nodes_count = 1;
2608d841
VM
157 ira_loop_nodes = ((struct ira_loop_tree_node *)
158 ira_allocate (sizeof (struct ira_loop_tree_node)));
159 init_loop_tree_node (ira_loop_nodes, 0);
160 return;
161 }
0fc822d0 162 ira_loop_nodes_count = number_of_loops (cfun);
058e97ec
VM
163 ira_loop_nodes = ((struct ira_loop_tree_node *)
164 ira_allocate (sizeof (struct ira_loop_tree_node)
caff7edf 165 * ira_loop_nodes_count));
0fc822d0 166 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
058e97ec 167 {
661bc682 168 if (loop_outer (loop) != NULL)
058e97ec
VM
169 {
170 ira_loop_nodes[i].regno_allocno_map = NULL;
058e97ec
VM
171 skip_p = false;
172 FOR_EACH_EDGE (e, ei, loop->header->preds)
173 if (e->src != loop->latch
174 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
175 {
176 skip_p = true;
177 break;
178 }
179 if (skip_p)
180 continue;
181 edges = get_loop_exit_edges (loop);
9771b263 182 FOR_EACH_VEC_ELT (edges, j, e)
058e97ec
VM
183 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
184 {
185 skip_p = true;
186 break;
187 }
9771b263 188 edges.release ();
058e97ec
VM
189 if (skip_p)
190 continue;
191 }
2608d841 192 init_loop_tree_node (&ira_loop_nodes[i], loop->num);
058e97ec
VM
193 }
194}
195
196/* The function returns TRUE if there are more one allocation
197 region. */
198static bool
199more_one_region_p (void)
200{
201 unsigned int i;
202 loop_p loop;
203
2608d841 204 if (current_loops != NULL)
0fc822d0 205 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
2608d841
VM
206 if (ira_loop_nodes[i].regno_allocno_map != NULL
207 && ira_loop_tree_root != &ira_loop_nodes[i])
208 return true;
058e97ec
VM
209 return false;
210}
211
212/* Free the loop tree node of a loop. */
213static void
214finish_loop_tree_node (ira_loop_tree_node_t loop)
215{
216 if (loop->regno_allocno_map != NULL)
217 {
218 ira_assert (loop->bb == NULL);
219 ira_free_bitmap (loop->local_copies);
220 ira_free_bitmap (loop->border_allocnos);
221 ira_free_bitmap (loop->modified_regnos);
49d988e7 222 ira_free_bitmap (loop->all_allocnos);
058e97ec
VM
223 ira_free (loop->regno_allocno_map);
224 loop->regno_allocno_map = NULL;
225 }
226}
227
228/* Free the loop tree nodes. */
229static void
230finish_loop_tree_nodes (void)
231{
232 unsigned int i;
058e97ec 233
caff7edf
JJ
234 for (i = 0; i < ira_loop_nodes_count; i++)
235 finish_loop_tree_node (&ira_loop_nodes[i]);
058e97ec
VM
236 ira_free (ira_loop_nodes);
237 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
238 {
239 if (ira_bb_nodes[i].local_copies != NULL)
240 ira_free_bitmap (ira_bb_nodes[i].local_copies);
241 if (ira_bb_nodes[i].border_allocnos != NULL)
242 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
243 if (ira_bb_nodes[i].modified_regnos != NULL)
244 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
49d988e7
VM
245 if (ira_bb_nodes[i].all_allocnos != NULL)
246 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
058e97ec
VM
247 if (ira_bb_nodes[i].regno_allocno_map != NULL)
248 ira_free (ira_bb_nodes[i].regno_allocno_map);
249 }
250 ira_free (ira_bb_nodes);
251}
252
253\f
254
255/* The following recursive function adds LOOP to the loop tree
2608d841
VM
256 hierarchy. LOOP is added only once. If LOOP is NULL we adding
257 loop designating the whole function when CFG loops are not
258 built. */
058e97ec
VM
259static void
260add_loop_to_tree (struct loop *loop)
261{
2608d841 262 int loop_num;
058e97ec
VM
263 struct loop *parent;
264 ira_loop_tree_node_t loop_node, parent_node;
265
266 /* We can not use loop node access macros here because of potential
267 checking and because the nodes are not initialized enough
268 yet. */
2608d841 269 if (loop != NULL && loop_outer (loop) != NULL)
058e97ec 270 add_loop_to_tree (loop_outer (loop));
2608d841
VM
271 loop_num = loop != NULL ? loop->num : 0;
272 if (ira_loop_nodes[loop_num].regno_allocno_map != NULL
273 && ira_loop_nodes[loop_num].children == NULL)
058e97ec
VM
274 {
275 /* We have not added loop node to the tree yet. */
2608d841 276 loop_node = &ira_loop_nodes[loop_num];
058e97ec
VM
277 loop_node->loop = loop;
278 loop_node->bb = NULL;
2608d841
VM
279 if (loop == NULL)
280 parent = NULL;
281 else
282 {
283 for (parent = loop_outer (loop);
284 parent != NULL;
285 parent = loop_outer (parent))
286 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
287 break;
288 }
058e97ec
VM
289 if (parent == NULL)
290 {
291 loop_node->next = NULL;
292 loop_node->subloop_next = NULL;
293 loop_node->parent = NULL;
294 }
295 else
296 {
297 parent_node = &ira_loop_nodes[parent->num];
298 loop_node->next = parent_node->children;
299 parent_node->children = loop_node;
300 loop_node->subloop_next = parent_node->subloops;
301 parent_node->subloops = loop_node;
302 loop_node->parent = parent_node;
303 }
304 }
305}
306
307/* The following recursive function sets up levels of nodes of the
308 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
309 The function returns maximal value of level in the tree + 1. */
310static int
311setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
312{
313 int height, max_height;
314 ira_loop_tree_node_t subloop_node;
315
316 ira_assert (loop_node->bb == NULL);
317 loop_node->level = level;
318 max_height = level + 1;
319 for (subloop_node = loop_node->subloops;
320 subloop_node != NULL;
321 subloop_node = subloop_node->subloop_next)
322 {
323 ira_assert (subloop_node->bb == NULL);
324 height = setup_loop_tree_level (subloop_node, level + 1);
325 if (height > max_height)
326 max_height = height;
327 }
328 return max_height;
329}
330
331/* Create the loop tree. The algorithm is designed to provide correct
332 order of loops (they are ordered by their last loop BB) and basic
333 blocks in the chain formed by member next. */
334static void
335form_loop_tree (void)
336{
058e97ec
VM
337 basic_block bb;
338 struct loop *parent;
339 ira_loop_tree_node_t bb_node, loop_node;
058e97ec
VM
340
341 /* We can not use loop/bb node access macros because of potential
342 checking and because the nodes are not initialized enough
343 yet. */
11cd3bed 344 FOR_EACH_BB_FN (bb, cfun)
058e97ec
VM
345 {
346 bb_node = &ira_bb_nodes[bb->index];
347 bb_node->bb = bb;
348 bb_node->loop = NULL;
349 bb_node->subloops = NULL;
350 bb_node->children = NULL;
351 bb_node->subloop_next = NULL;
352 bb_node->next = NULL;
2608d841
VM
353 if (current_loops == NULL)
354 parent = NULL;
355 else
356 {
357 for (parent = bb->loop_father;
358 parent != NULL;
359 parent = loop_outer (parent))
360 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
361 break;
362 }
058e97ec 363 add_loop_to_tree (parent);
2608d841 364 loop_node = &ira_loop_nodes[parent == NULL ? 0 : parent->num];
058e97ec
VM
365 bb_node->next = loop_node->children;
366 bb_node->parent = loop_node;
367 loop_node->children = bb_node;
368 }
2608d841 369 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (0);
058e97ec
VM
370 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
371 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
372}
373
374\f
375
376/* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
377 tree nodes. */
378static void
379rebuild_regno_allocno_maps (void)
380{
381 unsigned int l;
382 int max_regno, regno;
383 ira_allocno_t a;
384 ira_loop_tree_node_t loop_tree_node;
385 loop_p loop;
386 ira_allocno_iterator ai;
387
2608d841 388 ira_assert (current_loops != NULL);
058e97ec 389 max_regno = max_reg_num ();
0fc822d0 390 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), l, loop)
058e97ec
VM
391 if (ira_loop_nodes[l].regno_allocno_map != NULL)
392 {
393 ira_free (ira_loop_nodes[l].regno_allocno_map);
394 ira_loop_nodes[l].regno_allocno_map
395 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
396 * max_regno);
397 memset (ira_loop_nodes[l].regno_allocno_map, 0,
398 sizeof (ira_allocno_t) * max_regno);
399 }
400 ira_free (ira_regno_allocno_map);
401 ira_regno_allocno_map
402 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
403 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
404 FOR_EACH_ALLOCNO (a, ai)
405 {
406 if (ALLOCNO_CAP_MEMBER (a) != NULL)
407 /* Caps are not in the regno allocno maps. */
408 continue;
409 regno = ALLOCNO_REGNO (a);
410 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
411 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
412 ira_regno_allocno_map[regno] = a;
413 if (loop_tree_node->regno_allocno_map[regno] == NULL)
414 /* Remember that we can create temporary allocnos to break
415 cycles in register shuffle. */
416 loop_tree_node->regno_allocno_map[regno] = a;
417 }
418}
058e97ec
VM
419\f
420
a49ae217
BS
421/* Pools for allocnos, allocno live ranges and objects. */
422static alloc_pool allocno_pool, live_range_pool, object_pool;
058e97ec
VM
423
424/* Vec containing references to all created allocnos. It is a
425 container of array allocnos. */
9771b263 426static vec<ira_allocno_t> allocno_vec;
058e97ec 427
a49ae217
BS
428/* Vec containing references to all created ira_objects. It is a
429 container of ira_object_id_map. */
9771b263 430static vec<ira_object_t> ira_object_id_map_vec;
058e97ec
VM
431
432/* Initialize data concerning allocnos. */
433static void
434initiate_allocnos (void)
435{
b14151b5
BS
436 live_range_pool
437 = create_alloc_pool ("live ranges",
438 sizeof (struct live_range), 100);
058e97ec
VM
439 allocno_pool
440 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
a49ae217
BS
441 object_pool
442 = create_alloc_pool ("objects", sizeof (struct ira_object), 100);
9771b263 443 allocno_vec.create (max_reg_num () * 2);
058e97ec
VM
444 ira_allocnos = NULL;
445 ira_allocnos_num = 0;
a49ae217 446 ira_objects_num = 0;
9771b263 447 ira_object_id_map_vec.create (max_reg_num () * 2);
a49ae217 448 ira_object_id_map = NULL;
058e97ec 449 ira_regno_allocno_map
1756cb66
VM
450 = (ira_allocno_t *) ira_allocate (max_reg_num ()
451 * sizeof (ira_allocno_t));
058e97ec
VM
452 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
453}
454
a49ae217
BS
455/* Create and return an object corresponding to a new allocno A. */
456static ira_object_t
ac0ab4f7 457ira_create_object (ira_allocno_t a, int subword)
a49ae217 458{
1756cb66 459 enum reg_class aclass = ALLOCNO_CLASS (a);
a49ae217
BS
460 ira_object_t obj = (ira_object_t) pool_alloc (object_pool);
461
462 OBJECT_ALLOCNO (obj) = a;
ac0ab4f7 463 OBJECT_SUBWORD (obj) = subword;
a49ae217
BS
464 OBJECT_CONFLICT_ID (obj) = ira_objects_num;
465 OBJECT_CONFLICT_VEC_P (obj) = false;
466 OBJECT_CONFLICT_ARRAY (obj) = NULL;
467 OBJECT_NUM_CONFLICTS (obj) = 0;
468 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
469 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
470 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
1756cb66 471 reg_class_contents[aclass]);
a49ae217 472 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
1756cb66 473 reg_class_contents[aclass]);
a49ae217
BS
474 OBJECT_MIN (obj) = INT_MAX;
475 OBJECT_MAX (obj) = -1;
9140d27b 476 OBJECT_LIVE_RANGES (obj) = NULL;
a49ae217 477
9771b263 478 ira_object_id_map_vec.safe_push (obj);
a49ae217 479 ira_object_id_map
9771b263
DN
480 = ira_object_id_map_vec.address ();
481 ira_objects_num = ira_object_id_map_vec.length ();
ac0ab4f7 482
a49ae217
BS
483 return obj;
484}
485
058e97ec
VM
486/* Create and return the allocno corresponding to REGNO in
487 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
488 same regno if CAP_P is FALSE. */
489ira_allocno_t
1756cb66
VM
490ira_create_allocno (int regno, bool cap_p,
491 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
492{
493 ira_allocno_t a;
494
495 a = (ira_allocno_t) pool_alloc (allocno_pool);
496 ALLOCNO_REGNO (a) = regno;
497 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
498 if (! cap_p)
499 {
500 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
501 ira_regno_allocno_map[regno] = a;
502 if (loop_tree_node->regno_allocno_map[regno] == NULL)
503 /* Remember that we can create temporary allocnos to break
504 cycles in register shuffle on region borders (see
505 ira-emit.c). */
506 loop_tree_node->regno_allocno_map[regno] = a;
507 }
508 ALLOCNO_CAP (a) = NULL;
509 ALLOCNO_CAP_MEMBER (a) = NULL;
510 ALLOCNO_NUM (a) = ira_allocnos_num;
49d988e7 511 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
058e97ec 512 ALLOCNO_NREFS (a) = 0;
854bd721 513 ALLOCNO_FREQ (a) = 0;
058e97ec
VM
514 ALLOCNO_HARD_REGNO (a) = -1;
515 ALLOCNO_CALL_FREQ (a) = 0;
516 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
e384e6b5 517 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a) = 0;
058e97ec
VM
518#ifdef STACK_REGS
519 ALLOCNO_NO_STACK_REG_P (a) = false;
520 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
521#endif
058e97ec 522 ALLOCNO_DONT_REASSIGN_P (a) = false;
927425df 523 ALLOCNO_BAD_SPILL_P (a) = false;
058e97ec 524 ALLOCNO_ASSIGNED_P (a) = false;
058e97ec 525 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
3b6d1699 526 ALLOCNO_PREFS (a) = NULL;
058e97ec
VM
527 ALLOCNO_COPIES (a) = NULL;
528 ALLOCNO_HARD_REG_COSTS (a) = NULL;
529 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
530 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
531 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1756cb66
VM
532 ALLOCNO_CLASS (a) = NO_REGS;
533 ALLOCNO_UPDATED_CLASS_COST (a) = 0;
534 ALLOCNO_CLASS_COST (a) = 0;
058e97ec
VM
535 ALLOCNO_MEMORY_COST (a) = 0;
536 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
537 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
ac0ab4f7 538 ALLOCNO_NUM_OBJECTS (a) = 0;
a49ae217 539
1756cb66 540 ALLOCNO_ADD_DATA (a) = NULL;
9771b263
DN
541 allocno_vec.safe_push (a);
542 ira_allocnos = allocno_vec.address ();
543 ira_allocnos_num = allocno_vec.length ();
ac0ab4f7 544
058e97ec
VM
545 return a;
546}
547
1756cb66
VM
548/* Set up register class for A and update its conflict hard
549 registers. */
058e97ec 550void
1756cb66 551ira_set_allocno_class (ira_allocno_t a, enum reg_class aclass)
058e97ec 552{
1756cb66
VM
553 ira_allocno_object_iterator oi;
554 ira_object_t obj;
555
556 ALLOCNO_CLASS (a) = aclass;
557 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
558 {
559 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
560 reg_class_contents[aclass]);
561 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
562 reg_class_contents[aclass]);
563 }
a49ae217
BS
564}
565
ac0ab4f7
BS
566/* Determine the number of objects we should associate with allocno A
567 and allocate them. */
a49ae217 568void
ac0ab4f7 569ira_create_allocno_objects (ira_allocno_t a)
a49ae217 570{
ac0ab4f7 571 enum machine_mode mode = ALLOCNO_MODE (a);
1756cb66
VM
572 enum reg_class aclass = ALLOCNO_CLASS (a);
573 int n = ira_reg_class_max_nregs[aclass][mode];
ac0ab4f7
BS
574 int i;
575
576 if (GET_MODE_SIZE (mode) != 2 * UNITS_PER_WORD || n != 2)
577 n = 1;
578
579 ALLOCNO_NUM_OBJECTS (a) = n;
580 for (i = 0; i < n; i++)
581 ALLOCNO_OBJECT (a, i) = ira_create_object (a, i);
a49ae217
BS
582}
583
ac0ab4f7 584/* For each allocno, set ALLOCNO_NUM_OBJECTS and create the
1756cb66 585 ALLOCNO_OBJECT structures. This must be called after the allocno
ac0ab4f7 586 classes are known. */
a49ae217
BS
587static void
588create_allocno_objects (void)
589{
590 ira_allocno_t a;
591 ira_allocno_iterator ai;
592
593 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7 594 ira_create_allocno_objects (a);
058e97ec
VM
595}
596
ac0ab4f7
BS
597/* Merge hard register conflict information for all objects associated with
598 allocno TO into the corresponding objects associated with FROM.
599 If TOTAL_ONLY is true, we only merge OBJECT_TOTAL_CONFLICT_HARD_REGS. */
3c55880a
BS
600static void
601merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
602 bool total_only)
603{
ac0ab4f7
BS
604 int i;
605 gcc_assert (ALLOCNO_NUM_OBJECTS (to) == ALLOCNO_NUM_OBJECTS (from));
606 for (i = 0; i < ALLOCNO_NUM_OBJECTS (to); i++)
607 {
608 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
609 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
1756cb66 610
ac0ab4f7
BS
611 if (!total_only)
612 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (to_obj),
613 OBJECT_CONFLICT_HARD_REGS (from_obj));
614 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (to_obj),
615 OBJECT_TOTAL_CONFLICT_HARD_REGS (from_obj));
616 }
3c55880a
BS
617#ifdef STACK_REGS
618 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
619 ALLOCNO_NO_STACK_REG_P (to) = true;
620 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
621 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
622#endif
623}
624
ac0ab4f7
BS
625/* Update hard register conflict information for all objects associated with
626 A to include the regs in SET. */
627void
628ior_hard_reg_conflicts (ira_allocno_t a, HARD_REG_SET *set)
629{
630 ira_allocno_object_iterator i;
631 ira_object_t obj;
1756cb66 632
ac0ab4f7
BS
633 FOR_EACH_ALLOCNO_OBJECT (a, obj, i)
634 {
635 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), *set);
636 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), *set);
637 }
638}
639
a49ae217
BS
640/* Return TRUE if a conflict vector with NUM elements is more
641 profitable than a conflict bit vector for OBJ. */
058e97ec 642bool
a49ae217 643ira_conflict_vector_profitable_p (ira_object_t obj, int num)
058e97ec
VM
644{
645 int nw;
a49ae217
BS
646 int max = OBJECT_MAX (obj);
647 int min = OBJECT_MIN (obj);
058e97ec 648
a49ae217
BS
649 if (max < min)
650 /* We prefer a bit vector in such case because it does not result
651 in allocation. */
058e97ec
VM
652 return false;
653
a49ae217
BS
654 nw = (max - min + IRA_INT_BITS) / IRA_INT_BITS;
655 return (2 * sizeof (ira_object_t) * (num + 1)
058e97ec
VM
656 < 3 * nw * sizeof (IRA_INT_TYPE));
657}
658
a49ae217
BS
659/* Allocates and initialize the conflict vector of OBJ for NUM
660 conflicting objects. */
058e97ec 661void
a49ae217 662ira_allocate_conflict_vec (ira_object_t obj, int num)
058e97ec
VM
663{
664 int size;
a49ae217 665 ira_object_t *vec;
058e97ec 666
a49ae217 667 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
058e97ec 668 num++; /* for NULL end marker */
a49ae217
BS
669 size = sizeof (ira_object_t) * num;
670 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
671 vec = (ira_object_t *) OBJECT_CONFLICT_ARRAY (obj);
058e97ec 672 vec[0] = NULL;
a49ae217
BS
673 OBJECT_NUM_CONFLICTS (obj) = 0;
674 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
675 OBJECT_CONFLICT_VEC_P (obj) = true;
058e97ec
VM
676}
677
a49ae217 678/* Allocate and initialize the conflict bit vector of OBJ. */
058e97ec 679static void
a49ae217 680allocate_conflict_bit_vec (ira_object_t obj)
058e97ec
VM
681{
682 unsigned int size;
683
a49ae217
BS
684 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
685 size = ((OBJECT_MAX (obj) - OBJECT_MIN (obj) + IRA_INT_BITS)
058e97ec 686 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
a49ae217
BS
687 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
688 memset (OBJECT_CONFLICT_ARRAY (obj), 0, size);
689 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
690 OBJECT_CONFLICT_VEC_P (obj) = false;
058e97ec
VM
691}
692
693/* Allocate and initialize the conflict vector or conflict bit vector
ac0ab4f7 694 of OBJ for NUM conflicting allocnos whatever is more profitable. */
058e97ec 695void
ac0ab4f7 696ira_allocate_object_conflicts (ira_object_t obj, int num)
058e97ec 697{
ac0ab4f7
BS
698 if (ira_conflict_vector_profitable_p (obj, num))
699 ira_allocate_conflict_vec (obj, num);
058e97ec 700 else
ac0ab4f7 701 allocate_conflict_bit_vec (obj);
058e97ec
VM
702}
703
a49ae217 704/* Add OBJ2 to the conflicts of OBJ1. */
058e97ec 705static void
a49ae217 706add_to_conflicts (ira_object_t obj1, ira_object_t obj2)
058e97ec
VM
707{
708 int num;
709 unsigned int size;
710
a49ae217 711 if (OBJECT_CONFLICT_VEC_P (obj1))
058e97ec 712 {
a49ae217
BS
713 ira_object_t *vec = OBJECT_CONFLICT_VEC (obj1);
714 int curr_num = OBJECT_NUM_CONFLICTS (obj1);
715 num = curr_num + 2;
716 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < num * sizeof (ira_object_t))
058e97ec 717 {
a49ae217 718 ira_object_t *newvec;
058e97ec 719 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
a49ae217
BS
720 newvec = (ira_object_t *) ira_allocate (size);
721 memcpy (newvec, vec, curr_num * sizeof (ira_object_t));
722 ira_free (vec);
723 vec = newvec;
724 OBJECT_CONFLICT_ARRAY (obj1) = vec;
725 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 726 }
a49ae217 727 vec[num - 2] = obj2;
058e97ec 728 vec[num - 1] = NULL;
a49ae217 729 OBJECT_NUM_CONFLICTS (obj1)++;
058e97ec
VM
730 }
731 else
732 {
733 int nw, added_head_nw, id;
a49ae217 734 IRA_INT_TYPE *vec = OBJECT_CONFLICT_BITVEC (obj1);
058e97ec 735
a49ae217
BS
736 id = OBJECT_CONFLICT_ID (obj2);
737 if (OBJECT_MIN (obj1) > id)
058e97ec
VM
738 {
739 /* Expand head of the bit vector. */
a49ae217
BS
740 added_head_nw = (OBJECT_MIN (obj1) - id - 1) / IRA_INT_BITS + 1;
741 nw = (OBJECT_MAX (obj1) - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 742 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
a49ae217 743 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) >= size)
058e97ec
VM
744 {
745 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
746 vec, nw * sizeof (IRA_INT_TYPE));
747 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
748 }
749 else
750 {
751 size
752 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
753 vec = (IRA_INT_TYPE *) ira_allocate (size);
754 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
a49ae217 755 OBJECT_CONFLICT_ARRAY (obj1), nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
756 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
757 memset ((char *) vec
758 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
759 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
a49ae217
BS
760 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
761 OBJECT_CONFLICT_ARRAY (obj1) = vec;
762 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 763 }
a49ae217 764 OBJECT_MIN (obj1) -= added_head_nw * IRA_INT_BITS;
058e97ec 765 }
a49ae217 766 else if (OBJECT_MAX (obj1) < id)
058e97ec 767 {
a49ae217 768 nw = (id - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
058e97ec 769 size = nw * sizeof (IRA_INT_TYPE);
a49ae217 770 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < size)
058e97ec
VM
771 {
772 /* Expand tail of the bit vector. */
773 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
774 vec = (IRA_INT_TYPE *) ira_allocate (size);
a49ae217
BS
775 memcpy (vec, OBJECT_CONFLICT_ARRAY (obj1), OBJECT_CONFLICT_ARRAY_SIZE (obj1));
776 memset ((char *) vec + OBJECT_CONFLICT_ARRAY_SIZE (obj1),
777 0, size - OBJECT_CONFLICT_ARRAY_SIZE (obj1));
778 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
779 OBJECT_CONFLICT_ARRAY (obj1) = vec;
780 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
058e97ec 781 }
a49ae217 782 OBJECT_MAX (obj1) = id;
058e97ec 783 }
a49ae217 784 SET_MINMAX_SET_BIT (vec, id, OBJECT_MIN (obj1), OBJECT_MAX (obj1));
058e97ec
VM
785 }
786}
787
a49ae217
BS
788/* Add OBJ1 to the conflicts of OBJ2 and vice versa. */
789static void
790ira_add_conflict (ira_object_t obj1, ira_object_t obj2)
058e97ec 791{
a49ae217
BS
792 add_to_conflicts (obj1, obj2);
793 add_to_conflicts (obj2, obj1);
058e97ec
VM
794}
795
a49ae217 796/* Clear all conflicts of OBJ. */
058e97ec 797static void
a49ae217 798clear_conflicts (ira_object_t obj)
058e97ec 799{
a49ae217 800 if (OBJECT_CONFLICT_VEC_P (obj))
058e97ec 801 {
a49ae217
BS
802 OBJECT_NUM_CONFLICTS (obj) = 0;
803 OBJECT_CONFLICT_VEC (obj)[0] = NULL;
058e97ec 804 }
a49ae217 805 else if (OBJECT_CONFLICT_ARRAY_SIZE (obj) != 0)
058e97ec
VM
806 {
807 int nw;
808
a49ae217
BS
809 nw = (OBJECT_MAX (obj) - OBJECT_MIN (obj)) / IRA_INT_BITS + 1;
810 memset (OBJECT_CONFLICT_BITVEC (obj), 0, nw * sizeof (IRA_INT_TYPE));
058e97ec
VM
811 }
812}
813
814/* The array used to find duplications in conflict vectors of
815 allocnos. */
a49ae217 816static int *conflict_check;
058e97ec
VM
817
818/* The value used to mark allocation presence in conflict vector of
819 the current allocno. */
a49ae217 820static int curr_conflict_check_tick;
058e97ec 821
a49ae217 822/* Remove duplications in conflict vector of OBJ. */
058e97ec 823static void
a49ae217 824compress_conflict_vec (ira_object_t obj)
058e97ec 825{
a49ae217 826 ira_object_t *vec, conflict_obj;
058e97ec
VM
827 int i, j;
828
a49ae217
BS
829 ira_assert (OBJECT_CONFLICT_VEC_P (obj));
830 vec = OBJECT_CONFLICT_VEC (obj);
831 curr_conflict_check_tick++;
832 for (i = j = 0; (conflict_obj = vec[i]) != NULL; i++)
058e97ec 833 {
a49ae217
BS
834 int id = OBJECT_CONFLICT_ID (conflict_obj);
835 if (conflict_check[id] != curr_conflict_check_tick)
058e97ec 836 {
a49ae217
BS
837 conflict_check[id] = curr_conflict_check_tick;
838 vec[j++] = conflict_obj;
058e97ec
VM
839 }
840 }
a49ae217 841 OBJECT_NUM_CONFLICTS (obj) = j;
058e97ec
VM
842 vec[j] = NULL;
843}
844
845/* Remove duplications in conflict vectors of all allocnos. */
846static void
847compress_conflict_vecs (void)
848{
ac0ab4f7
BS
849 ira_object_t obj;
850 ira_object_iterator oi;
058e97ec 851
a49ae217
BS
852 conflict_check = (int *) ira_allocate (sizeof (int) * ira_objects_num);
853 memset (conflict_check, 0, sizeof (int) * ira_objects_num);
854 curr_conflict_check_tick = 0;
ac0ab4f7 855 FOR_EACH_OBJECT (obj, oi)
a49ae217 856 {
a49ae217
BS
857 if (OBJECT_CONFLICT_VEC_P (obj))
858 compress_conflict_vec (obj);
859 }
860 ira_free (conflict_check);
058e97ec
VM
861}
862
863/* This recursive function outputs allocno A and if it is a cap the
864 function outputs its members. */
865void
866ira_print_expanded_allocno (ira_allocno_t a)
867{
868 basic_block bb;
869
870 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
871 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
872 fprintf (ira_dump_file, ",b%d", bb->index);
873 else
2608d841 874 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop_num);
058e97ec
VM
875 if (ALLOCNO_CAP_MEMBER (a) != NULL)
876 {
877 fprintf (ira_dump_file, ":");
878 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
879 }
880 fprintf (ira_dump_file, ")");
881}
882
883/* Create and return the cap representing allocno A in the
884 parent loop. */
885static ira_allocno_t
886create_cap_allocno (ira_allocno_t a)
887{
888 ira_allocno_t cap;
889 ira_loop_tree_node_t parent;
1756cb66 890 enum reg_class aclass;
058e97ec 891
058e97ec
VM
892 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
893 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
894 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
1756cb66
VM
895 aclass = ALLOCNO_CLASS (a);
896 ira_set_allocno_class (cap, aclass);
ac0ab4f7 897 ira_create_allocno_objects (cap);
058e97ec 898 ALLOCNO_CAP_MEMBER (cap) = a;
058e97ec 899 ALLOCNO_CAP (a) = cap;
1756cb66 900 ALLOCNO_CLASS_COST (cap) = ALLOCNO_CLASS_COST (a);
058e97ec 901 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
058e97ec 902 ira_allocate_and_copy_costs
1756cb66 903 (&ALLOCNO_HARD_REG_COSTS (cap), aclass, ALLOCNO_HARD_REG_COSTS (a));
058e97ec 904 ira_allocate_and_copy_costs
1756cb66 905 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), aclass,
058e97ec 906 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
927425df 907 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
058e97ec
VM
908 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
909 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
910 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
ac0ab4f7 911
3c55880a 912 merge_hard_reg_conflicts (a, cap, false);
ac0ab4f7 913
058e97ec 914 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5 915 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (cap) = ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
058e97ec
VM
916 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
917 {
918 fprintf (ira_dump_file, " Creating cap ");
919 ira_print_expanded_allocno (cap);
920 fprintf (ira_dump_file, "\n");
921 }
922 return cap;
923}
924
ac0ab4f7 925/* Create and return a live range for OBJECT with given attributes. */
b14151b5 926live_range_t
9140d27b
BS
927ira_create_live_range (ira_object_t obj, int start, int finish,
928 live_range_t next)
058e97ec 929{
b14151b5 930 live_range_t p;
058e97ec 931
b14151b5 932 p = (live_range_t) pool_alloc (live_range_pool);
9140d27b 933 p->object = obj;
058e97ec
VM
934 p->start = start;
935 p->finish = finish;
936 p->next = next;
937 return p;
938}
939
ac0ab4f7
BS
940/* Create a new live range for OBJECT and queue it at the head of its
941 live range list. */
942void
943ira_add_live_range_to_object (ira_object_t object, int start, int finish)
944{
945 live_range_t p;
946 p = ira_create_live_range (object, start, finish,
947 OBJECT_LIVE_RANGES (object));
948 OBJECT_LIVE_RANGES (object) = p;
949}
950
058e97ec 951/* Copy allocno live range R and return the result. */
b14151b5 952static live_range_t
9140d27b 953copy_live_range (live_range_t r)
058e97ec 954{
b14151b5 955 live_range_t p;
058e97ec 956
b14151b5 957 p = (live_range_t) pool_alloc (live_range_pool);
058e97ec
VM
958 *p = *r;
959 return p;
960}
961
962/* Copy allocno live range list given by its head R and return the
963 result. */
b14151b5 964live_range_t
9140d27b 965ira_copy_live_range_list (live_range_t r)
058e97ec 966{
b14151b5 967 live_range_t p, first, last;
058e97ec
VM
968
969 if (r == NULL)
970 return NULL;
971 for (first = last = NULL; r != NULL; r = r->next)
972 {
9140d27b 973 p = copy_live_range (r);
058e97ec
VM
974 if (first == NULL)
975 first = p;
976 else
977 last->next = p;
978 last = p;
979 }
980 return first;
981}
982
3553f0bb
VM
983/* Merge ranges R1 and R2 and returns the result. The function
984 maintains the order of ranges and tries to minimize number of the
985 result ranges. */
b14151b5 986live_range_t
9140d27b 987ira_merge_live_ranges (live_range_t r1, live_range_t r2)
3553f0bb 988{
b14151b5 989 live_range_t first, last, temp;
3553f0bb
VM
990
991 if (r1 == NULL)
992 return r2;
993 if (r2 == NULL)
994 return r1;
995 for (first = last = NULL; r1 != NULL && r2 != NULL;)
996 {
997 if (r1->start < r2->start)
998 {
999 temp = r1;
1000 r1 = r2;
1001 r2 = temp;
1002 }
1003 if (r1->start <= r2->finish + 1)
1004 {
1005 /* Intersected ranges: merge r1 and r2 into r1. */
1006 r1->start = r2->start;
1007 if (r1->finish < r2->finish)
1008 r1->finish = r2->finish;
1009 temp = r2;
1010 r2 = r2->next;
9140d27b 1011 ira_finish_live_range (temp);
3553f0bb
VM
1012 if (r2 == NULL)
1013 {
1014 /* To try to merge with subsequent ranges in r1. */
1015 r2 = r1->next;
1016 r1->next = NULL;
1017 }
1018 }
1019 else
1020 {
1021 /* Add r1 to the result. */
1022 if (first == NULL)
1023 first = last = r1;
1024 else
1025 {
1026 last->next = r1;
1027 last = r1;
1028 }
1029 r1 = r1->next;
1030 if (r1 == NULL)
1031 {
1032 /* To try to merge with subsequent ranges in r2. */
1033 r1 = r2->next;
1034 r2->next = NULL;
1035 }
1036 }
1037 }
1038 if (r1 != NULL)
1039 {
1040 if (first == NULL)
1041 first = r1;
1042 else
1043 last->next = r1;
1044 ira_assert (r1->next == NULL);
1045 }
1046 else if (r2 != NULL)
1047 {
1048 if (first == NULL)
1049 first = r2;
1050 else
1051 last->next = r2;
1052 ira_assert (r2->next == NULL);
1053 }
1054 else
1055 {
1056 ira_assert (last->next == NULL);
1057 }
1058 return first;
1059}
1060
1061/* Return TRUE if live ranges R1 and R2 intersect. */
1062bool
9140d27b 1063ira_live_ranges_intersect_p (live_range_t r1, live_range_t r2)
3553f0bb
VM
1064{
1065 /* Remember the live ranges are always kept ordered. */
1066 while (r1 != NULL && r2 != NULL)
1067 {
1068 if (r1->start > r2->finish)
1069 r1 = r1->next;
1070 else if (r2->start > r1->finish)
1071 r2 = r2->next;
1072 else
1073 return true;
1074 }
1075 return false;
1076}
1077
058e97ec
VM
1078/* Free allocno live range R. */
1079void
9140d27b 1080ira_finish_live_range (live_range_t r)
058e97ec 1081{
b14151b5 1082 pool_free (live_range_pool, r);
058e97ec
VM
1083}
1084
3553f0bb
VM
1085/* Free list of allocno live ranges starting with R. */
1086void
9140d27b 1087ira_finish_live_range_list (live_range_t r)
3553f0bb 1088{
b14151b5 1089 live_range_t next_r;
3553f0bb
VM
1090
1091 for (; r != NULL; r = next_r)
1092 {
1093 next_r = r->next;
9140d27b 1094 ira_finish_live_range (r);
3553f0bb
VM
1095 }
1096}
1097
058e97ec
VM
1098/* Free updated register costs of allocno A. */
1099void
1100ira_free_allocno_updated_costs (ira_allocno_t a)
1101{
1756cb66 1102 enum reg_class aclass;
058e97ec 1103
1756cb66 1104 aclass = ALLOCNO_CLASS (a);
058e97ec 1105 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1106 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1107 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1108 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1109 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66 1110 aclass);
058e97ec
VM
1111 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1112}
1113
1756cb66
VM
1114/* Free and nullify all cost vectors allocated earlier for allocno
1115 A. */
058e97ec 1116static void
1756cb66 1117ira_free_allocno_costs (ira_allocno_t a)
058e97ec 1118{
1756cb66 1119 enum reg_class aclass = ALLOCNO_CLASS (a);
ac0ab4f7
BS
1120 ira_object_t obj;
1121 ira_allocno_object_iterator oi;
058e97ec 1122
ac0ab4f7
BS
1123 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
1124 {
1125 ira_finish_live_range_list (OBJECT_LIVE_RANGES (obj));
1126 ira_object_id_map[OBJECT_CONFLICT_ID (obj)] = NULL;
1127 if (OBJECT_CONFLICT_ARRAY (obj) != NULL)
1128 ira_free (OBJECT_CONFLICT_ARRAY (obj));
1129 pool_free (object_pool, obj);
1130 }
9140d27b 1131
058e97ec 1132 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
058e97ec 1133 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1756cb66 1134 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), aclass);
058e97ec 1135 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1756cb66 1136 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), aclass);
058e97ec 1137 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1756cb66 1138 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), aclass);
058e97ec
VM
1139 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1140 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1756cb66
VM
1141 aclass);
1142 ALLOCNO_HARD_REG_COSTS (a) = NULL;
1143 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
1144 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1145 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1146}
1147
1148/* Free the memory allocated for allocno A. */
1149static void
1150finish_allocno (ira_allocno_t a)
1151{
1152 ira_free_allocno_costs (a);
058e97ec
VM
1153 pool_free (allocno_pool, a);
1154}
1155
1156/* Free the memory allocated for all allocnos. */
1157static void
1158finish_allocnos (void)
1159{
1160 ira_allocno_t a;
1161 ira_allocno_iterator ai;
1162
1163 FOR_EACH_ALLOCNO (a, ai)
1164 finish_allocno (a);
1165 ira_free (ira_regno_allocno_map);
9771b263
DN
1166 ira_object_id_map_vec.release ();
1167 allocno_vec.release ();
058e97ec 1168 free_alloc_pool (allocno_pool);
a49ae217 1169 free_alloc_pool (object_pool);
b14151b5 1170 free_alloc_pool (live_range_pool);
058e97ec
VM
1171}
1172
1173\f
1174
3b6d1699
VM
1175/* Pools for allocno preferences. */
1176static alloc_pool pref_pool;
1177
1178/* Vec containing references to all created preferences. It is a
1179 container of array ira_prefs. */
1180static vec<ira_pref_t> pref_vec;
1181
1182/* The function initializes data concerning allocno prefs. */
1183static void
1184initiate_prefs (void)
1185{
1186 pref_pool
1187 = create_alloc_pool ("prefs", sizeof (struct ira_allocno_pref), 100);
1188 pref_vec.create (get_max_uid ());
1189 ira_prefs = NULL;
1190 ira_prefs_num = 0;
1191}
1192
1193/* Return pref for A and HARD_REGNO if any. */
1194static ira_pref_t
1195find_allocno_pref (ira_allocno_t a, int hard_regno)
1196{
1197 ira_pref_t pref;
1198
1199 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1200 if (pref->allocno == a && pref->hard_regno == hard_regno)
1201 return pref;
1202 return NULL;
1203}
1204
1205/* Create and return pref with given attributes A, HARD_REGNO, and FREQ. */
1206ira_pref_t
1207ira_create_pref (ira_allocno_t a, int hard_regno, int freq)
1208{
1209 ira_pref_t pref;
1210
1211 pref = (ira_pref_t) pool_alloc (pref_pool);
1212 pref->num = ira_prefs_num;
1213 pref->allocno = a;
1214 pref->hard_regno = hard_regno;
1215 pref->freq = freq;
1216 pref_vec.safe_push (pref);
1217 ira_prefs = pref_vec.address ();
1218 ira_prefs_num = pref_vec.length ();
1219 return pref;
1220}
1221
1222/* Attach a pref PREF to the cooresponding allocno. */
1223static void
1224add_allocno_pref_to_list (ira_pref_t pref)
1225{
1226 ira_allocno_t a = pref->allocno;
1227
1228 pref->next_pref = ALLOCNO_PREFS (a);
1229 ALLOCNO_PREFS (a) = pref;
1230}
1231
1232/* Create (or update frequency if the pref already exists) the pref of
1233 allocnos A preferring HARD_REGNO with frequency FREQ. */
1234void
1235ira_add_allocno_pref (ira_allocno_t a, int hard_regno, int freq)
1236{
1237 ira_pref_t pref;
1238
1239 if (freq <= 0)
1240 return;
1241 if ((pref = find_allocno_pref (a, hard_regno)) != NULL)
1242 {
1243 pref->freq += freq;
1244 return;
1245 }
1246 pref = ira_create_pref (a, hard_regno, freq);
1247 ira_assert (a != NULL);
1248 add_allocno_pref_to_list (pref);
1249}
1250
1251/* Print info about PREF into file F. */
1252static void
1253print_pref (FILE *f, ira_pref_t pref)
1254{
1255 fprintf (f, " pref%d:a%d(r%d)<-hr%d@%d\n", pref->num,
1256 ALLOCNO_NUM (pref->allocno), ALLOCNO_REGNO (pref->allocno),
1257 pref->hard_regno, pref->freq);
1258}
1259
1260/* Print info about PREF into stderr. */
1261void
1262ira_debug_pref (ira_pref_t pref)
1263{
1264 print_pref (stderr, pref);
1265}
1266
1267/* Print info about all prefs into file F. */
1268static void
1269print_prefs (FILE *f)
1270{
1271 ira_pref_t pref;
1272 ira_pref_iterator pi;
1273
1274 FOR_EACH_PREF (pref, pi)
1275 print_pref (f, pref);
1276}
1277
1278/* Print info about all prefs into stderr. */
1279void
1280ira_debug_prefs (void)
1281{
1282 print_prefs (stderr);
1283}
1284
1285/* Print info about prefs involving allocno A into file F. */
1286static void
1287print_allocno_prefs (FILE *f, ira_allocno_t a)
1288{
1289 ira_pref_t pref;
1290
1291 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1292 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = pref->next_pref)
1293 fprintf (f, " pref%d:hr%d@%d", pref->num, pref->hard_regno, pref->freq);
1294 fprintf (f, "\n");
1295}
1296
1297/* Print info about prefs involving allocno A into stderr. */
1298void
1299ira_debug_allocno_prefs (ira_allocno_t a)
1300{
1301 print_allocno_prefs (stderr, a);
1302}
1303
1304/* The function frees memory allocated for PREF. */
1305static void
1306finish_pref (ira_pref_t pref)
1307{
1308 ira_prefs[pref->num] = NULL;
1309 pool_free (pref_pool, pref);
1310}
1311
1312/* Remove PREF from the list of allocno prefs and free memory for
1313 it. */
1314void
1315ira_remove_pref (ira_pref_t pref)
1316{
1317 ira_pref_t cpref, prev;
1318
1319 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1320 fprintf (ira_dump_file, " Removing pref%d:hr%d@%d\n",
1321 pref->num, pref->hard_regno, pref->freq);
1322 for (prev = NULL, cpref = ALLOCNO_PREFS (pref->allocno);
1323 cpref != NULL;
1324 prev = cpref, cpref = cpref->next_pref)
1325 if (cpref == pref)
1326 break;
1327 ira_assert (cpref != NULL);
1328 if (prev == NULL)
1329 ALLOCNO_PREFS (pref->allocno) = pref->next_pref;
1330 else
1331 prev->next_pref = pref->next_pref;
1332 finish_pref (pref);
1333}
1334
1335/* Remove all prefs of allocno A. */
1336void
1337ira_remove_allocno_prefs (ira_allocno_t a)
1338{
1339 ira_pref_t pref, next_pref;
1340
1341 for (pref = ALLOCNO_PREFS (a); pref != NULL; pref = next_pref)
1342 {
1343 next_pref = pref->next_pref;
1344 finish_pref (pref);
1345 }
1346 ALLOCNO_PREFS (a) = NULL;
1347}
1348
1349/* Free memory allocated for all prefs. */
1350static void
1351finish_prefs (void)
1352{
1353 ira_pref_t pref;
1354 ira_pref_iterator pi;
1355
1356 FOR_EACH_PREF (pref, pi)
1357 finish_pref (pref);
1358 pref_vec.release ();
1359 free_alloc_pool (pref_pool);
1360}
1361
1362\f
1363
058e97ec
VM
1364/* Pools for copies. */
1365static alloc_pool copy_pool;
1366
1367/* Vec containing references to all created copies. It is a
1368 container of array ira_copies. */
9771b263 1369static vec<ira_copy_t> copy_vec;
058e97ec
VM
1370
1371/* The function initializes data concerning allocno copies. */
1372static void
1373initiate_copies (void)
1374{
1375 copy_pool
1376 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
9771b263 1377 copy_vec.create (get_max_uid ());
058e97ec
VM
1378 ira_copies = NULL;
1379 ira_copies_num = 0;
1380}
1381
1382/* Return copy connecting A1 and A2 and originated from INSN of
1383 LOOP_TREE_NODE if any. */
1384static ira_copy_t
1385find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
1386 ira_loop_tree_node_t loop_tree_node)
1387{
1388 ira_copy_t cp, next_cp;
1389 ira_allocno_t another_a;
1390
1391 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1392 {
1393 if (cp->first == a1)
1394 {
1395 next_cp = cp->next_first_allocno_copy;
1396 another_a = cp->second;
1397 }
1398 else if (cp->second == a1)
1399 {
1400 next_cp = cp->next_second_allocno_copy;
1401 another_a = cp->first;
1402 }
1403 else
1404 gcc_unreachable ();
1405 if (another_a == a2 && cp->insn == insn
1406 && cp->loop_tree_node == loop_tree_node)
1407 return cp;
1408 }
1409 return NULL;
1410}
1411
1412/* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
548a6322 1413 SECOND, FREQ, CONSTRAINT_P, and INSN. */
058e97ec 1414ira_copy_t
548a6322
VM
1415ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1416 bool constraint_p, rtx insn,
058e97ec
VM
1417 ira_loop_tree_node_t loop_tree_node)
1418{
1419 ira_copy_t cp;
1420
1421 cp = (ira_copy_t) pool_alloc (copy_pool);
1422 cp->num = ira_copies_num;
1423 cp->first = first;
1424 cp->second = second;
1425 cp->freq = freq;
548a6322 1426 cp->constraint_p = constraint_p;
058e97ec
VM
1427 cp->insn = insn;
1428 cp->loop_tree_node = loop_tree_node;
9771b263
DN
1429 copy_vec.safe_push (cp);
1430 ira_copies = copy_vec.address ();
1431 ira_copies_num = copy_vec.length ();
058e97ec
VM
1432 return cp;
1433}
1434
1435/* Attach a copy CP to allocnos involved into the copy. */
3b6d1699
VM
1436static void
1437add_allocno_copy_to_list (ira_copy_t cp)
058e97ec
VM
1438{
1439 ira_allocno_t first = cp->first, second = cp->second;
1440
1441 cp->prev_first_allocno_copy = NULL;
1442 cp->prev_second_allocno_copy = NULL;
1443 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1444 if (cp->next_first_allocno_copy != NULL)
1445 {
1446 if (cp->next_first_allocno_copy->first == first)
1447 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1448 else
1449 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1450 }
1451 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1452 if (cp->next_second_allocno_copy != NULL)
1453 {
1454 if (cp->next_second_allocno_copy->second == second)
1455 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1456 else
1457 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1458 }
1459 ALLOCNO_COPIES (first) = cp;
1460 ALLOCNO_COPIES (second) = cp;
1461}
1462
058e97ec
VM
1463/* Make a copy CP a canonical copy where number of the
1464 first allocno is less than the second one. */
3b6d1699
VM
1465static void
1466swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
058e97ec
VM
1467{
1468 ira_allocno_t temp;
1469 ira_copy_t temp_cp;
1470
1471 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1472 return;
1473
1474 temp = cp->first;
1475 cp->first = cp->second;
1476 cp->second = temp;
1477
1478 temp_cp = cp->prev_first_allocno_copy;
1479 cp->prev_first_allocno_copy = cp->prev_second_allocno_copy;
1480 cp->prev_second_allocno_copy = temp_cp;
1481
1482 temp_cp = cp->next_first_allocno_copy;
1483 cp->next_first_allocno_copy = cp->next_second_allocno_copy;
1484 cp->next_second_allocno_copy = temp_cp;
1485}
1486
1487/* Create (or update frequency if the copy already exists) and return
1488 the copy of allocnos FIRST and SECOND with frequency FREQ
1489 corresponding to move insn INSN (if any) and originated from
1490 LOOP_TREE_NODE. */
1491ira_copy_t
1492ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
548a6322
VM
1493 bool constraint_p, rtx insn,
1494 ira_loop_tree_node_t loop_tree_node)
058e97ec
VM
1495{
1496 ira_copy_t cp;
1497
1498 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1499 {
1500 cp->freq += freq;
1501 return cp;
1502 }
548a6322
VM
1503 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1504 loop_tree_node);
058e97ec 1505 ira_assert (first != NULL && second != NULL);
3b6d1699
VM
1506 add_allocno_copy_to_list (cp);
1507 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
1508 return cp;
1509}
1510
4cda38d5
VM
1511/* Print info about copy CP into file F. */
1512static void
1513print_copy (FILE *f, ira_copy_t cp)
1514{
548a6322 1515 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
4cda38d5 1516 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
548a6322
VM
1517 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1518 cp->insn != NULL
1519 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
4cda38d5
VM
1520}
1521
7b3b6ae4
LC
1522DEBUG_FUNCTION void
1523debug (ira_allocno_copy &ref)
1524{
1525 print_copy (stderr, &ref);
1526}
1527
1528DEBUG_FUNCTION void
1529debug (ira_allocno_copy *ptr)
1530{
1531 if (ptr)
1532 debug (*ptr);
1533 else
1534 fprintf (stderr, "<nil>\n");
1535}
1536
4cda38d5
VM
1537/* Print info about copy CP into stderr. */
1538void
1539ira_debug_copy (ira_copy_t cp)
1540{
1541 print_copy (stderr, cp);
1542}
1543
1544/* Print info about all copies into file F. */
1545static void
1546print_copies (FILE *f)
1547{
1548 ira_copy_t cp;
1549 ira_copy_iterator ci;
1550
1551 FOR_EACH_COPY (cp, ci)
1552 print_copy (f, cp);
1553}
1554
1555/* Print info about all copies into stderr. */
1556void
1557ira_debug_copies (void)
1558{
1559 print_copies (stderr);
1560}
1561
058e97ec
VM
1562/* Print info about copies involving allocno A into file F. */
1563static void
1564print_allocno_copies (FILE *f, ira_allocno_t a)
1565{
1566 ira_allocno_t another_a;
1567 ira_copy_t cp, next_cp;
1568
1569 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1570 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1571 {
1572 if (cp->first == a)
1573 {
1574 next_cp = cp->next_first_allocno_copy;
1575 another_a = cp->second;
1576 }
1577 else if (cp->second == a)
1578 {
1579 next_cp = cp->next_second_allocno_copy;
1580 another_a = cp->first;
1581 }
1582 else
1583 gcc_unreachable ();
1584 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1585 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1586 }
1587 fprintf (f, "\n");
1588}
1589
7b3b6ae4
LC
1590DEBUG_FUNCTION void
1591debug (ira_allocno &ref)
1592{
1593 print_allocno_copies (stderr, &ref);
1594}
1595
1596DEBUG_FUNCTION void
1597debug (ira_allocno *ptr)
1598{
1599 if (ptr)
1600 debug (*ptr);
1601 else
1602 fprintf (stderr, "<nil>\n");
1603}
1604
1605
058e97ec
VM
1606/* Print info about copies involving allocno A into stderr. */
1607void
1608ira_debug_allocno_copies (ira_allocno_t a)
1609{
1610 print_allocno_copies (stderr, a);
1611}
1612
1613/* The function frees memory allocated for copy CP. */
1614static void
1615finish_copy (ira_copy_t cp)
1616{
1617 pool_free (copy_pool, cp);
1618}
1619
1620
1621/* Free memory allocated for all copies. */
1622static void
1623finish_copies (void)
1624{
1625 ira_copy_t cp;
1626 ira_copy_iterator ci;
1627
1628 FOR_EACH_COPY (cp, ci)
1629 finish_copy (cp);
9771b263 1630 copy_vec.release ();
058e97ec
VM
1631 free_alloc_pool (copy_pool);
1632}
1633
1634\f
1635
1756cb66 1636/* Pools for cost vectors. It is defined only for allocno classes. */
058e97ec
VM
1637static alloc_pool cost_vector_pool[N_REG_CLASSES];
1638
1639/* The function initiates work with hard register cost vectors. It
1756cb66 1640 creates allocation pool for each allocno class. */
058e97ec
VM
1641static void
1642initiate_cost_vectors (void)
1643{
1644 int i;
1756cb66 1645 enum reg_class aclass;
058e97ec 1646
1756cb66 1647 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1648 {
1756cb66
VM
1649 aclass = ira_allocno_classes[i];
1650 cost_vector_pool[aclass]
058e97ec 1651 = create_alloc_pool ("cost vectors",
1756cb66 1652 sizeof (int) * ira_class_hard_regs_num[aclass],
058e97ec
VM
1653 100);
1654 }
1655}
1656
1756cb66 1657/* Allocate and return a cost vector VEC for ACLASS. */
058e97ec 1658int *
6f76a878 1659ira_allocate_cost_vector (reg_class_t aclass)
058e97ec 1660{
6f76a878 1661 return (int *) pool_alloc (cost_vector_pool[(int) aclass]);
058e97ec
VM
1662}
1663
1756cb66 1664/* Free a cost vector VEC for ACLASS. */
058e97ec 1665void
6f76a878 1666ira_free_cost_vector (int *vec, reg_class_t aclass)
058e97ec
VM
1667{
1668 ira_assert (vec != NULL);
6f76a878 1669 pool_free (cost_vector_pool[(int) aclass], vec);
058e97ec
VM
1670}
1671
1672/* Finish work with hard register cost vectors. Release allocation
1756cb66 1673 pool for each allocno class. */
058e97ec
VM
1674static void
1675finish_cost_vectors (void)
1676{
1677 int i;
1756cb66 1678 enum reg_class aclass;
058e97ec 1679
1756cb66 1680 for (i = 0; i < ira_allocno_classes_num; i++)
058e97ec 1681 {
1756cb66
VM
1682 aclass = ira_allocno_classes[i];
1683 free_alloc_pool (cost_vector_pool[aclass]);
058e97ec
VM
1684 }
1685}
1686
1687\f
1688
e6a7da82
SB
1689/* Compute a post-ordering of the reverse control flow of the loop body
1690 designated by the children nodes of LOOP_NODE, whose body nodes in
1691 pre-order are input as LOOP_PREORDER. Return a VEC with a post-order
1692 of the reverse loop body.
1693
1694 For the post-order of the reverse CFG, we visit the basic blocks in
1695 LOOP_PREORDER array in the reverse order of where they appear.
1696 This is important: We do not just want to compute a post-order of
1697 the reverse CFG, we want to make a best-guess for a visiting order that
1698 minimizes the number of chain elements per allocno live range. If the
1699 blocks would be visited in a different order, we would still compute a
1700 correct post-ordering but it would be less likely that two nodes
1701 connected by an edge in the CFG are neighbours in the topsort. */
1702
9771b263 1703static vec<ira_loop_tree_node_t>
e6a7da82 1704ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSED,
9771b263 1705 vec<ira_loop_tree_node_t> loop_preorder)
e6a7da82 1706{
6e1aa848 1707 vec<ira_loop_tree_node_t> topsort_nodes = vNULL;
e6a7da82
SB
1708 unsigned int n_loop_preorder;
1709
9771b263 1710 n_loop_preorder = loop_preorder.length ();
e6a7da82
SB
1711 if (n_loop_preorder != 0)
1712 {
1713 ira_loop_tree_node_t subloop_node;
1714 unsigned int i;
ef062b13 1715 auto_vec<ira_loop_tree_node_t> dfs_stack;
e6a7da82
SB
1716
1717 /* This is a bit of strange abuse of the BB_VISITED flag: We use
1718 the flag to mark blocks we still have to visit to add them to
1719 our post-order. Define an alias to avoid confusion. */
1720#define BB_TO_VISIT BB_VISITED
1721
9771b263 1722 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1723 {
1724 gcc_checking_assert (! (subloop_node->bb->flags & BB_TO_VISIT));
1725 subloop_node->bb->flags |= BB_TO_VISIT;
1726 }
1727
9771b263
DN
1728 topsort_nodes.create (n_loop_preorder);
1729 dfs_stack.create (n_loop_preorder);
e6a7da82 1730
9771b263 1731 FOR_EACH_VEC_ELT_REVERSE (loop_preorder, i, subloop_node)
e6a7da82
SB
1732 {
1733 if (! (subloop_node->bb->flags & BB_TO_VISIT))
1734 continue;
1735
1736 subloop_node->bb->flags &= ~BB_TO_VISIT;
9771b263
DN
1737 dfs_stack.quick_push (subloop_node);
1738 while (! dfs_stack.is_empty ())
e6a7da82
SB
1739 {
1740 edge e;
1741 edge_iterator ei;
1742
9771b263 1743 ira_loop_tree_node_t n = dfs_stack.last ();
e6a7da82
SB
1744 FOR_EACH_EDGE (e, ei, n->bb->preds)
1745 {
1746 ira_loop_tree_node_t pred_node;
1747 basic_block pred_bb = e->src;
1748
fefa31b5 1749 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
e6a7da82
SB
1750 continue;
1751
1752 pred_node = IRA_BB_NODE_BY_INDEX (pred_bb->index);
1753 if (pred_node != n
1754 && (pred_node->bb->flags & BB_TO_VISIT))
1755 {
1756 pred_node->bb->flags &= ~BB_TO_VISIT;
9771b263 1757 dfs_stack.quick_push (pred_node);
e6a7da82
SB
1758 }
1759 }
9771b263 1760 if (n == dfs_stack.last ())
e6a7da82 1761 {
9771b263
DN
1762 dfs_stack.pop ();
1763 topsort_nodes.quick_push (n);
e6a7da82
SB
1764 }
1765 }
1766 }
1767
1768#undef BB_TO_VISIT
e6a7da82
SB
1769 }
1770
9771b263 1771 gcc_assert (topsort_nodes.length () == n_loop_preorder);
e6a7da82
SB
1772 return topsort_nodes;
1773}
1774
058e97ec
VM
1775/* The current loop tree node and its regno allocno map. */
1776ira_loop_tree_node_t ira_curr_loop_tree_node;
1777ira_allocno_t *ira_curr_regno_allocno_map;
1778
1779/* This recursive function traverses loop tree with root LOOP_NODE
1780 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1781 correspondingly in preorder and postorder. The function sets up
1782 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1783 basic block nodes of LOOP_NODE is also processed (before its
e6a7da82
SB
1784 subloop nodes).
1785
1786 If BB_P is set and POSTORDER_FUNC is given, the basic blocks in
1787 the loop are passed in the *reverse* post-order of the *reverse*
1788 CFG. This is only used by ira_create_allocno_live_ranges, which
1789 wants to visit basic blocks in this order to minimize the number
1790 of elements per live range chain.
1791 Note that the loop tree nodes are still visited in the normal,
1792 forward post-order of the loop tree. */
1793
058e97ec
VM
1794void
1795ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1796 void (*preorder_func) (ira_loop_tree_node_t),
1797 void (*postorder_func) (ira_loop_tree_node_t))
1798{
1799 ira_loop_tree_node_t subloop_node;
1800
1801 ira_assert (loop_node->bb == NULL);
1802 ira_curr_loop_tree_node = loop_node;
1803 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1804
1805 if (preorder_func != NULL)
1806 (*preorder_func) (loop_node);
b8698a0f 1807
058e97ec 1808 if (bb_p)
e6a7da82 1809 {
ef062b13 1810 auto_vec<ira_loop_tree_node_t> loop_preorder;
e6a7da82
SB
1811 unsigned int i;
1812
1813 /* Add all nodes to the set of nodes to visit. The IRA loop tree
1814 is set up such that nodes in the loop body appear in a pre-order
1815 of their place in the CFG. */
1816 for (subloop_node = loop_node->children;
1817 subloop_node != NULL;
1818 subloop_node = subloop_node->next)
1819 if (subloop_node->bb != NULL)
9771b263 1820 loop_preorder.safe_push (subloop_node);
e6a7da82
SB
1821
1822 if (preorder_func != NULL)
9771b263 1823 FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
e6a7da82
SB
1824 (*preorder_func) (subloop_node);
1825
1826 if (postorder_func != NULL)
058e97ec 1827 {
9771b263 1828 vec<ira_loop_tree_node_t> loop_rev_postorder =
e6a7da82 1829 ira_loop_tree_body_rev_postorder (loop_node, loop_preorder);
9771b263 1830 FOR_EACH_VEC_ELT_REVERSE (loop_rev_postorder, i, subloop_node)
058e97ec 1831 (*postorder_func) (subloop_node);
9771b263 1832 loop_rev_postorder.release ();
058e97ec 1833 }
e6a7da82
SB
1834 }
1835
058e97ec
VM
1836 for (subloop_node = loop_node->subloops;
1837 subloop_node != NULL;
1838 subloop_node = subloop_node->subloop_next)
1839 {
1840 ira_assert (subloop_node->bb == NULL);
1841 ira_traverse_loop_tree (bb_p, subloop_node,
1842 preorder_func, postorder_func);
1843 }
1844
1845 ira_curr_loop_tree_node = loop_node;
1846 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1847
1848 if (postorder_func != NULL)
1849 (*postorder_func) (loop_node);
1850}
1851
1852\f
1853
1854/* The basic block currently being processed. */
1855static basic_block curr_bb;
1856
1857/* This recursive function creates allocnos corresponding to
1858 pseudo-registers containing in X. True OUTPUT_P means that X is
1859 a lvalue. */
1860static void
1861create_insn_allocnos (rtx x, bool output_p)
1862{
1863 int i, j;
1864 const char *fmt;
1865 enum rtx_code code = GET_CODE (x);
1866
1867 if (code == REG)
1868 {
1869 int regno;
1870
1871 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1872 {
1873 ira_allocno_t a;
1874
1875 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
1876 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
b8698a0f 1877
058e97ec
VM
1878 ALLOCNO_NREFS (a)++;
1879 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
058e97ec
VM
1880 if (output_p)
1881 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1882 }
1883 return;
1884 }
1885 else if (code == SET)
1886 {
1887 create_insn_allocnos (SET_DEST (x), true);
1888 create_insn_allocnos (SET_SRC (x), false);
1889 return;
1890 }
1891 else if (code == CLOBBER)
1892 {
1893 create_insn_allocnos (XEXP (x, 0), true);
1894 return;
1895 }
1896 else if (code == MEM)
1897 {
1898 create_insn_allocnos (XEXP (x, 0), false);
1899 return;
1900 }
b8698a0f 1901 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
058e97ec
VM
1902 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1903 {
1904 create_insn_allocnos (XEXP (x, 0), true);
1905 create_insn_allocnos (XEXP (x, 0), false);
1906 return;
1907 }
1908
1909 fmt = GET_RTX_FORMAT (code);
1910 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1911 {
1912 if (fmt[i] == 'e')
1913 create_insn_allocnos (XEXP (x, i), output_p);
1914 else if (fmt[i] == 'E')
1915 for (j = 0; j < XVECLEN (x, i); j++)
1916 create_insn_allocnos (XVECEXP (x, i, j), output_p);
1917 }
1918}
1919
1920/* Create allocnos corresponding to pseudo-registers living in the
1921 basic block represented by the corresponding loop tree node
1922 BB_NODE. */
1923static void
1924create_bb_allocnos (ira_loop_tree_node_t bb_node)
1925{
1926 basic_block bb;
1927 rtx insn;
1928 unsigned int i;
1929 bitmap_iterator bi;
1930
1931 curr_bb = bb = bb_node->bb;
1932 ira_assert (bb != NULL);
acb37d29 1933 FOR_BB_INSNS_REVERSE (bb, insn)
b5b8b0ac 1934 if (NONDEBUG_INSN_P (insn))
058e97ec
VM
1935 create_insn_allocnos (PATTERN (insn), false);
1936 /* It might be a allocno living through from one subloop to
1937 another. */
bf744527 1938 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb), FIRST_PSEUDO_REGISTER, i, bi)
058e97ec
VM
1939 if (ira_curr_regno_allocno_map[i] == NULL)
1940 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1941}
1942
1943/* Create allocnos corresponding to pseudo-registers living on edge E
1944 (a loop entry or exit). Also mark the allocnos as living on the
1945 loop border. */
1946static void
1947create_loop_allocnos (edge e)
1948{
1949 unsigned int i;
1950 bitmap live_in_regs, border_allocnos;
1951 bitmap_iterator bi;
1952 ira_loop_tree_node_t parent;
1953
bf744527 1954 live_in_regs = df_get_live_in (e->dest);
058e97ec 1955 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
bf744527 1956 EXECUTE_IF_SET_IN_REG_SET (df_get_live_out (e->src),
058e97ec
VM
1957 FIRST_PSEUDO_REGISTER, i, bi)
1958 if (bitmap_bit_p (live_in_regs, i))
1959 {
1960 if (ira_curr_regno_allocno_map[i] == NULL)
1961 {
1962 /* The order of creations is important for right
1963 ira_regno_allocno_map. */
1964 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1965 && parent->regno_allocno_map[i] == NULL)
1966 ira_create_allocno (i, false, parent);
1967 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1968 }
1969 bitmap_set_bit (border_allocnos,
1970 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1971 }
1972}
1973
1974/* Create allocnos corresponding to pseudo-registers living in loop
1975 represented by the corresponding loop tree node LOOP_NODE. This
1976 function is called by ira_traverse_loop_tree. */
1977static void
1978create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1979{
1980 if (loop_node->bb != NULL)
1981 create_bb_allocnos (loop_node);
1982 else if (loop_node != ira_loop_tree_root)
1983 {
1984 int i;
1985 edge_iterator ei;
1986 edge e;
9771b263 1987 vec<edge> edges;
058e97ec 1988
2608d841 1989 ira_assert (current_loops != NULL);
058e97ec
VM
1990 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1991 if (e->src != loop_node->loop->latch)
1992 create_loop_allocnos (e);
b8698a0f 1993
058e97ec 1994 edges = get_loop_exit_edges (loop_node->loop);
9771b263 1995 FOR_EACH_VEC_ELT (edges, i, e)
058e97ec 1996 create_loop_allocnos (e);
9771b263 1997 edges.release ();
058e97ec
VM
1998 }
1999}
2000
2001/* Propagate information about allocnos modified inside the loop given
2002 by its LOOP_TREE_NODE to its parent. */
2003static void
2004propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
2005{
2006 if (loop_tree_node == ira_loop_tree_root)
2007 return;
2008 ira_assert (loop_tree_node->bb == NULL);
2009 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
2010 loop_tree_node->modified_regnos);
2011}
2012
2013/* Propagate new info about allocno A (see comments about accumulated
2014 info in allocno definition) to the corresponding allocno on upper
2015 loop tree level. So allocnos on upper levels accumulate
2016 information about the corresponding allocnos in nested regions.
2017 The new info means allocno info finally calculated in this
2018 file. */
2019static void
2020propagate_allocno_info (void)
2021{
2022 int i;
2023 ira_allocno_t a, parent_a;
2024 ira_loop_tree_node_t parent;
1756cb66 2025 enum reg_class aclass;
058e97ec 2026
7db7ed3c
VM
2027 if (flag_ira_region != IRA_REGION_ALL
2028 && flag_ira_region != IRA_REGION_MIXED)
058e97ec
VM
2029 return;
2030 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2031 for (a = ira_regno_allocno_map[i];
2032 a != NULL;
2033 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2034 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
2035 && (parent_a = parent->regno_allocno_map[i]) != NULL
2036 /* There are no caps yet at this point. So use
2037 border_allocnos to find allocnos for the propagation. */
2038 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
2039 ALLOCNO_NUM (a)))
2040 {
927425df
VM
2041 if (! ALLOCNO_BAD_SPILL_P (a))
2042 ALLOCNO_BAD_SPILL_P (parent_a) = false;
058e97ec
VM
2043 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
2044 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
2045 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3c55880a 2046 merge_hard_reg_conflicts (a, parent_a, true);
058e97ec
VM
2047 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2048 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
2049 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
2050 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
058e97ec
VM
2051 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2052 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1756cb66
VM
2053 aclass = ALLOCNO_CLASS (a);
2054 ira_assert (aclass == ALLOCNO_CLASS (parent_a));
058e97ec 2055 ira_allocate_and_accumulate_costs
1756cb66 2056 (&ALLOCNO_HARD_REG_COSTS (parent_a), aclass,
058e97ec
VM
2057 ALLOCNO_HARD_REG_COSTS (a));
2058 ira_allocate_and_accumulate_costs
2059 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1756cb66 2060 aclass,
058e97ec 2061 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1756cb66
VM
2062 ALLOCNO_CLASS_COST (parent_a)
2063 += ALLOCNO_CLASS_COST (a);
058e97ec 2064 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
058e97ec
VM
2065 }
2066}
2067
2068/* Create allocnos corresponding to pseudo-registers in the current
2069 function. Traverse the loop tree for this. */
2070static void
2071create_allocnos (void)
2072{
2073 /* We need to process BB first to correctly link allocnos by member
2074 next_regno_allocno. */
2075 ira_traverse_loop_tree (true, ira_loop_tree_root,
2076 create_loop_tree_node_allocnos, NULL);
2077 if (optimize)
2078 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
2079 propagate_modified_regnos);
2080}
2081
2082\f
2083
2084/* The page contains function to remove some regions from a separate
2085 register allocation. We remove regions whose separate allocation
2086 will hardly improve the result. As a result we speed up regional
2087 register allocation. */
2088
9140d27b 2089/* The function changes the object in range list given by R to OBJ. */
058e97ec 2090static void
9140d27b 2091change_object_in_range_list (live_range_t r, ira_object_t obj)
058e97ec
VM
2092{
2093 for (; r != NULL; r = r->next)
9140d27b 2094 r->object = obj;
058e97ec
VM
2095}
2096
3c55880a
BS
2097/* Move all live ranges associated with allocno FROM to allocno TO. */
2098static void
2099move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2100{
ac0ab4f7
BS
2101 int i;
2102 int n = ALLOCNO_NUM_OBJECTS (from);
2103
2104 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
3c55880a 2105
ac0ab4f7 2106 for (i = 0; i < n; i++)
3c55880a 2107 {
ac0ab4f7
BS
2108 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2109 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2110 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2111
2112 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2113 {
2114 fprintf (ira_dump_file,
2115 " Moving ranges of a%dr%d to a%dr%d: ",
2116 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2117 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2118 ira_print_live_range_list (ira_dump_file, lr);
2119 }
2120 change_object_in_range_list (lr, to_obj);
2121 OBJECT_LIVE_RANGES (to_obj)
2122 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
2123 OBJECT_LIVE_RANGES (from_obj) = NULL;
3c55880a 2124 }
3c55880a
BS
2125}
2126
3c55880a
BS
2127static void
2128copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
2129{
ac0ab4f7
BS
2130 int i;
2131 int n = ALLOCNO_NUM_OBJECTS (from);
3c55880a 2132
ac0ab4f7
BS
2133 gcc_assert (n == ALLOCNO_NUM_OBJECTS (to));
2134
2135 for (i = 0; i < n; i++)
3c55880a 2136 {
ac0ab4f7
BS
2137 ira_object_t from_obj = ALLOCNO_OBJECT (from, i);
2138 ira_object_t to_obj = ALLOCNO_OBJECT (to, i);
2139 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
2140
2141 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2142 {
2143 fprintf (ira_dump_file, " Copying ranges of a%dr%d to a%dr%d: ",
2144 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
2145 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
2146 ira_print_live_range_list (ira_dump_file, lr);
2147 }
2148 lr = ira_copy_live_range_list (lr);
2149 change_object_in_range_list (lr, to_obj);
2150 OBJECT_LIVE_RANGES (to_obj)
2151 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
3c55880a 2152 }
3c55880a
BS
2153}
2154
058e97ec
VM
2155/* Return TRUE if NODE represents a loop with low register
2156 pressure. */
2157static bool
2158low_pressure_loop_node_p (ira_loop_tree_node_t node)
2159{
2160 int i;
1756cb66 2161 enum reg_class pclass;
b8698a0f 2162
058e97ec
VM
2163 if (node->bb != NULL)
2164 return false;
b8698a0f 2165
1756cb66 2166 for (i = 0; i < ira_pressure_classes_num; i++)
058e97ec 2167 {
1756cb66 2168 pclass = ira_pressure_classes[i];
f508f827
RS
2169 if (node->reg_pressure[pclass] > ira_class_hard_regs_num[pclass]
2170 && ira_class_hard_regs_num[pclass] > 1)
058e97ec
VM
2171 return false;
2172 }
2173 return true;
2174}
2175
30a435d8
VM
2176#ifdef STACK_REGS
2177/* Return TRUE if LOOP has a complex enter or exit edge. We don't
2178 form a region from such loop if the target use stack register
2179 because reg-stack.c can not deal with such edges. */
8c5fdaae 2180static bool
30a435d8 2181loop_with_complex_edge_p (struct loop *loop)
8c5fdaae
VM
2182{
2183 int i;
2184 edge_iterator ei;
2185 edge e;
9771b263 2186 vec<edge> edges;
f5843d08 2187 bool res;
8c5fdaae
VM
2188
2189 FOR_EACH_EDGE (e, ei, loop->header->preds)
2190 if (e->flags & EDGE_EH)
2191 return true;
2192 edges = get_loop_exit_edges (loop);
f5843d08 2193 res = false;
9771b263 2194 FOR_EACH_VEC_ELT (edges, i, e)
30a435d8 2195 if (e->flags & EDGE_COMPLEX)
f5843d08
RG
2196 {
2197 res = true;
2198 break;
2199 }
9771b263 2200 edges.release ();
f5843d08 2201 return res;
8c5fdaae 2202}
30a435d8 2203#endif
8c5fdaae 2204
30ea859e
VM
2205/* Sort loops for marking them for removal. We put already marked
2206 loops first, then less frequent loops next, and then outer loops
2207 next. */
2208static int
2209loop_compare_func (const void *v1p, const void *v2p)
2210{
2211 int diff;
2212 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
2213 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
2214
2215 ira_assert (l1->parent != NULL && l2->parent != NULL);
2216 if (l1->to_remove_p && ! l2->to_remove_p)
2217 return -1;
2218 if (! l1->to_remove_p && l2->to_remove_p)
2219 return 1;
2220 if ((diff = l1->loop->header->frequency - l2->loop->header->frequency) != 0)
2221 return diff;
2222 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
2223 return diff;
2224 /* Make sorting stable. */
2608d841 2225 return l1->loop_num - l2->loop_num;
30ea859e
VM
2226}
2227
30ea859e
VM
2228/* Mark loops which should be removed from regional allocation. We
2229 remove a loop with low register pressure inside another loop with
2230 register pressure. In this case a separate allocation of the loop
2231 hardly helps (for irregular register file architecture it could
2232 help by choosing a better hard register in the loop but we prefer
2233 faster allocation even in this case). We also remove cheap loops
8c5fdaae
VM
2234 if there are more than IRA_MAX_LOOPS_NUM of them. Loop with EH
2235 exit or enter edges are removed too because the allocation might
2236 require put pseudo moves on the EH edges (we could still do this
2237 for pseudos with caller saved hard registers in some cases but it
2238 is impossible to say here or during top-down allocation pass what
2239 hard register the pseudos get finally). */
30ea859e
VM
2240static void
2241mark_loops_for_removal (void)
058e97ec 2242{
30ea859e
VM
2243 int i, n;
2244 ira_loop_tree_node_t *sorted_loops;
2245 loop_p loop;
2246
2608d841 2247 ira_assert (current_loops != NULL);
30ea859e
VM
2248 sorted_loops
2249 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
0fc822d0
RB
2250 * number_of_loops (cfun));
2251 for (n = i = 0; vec_safe_iterate (get_loops (cfun), i, &loop); i++)
30ea859e
VM
2252 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2253 {
2254 if (ira_loop_nodes[i].parent == NULL)
2255 {
2256 /* Don't remove the root. */
2257 ira_loop_nodes[i].to_remove_p = false;
2258 continue;
2259 }
2260 sorted_loops[n++] = &ira_loop_nodes[i];
2261 ira_loop_nodes[i].to_remove_p
8c5fdaae
VM
2262 = ((low_pressure_loop_node_p (ira_loop_nodes[i].parent)
2263 && low_pressure_loop_node_p (&ira_loop_nodes[i]))
30a435d8
VM
2264#ifdef STACK_REGS
2265 || loop_with_complex_edge_p (ira_loop_nodes[i].loop)
2266#endif
2267 );
30ea859e
VM
2268 }
2269 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
2270 for (i = 0; n - i + 1 > IRA_MAX_LOOPS_NUM; i++)
2271 {
2272 sorted_loops[i]->to_remove_p = true;
2273 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2274 fprintf
2275 (ira_dump_file,
2276 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
2608d841 2277 sorted_loops[i]->loop_num, sorted_loops[i]->loop->header->index,
30ea859e
VM
2278 sorted_loops[i]->loop->header->frequency,
2279 loop_depth (sorted_loops[i]->loop),
2280 low_pressure_loop_node_p (sorted_loops[i]->parent)
2281 && low_pressure_loop_node_p (sorted_loops[i])
2282 ? "low pressure" : "cheap loop");
2283 }
2284 ira_free (sorted_loops);
058e97ec
VM
2285}
2286
311aab06
VM
2287/* Mark all loops but root for removing. */
2288static void
2289mark_all_loops_for_removal (void)
2290{
2291 int i;
2292 loop_p loop;
2293
2608d841 2294 ira_assert (current_loops != NULL);
0fc822d0 2295 FOR_EACH_VEC_SAFE_ELT (get_loops (cfun), i, loop)
311aab06
VM
2296 if (ira_loop_nodes[i].regno_allocno_map != NULL)
2297 {
2298 if (ira_loop_nodes[i].parent == NULL)
2299 {
2300 /* Don't remove the root. */
2301 ira_loop_nodes[i].to_remove_p = false;
2302 continue;
2303 }
2304 ira_loop_nodes[i].to_remove_p = true;
2305 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2306 fprintf
2307 (ira_dump_file,
2308 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
2608d841 2309 ira_loop_nodes[i].loop_num,
311aab06
VM
2310 ira_loop_nodes[i].loop->header->index,
2311 ira_loop_nodes[i].loop->header->frequency,
2312 loop_depth (ira_loop_nodes[i].loop));
2313 }
2314}
30ea859e 2315
058e97ec 2316/* Definition of vector of loop tree nodes. */
058e97ec
VM
2317
2318/* Vec containing references to all removed loop tree nodes. */
9771b263 2319static vec<ira_loop_tree_node_t> removed_loop_vec;
058e97ec
VM
2320
2321/* Vec containing references to all children of loop tree nodes. */
9771b263 2322static vec<ira_loop_tree_node_t> children_vec;
058e97ec
VM
2323
2324/* Remove subregions of NODE if their separate allocation will not
2325 improve the result. */
2326static void
2327remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
2328{
2329 unsigned int start;
2330 bool remove_p;
2331 ira_loop_tree_node_t subnode;
2332
30ea859e 2333 remove_p = node->to_remove_p;
058e97ec 2334 if (! remove_p)
9771b263
DN
2335 children_vec.safe_push (node);
2336 start = children_vec.length ();
058e97ec
VM
2337 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
2338 if (subnode->bb == NULL)
2339 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
2340 else
9771b263 2341 children_vec.safe_push (subnode);
058e97ec
VM
2342 node->children = node->subloops = NULL;
2343 if (remove_p)
2344 {
9771b263 2345 removed_loop_vec.safe_push (node);
058e97ec
VM
2346 return;
2347 }
9771b263 2348 while (children_vec.length () > start)
058e97ec 2349 {
9771b263 2350 subnode = children_vec.pop ();
058e97ec
VM
2351 subnode->parent = node;
2352 subnode->next = node->children;
2353 node->children = subnode;
2354 if (subnode->bb == NULL)
2355 {
2356 subnode->subloop_next = node->subloops;
2357 node->subloops = subnode;
2358 }
2359 }
2360}
2361
c6bb4c93
VM
2362/* Return TRUE if NODE is inside PARENT. */
2363static bool
2364loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
2365{
2366 for (node = node->parent; node != NULL; node = node->parent)
2367 if (node == parent)
2368 return true;
2369 return false;
2370}
2371
2372/* Sort allocnos according to their order in regno allocno list. */
2373static int
2374regno_allocno_order_compare_func (const void *v1p, const void *v2p)
2375{
2376 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
2377 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
2378 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
2379 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
2380
2381 if (loop_is_inside_p (n1, n2))
2382 return -1;
2383 else if (loop_is_inside_p (n2, n1))
2384 return 1;
2385 /* If allocnos are equally good, sort by allocno numbers, so that
2386 the results of qsort leave nothing to chance. We put allocnos
2387 with higher number first in the list because it is the original
2388 order for allocnos from loops on the same levels. */
2389 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
2390}
2391
2392/* This array is used to sort allocnos to restore allocno order in
2393 the regno allocno list. */
2394static ira_allocno_t *regno_allocnos;
2395
2396/* Restore allocno order for REGNO in the regno allocno list. */
2397static void
2398ira_rebuild_regno_allocno_list (int regno)
2399{
2400 int i, n;
2401 ira_allocno_t a;
2402
2403 for (n = 0, a = ira_regno_allocno_map[regno];
2404 a != NULL;
2405 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2406 regno_allocnos[n++] = a;
2407 ira_assert (n > 0);
b8698a0f 2408 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
c6bb4c93
VM
2409 regno_allocno_order_compare_func);
2410 for (i = 1; i < n; i++)
2411 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
2412 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
2413 ira_regno_allocno_map[regno] = regno_allocnos[0];
2414 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
2415 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
2416}
2417
311aab06
VM
2418/* Propagate info from allocno FROM_A to allocno A. */
2419static void
2420propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
2421{
1756cb66 2422 enum reg_class aclass;
311aab06 2423
3c55880a 2424 merge_hard_reg_conflicts (from_a, a, false);
311aab06
VM
2425 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
2426 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
2427 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
311aab06 2428 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
e384e6b5
BS
2429 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a)
2430 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (from_a);
311aab06
VM
2431 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
2432 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
2433 if (! ALLOCNO_BAD_SPILL_P (from_a))
2434 ALLOCNO_BAD_SPILL_P (a) = false;
1756cb66
VM
2435 aclass = ALLOCNO_CLASS (from_a);
2436 ira_assert (aclass == ALLOCNO_CLASS (a));
2437 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), aclass,
311aab06
VM
2438 ALLOCNO_HARD_REG_COSTS (from_a));
2439 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 2440 aclass,
311aab06 2441 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
1756cb66 2442 ALLOCNO_CLASS_COST (a) += ALLOCNO_CLASS_COST (from_a);
311aab06
VM
2443 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
2444}
2445
058e97ec
VM
2446/* Remove allocnos from loops removed from the allocation
2447 consideration. */
2448static void
2449remove_unnecessary_allocnos (void)
2450{
2451 int regno;
c6bb4c93 2452 bool merged_p, rebuild_p;
058e97ec
VM
2453 ira_allocno_t a, prev_a, next_a, parent_a;
2454 ira_loop_tree_node_t a_node, parent;
058e97ec
VM
2455
2456 merged_p = false;
c6bb4c93 2457 regno_allocnos = NULL;
058e97ec 2458 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
c6bb4c93
VM
2459 {
2460 rebuild_p = false;
2461 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
2462 a != NULL;
2463 a = next_a)
2464 {
2465 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
2466 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2467 if (! a_node->to_remove_p)
2468 prev_a = a;
2469 else
2470 {
2471 for (parent = a_node->parent;
2472 (parent_a = parent->regno_allocno_map[regno]) == NULL
2473 && parent->to_remove_p;
2474 parent = parent->parent)
2475 ;
2476 if (parent_a == NULL)
2477 {
311aab06
VM
2478 /* There are no allocnos with the same regno in
2479 upper region -- just move the allocno to the
2480 upper region. */
c6bb4c93
VM
2481 prev_a = a;
2482 ALLOCNO_LOOP_TREE_NODE (a) = parent;
2483 parent->regno_allocno_map[regno] = a;
2484 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2485 rebuild_p = true;
2486 }
2487 else
2488 {
2489 /* Remove the allocno and update info of allocno in
2490 the upper region. */
2491 if (prev_a == NULL)
2492 ira_regno_allocno_map[regno] = next_a;
2493 else
2494 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
3c55880a 2495 move_allocno_live_ranges (a, parent_a);
c6bb4c93 2496 merged_p = true;
311aab06 2497 propagate_some_info_from_allocno (parent_a, a);
46044dd9
L
2498 /* Remove it from the corresponding regno allocno
2499 map to avoid info propagation of subsequent
2500 allocno into this already removed allocno. */
2501 a_node->regno_allocno_map[regno] = NULL;
3b6d1699 2502 ira_remove_allocno_prefs (a);
c6bb4c93
VM
2503 finish_allocno (a);
2504 }
2505 }
2506 }
2507 if (rebuild_p)
2508 /* We need to restore the order in regno allocno list. */
2509 {
2510 if (regno_allocnos == NULL)
2511 regno_allocnos
2512 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2513 * ira_allocnos_num);
2514 ira_rebuild_regno_allocno_list (regno);
2515 }
2516 }
058e97ec
VM
2517 if (merged_p)
2518 ira_rebuild_start_finish_chains ();
c6bb4c93
VM
2519 if (regno_allocnos != NULL)
2520 ira_free (regno_allocnos);
058e97ec
VM
2521}
2522
311aab06 2523/* Remove allocnos from all loops but the root. */
058e97ec 2524static void
311aab06 2525remove_low_level_allocnos (void)
058e97ec 2526{
311aab06
VM
2527 int regno;
2528 bool merged_p, propagate_p;
2529 ira_allocno_t a, top_a;
2530 ira_loop_tree_node_t a_node, parent;
311aab06
VM
2531 ira_allocno_iterator ai;
2532
2533 merged_p = false;
2534 FOR_EACH_ALLOCNO (a, ai)
2535 {
2536 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2537 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2538 continue;
2539 regno = ALLOCNO_REGNO (a);
2540 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2541 {
2542 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2543 ira_loop_tree_root->regno_allocno_map[regno] = a;
2544 continue;
2545 }
2546 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2547 /* Remove the allocno and update info of allocno in the upper
2548 region. */
3c55880a 2549 move_allocno_live_ranges (a, top_a);
311aab06 2550 merged_p = true;
311aab06
VM
2551 if (propagate_p)
2552 propagate_some_info_from_allocno (top_a, a);
2553 }
2554 FOR_EACH_ALLOCNO (a, ai)
2555 {
2556 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2557 if (a_node == ira_loop_tree_root)
2558 continue;
2559 parent = a_node->parent;
2560 regno = ALLOCNO_REGNO (a);
2561 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2562 ira_assert (ALLOCNO_CAP (a) != NULL);
2563 else if (ALLOCNO_CAP (a) == NULL)
2564 ira_assert (parent->regno_allocno_map[regno] != NULL);
2565 }
2566 FOR_EACH_ALLOCNO (a, ai)
2567 {
2568 regno = ALLOCNO_REGNO (a);
2569 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2570 {
ac0ab4f7
BS
2571 ira_object_t obj;
2572 ira_allocno_object_iterator oi;
a49ae217 2573
311aab06
VM
2574 ira_regno_allocno_map[regno] = a;
2575 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2576 ALLOCNO_CAP_MEMBER (a) = NULL;
ac0ab4f7
BS
2577 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2578 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
2579 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
311aab06
VM
2580#ifdef STACK_REGS
2581 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2582 ALLOCNO_NO_STACK_REG_P (a) = true;
2583#endif
2584 }
2585 else
3b6d1699
VM
2586 {
2587 ira_remove_allocno_prefs (a);
2588 finish_allocno (a);
2589 }
311aab06
VM
2590 }
2591 if (merged_p)
2592 ira_rebuild_start_finish_chains ();
2593}
2594
2595/* Remove loops from consideration. We remove all loops except for
2596 root if ALL_P or loops for which a separate allocation will not
2597 improve the result. We have to do this after allocno creation and
1756cb66
VM
2598 their costs and allocno class evaluation because only after that
2599 the register pressure can be known and is calculated. */
311aab06
VM
2600static void
2601remove_unnecessary_regions (bool all_p)
2602{
2608d841
VM
2603 if (current_loops == NULL)
2604 return;
311aab06
VM
2605 if (all_p)
2606 mark_all_loops_for_removal ();
2607 else
2608 mark_loops_for_removal ();
8b1c6fd7
DM
2609 children_vec.create (last_basic_block_for_fn (cfun)
2610 + number_of_loops (cfun));
2611 removed_loop_vec.create (last_basic_block_for_fn (cfun)
2612 + number_of_loops (cfun));
9771b263
DN
2613 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root);
2614 children_vec.release ();
311aab06
VM
2615 if (all_p)
2616 remove_low_level_allocnos ();
2617 else
2618 remove_unnecessary_allocnos ();
9771b263
DN
2619 while (removed_loop_vec.length () > 0)
2620 finish_loop_tree_node (removed_loop_vec.pop ());
2621 removed_loop_vec.release ();
058e97ec
VM
2622}
2623
2624\f
2625
927425df
VM
2626/* At this point true value of allocno attribute bad_spill_p means
2627 that there is an insn where allocno occurs and where the allocno
2628 can not be used as memory. The function updates the attribute, now
2629 it can be true only for allocnos which can not be used as memory in
2630 an insn and in whose live ranges there is other allocno deaths.
2631 Spilling allocnos with true value will not improve the code because
2632 it will not make other allocnos colorable and additional reloads
2633 for the corresponding pseudo will be generated in reload pass for
2634 each insn it occurs.
2635
2636 This is a trick mentioned in one classic article of Chaitin etc
2637 which is frequently omitted in other implementations of RA based on
2638 graph coloring. */
2639static void
2640update_bad_spill_attribute (void)
2641{
2642 int i;
2643 ira_allocno_t a;
2644 ira_allocno_iterator ai;
ac0ab4f7
BS
2645 ira_allocno_object_iterator aoi;
2646 ira_object_t obj;
b14151b5 2647 live_range_t r;
1756cb66 2648 enum reg_class aclass;
927425df
VM
2649 bitmap_head dead_points[N_REG_CLASSES];
2650
1756cb66 2651 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2652 {
1756cb66
VM
2653 aclass = ira_allocno_classes[i];
2654 bitmap_initialize (&dead_points[aclass], &reg_obstack);
927425df
VM
2655 }
2656 FOR_EACH_ALLOCNO (a, ai)
2657 {
1756cb66
VM
2658 aclass = ALLOCNO_CLASS (a);
2659 if (aclass == NO_REGS)
927425df 2660 continue;
ac0ab4f7
BS
2661 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
2662 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1756cb66 2663 bitmap_set_bit (&dead_points[aclass], r->finish);
927425df
VM
2664 }
2665 FOR_EACH_ALLOCNO (a, ai)
2666 {
1756cb66
VM
2667 aclass = ALLOCNO_CLASS (a);
2668 if (aclass == NO_REGS)
927425df
VM
2669 continue;
2670 if (! ALLOCNO_BAD_SPILL_P (a))
2671 continue;
ac0ab4f7 2672 FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
927425df 2673 {
ac0ab4f7
BS
2674 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2675 {
2676 for (i = r->start + 1; i < r->finish; i++)
1756cb66 2677 if (bitmap_bit_p (&dead_points[aclass], i))
ac0ab4f7
BS
2678 break;
2679 if (i < r->finish)
2680 break;
2681 }
2682 if (r != NULL)
2683 {
2684 ALLOCNO_BAD_SPILL_P (a) = false;
927425df 2685 break;
ac0ab4f7 2686 }
927425df 2687 }
927425df 2688 }
1756cb66 2689 for (i = 0; i < ira_allocno_classes_num; i++)
927425df 2690 {
1756cb66
VM
2691 aclass = ira_allocno_classes[i];
2692 bitmap_clear (&dead_points[aclass]);
927425df
VM
2693 }
2694}
2695
2696\f
2697
058e97ec
VM
2698/* Set up minimal and maximal live range points for allocnos. */
2699static void
2700setup_min_max_allocno_live_range_point (void)
2701{
2702 int i;
2703 ira_allocno_t a, parent_a, cap;
2704 ira_allocno_iterator ai;
ac0ab4f7
BS
2705#ifdef ENABLE_IRA_CHECKING
2706 ira_object_iterator oi;
2707 ira_object_t obj;
2708#endif
b14151b5 2709 live_range_t r;
058e97ec
VM
2710 ira_loop_tree_node_t parent;
2711
2712 FOR_EACH_ALLOCNO (a, ai)
2713 {
ac0ab4f7 2714 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2715
ac0ab4f7
BS
2716 for (i = 0; i < n; i++)
2717 {
2718 ira_object_t obj = ALLOCNO_OBJECT (a, i);
2719 r = OBJECT_LIVE_RANGES (obj);
2720 if (r == NULL)
2721 continue;
2722 OBJECT_MAX (obj) = r->finish;
2723 for (; r->next != NULL; r = r->next)
2724 ;
2725 OBJECT_MIN (obj) = r->start;
2726 }
058e97ec
VM
2727 }
2728 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2729 for (a = ira_regno_allocno_map[i];
2730 a != NULL;
2731 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2732 {
ac0ab4f7
BS
2733 int j;
2734 int n = ALLOCNO_NUM_OBJECTS (a);
1756cb66 2735
ac0ab4f7 2736 for (j = 0; j < n; j++)
058e97ec 2737 {
ac0ab4f7
BS
2738 ira_object_t obj = ALLOCNO_OBJECT (a, j);
2739 ira_object_t parent_obj;
2740
2741 if (OBJECT_MAX (obj) < 0)
2742 continue;
2743 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2744 /* Accumulation of range info. */
2745 if (ALLOCNO_CAP (a) != NULL)
058e97ec 2746 {
ac0ab4f7
BS
2747 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2748 {
2749 ira_object_t cap_obj = ALLOCNO_OBJECT (cap, j);
2750 if (OBJECT_MAX (cap_obj) < OBJECT_MAX (obj))
2751 OBJECT_MAX (cap_obj) = OBJECT_MAX (obj);
2752 if (OBJECT_MIN (cap_obj) > OBJECT_MIN (obj))
2753 OBJECT_MIN (cap_obj) = OBJECT_MIN (obj);
2754 }
2755 continue;
058e97ec 2756 }
ac0ab4f7
BS
2757 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2758 continue;
2759 parent_a = parent->regno_allocno_map[i];
2760 parent_obj = ALLOCNO_OBJECT (parent_a, j);
2761 if (OBJECT_MAX (parent_obj) < OBJECT_MAX (obj))
2762 OBJECT_MAX (parent_obj) = OBJECT_MAX (obj);
2763 if (OBJECT_MIN (parent_obj) > OBJECT_MIN (obj))
2764 OBJECT_MIN (parent_obj) = OBJECT_MIN (obj);
058e97ec 2765 }
058e97ec
VM
2766 }
2767#ifdef ENABLE_IRA_CHECKING
ac0ab4f7 2768 FOR_EACH_OBJECT (obj, oi)
058e97ec 2769 {
a49ae217
BS
2770 if ((0 <= OBJECT_MIN (obj) && OBJECT_MIN (obj) <= ira_max_point)
2771 && (0 <= OBJECT_MAX (obj) && OBJECT_MAX (obj) <= ira_max_point))
058e97ec
VM
2772 continue;
2773 gcc_unreachable ();
2774 }
2775#endif
2776}
2777
2778/* Sort allocnos according to their live ranges. Allocnos with
1756cb66
VM
2779 smaller allocno class are put first unless we use priority
2780 coloring. Allocnos with the same class are ordered according
2781 their start (min). Allocnos with the same start are ordered
2782 according their finish (max). */
058e97ec 2783static int
ac0ab4f7 2784object_range_compare_func (const void *v1p, const void *v2p)
058e97ec
VM
2785{
2786 int diff;
a49ae217
BS
2787 ira_object_t obj1 = *(const ira_object_t *) v1p;
2788 ira_object_t obj2 = *(const ira_object_t *) v2p;
2789 ira_allocno_t a1 = OBJECT_ALLOCNO (obj1);
2790 ira_allocno_t a2 = OBJECT_ALLOCNO (obj2);
058e97ec 2791
a49ae217 2792 if ((diff = OBJECT_MIN (obj1) - OBJECT_MIN (obj2)) != 0)
058e97ec 2793 return diff;
a49ae217 2794 if ((diff = OBJECT_MAX (obj1) - OBJECT_MAX (obj2)) != 0)
058e97ec
VM
2795 return diff;
2796 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2797}
2798
a49ae217 2799/* Sort ira_object_id_map and set up conflict id of allocnos. */
058e97ec 2800static void
a49ae217 2801sort_conflict_id_map (void)
058e97ec
VM
2802{
2803 int i, num;
2804 ira_allocno_t a;
2805 ira_allocno_iterator ai;
2806
2807 num = 0;
2808 FOR_EACH_ALLOCNO (a, ai)
ac0ab4f7
BS
2809 {
2810 ira_allocno_object_iterator oi;
2811 ira_object_t obj;
2812
2813 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
2814 ira_object_id_map[num++] = obj;
2815 }
a49ae217 2816 qsort (ira_object_id_map, num, sizeof (ira_object_t),
ac0ab4f7 2817 object_range_compare_func);
058e97ec 2818 for (i = 0; i < num; i++)
a49ae217
BS
2819 {
2820 ira_object_t obj = ira_object_id_map[i];
1756cb66 2821
a49ae217
BS
2822 gcc_assert (obj != NULL);
2823 OBJECT_CONFLICT_ID (obj) = i;
2824 }
2825 for (i = num; i < ira_objects_num; i++)
2826 ira_object_id_map[i] = NULL;
058e97ec
VM
2827}
2828
2829/* Set up minimal and maximal conflict ids of allocnos with which
2830 given allocno can conflict. */
2831static void
2832setup_min_max_conflict_allocno_ids (void)
2833{
1756cb66 2834 int aclass;
058e97ec
VM
2835 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2836 int *live_range_min, *last_lived;
ac0ab4f7 2837 int word0_min, word0_max;
058e97ec 2838 ira_allocno_t a;
ac0ab4f7 2839 ira_allocno_iterator ai;
058e97ec 2840
a49ae217 2841 live_range_min = (int *) ira_allocate (sizeof (int) * ira_objects_num);
1756cb66 2842 aclass = -1;
058e97ec 2843 first_not_finished = -1;
a49ae217 2844 for (i = 0; i < ira_objects_num; i++)
058e97ec 2845 {
a49ae217 2846 ira_object_t obj = ira_object_id_map[i];
1756cb66 2847
a49ae217 2848 if (obj == NULL)
058e97ec 2849 continue;
a49ae217
BS
2850
2851 a = OBJECT_ALLOCNO (obj);
2852
1756cb66 2853 if (aclass < 0)
058e97ec 2854 {
1756cb66 2855 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2856 min = i;
2857 first_not_finished = i;
2858 }
2859 else
2860 {
a49ae217 2861 start = OBJECT_MIN (obj);
058e97ec
VM
2862 /* If we skip an allocno, the allocno with smaller ids will
2863 be also skipped because of the secondary sorting the
2864 range finishes (see function
ac0ab4f7 2865 object_range_compare_func). */
058e97ec 2866 while (first_not_finished < i
a49ae217 2867 && start > OBJECT_MAX (ira_object_id_map
ac0ab4f7 2868 [first_not_finished]))
058e97ec
VM
2869 first_not_finished++;
2870 min = first_not_finished;
b8698a0f 2871 }
058e97ec
VM
2872 if (min == i)
2873 /* We could increase min further in this case but it is good
2874 enough. */
2875 min++;
a49ae217
BS
2876 live_range_min[i] = OBJECT_MIN (obj);
2877 OBJECT_MIN (obj) = min;
058e97ec
VM
2878 }
2879 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
1756cb66 2880 aclass = -1;
058e97ec 2881 filled_area_start = -1;
a49ae217 2882 for (i = ira_objects_num - 1; i >= 0; i--)
058e97ec 2883 {
a49ae217 2884 ira_object_t obj = ira_object_id_map[i];
1756cb66 2885
a49ae217 2886 if (obj == NULL)
058e97ec 2887 continue;
a49ae217
BS
2888
2889 a = OBJECT_ALLOCNO (obj);
1756cb66 2890 if (aclass < 0)
058e97ec 2891 {
1756cb66 2892 aclass = ALLOCNO_CLASS (a);
058e97ec
VM
2893 for (j = 0; j < ira_max_point; j++)
2894 last_lived[j] = -1;
2895 filled_area_start = ira_max_point;
2896 }
2897 min = live_range_min[i];
a49ae217 2898 finish = OBJECT_MAX (obj);
058e97ec
VM
2899 max = last_lived[finish];
2900 if (max < 0)
2901 /* We could decrease max further in this case but it is good
2902 enough. */
a49ae217
BS
2903 max = OBJECT_CONFLICT_ID (obj) - 1;
2904 OBJECT_MAX (obj) = max;
058e97ec
VM
2905 /* In filling, we can go further A range finish to recognize
2906 intersection quickly because if the finish of subsequently
2907 processed allocno (it has smaller conflict id) range is
2908 further A range finish than they are definitely intersected
2909 (the reason for this is the allocnos with bigger conflict id
2910 have their range starts not smaller than allocnos with
2911 smaller ids. */
2912 for (j = min; j < filled_area_start; j++)
2913 last_lived[j] = i;
2914 filled_area_start = min;
2915 }
2916 ira_free (last_lived);
2917 ira_free (live_range_min);
ac0ab4f7
BS
2918
2919 /* For allocnos with more than one object, we may later record extra conflicts in
2920 subobject 0 that we cannot really know about here.
2921 For now, simply widen the min/max range of these subobjects. */
2922
2923 word0_min = INT_MAX;
2924 word0_max = INT_MIN;
2925
2926 FOR_EACH_ALLOCNO (a, ai)
2927 {
2928 int n = ALLOCNO_NUM_OBJECTS (a);
2929 ira_object_t obj0;
1756cb66 2930
ac0ab4f7
BS
2931 if (n < 2)
2932 continue;
2933 obj0 = ALLOCNO_OBJECT (a, 0);
2934 if (OBJECT_CONFLICT_ID (obj0) < word0_min)
2935 word0_min = OBJECT_CONFLICT_ID (obj0);
2936 if (OBJECT_CONFLICT_ID (obj0) > word0_max)
2937 word0_max = OBJECT_CONFLICT_ID (obj0);
2938 }
2939 FOR_EACH_ALLOCNO (a, ai)
2940 {
2941 int n = ALLOCNO_NUM_OBJECTS (a);
2942 ira_object_t obj0;
1756cb66 2943
ac0ab4f7
BS
2944 if (n < 2)
2945 continue;
2946 obj0 = ALLOCNO_OBJECT (a, 0);
2947 if (OBJECT_MIN (obj0) > word0_min)
2948 OBJECT_MIN (obj0) = word0_min;
2949 if (OBJECT_MAX (obj0) < word0_max)
2950 OBJECT_MAX (obj0) = word0_max;
2951 }
058e97ec
VM
2952}
2953
2954\f
2955
2956static void
2957create_caps (void)
2958{
2959 ira_allocno_t a;
2960 ira_allocno_iterator ai;
2961 ira_loop_tree_node_t loop_tree_node;
2962
2963 FOR_EACH_ALLOCNO (a, ai)
2964 {
2965 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2966 continue;
2967 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2968 create_cap_allocno (a);
2969 else if (ALLOCNO_CAP (a) == NULL)
2970 {
2971 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2972 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2973 create_cap_allocno (a);
2974 }
2975 }
2976}
2977
2978\f
2979
2980/* The page contains code transforming more one region internal
2981 representation (IR) to one region IR which is necessary for reload.
2982 This transformation is called IR flattening. We might just rebuild
2983 the IR for one region but we don't do it because it takes a lot of
2984 time. */
2985
82b33628
VM
2986/* Map: regno -> allocnos which will finally represent the regno for
2987 IR with one region. */
2988static ira_allocno_t *regno_top_level_allocno_map;
2989
029da7d4
BS
2990/* Find the allocno that corresponds to A at a level one higher up in the
2991 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2992ira_allocno_t
2993ira_parent_allocno (ira_allocno_t a)
2994{
2995 ira_loop_tree_node_t parent;
2996
2997 if (ALLOCNO_CAP (a) != NULL)
2998 return NULL;
2999
3000 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
3001 if (parent == NULL)
3002 return NULL;
3003
3004 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
3005}
3006
3007/* Find the allocno that corresponds to A at a level one higher up in the
3008 loop tree. If ALLOCNO_CAP is set for A, return that. */
3009ira_allocno_t
3010ira_parent_or_cap_allocno (ira_allocno_t a)
3011{
3012 if (ALLOCNO_CAP (a) != NULL)
3013 return ALLOCNO_CAP (a);
3014
3015 return ira_parent_allocno (a);
3016}
3017
82b33628 3018/* Process all allocnos originated from pseudo REGNO and copy live
801f03e3
VM
3019 ranges, hard reg conflicts, and allocno stack reg attributes from
3020 low level allocnos to final allocnos which are destinations of
3021 removed stores at a loop exit. Return true if we copied live
3022 ranges. */
82b33628 3023static bool
801f03e3 3024copy_info_to_removed_store_destinations (int regno)
82b33628 3025{
504b33d8
ILT
3026 ira_allocno_t a;
3027 ira_allocno_t parent_a = NULL;
82b33628 3028 ira_loop_tree_node_t parent;
82b33628
VM
3029 bool merged_p;
3030
3031 merged_p = false;
3032 for (a = ira_regno_allocno_map[regno];
3033 a != NULL;
3034 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3035 {
1756cb66 3036 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))])
82b33628
VM
3037 /* This allocno will be removed. */
3038 continue;
ac0ab4f7 3039
82b33628
VM
3040 /* Caps will be removed. */
3041 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
3042 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
3043 parent != NULL;
3044 parent = parent->parent)
3045 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
1756cb66
VM
3046 || (parent_a
3047 == regno_top_level_allocno_map[REGNO
3048 (allocno_emit_reg (parent_a))]
3049 && ALLOCNO_EMIT_DATA (parent_a)->mem_optimized_dest_p))
82b33628
VM
3050 break;
3051 if (parent == NULL || parent_a == NULL)
3052 continue;
ac0ab4f7 3053
3c55880a
BS
3054 copy_allocno_live_ranges (a, parent_a);
3055 merge_hard_reg_conflicts (a, parent_a, true);
ac0ab4f7 3056
ea1c67e6
VM
3057 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
3058 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3059 += ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3060 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3061 += ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
ea1c67e6
VM
3062 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3063 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
82b33628
VM
3064 merged_p = true;
3065 }
3066 return merged_p;
058e97ec
VM
3067}
3068
3069/* Flatten the IR. In other words, this function transforms IR as if
3070 it were built with one region (without loops). We could make it
3071 much simpler by rebuilding IR with one region, but unfortunately it
3072 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
3073 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
3074 IRA_MAX_POINT before emitting insns on the loop borders. */
3075void
3076ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
3077{
9140d27b 3078 int i, j;
ea1c67e6 3079 bool keep_p;
058e97ec 3080 int hard_regs_num;
82b33628 3081 bool new_pseudos_p, merged_p, mem_dest_p;
058e97ec 3082 unsigned int n;
1756cb66 3083 enum reg_class aclass;
058e97ec 3084 ira_allocno_t a, parent_a, first, second, node_first, node_second;
058e97ec 3085 ira_copy_t cp;
029da7d4 3086 ira_loop_tree_node_t node;
b14151b5 3087 live_range_t r;
058e97ec
VM
3088 ira_allocno_iterator ai;
3089 ira_copy_iterator ci;
058e97ec
VM
3090
3091 regno_top_level_allocno_map
1756cb66
VM
3092 = (ira_allocno_t *) ira_allocate (max_reg_num ()
3093 * sizeof (ira_allocno_t));
058e97ec
VM
3094 memset (regno_top_level_allocno_map, 0,
3095 max_reg_num () * sizeof (ira_allocno_t));
058e97ec 3096 new_pseudos_p = merged_p = false;
0ca9fa56
VM
3097 FOR_EACH_ALLOCNO (a, ai)
3098 {
ac0ab4f7
BS
3099 ira_allocno_object_iterator oi;
3100 ira_object_t obj;
1756cb66 3101
0ca9fa56
VM
3102 if (ALLOCNO_CAP_MEMBER (a) != NULL)
3103 /* Caps are not in the regno allocno maps and they are never
3104 will be transformed into allocnos existing after IR
3105 flattening. */
3106 continue;
ac0ab4f7
BS
3107 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
3108 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
3109 OBJECT_CONFLICT_HARD_REGS (obj));
0ca9fa56
VM
3110#ifdef STACK_REGS
3111 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
3112#endif
3113 }
058e97ec
VM
3114 /* Fix final allocno attributes. */
3115 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
3116 {
0ca9fa56 3117 mem_dest_p = false;
058e97ec
VM
3118 for (a = ira_regno_allocno_map[i];
3119 a != NULL;
3120 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
3121 {
1756cb66
VM
3122 ira_emit_data_t parent_data, data = ALLOCNO_EMIT_DATA (a);
3123
058e97ec 3124 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
1756cb66 3125 if (data->somewhere_renamed_p)
058e97ec 3126 new_pseudos_p = true;
029da7d4
BS
3127 parent_a = ira_parent_allocno (a);
3128 if (parent_a == NULL)
058e97ec
VM
3129 {
3130 ALLOCNO_COPIES (a) = NULL;
1756cb66 3131 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec
VM
3132 continue;
3133 }
3134 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
b8698a0f 3135
1756cb66 3136 if (data->mem_optimized_dest != NULL)
82b33628 3137 mem_dest_p = true;
1756cb66
VM
3138 parent_data = ALLOCNO_EMIT_DATA (parent_a);
3139 if (REGNO (data->reg) == REGNO (parent_data->reg))
058e97ec 3140 {
3c55880a
BS
3141 merge_hard_reg_conflicts (a, parent_a, true);
3142 move_allocno_live_ranges (a, parent_a);
058e97ec 3143 merged_p = true;
1756cb66
VM
3144 parent_data->mem_optimized_dest_p
3145 = (parent_data->mem_optimized_dest_p
3146 || data->mem_optimized_dest_p);
058e97ec
VM
3147 continue;
3148 }
3149 new_pseudos_p = true;
058e97ec
VM
3150 for (;;)
3151 {
058e97ec
VM
3152 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
3153 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
ea1c67e6
VM
3154 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
3155 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
3156 -= ALLOCNO_CALLS_CROSSED_NUM (a);
e384e6b5
BS
3157 ALLOCNO_CHEAP_CALLS_CROSSED_NUM (parent_a)
3158 -= ALLOCNO_CHEAP_CALLS_CROSSED_NUM (a);
ea1c67e6
VM
3159 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
3160 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
058e97ec
VM
3161 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
3162 && ALLOCNO_NREFS (parent_a) >= 0
3163 && ALLOCNO_FREQ (parent_a) >= 0);
1756cb66
VM
3164 aclass = ALLOCNO_CLASS (parent_a);
3165 hard_regs_num = ira_class_hard_regs_num[aclass];
058e97ec
VM
3166 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
3167 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
3168 for (j = 0; j < hard_regs_num; j++)
3169 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
3170 -= ALLOCNO_HARD_REG_COSTS (a)[j];
3171 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
3172 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
3173 for (j = 0; j < hard_regs_num; j++)
3174 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
3175 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
1756cb66
VM
3176 ALLOCNO_CLASS_COST (parent_a)
3177 -= ALLOCNO_CLASS_COST (a);
058e97ec 3178 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
029da7d4
BS
3179 parent_a = ira_parent_allocno (parent_a);
3180 if (parent_a == NULL)
058e97ec
VM
3181 break;
3182 }
058e97ec 3183 ALLOCNO_COPIES (a) = NULL;
1756cb66 3184 regno_top_level_allocno_map[REGNO (data->reg)] = a;
058e97ec 3185 }
801f03e3 3186 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
82b33628 3187 merged_p = true;
058e97ec 3188 }
058e97ec
VM
3189 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
3190 if (merged_p || ira_max_point_before_emit != ira_max_point)
3191 ira_rebuild_start_finish_chains ();
3192 if (new_pseudos_p)
3193 {
9140d27b
BS
3194 sparseset objects_live;
3195
058e97ec
VM
3196 /* Rebuild conflicts. */
3197 FOR_EACH_ALLOCNO (a, ai)
3198 {
ac0ab4f7
BS
3199 ira_allocno_object_iterator oi;
3200 ira_object_t obj;
1756cb66
VM
3201
3202 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3203 || ALLOCNO_CAP_MEMBER (a) != NULL)
3204 continue;
ac0ab4f7
BS
3205 FOR_EACH_ALLOCNO_OBJECT (a, obj, oi)
3206 {
3207 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3208 ira_assert (r->object == obj);
3209 clear_conflicts (obj);
3210 }
058e97ec 3211 }
9140d27b 3212 objects_live = sparseset_alloc (ira_objects_num);
058e97ec
VM
3213 for (i = 0; i < ira_max_point; i++)
3214 {
3215 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
3216 {
9140d27b 3217 ira_object_t obj = r->object;
1756cb66 3218
9140d27b 3219 a = OBJECT_ALLOCNO (obj);
1756cb66 3220 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3221 || ALLOCNO_CAP_MEMBER (a) != NULL)
3222 continue;
ac0ab4f7 3223
1756cb66 3224 aclass = ALLOCNO_CLASS (a);
9140d27b
BS
3225 sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
3226 EXECUTE_IF_SET_IN_SPARSESET (objects_live, n)
058e97ec 3227 {
9140d27b
BS
3228 ira_object_t live_obj = ira_object_id_map[n];
3229 ira_allocno_t live_a = OBJECT_ALLOCNO (live_obj);
1756cb66
VM
3230 enum reg_class live_aclass = ALLOCNO_CLASS (live_a);
3231
3232 if (ira_reg_classes_intersect_p[aclass][live_aclass]
058e97ec 3233 /* Don't set up conflict for the allocno with itself. */
9140d27b
BS
3234 && live_a != a)
3235 ira_add_conflict (obj, live_obj);
058e97ec
VM
3236 }
3237 }
b8698a0f 3238
058e97ec 3239 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
9140d27b 3240 sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (r->object));
058e97ec 3241 }
9140d27b 3242 sparseset_free (objects_live);
058e97ec
VM
3243 compress_conflict_vecs ();
3244 }
3245 /* Mark some copies for removing and change allocnos in the rest
3246 copies. */
3247 FOR_EACH_COPY (cp, ci)
3248 {
3249 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
3250 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
3251 {
3252 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3253 fprintf
3254 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
3255 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
1756cb66
VM
3256 ALLOCNO_NUM (cp->first),
3257 REGNO (allocno_emit_reg (cp->first)),
058e97ec 3258 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
1756cb66
VM
3259 ALLOCNO_NUM (cp->second),
3260 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3261 cp->loop_tree_node = NULL;
3262 continue;
3263 }
1756cb66
VM
3264 first
3265 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->first))];
3266 second
3267 = regno_top_level_allocno_map[REGNO (allocno_emit_reg (cp->second))];
058e97ec
VM
3268 node = cp->loop_tree_node;
3269 if (node == NULL)
3270 keep_p = true; /* It copy generated in ira-emit.c. */
3271 else
3272 {
3273 /* Check that the copy was not propagated from level on
3274 which we will have different pseudos. */
3275 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
3276 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
1756cb66
VM
3277 keep_p = ((REGNO (allocno_emit_reg (first))
3278 == REGNO (allocno_emit_reg (node_first)))
3279 && (REGNO (allocno_emit_reg (second))
3280 == REGNO (allocno_emit_reg (node_second))));
058e97ec
VM
3281 }
3282 if (keep_p)
3283 {
3284 cp->loop_tree_node = ira_loop_tree_root;
3285 cp->first = first;
3286 cp->second = second;
3287 }
3288 else
3289 {
3290 cp->loop_tree_node = NULL;
3291 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3292 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
3293 cp->num, ALLOCNO_NUM (cp->first),
1756cb66
VM
3294 REGNO (allocno_emit_reg (cp->first)),
3295 ALLOCNO_NUM (cp->second),
3296 REGNO (allocno_emit_reg (cp->second)));
058e97ec
VM
3297 }
3298 }
3299 /* Remove unnecessary allocnos on lower levels of the loop tree. */
3300 FOR_EACH_ALLOCNO (a, ai)
3301 {
1756cb66 3302 if (a != regno_top_level_allocno_map[REGNO (allocno_emit_reg (a))]
058e97ec
VM
3303 || ALLOCNO_CAP_MEMBER (a) != NULL)
3304 {
3305 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
3306 fprintf (ira_dump_file, " Remove a%dr%d\n",
1756cb66 3307 ALLOCNO_NUM (a), REGNO (allocno_emit_reg (a)));
3b6d1699 3308 ira_remove_allocno_prefs (a);
058e97ec
VM
3309 finish_allocno (a);
3310 continue;
3311 }
3312 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
1756cb66 3313 ALLOCNO_REGNO (a) = REGNO (allocno_emit_reg (a));
058e97ec 3314 ALLOCNO_CAP (a) = NULL;
cb1ca6ac 3315 /* Restore updated costs for assignments from reload. */
058e97ec 3316 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
1756cb66 3317 ALLOCNO_UPDATED_CLASS_COST (a) = ALLOCNO_CLASS_COST (a);
058e97ec
VM
3318 if (! ALLOCNO_ASSIGNED_P (a))
3319 ira_free_allocno_updated_costs (a);
3320 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
3321 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
3322 }
3323 /* Remove unnecessary copies. */
3324 FOR_EACH_COPY (cp, ci)
3325 {
3326 if (cp->loop_tree_node == NULL)
3327 {
3328 ira_copies[cp->num] = NULL;
3329 finish_copy (cp);
3330 continue;
3331 }
3332 ira_assert
3333 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
3334 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
3b6d1699
VM
3335 add_allocno_copy_to_list (cp);
3336 swap_allocno_copy_ends_if_necessary (cp);
058e97ec
VM
3337 }
3338 rebuild_regno_allocno_maps ();
b15a7ae6
VM
3339 if (ira_max_point != ira_max_point_before_emit)
3340 ira_compress_allocno_live_ranges ();
058e97ec
VM
3341 ira_free (regno_top_level_allocno_map);
3342}
3343
3344\f
3345
3346#ifdef ENABLE_IRA_CHECKING
3347/* Check creation of all allocnos. Allocnos on lower levels should
3348 have allocnos or caps on all upper levels. */
3349static void
3350check_allocno_creation (void)
3351{
3352 ira_allocno_t a;
3353 ira_allocno_iterator ai;
3354 ira_loop_tree_node_t loop_tree_node;
3355
3356 FOR_EACH_ALLOCNO (a, ai)
3357 {
49d988e7
VM
3358 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
3359 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
3360 ALLOCNO_NUM (a)));
3361 if (loop_tree_node == ira_loop_tree_root)
058e97ec
VM
3362 continue;
3363 if (ALLOCNO_CAP_MEMBER (a) != NULL)
49d988e7 3364 ira_assert (ALLOCNO_CAP (a) != NULL);
058e97ec 3365 else if (ALLOCNO_CAP (a) == NULL)
49d988e7
VM
3366 ira_assert (loop_tree_node->parent
3367 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
3368 && bitmap_bit_p (loop_tree_node->border_allocnos,
3369 ALLOCNO_NUM (a)));
058e97ec
VM
3370 }
3371}
3372#endif
3373
4ac293e2
VM
3374/* Identify allocnos which prefer a register class with a single hard register.
3375 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
3376 less likely to use the preferred singleton register. */
3377static void
3378update_conflict_hard_reg_costs (void)
3379{
3380 ira_allocno_t a;
3381 ira_allocno_iterator ai;
3382 int i, index, min;
3383
3384 FOR_EACH_ALLOCNO (a, ai)
3385 {
6f76a878
AS
3386 reg_class_t aclass = ALLOCNO_CLASS (a);
3387 reg_class_t pref = reg_preferred_class (ALLOCNO_REGNO (a));
c9d74da6
RS
3388 int singleton = ira_class_singleton[pref][ALLOCNO_MODE (a)];
3389 if (singleton < 0)
4ac293e2 3390 continue;
c9d74da6 3391 index = ira_class_hard_reg_index[(int) aclass][singleton];
4ac293e2
VM
3392 if (index < 0)
3393 continue;
3394 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
3395 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
3396 continue;
3397 min = INT_MAX;
6f76a878 3398 for (i = ira_class_hard_regs_num[(int) aclass] - 1; i >= 0; i--)
1756cb66 3399 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_CLASS_COST (a)
4ac293e2
VM
3400 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
3401 min = ALLOCNO_HARD_REG_COSTS (a)[i];
3402 if (min == INT_MAX)
3403 continue;
3404 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1756cb66 3405 aclass, 0);
4ac293e2 3406 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
1756cb66 3407 -= min - ALLOCNO_CLASS_COST (a);
4ac293e2
VM
3408 }
3409}
3410
058e97ec 3411/* Create a internal representation (IR) for IRA (allocnos, copies,
2608d841
VM
3412 loop tree nodes). The function returns TRUE if we generate loop
3413 structure (besides nodes representing all function and the basic
3414 blocks) for regional allocation. A true return means that we
3415 really need to flatten IR before the reload. */
058e97ec 3416bool
2608d841 3417ira_build (void)
058e97ec 3418{
2608d841 3419 bool loops_p;
058e97ec 3420
2608d841 3421 df_analyze ();
058e97ec
VM
3422 initiate_cost_vectors ();
3423 initiate_allocnos ();
3b6d1699 3424 initiate_prefs ();
058e97ec 3425 initiate_copies ();
2608d841 3426 create_loop_tree_nodes ();
058e97ec
VM
3427 form_loop_tree ();
3428 create_allocnos ();
3429 ira_costs ();
a49ae217 3430 create_allocno_objects ();
058e97ec 3431 ira_create_allocno_live_ranges ();
311aab06 3432 remove_unnecessary_regions (false);
b15a7ae6 3433 ira_compress_allocno_live_ranges ();
927425df 3434 update_bad_spill_attribute ();
058e97ec
VM
3435 loops_p = more_one_region_p ();
3436 if (loops_p)
3437 {
3438 propagate_allocno_info ();
3439 create_caps ();
3440 }
1756cb66 3441 ira_tune_allocno_costs ();
058e97ec
VM
3442#ifdef ENABLE_IRA_CHECKING
3443 check_allocno_creation ();
3444#endif
3445 setup_min_max_allocno_live_range_point ();
a49ae217 3446 sort_conflict_id_map ();
058e97ec
VM
3447 setup_min_max_conflict_allocno_ids ();
3448 ira_build_conflicts ();
4ac293e2 3449 update_conflict_hard_reg_costs ();
311aab06
VM
3450 if (! ira_conflicts_p)
3451 {
3452 ira_allocno_t a;
3453 ira_allocno_iterator ai;
3454
3455 /* Remove all regions but root one. */
3456 if (loops_p)
3457 {
3458 remove_unnecessary_regions (true);
3459 loops_p = false;
3460 }
3461 /* We don't save hard registers around calls for fast allocation
3462 -- add caller clobbered registers as conflicting ones to
3463 allocno crossing calls. */
3464 FOR_EACH_ALLOCNO (a, ai)
3465 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
ac0ab4f7 3466 ior_hard_reg_conflicts (a, &call_used_reg_set);
311aab06 3467 }
4cda38d5
VM
3468 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3469 print_copies (ira_dump_file);
3b6d1699
VM
3470 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
3471 print_prefs (ira_dump_file);
058e97ec
VM
3472 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
3473 {
ac0ab4f7 3474 int n, nr, nr_big;
058e97ec 3475 ira_allocno_t a;
b14151b5 3476 live_range_t r;
058e97ec
VM
3477 ira_allocno_iterator ai;
3478
3479 n = 0;
ac0ab4f7
BS
3480 nr = 0;
3481 nr_big = 0;
058e97ec 3482 FOR_EACH_ALLOCNO (a, ai)
a49ae217 3483 {
ac0ab4f7 3484 int j, nobj = ALLOCNO_NUM_OBJECTS (a);
1756cb66 3485
ac0ab4f7
BS
3486 if (nobj > 1)
3487 nr_big++;
3488 for (j = 0; j < nobj; j++)
3489 {
3490 ira_object_t obj = ALLOCNO_OBJECT (a, j);
3491 n += OBJECT_NUM_CONFLICTS (obj);
3492 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
3493 nr++;
3494 }
a49ae217 3495 }
058e97ec 3496 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
0fc822d0 3497 current_loops == NULL ? 1 : number_of_loops (cfun),
0cae8d31 3498 n_basic_blocks_for_fn (cfun), ira_max_point);
058e97ec 3499 fprintf (ira_dump_file,
ac0ab4f7
BS
3500 " allocnos=%d (big %d), copies=%d, conflicts=%d, ranges=%d\n",
3501 ira_allocnos_num, nr_big, ira_copies_num, n, nr);
058e97ec
VM
3502 }
3503 return loops_p;
3504}
3505
3506/* Release the data created by function ira_build. */
3507void
3508ira_destroy (void)
3509{
3510 finish_loop_tree_nodes ();
3b6d1699 3511 finish_prefs ();
058e97ec
VM
3512 finish_copies ();
3513 finish_allocnos ();
3514 finish_cost_vectors ();
3515 ira_finish_allocno_live_ranges ();
3516}